diff --git a/.github/workflows/Unit-Test.yml b/.github/workflows/Unit-Test.yml index ead3b36f..d10314eb 100644 --- a/.github/workflows/Unit-Test.yml +++ b/.github/workflows/Unit-Test.yml @@ -13,17 +13,22 @@ jobs: deploy: runs-on: ubuntu-latest - + services: + postgres: + image: postgis/postgis:14-3.3 + env: + POSTGRES_PASSWORD: admin + POSTGRES_DB: insights + ports: + - 5434:5432 + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 2 steps: - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v1 with: python-version: 3.8 - - name: Check postgresql version - run: | - psql -V - - name: Remove postgresql version 14 + - name: Clean up PSQL run: | sudo apt-get --purge remove postgresql sudo apt-get purge postgresql* @@ -36,17 +41,31 @@ jobs: run: | sudo apt-get update sudo apt install postgis postgresql-12-postgis-3 - - name: Install gdal - run: | - sudo apt-add-repository ppa:ubuntugis/ubuntugis-unstable - sudo apt-get update - sudo apt-get install gdal-bin libgdal-dev - + + - name: Create Databases + run : | + export PGPASSWORD='admin'; + psql -U postgres -h localhost -p 5434 -c "CREATE DATABASE underpass;" + psql -U postgres -h localhost -p 5434 -c "CREATE DATABASE tm;" + psql -U postgres -h localhost -p 5434 -c "CREATE DATABASE raw;" + + - name: Insert sample db data + run : | + export PGPASSWORD='admin'; + psql -U postgres -h localhost -p 5434 insights < tests/src/fixtures/insights.sql + psql -U postgres -h localhost -p 5434 raw < tests/src/fixtures/raw_data.sql + psql -U postgres -h localhost -p 5434 underpass < tests/src/fixtures/underpass.sql + wget https://raw.githubusercontent.com/hotosm/tasking-manager/develop/tests/database/tasking-manager.sql + psql -U postgres -h localhost -p 5434 tm < tasking-manager.sql + - name: Install Dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt pip install -e . + - name: Creating config.txt + run: | + mv src/config.txt.sample src/config.txt - name: Run Tests run: | - py.test -v -s + py.test -v -s \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..7ae123ef --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,86 @@ +name: Check Build +on: + push: + branches: + - master + - develop + pull_request: + branches: + - master + - develop + +jobs: + build: + timeout-minutes: 4 + runs-on: ubuntu-latest + services: + postgres: + image: postgis/postgis:14-3.3 + env: + POSTGRES_PASSWORD: admin + POSTGRES_DB: insights + ports: + - 5434:5432 + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 2 + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + + - name: Create Databases + run: | + export PGPASSWORD='admin'; + psql -U postgres -h localhost -p 5434 -c "CREATE DATABASE underpass;" + psql -U postgres -h localhost -p 5434 -c "CREATE DATABASE tm;" + psql -U postgres -h localhost -p 5434 -c "CREATE DATABASE raw;" + + - name: Insert sample db data + run: | + export PGPASSWORD='admin'; + psql -U postgres -h localhost -p 5434 insights < tests/src/fixtures/insights.sql + psql -U postgres -h localhost -p 5434 insights < tests/src/fixtures/mapathon_summary.sql + psql -U postgres -h localhost -p 5434 raw < tests/src/fixtures/raw_data.sql + psql -U postgres -h localhost -p 5434 underpass < tests/src/fixtures/underpass.sql + wget https://raw.githubusercontent.com/hotosm/tasking-manager/develop/tests/database/tasking-manager.sql + psql -U postgres -h localhost -p 5434 tm < tasking-manager.sql + + - name: Install gdal + run: | + sudo apt-get -y install gdal-bin python3-gdal && sudo apt-get -y autoremove && sudo apt-get clean + + - name: Install redis + run: | + sudo apt install lsb-release + curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg + echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list + sudo apt-get update + sudo apt-get install redis + redis-cli ping + + - name: Install Dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install -e . + - name: Creating config.txt + run: | + mv src/config.txt.sample src/config.txt + - name: Run uvicorn server + run: | + uvicorn API.main:app & + env: + PORT: 8000 + - name: Run celery server + run: | + celery --app API.api_worker worker --loglevel=INFO & + - name: Run flower dashboard + run: | + celery --app API.api_worker flower --port=5555 --broker=redis://localhost:6379/ & + - name: Run mapathon summary endpoint + run: | + curl -d '{"project_ids": [11224, 10042, 9906, 1381, 11203, 10681, 8055, 8732, 11193, 7305,11210, 10985, 10988, 11190, 6658, 5644, 10913, 6495, 4229],"fromTimestamp":"2021-08-27T9:00:00","toTimestamp":"2021-08-27T11:00:00","hashtags": ["mapandchathour2021"]}' -H 'Content-Type: application/json' http://127.0.0.1:8000/v1/mapathon/summary/ + - name: Run rawdata current snapshot + run: | + curl -d '{"geometry":{"type":"Polygon","coordinates":[[[83.96919250488281,28.194446860487773],[83.99751663208006,28.194446860487773],[83.99751663208006,28.214869548073377],[83.96919250488281,28.214869548073377],[83.96919250488281,28.194446860487773]]]}}' -H 'Content-Type: application/json' http://127.0.0.1:8000/v2/raw-data/current-snapshot/ diff --git a/.gitignore b/.gitignore index ad2d58ab..1c238ea4 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,6 @@ build newrelic.ini newrelic.ini_backup exports -nohup.out \ No newline at end of file +postgres-data +*.out +*.log diff --git a/API/api_worker.py b/API/api_worker.py new file mode 100644 index 00000000..a6809e18 --- /dev/null +++ b/API/api_worker.py @@ -0,0 +1,121 @@ +import os +import pathlib +import orjson +import shutil +import time +import requests +from datetime import datetime as dt +import zipfile +from celery import Celery +from src.galaxy.config import config +from fastapi.responses import JSONResponse +from src.galaxy.query_builder.builder import format_file_name_str +from src.galaxy.validation.models import RawDataOutputType +from src.galaxy.app import RawData, S3FileTransfer +from src.galaxy.config import use_s3_to_upload, logger as logging, config, allow_bind_zip_filter + +celery = Celery(__name__) +celery.conf.broker_url = config.get( + "CELERY", "CELERY_BROKER_URL", fallback="redis://localhost:6379" +) +celery.conf.result_backend = config.get( + "CELERY", "CELERY_RESULT_BACKEND", fallback="redis://localhost:6379" +) # using redis as backend , make sure you have redis server started on your system on port 6379 + +celery.conf.task_serializer = 'pickle' +celery.conf.result_serializer = 'pickle' +celery.conf.accept_content = ['application/json', 'application/x-python-serialize'] + + +@celery.task(bind=True, name="process_raw_data") +def process_raw_data(self, params): + try: + start_time = dt.now() + bind_zip=params.bind_zip if allow_bind_zip_filter else True + # unique id for zip file and geojson for each export + params.output_type = params.output_type if params.output_type else RawDataOutputType.GEOJSON.value + params.file_name=format_file_name_str(params.file_name) if params.file_name else 'Galaxy_export' + exportname = f"{params.file_name}_{str(self.request.id)}_{params.output_type}" + + logging.info("Request %s received", exportname) + + geom_area, working_dir = RawData(params).extract_current_data(exportname) + inside_file_size = 0 + if bind_zip: + logging.debug('Zip Binding Started !') + # saving file in temp directory instead of memory so that zipping file will not eat memory + upload_file_path = os.path.join(working_dir,os.pardir,f"{exportname}.zip") + + zf = zipfile.ZipFile(upload_file_path, "w", zipfile.ZIP_DEFLATED) + for file_path in pathlib.Path(working_dir).iterdir(): + zf.write(file_path, arcname=file_path.name) + inside_file_size += os.path.getsize(file_path) + + # Compressing geojson file + zf.writestr("clipping_boundary.geojson", + orjson.dumps(dict(params.geometry))) + + zf.close() + logging.debug('Zip Binding Done !') + else: + for file_path in pathlib.Path(working_dir).iterdir(): + upload_file_path=file_path + inside_file_size += os.path.getsize(file_path) + break # only take one file inside dir , if contains many it should be inside zip + # check if download url will be generated from s3 or not from config + if use_s3_to_upload: + file_transfer_obj = S3FileTransfer() + download_url = file_transfer_obj.upload(upload_file_path, exportname, file_suffix='zip' if bind_zip else params.output_type.lower()) + else: + download_url = str(upload_file_path) # give the static file download url back to user served from fastapi static export path + + # getting file size of zip , units are in bytes converted to mb in response + zip_file_size = os.path.getsize(upload_file_path) + # watches the status code of the link provided and deletes the file if it is 200 + if use_s3_to_upload: + watch_s3_upload(download_url, upload_file_path) + if use_s3_to_upload or bind_zip: + #remove working dir from the machine , if its inside zip / uploaded we no longer need it + remove_file(working_dir) + response_time = dt.now() - start_time + response_time_str = str(response_time) + logging.info(f"Done Export : {exportname} of {round(inside_file_size/1000000)} MB / {geom_area} sqkm in {response_time_str}") + return {"download_url": download_url, "file_name": params.file_name, "process_time": response_time_str, "query_area": f"{round(geom_area,2)} Sq Km", "binded_file_size": f"{round(inside_file_size/1000000,2)} MB", "zip_file_size_bytes": zip_file_size} + + except Exception as ex: + raise ex + + +def remove_file(path: str) -> None: + """Used for removing temp file dir and its all content after zip file is delivered to user""" + try: + shutil.rmtree(path) + except OSError as ex: + logging.error("Error: %s - %s.", ex.filename, ex.strerror) + + +def watch_s3_upload(url: str, path: str) -> None: + """Watches upload of s3 either it is completed or not and removes the temp file after completion + + Args: + url (_type_): url generated by the script where data will be available + path (_type_): path where temp file is located at + """ + start_time = time.time() + remove_temp_file = True + check_call = requests.head(url).status_code + if check_call != 200: + logging.debug("Upload is not done yet waiting ...") + while check_call != 200: # check until status is not green + check_call = requests.head(url).status_code + if time.time() - start_time > 300: + logging.error( + "Upload time took more than 5 min , Killing watch : %s , URL : %s", path, url) + remove_temp_file = False # don't remove the file if upload fails + break + time.sleep(3) # check each 3 second + # once it is verfied file is uploaded finally remove the file + if remove_temp_file: + logging.debug( + "File is uploaded at %s , flushing out from %s", url, path) + os.unlink(path) diff --git a/API/download_export.py b/API/download_export.py deleted file mode 100644 index d8645c63..00000000 --- a/API/download_export.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (C) 2021 Humanitarian OpenStreetmap Team - -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. - -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -# Humanitarian OpenStreetmap Team -# 1100 13th Street NW Suite 800 Washington, D.C. 20005 -# - -"""[Router Responsible for downloading exports ] -""" -from fastapi import APIRouter -from fastapi_versioning import version -from src.galaxy.config import export_path -from fastapi.responses import FileResponse -from os.path import exists - - -router = APIRouter(prefix="") - - -@router.get("/exports/{file_name}") -@version(1) -def download_export(file_name: str): - """Used for Delivering our export to user. - Returns zip file if it is present on our server if not returns error - """ - zip_temp_path = f"""{export_path}{file_name}""" - if exists(zip_temp_path): - response = FileResponse(zip_temp_path, media_type="application/zip") - response.headers["Content-Disposition"] = f"attachment; filename={file_name}" - return response - else: - raise ValueError( - "File Doesn't Exist or has been cleared up from system") diff --git a/API/main.py b/API/main.py index 3935356c..4e398b60 100644 --- a/API/main.py +++ b/API/main.py @@ -33,12 +33,14 @@ from .hashtag_stats import router as hashtag_router from .tasking_manager import router as tm_router from .raw_data import router as raw_data_router -from .download_export import router as download_router -# from .test_router import router as test_router +from .tasks import router as tasks_router from .status import router as status_router from src.galaxy.db_session import database_instance -from src.galaxy.config import use_connection_pooling, use_s3_to_upload, logger as logging, config +from src.galaxy.config import limiter, export_path, use_connection_pooling, use_s3_to_upload, logger as logging, config from fastapi_versioning import VersionedFastAPI +from slowapi import _rate_limit_exceeded_handler +from slowapi.errors import RateLimitExceeded +from fastapi.staticfiles import StaticFiles # only use sentry if it is specified in config blocks if config.get("SENTRY", "dsn", fallback=None): @@ -50,12 +52,15 @@ traces_sample_rate=config.get("SENTRY", "rate") ) + run_env = config.get("API_CONFIG", "env", fallback='prod') if run_env.lower() == 'dev': # This is used for local setup for auth login import os os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1' + + app = FastAPI(title="Galaxy API") # app.include_router(test_router) @@ -71,15 +76,20 @@ app.include_router(tm_router) app.include_router(status_router) app.include_router(raw_data_router) +app.include_router(tasks_router) + -if use_s3_to_upload is False: - # only mount the disk if config is set to disk - app.include_router(download_router) app = VersionedFastAPI(app, enable_latest=True, version_format='{major}', prefix_format='/v{major}') +if use_s3_to_upload is False: + # only mount the disk if config is set to disk + app.mount("/exports", StaticFiles(directory=export_path), name="exports") + +app.state.limiter = limiter +app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) origins = ["*"] diff --git a/API/raw_data.py b/API/raw_data.py index 6b8d4fb5..9ad0a746 100644 --- a/API/raw_data.py +++ b/API/raw_data.py @@ -38,8 +38,8 @@ from src.galaxy.query_builder.builder import format_file_name_str from src.galaxy.validation.models import RawDataCurrentParams, RawDataOutputType from src.galaxy.app import RawData, S3FileTransfer - -from src.galaxy.config import use_s3_to_upload, logger as logging, config +from .api_worker import process_raw_data +from src.galaxy.config import export_rate_limit, use_s3_to_upload, logger as logging, config, limiter, allow_bind_zip_filter router = APIRouter(prefix="/raw-data") @@ -52,14 +52,14 @@ @router.post("/current-snapshot/") @version(1) -def get_current_data(params: RawDataCurrentParams, background_tasks: BackgroundTasks, request: Request): +def get_current_snapshot_osm_data(params: RawDataCurrentParams, background_tasks: BackgroundTasks, request: Request): """Generates the current raw OpenStreetMap data available on database based on the input geometry, query and spatial features Args: params (RawDataCurrentParams): { - "outputType": "GeoJSON", + "outputType": "GeoJSON", # supported are : kml,shp,(FLATGEOBUF)fgb "fileName": "string", "geometry": { # only polygon is supported ** required field ** "coordinates": [ @@ -90,7 +90,8 @@ def get_current_data(params: RawDataCurrentParams, background_tasks: BackgroundT }, "geometryType": [ "point","line","polygon" - ] + ], + joinFilterType:"OR" # options are and / or . 'or' by default -- applies condition for filters **optional } background_tasks (BackgroundTasks): task to cleanup the files produced during export request (Request): request instance @@ -209,91 +210,64 @@ def get_current_data(params: RawDataCurrentParams, background_tasks: BackgroundT """ # def get_current_data(params:RawDataCurrentParams,background_tasks: BackgroundTasks, user_data=Depends(login_required)): # this will use osm login makes it restrict login start_time = dt.now() - if params.output_type is None: # if no ouput type is supplied default is geojson output - params.output_type = RawDataOutputType.GEOJSON.value - + bind_zip=params.bind_zip if allow_bind_zip_filter else True # unique id for zip file and geojson for each export - if params.file_name: - # need to format string from space to _ because it is filename , may be we need to filter special character as well later on - formatted_file_name = format_file_name_str(params.file_name) - # exportname = f"{formatted_file_name}_{datetime.now().isoformat()}_{str(uuid4())}" - exportname = f"""{formatted_file_name}_{str(uuid4())}_{params.output_type}""" # disabled date for now - - else: - # exportname = f"Raw_Export_{datetime.now().isoformat()}_{str(uuid4())}" - exportname = f"Raw_Export_{str(uuid4())}_{params.output_type}" + params.file_name=format_file_name_str(params.file_name) if params.file_name else 'Export' + exportname = f"{params.file_name}_{str(str(uuid4()))}_{params.output_type}" logging.info("Request %s received", exportname) - dump_temp_file, geom_area, root_dir_file = RawData( - params).extract_current_data(exportname) - path = f"""{root_dir_file}{exportname}/""" - - if os.path.exists(path) is False: - return JSONResponse( - status_code=400, - content={"Error": "Request went too big"} - ) - - logging.debug('Zip Binding Started !') - # saving file in temp directory instead of memory so that zipping file will not eat memory - zip_temp_path = f"""{root_dir_file}{exportname}.zip""" - zf = zipfile.ZipFile(zip_temp_path, "w", zipfile.ZIP_DEFLATED) - - directory = pathlib.Path(path) - for file_path in directory.iterdir(): - zf.write(file_path, arcname=file_path.name) - - # Compressing geojson file - zf.writestr("clipping_boundary.geojson", - orjson.dumps(dict(params.geometry))) - - zf.close() - logging.debug('Zip Binding Done !') + geom_area, working_dir = RawData(params).extract_current_data(exportname) inside_file_size = 0 - for temp_file in dump_temp_file: - # clearing tmp geojson file since it is already dumped to zip file we don't need it anymore - if os.path.exists(temp_file): - inside_file_size += os.path.getsize(temp_file) - - # remove the file that are just binded to zip file , we no longer need to store it - background_tasks.add_task(remove_file, path) - + if bind_zip: + logging.debug('Zip Binding Started !') + # saving file in temp directory instead of memory so that zipping file will not eat memory + upload_file_path = os.path.join(working_dir,os.pardir,f"{exportname}.zip") + + zf = zipfile.ZipFile(upload_file_path, "w", zipfile.ZIP_DEFLATED) + for file_path in pathlib.Path(working_dir).iterdir(): + zf.write(file_path, arcname=file_path.name) + inside_file_size += os.path.getsize(file_path) + + # Compressing geojson file + zf.writestr("clipping_boundary.geojson", + orjson.dumps(dict(params.geometry))) + + zf.close() + logging.debug('Zip Binding Done !') + else: + for file_path in pathlib.Path(working_dir).iterdir(): + upload_file_path=file_path + inside_file_size += os.path.getsize(file_path) + break # only take one file inside dir , if contains many it should be inside zip # check if download url will be generated from s3 or not from config if use_s3_to_upload: file_transfer_obj = S3FileTransfer() - download_url = file_transfer_obj.upload(zip_temp_path, exportname) - # watches the status code of the link provided and deletes the file if it is 200 - background_tasks.add_task(watch_s3_upload, download_url, zip_temp_path) + download_url = file_transfer_obj.upload(upload_file_path, exportname, file_suffix='zip' if bind_zip else params.output_type.lower()) else: - - # getting from config in case api and frontend is not hosted on same url - client_host = config.get( - "API_CONFIG", "api_host", fallback=f"""{request.url.scheme}://{request.client.host}""") - client_port = config.get("API_CONFIG", "api_port", fallback=8000) - - if client_port: - download_url = f"""{client_host}:{client_port}/v1/exports/{exportname}.zip""" # disconnected download portion from this endpoint because when there will be multiple hits at a same time we don't want function to get stuck waiting for user to download the file and deliver the response , we want to reduce waiting time and free function ! - else: - download_url = f"""{client_host}/v1/exports/{exportname}.zip""" # disconnected download portion from this endpoint because when there will be multiple hits at a same time we don't want function to get stuck waiting for user to download the file and deliver the response , we want to reduce waiting time and free function ! + download_url = str(upload_file_path) # give the static file download url back to user served from fastapi static export path # getting file size of zip , units are in bytes converted to mb in response - zip_file_size = os.path.getsize(zip_temp_path) + zip_file_size = os.path.getsize(upload_file_path) + # watches the status code of the link provided and deletes the file if it is 200 + if use_s3_to_upload: + background_tasks.add_task(watch_s3_upload,download_url, upload_file_path) + if use_s3_to_upload or bind_zip: + #remove working dir from the machine , if its inside zip / uploaded we no longer need it + background_tasks.add_task(remove_file,working_dir) response_time = dt.now() - start_time response_time_str = str(response_time) - logging.info( - f"Done Export : {exportname} of {round(inside_file_size/1000000)} MB / {geom_area} sqkm in {response_time_str}") + logging.info(f"Done Export : {exportname} of {round(inside_file_size/1000000)} MB / {geom_area} sqkm in {response_time_str}") - return {"download_url": download_url, "file_name": exportname, "response_time": response_time_str, "query_area": f"""{geom_area} Sq Km """, "binded_file_size": f"""{round(inside_file_size/1000000)} MB""", "zip_file_size_bytes": {zip_file_size}} + return {"download_url": download_url, "file_name": exportname, "response_time": response_time_str, "query_area": f"""{geom_area} Sq Km """, "binded_file_size": f"""{round(inside_file_size/1000000,2)} MB""", "zip_file_size_bytes": {zip_file_size}} @router.get("/status/") @version(1) def check_current_db_status(): - """Gives status about DB update, Substracts with current time and last db update time""" + """Gives status about how recent the osm data is , it will give the last time that database was updated completely""" result = RawData().check_status() - response = f"{result} ago" - return {"last_updated": response} + return {"last_updated": result} def remove_file(path: str) -> None: @@ -330,3 +304,172 @@ def watch_s3_upload(url: str, path: str) -> None: logging.debug( "File is uploaded at %s , flushing out from %s", url, path) os.unlink(path) + + +@router.post("/current-snapshot/") +@limiter.limit(f"{export_rate_limit}/minute") +@version(2) +def get_current_snapshot_of_osm_data( + params: RawDataCurrentParams, request: Request): + """Generates the current raw OpenStreetMap data available on database based on the input geometry, query and spatial features. + + Steps to Run Snapshot : + + 1. Post the your request here and your request will be on queue, endpoint will return as following : + { + "task_id": "your task_id", + "track_link": "/tasks/task_id/" + } + 2. Now navigate to /tasks/ with your task id to track progress and result + + Args: + + params (RawDataCurrentParams): + { + "outputType": "GeoJSON", # supports kml,(FLATGEOBUF)fgb,shp + "fileName": "string", + "geometry": { # only polygon is supported ** required field ** + "coordinates": [ + [ + [ + 1,0 + ], + [ + 2,0 + ] + ] + ], + "type": "Polygon" + }, + "filters" : { + "tags": { # tags filter controls no of rows returned + "point" : {"amenity":["shop"]}, + "line" : {}, + "polygon" : {"key":["value1","value2"],"key2":["value1"]}, + "all_geometry" : {"building":['yes']} # master filter applied to all of the geometries selected on geometryType + }, + "attributes": { # attribute column controls associated k-v pairs returned + "point": [], column + "line" : [], + "polygon" : [], + "all_geometry" : ["name","address"], # master field applied to all geometries selected on geometryType + } + }, + "geometryType": [ + "point","line","polygon" + ], + joinFilterType:"OR" # options are and / or , 'or' by default -- applies condition for filters **optional + } + background_tasks (BackgroundTasks): task to cleanup the files produced during export + request (Request): request instance + + Returns : + { + "task_id": "7d241e47-ffd6-405c-9312-614593f77b14", + "track_link": "/current-snapshot/tasks/7d241e47-ffd6-405c-9312-614593f77b14/" + } + + Sample Query : + 1. Sample query to extract point and polygon features that are marked building=* with name attribute + { + "outputType": "GeoJSON", + "fileName": "Pokhara_buildings", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 83.96919250488281, + 28.194446860487773 + ], + [ + 83.99751663208006, + 28.194446860487773 + ], + [ + 83.99751663208006, + 28.214869548073377 + ], + [ + 83.96919250488281, + 28.214869548073377 + ], + [ + 83.96919250488281, + 28.194446860487773 + ] + ] + ] + }, + "filters": {"tags":{"all_geometry":{"building":[]}},"attributes":{"all_geometry":["name"]}}, + "geometryType": [ + "point","polygon" + ] + } + 2. Query to extract all OpenStreetMap features in a polygon in shapefile format: + { + "outputType": "shp", + "fileName": "Pokhara_all_features", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 83.96919250488281, + 28.194446860487773 + ], + [ + 83.99751663208006, + 28.194446860487773 + ], + [ + 83.99751663208006, + 28.214869548073377 + ], + [ + 83.96919250488281, + 28.214869548073377 + ], + [ + 83.96919250488281, + 28.194446860487773 + ] + ] + ] + } + } + 3. Clean query to extract all features by deafult; output will be same as 2nd query but in GeoJSON format and output name will be `default` + { + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 83.96919250488281, + 28.194446860487773 + ], + [ + 83.99751663208006, + 28.194446860487773 + ], + [ + 83.99751663208006, + 28.214869548073377 + ], + [ + 83.96919250488281, + 28.214869548073377 + ], + [ + 83.96919250488281, + 28.194446860487773 + ] + ] + ] + } + } + + """ + # def get_current_data(params:RawDataCurrentParams,background_tasks: BackgroundTasks, user_data=Depends(login_required)): # this will use osm login makes it restrict login + task = process_raw_data.delay(params) + return JSONResponse({"task_id": task.id, "track_link": f"/tasks/status/{task.id}/"}) \ No newline at end of file diff --git a/API/tasks.py b/API/tasks.py new file mode 100644 index 00000000..04c87cbc --- /dev/null +++ b/API/tasks.py @@ -0,0 +1,46 @@ +from celery.result import AsyncResult +from .api_worker import celery +from fastapi import APIRouter +from fastapi_versioning import version +from fastapi.responses import JSONResponse + + +router = APIRouter(prefix="/tasks") + + +@router.get("/status/{task_id}/") +@version(2) +def get_task_status(task_id): + """Tracks the request from the task id provided by galaxy api for the request + + Args: + + task_id ([type]): [Unique id provided on response from /current-snapshot/] + + Returns: + + id: Id of the task + status : SUCCESS / PENDING + result : Result of task + + Successful task will have additional nested json inside row as following : + Example response of rawdata current snapshot response : + + + { + "id": "3fded368-456f-4ef4-a1b8-c099a7f77ca4", + "status": "SUCCESS", + "result": { + "download_url": "https://s3.us-east-1.amazonaws.com/exports-stage.hotosm.org/Raw_Export_3fded368-456f-4ef4-a1b8-c099a7f77ca4_GeoJSON.zip", + "file_name": "Raw_Export_3fded368-456f-4ef4-a1b8-c099a7f77ca4_GeoJSON", + "response_time": "0:00:12.175976", + "query_area": "6 Sq Km ", + "binded_file_size": "7 MB", + "zip_file_size_bytes": 1331601 + + } + + """ + task_result = AsyncResult(task_id, app=celery) + result = { "id": task_id, "status": task_result.state, "result": task_result.result if task_result.status == 'SUCCESS' else None } + return JSONResponse(result) \ No newline at end of file diff --git a/API/test_router.py b/API/test_router.py deleted file mode 100644 index 95badfda..00000000 --- a/API/test_router.py +++ /dev/null @@ -1,16 +0,0 @@ -from fastapi import APIRouter -from fastapi_versioning import version - -router = APIRouter(prefix="/test") - - -@router.get("/galaxy/") -@version(1) -def galaxy_says_v1(): - return "Hello" - - -@router.get("/galaxy/") -@version(2) -def galaxy_says_v2(): - return "Hi" diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..f85cbb27 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +FROM python:3.9-bullseye + +ENV PIP_NO_CACHE_DIR=1 +RUN apt-get update && apt-get -y upgrade && \ + apt-get -y install gdal-bin python3-gdal && \ + apt-get -y autoremove && \ + apt-get clean + +RUN mkdir /app +COPY requirements.docker.txt /app/requirements.docker.txt + +RUN chmod +x ./docker-multiple-db.sh +RUN chmod +x ./populate-docker-db.sh + +COPY setup.py /app/setup.py + +WORKDIR /app + +RUN pip install --upgrade pip +RUN pip install -r requirements.docker.txt + +COPY . /app + +RUN pip install -e . + +HEALTHCHECK CMD curl -f http://localhost:8000/latest/docs || exit 1 diff --git a/README.md b/README.md index 16a09abf..c4a23973 100644 --- a/README.md +++ b/README.md @@ -4,171 +4,128 @@ ## Getting Started +API Can be installed through docker or manually to local machine . +To get started with docker follow [GETTING_STARTED_WITH_DOCKER](/docs/GETTING_STARTED_WITH_DOCKER.md) ### 1. Install requirements. -Install gdal on your machine , for example on Ubuntu +- Install [gdal](https://gdal.org/index.html) on your machine , for example on Ubuntu ``` -sudo apt-add-repository ppa:ubuntugis/ubuntugis-unstable -sudo apt-get update -sudo apt-get install gdal-bin libgdal-dev -``` - -Clone the Repo to your machine - -``` git clone https://github.com/hotosm/galaxy-api.git ``` - -Navigate to repo - -``` cd galaxy-api ``` +sudo apt-get update && sudo apt-get -y install gdal-bin python3-gdal && sudo apt-get -y autoremove && sudo apt-get clean -Install python dependencies - -```pip install -r requirements.txt``` - -Install gdal python ( Include your gdal version , if you are using different version ) - -```pip install gdal==3.0.2``` +``` +- Install [redis](https://redis.io/docs/getting-started/installation/) on your system +``` +sudo apt-get install redis +``` +- Check Redis server -### 2. Create ```config.txt``` inside src directory. -![image](https://user-images.githubusercontent.com/36752999/188402566-80dc9633-5d4e-479c-97dc-9e8a4999b385.png) +Check redis is running on your machine +Login to redis cli -### 3. Setup Underpass - Run underpass from [here](https://github.com/hotosm/underpass/blob/master/doc/getting-started.md) OR Create database "underpass" in your local postgres and insert sample dump from ```/tests/src/fixtures/underpass.sql ``` +``` +redis-cli +``` -```psql -U postgres -h localhost underpass < underpass.sql``` -### 4. Setup Insights -Setup insights from [here](https://github.com/hotosm/insights) OR Create database "insights" in your local postgres and insert sample dump from ```/tests/src/fixtures/insights.sql ``` +Hit ```ping``` it should return pong -```psql -U postgres -h localhost insights < insights.sql``` +If REDIS is not running check out its [documentation](https://redis.io/docs/getting-started/) -### 5. Setup Raw Data -Initialize rawdata from [here](https://github.com/hotosm/underpass/tree/master/raw) OR Create database "raw" in your local postgres and insert sample dump from ```/tests/src/fixtures/raw_data.sql ``` +- Clone the Repo to your machine -```psql -U postgres -h localhost raw < raw_data.sql``` +``` +git clone https://github.com/hotosm/galaxy-api.git +``` +Navigate to repo -### 6. Setup Oauth -Login to [OSM](https://www.openstreetmap.org/) , Click on My Settings and register your local galaxy app to Oauth2applications +``` +cd galaxy-api +``` -![image](https://user-images.githubusercontent.com/36752999/188452619-aababf28-b685-4141-b381-9c25d0367b57.png) +- Install python dependencies +``` +pip install -r requirements.txt +``` -Check on read user preferences and Enter redirect URI as following -```http://127.0.0.1:8000/latest/auth/callback/``` +### 2. Setup required config for API -Grab Client ID and Client Secret and put it inside config.txt as OAUTH Block , you can generate secret key for your application by yourself +Make sure you have https://www.postgresql.org/ setup in your machine or you can use docker +Setup necessary config for API from [docs/CONFIG.DOC](/docs/CONFIG_DOC.md) -### 7. Put your credentials inside config.txt -Insert your config blocks with the database credentials where you have underpass ,insight and tm in your database +### 3. Run server ``` -[INSIGHTS] -host=localhost -user=postgres -password=admin -database=insights -port=5432 - -[UNDERPASS] -host=localhost -user=postgres -password=admin -database=underpass -port=5432 - -[RAW_DATA] -host=localhost -user=postgres -password=admin -database=raw -port=5432 - -[OAUTH] -client_id= your client id -client_secret= your client secret -url=https://www.openstreetmap.org -scope=read_prefs -login_redirect_uri=http://127.0.0.1:8000/latest/auth/callback/ -secret_key=jnfdsjkfndsjkfnsdkjfnskfn +uvicorn API.main:app --reload +``` -[API_CONFIG] -env=dev +### 4. Start Celery Worker +You should be able to start [celery](https://docs.celeryq.dev/en/stable/getting-started/first-steps-with-celery.html#running-the-celery-worker-server) worker by running following command on different shell +``` +celery --app API.api_worker worker --loglevel=INFO ``` -#### Optional Configuration +### 5 . [OPTIONAL] Start flower for monitoring queue -You can further customize API if you wish with API_CONFIG Block +API uses flower for monitoring the Celery distributed queue. Run this command on different shell , if you are running redis on same machine your broker could be ```redis://localhost:6379/``` ``` -[API_CONFIG] -export_path=exports/ # used to store export path -api_host=http://127.0.0.1 # you can define this if you have different host -api_port=8000 -max_area=100000 # max area to support for rawdata input -use_connection_pooling=True # default it will not use connection pooling but you can configure api to use to for psycopg2 connections -log_level=info #options are info,debug,warning,error -env=dev # default is dev , supported values are dev and prod -shp_limit=6000 # in mb default is 4096 +celery --app API.api_worker flower --port=4000 --broker=redis://redis:6379/ ``` -Based on your requirement you can also customize rawdata exports parameter using EXPORT_UPLOAD block + +### 6. Navigate to Fast API Docs to get details about API Endpoint + +After sucessfully running server , hit [this](http://127.0.0.1:8000/latest/docs) URL on your browser ``` -[EXPORT_UPLOAD] -FILE_UPLOAD_METHOD=disk # options are s3,disk , default disk -AWS_ACCESS_KEY_ID= your id -AWS_SECRET_ACCESS_KEY= yourkey -BUCKET_NAME= your bucket name +http://127.0.0.1:8000/latest/docs ``` -##### Setup Tasking Manager Database for TM related development -Setup Tasking manager from [here](https://github.com/hotosm/tasking-manager/blob/develop/docs/developers/development-setup.md#backend) OR Create database "tm" in your local postgres and insert sample dump from [TM test dump](https://github.com/hotosm/tasking-manager/blob/develop/tests/database/tasking-manager.sql). -(```wget https://raw.githubusercontent.com/hotosm/tasking-manager/develop/tests/database/tasking-manager.sql```) +Flower dashboard should be available on 4000 localhost port. -```psql -U postgres -h localhost tm < tasking-manager.sql``` - -Add those block to config.txt with the value you use in the tasking manager configuration. ``` -[TM] -host=localhost -user=postgres -password=admin -database=tm -port=5432 +http://127.0.0.1:4000/ ``` -You can test it with the `/mapathon/detail/` endpoint and with the following input: -`{"fromTimestamp":"2019-04-08 10:00:00.000000","toTimestamp":"2019-04-08 11:00:00.000000","projectIds":[1],"hashtags":[]}` +## Check API Installation +- Check Mapathon Summary : -### 8. Run server + ``` + curl -d '{"project_ids": [11224, 10042, 9906, 1381, 11203, 10681, 8055, 8732, 11193, 7305,11210, 10985, 10988, 11190, 6658, 5644, 10913, 6495, 4229],"fromTimestamp":"2021-08-27T9:00:00","toTimestamp":"2021-08-27T11:00:00","hashtags": ["mapandchathour2021"]}' -H 'Content-Type: application/json' http://127.0.0.1:8000/v1/mapathon/summary/ + ``` + It should return some stats -```uvicorn API.main:app --reload``` +- Check Authetication : -### 9. Navigate to Fast API Docs to get details about API Endpoint + 1. Hit /auth/login/ + 2. Hit Url returned on response + 3. You will get access_token + 4. You can use that access_token in all endpoints that requires authentication , To check token pass token in /auth/me/ It should return your osm profile -After sucessfully running server , hit [this](http://127.0.0.1:8000/latest/docs) URL on your browser + If you get a 401 response with the detail "User is not staff member", get your OSM id using https://galaxy-api.hotosm.org/v1/docs#/default/get_user_id_osm_users_ids__post, then run the following SQL on underpass database replacing ID: -```http://127.0.0.1:8000/latest/docs``` + ```sql + INSERT INTO users_roles VALUES (ID, 1); + ``` -### Check Authetication + Repeat the steps to get a new access_token. -1. Hit /auth/login/ -2. Hit Url returned on response -3. You will get access_token -4. You can use that access_token in all endpoints that requires authentication , To check token pass token in /auth/me/ It should return your osm profile -If you get a 401 response with the detail "User is not staff member", get your OSM id using https://galaxy-api.hotosm.org/v1/docs#/default/get_user_id_osm_users_ids__post, then run the following SQL on underpass database replacing ID: +- Check Mapathon detailed report : -```sql -INSERT INTO users_roles VALUES (ID, 1); -``` + You can test with the `/mapathon/detail/` endpoint with the following input to check both authentication , database connection and visualize the above summary result + + ``` + {"project_ids": [11224, 10042, 9906, 1381, 11203, 10681, 8055, 8732, 11193, 7305,11210, 10985, 10988, 11190, 6658, 5644, 10913, 6495, 4229],"fromTimestamp":"2021-08-27T9:00:00","toTimestamp":"2021-08-27T11:00:00","hashtags": ["mapandchathour2021"]} + ``` -Repeat the steps to get a new access_token. +Clean Setup of API can be found in github action workflow , You can follow the steps for more [clarity](/.github/workflows/build.yml). ```/workflows/build.yml``` #### API has been setup successfully ! @@ -178,18 +135,24 @@ Repeat the steps to get a new access_token. Galaxy-API uses pytest for tests ,Navigate to root Dir, Install package in editable mode -```pip install -e .``` +``` +pip install -e . +``` Make sure you have postgresql installed locally with postgis extension enabled , Now Run Pytest -```py.test -v -s``` +``` +py.test -v -s +``` Run Individual tests -```py.test -k test function name``` +``` +py.test -k test function name +``` # Galaxy Package @@ -197,7 +160,9 @@ Run Individual tests ## Local Install -```python setup.py install``` +``` +python setup.py install +``` Now import as : diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..71768db6 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,60 @@ +version: '3.8' + +services: + + postgres: + image: postgis/postgis + container_name: pgsql + environment: + - POSTGRES_MULTIPLE_DATABASES="underpass","tm","raw","insights" + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=admin + ports: + - '5434:5432' + volumes: + - ./tests/src/fixtures/:/sql/ + - ./postgres-data:/var/lib/postgresql/data + - ./docker-multiple-db.sh:/docker-entrypoint-initdb.d/create.sh + - ./populate-docker-db.sh:/docker-entrypoint-initdb.d/insert.sh + + + app: + build: . + container_name: api + command: uvicorn API.main:app --reload --host 0.0.0.0 --port 8000 --no-use-colors --proxy-headers + ports: + - 8000:8000 + volumes: + - .:/app + depends_on: + - redis + - postgres + + + worker: + build: . + container_name: worker + command: celery --app API.api_worker worker --loglevel=INFO + volumes: + - .:/app + depends_on: + - app + - redis + - postgres + + redis: + image: redis:6-alpine + container_name: redis + ports: + - "6379:6379" + + worker-dashboard: + build: . + container_name: flower + command: celery --app API.api_worker flower --port=4000 --broker=redis://redis:6379/ + ports: + - 4000:4000 + depends_on: + - app + - redis + - worker \ No newline at end of file diff --git a/docker-multiple-db.sh b/docker-multiple-db.sh new file mode 100755 index 00000000..18c0f96b --- /dev/null +++ b/docker-multiple-db.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -e +set -u + +function create_user_and_database() { + local database=$1 + echo " Creating user and database '$database'" + psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL + CREATE USER $database; + CREATE DATABASE $database; + GRANT ALL PRIVILEGES ON DATABASE $database TO $database; +EOSQL +} + +if [ -n "$POSTGRES_MULTIPLE_DATABASES" ]; then + echo "Multiple database creation requested: $POSTGRES_MULTIPLE_DATABASES" + for db in $(echo $POSTGRES_MULTIPLE_DATABASES | tr ',' ' '); do + create_user_and_database $db + done + echo "Multiple databases created" +fi \ No newline at end of file diff --git a/docs/CONFIG_DOC.md b/docs/CONFIG_DOC.md index aa41c0a5..73382a72 100644 --- a/docs/CONFIG_DOC.md +++ b/docs/CONFIG_DOC.md @@ -1 +1,229 @@ -This document describes what are the config values that API can accept , and what do they mean + +Before getting started on config Make sure you have [Postgres](https://www.postgresql.org/) and [Postgis](https://postgis.net/) setup in your machine. + + +## Compulsary Configuration + +### 1. Create ```config.txt``` inside src directory. +![image](https://user-images.githubusercontent.com/36752999/188402566-80dc9633-5d4e-479c-97dc-9e8a4999b385.png) + + +### 2. Setup Underpass + Run underpass from [here](https://github.com/hotosm/underpass/blob/master/doc/getting-started.md) OR Create database "underpass" in your local postgres and insert sample dump from +``` +/tests/src/fixtures/underpass.sql +``` + +``` +psql -U postgres -h localhost underpass < underpass.sql +``` +Put your credentials in Underpass block +``` +[UNDERPASS] +host=localhost +user=postgres +password=admin +database=underpass +port=5432 +``` + +### 3. Setup Insights for Historical Data +Setup insights from [here](https://github.com/hotosm/insights) OR Create database "insights" in your local postgres and insert sample dump from +``` +/tests/src/fixtures/insights.sql +``` + +``` +psql -U postgres -h localhost insights < insights.sql +``` +Add a sample data dump for mapathon summary to visualize statistics + +``` +psql -U postgres -h localhost insights < tests/src/fixtures/mapathon_summary.sql +``` + +Put your credentials in insights block +``` +[INSIGHTS] +host=localhost +user=postgres +password=admin +database=insights +port=5432 +``` + +### 4. Setup Raw Data for Current OSM Snapshot +Initialize rawdata from [here](https://github.com/hotosm/underpass/tree/master/raw) OR Create database "raw" in your local postgres and insert sample dump from +``` +/tests/src/fixtures/raw_data.sql +``` + +``` +psql -U postgres -h localhost raw < raw_data.sql +``` +Put your credentials on Rawdata block + +``` +[RAW_DATA] +host=localhost +user=postgres +password=admin +database=raw +port=5432 +``` + +### 5. Setup Tasking Manager Database for TM related development + +Setup Tasking manager from [here](https://github.com/hotosm/tasking-manager) OR Create database "tm" in your local postgres and insert sample dump from TM Sample Dump + +``` +/tests/src/fixtures/tasking-manager.sql +``` + +``` +psql -U postgres -h localhost tm < tasking-manager.sql +``` +Put your credentials on TM block +``` +[TM] +host=localhost +user=postgres +password=admin +database=tm +port=5432 +``` + +### 6. Setup Oauth for Authentication +Login to [OSM](https://www.openstreetmap.org/) , Click on My Settings and register your local galaxy app to Oauth2applications + +![image](https://user-images.githubusercontent.com/36752999/188452619-aababf28-b685-4141-b381-9c25d0367b57.png) + + +Check on read user preferences and Enter redirect URI as following +``` +http://127.0.0.1:8000/latest/auth/callback/ +``` + +Grab Client ID and Client Secret and put it inside config.txt as OAUTH Block , you can generate secret key for your application by yourself + +``` +[OAUTH] +client_id= your client id +client_secret= your client secret +url=https://www.openstreetmap.org +scope=read_prefs +login_redirect_uri=http://127.0.0.1:8000/latest/auth/callback/ +secret_key=jnfdsjkfndsjkfnsdkjfnskfn +``` + +### 7. Configure celery and redis + +Galaxy API uses [Celery 5](https://docs.celeryq.dev/en/stable/getting-started/first-steps-with-celery.html) and [Redis 6](https://redis.io/download/#redis-stack-downloads) for task queue management , Currently implemented for Rawdata endpoint. 6379 is the default port . if you are running redis on same machine your broker could be ```redis://localhost:6379/```. You can change the port according to your configuration for the current docker compose use following + +``` +[CELERY] +CELERY_BROKER_URL=redis://redis:6379/0 +CELERY_RESULT_BACKEND=redis://redis:6379/0 +``` + +### 7. Finalizing config.txt +Insert your config blocks with the database credentials where you have underpass ,insight and rawdata in your database along with oauth block + +Summary of command : + +Considering You have PSQL-POSTGIS setup with user **postgres** host **localhost** on port **5432** as password **admin** + +``` + export PGPASSWORD='admin'; + psql -U postgres -h localhost -p 5432 -c "CREATE DATABASE underpass;" + psql -U postgres -h localhost -p 5432 -c "CREATE DATABASE tm;" + psql -U postgres -h localhost -p 5432 -c "CREATE DATABASE raw;" + + cd tests/src/fixtures/ + psql -U postgres -h localhost -p 5432 insights < insights.sql + psql -U postgres -h localhost -p 5432 insights < mapathon_summary.sql + psql -U postgres -h localhost -p 5432 raw < raw_data.sql + psql -U postgres -h localhost -p 5432 underpass < underpass.sql + wget https://raw.githubusercontent.com/hotosm/tasking-manager/develop/tests/database/tasking-manager.sql + psql -U postgres -h localhost -p 5432 tm < tasking-manager.sql +``` + +Your config.txt will look like this + +``` +[UNDERPASS] +host=localhost +user=postgres +password=admin +database=underpass +port=5432 + +[INSIGHTS] +host=localhost +user=postgres +password=admin +database=insights +port=5432 + +[RAW_DATA] +host=localhost +user=postgres +password=admin +database=raw +port=5432 + +[TM] +host=localhost +user=postgres +password=admin +database=tm +port=5432 + +[OAUTH] +client_id= your client id +client_secret= your client secret +url=https://www.openstreetmap.org +scope=read_prefs +login_redirect_uri=http://127.0.0.1:8000/latest/auth/callback/ +secret_key=jnfdsjkfndsjkfnsdkjfnskfn + +[API_CONFIG] +env=dev +log_level=debug + +[CELERY] +CELERY_BROKER_URL=redis://redis:6379/0 +CELERY_RESULT_BACKEND=redis://redis:6379/0 + +``` + +**Tips** : Follow .github/workflows/[unit-test](https://github.com/hotosm/galaxy-api/blob/feature/celery/.github/workflows/unit-test.yml) If you have any confusion on implementation of config file . + +## Optional Configuration [ You can skip this part for basic installation ] + +You can further customize API if you wish with API_CONFIG Block + +``` +[API_CONFIG] +export_path=exports # used to store export path +api_host=http://127.0.0.1 # you can define this if you have different host +api_port=8000 +max_area=100000 # max area to support for rawdata input +use_connection_pooling=True # default it will not use connection pooling but you can configure api to use to for psycopg2 connections +log_level=info #options are info,debug,warning,error +env=dev # default is dev , supported values are dev and prod +allow_bind_zip_filter=true # option to configure export output zipped/unzipped Default all output will be zipped +limiter_storage_uri=redis://localhost:6379 # API uses redis as backend for rate limiting +grid_index_threshold=5000 # value in sqkm to apply grid index filter +export_rate_limit=5 # no of requests per minute - default is 5 requests per minute +``` +Based on your requirement you can also customize rawdata exports parameter using EXPORT_UPLOAD block + +``` +[EXPORT_UPLOAD] +FILE_UPLOAD_METHOD=disk # options are s3,disk , default disk +AWS_ACCESS_KEY_ID= your id +AWS_SECRET_ACCESS_KEY= yourkey +BUCKET_NAME= your bucket name +``` + diff --git a/docs/GETTING_STARTED_WITH_DOCKER.md b/docs/GETTING_STARTED_WITH_DOCKER.md new file mode 100644 index 00000000..24d8e68f --- /dev/null +++ b/docs/GETTING_STARTED_WITH_DOCKER.md @@ -0,0 +1,107 @@ +### 1. First Checkout the repository and Setup Config + +``` +git clone https://github.com/hotosm/galaxy-api.git +``` + +- Create config.txt inside /src/ + +``` +touch src/config.txt +``` + +- Put those config block inside your file + +If you want to use docker postgres Sample data for underpass, insights, taskingmanager, rawdata is included in db itself : +You can use following config to get started with sample data or Setup them by yourself by following [instructions](../docs/CONFIG_DOC.md) +``` +[INSIGHTS] +host=pgsql +user=postgres +password=admin +database=insights +port=5432 + +[UNDERPASS] +host=pgsql +user=postgres +password=admin +database=underpass +port=5432 + +[TM] +host=pgsql +user=postgres +password=admin +database=tm +port=5432 + +[RAW_DATA] +host=pgsql +user=postgres +password=admin +database=raw +port=5432 + +[API_CONFIG] +env=dev + +[CELERY] +CELERY_BROKER_URL=redis://redis:6379/0 +CELERY_RESULT_BACKEND=redis://redis:6379/0 +``` + +- **Setup Authentication** + + Follow this [Setup Oauth Block](../docs/CONFIG_DOC.md#6-setup-oauth-for-authentication) and include it in your config.txt + +### 2. Create the images and spin up the Docker containers: +``` +docker-compose up -d --build +``` + +### 3. Check Servers + +Uvicorn should be running on [8000](http://127.0.0.1:8000/latest/docs) port , Redis on default port , Celery with a worker and Flower on 4000 + +``` +http://127.0.0.1:8000/latest/docs +``` +API Docs will be displayed like this upon uvicorn successfull server start +![image](https://user-images.githubusercontent.com/36752999/191813795-fdfd46fe-5e6c-4ecf-be9b-f9f351d3d1d7.png) + +``` +http://127.0.0.1:4000/ +``` + +Flower [dashboard](http://127.0.0.1:4000/) will look like this on successfull installation with a worker online +![image](https://user-images.githubusercontent.com/36752999/191813613-3859522b-ea68-4370-87b2-ebd1d8880d80.png) + + +Now, Continue Readme. Check installation from [here](../README.md#check-api-installation) + +### [Troubleshoot] If you can't connect to local postgres from API + +Since API is running through container, If you have local postgres installed on your machine that port may not be accesible as localhost from container , Container needs to connect to your local network , In order to do that there are few options +1. Option one : + + - For windows/ Mac docker user + Replace localhost with ```host.docker.internal``` – This resolves to the outside host and lets you connect to your machine's localhost through container , For example if postgres is running on your machine in 5432 , container can connect from ```host.docker.internal:5432``` + - For linux user : + Linux users can enable host.docker.internal too via the --add-host flag for docker run. Start your containers with this flag to expose the host string: + ```docker run -d --add-host host.docker.internal:host-gateway my-container:latest``` + +2. Option two : + + Find your network ip address (for linux/mac you can use ```ifconfig -l | xargs -n1 ipconfig getifaddr``` ) and use your ip as a host instead of localhost in config file . + + If connection still fails : You may need to edit your postgres config file ( ask postgres where it is by this query ```show config_file;``` ) and edit/enable ```listen_addresses = '*'``` inside ```postgresql.conf``` . Also add ```host all all 0.0.0.0/0 trust``` in ```pg_hba.conf``` + +### [Troubleshoot] If you can't run postgresql on docker to execute .sh script provided + +Make your .sh script executable . For eg : In ubuntu/mac + +``` +chmod +x populate-docker-db.sh && chmod +x docker-multiple-db.sh +``` +In windows you can recreate the file and paste the content ! diff --git a/populate-docker-db.sh b/populate-docker-db.sh new file mode 100755 index 00000000..89aae5e0 --- /dev/null +++ b/populate-docker-db.sh @@ -0,0 +1,6 @@ + #!/bin/bash + psql -U postgres insights < /sql/insights.sql + psql -U postgres insights < /sql/mapathon_summary.sql + psql -U postgres raw < /sql/raw_data.sql + psql -U postgres underpass < /sql/underpass.sql + psql -U postgres tm < /sql/tasking-manager.sql \ No newline at end of file diff --git a/requirements.docker.txt b/requirements.docker.txt index 8ad35583..6adae62f 100644 --- a/requirements.docker.txt +++ b/requirements.docker.txt @@ -1,6 +1,6 @@ aiofiles==0.7.0 asgiref==3.3.4 -click==8.0.1 +click==8.0.3 fastapi==0.65.2 h11==0.12.0 importlib-metadata==4.5.0 @@ -26,7 +26,9 @@ sphinx==4.2.0 area==1.1.1 orjson==3.6.7 boto3==1.24.38 -Fiona==1.8.21 fastapi-versioning==0.10.0 -#gdal and ogr2ogr is required on the machine to run rawdata endpoint -#gdal == 3.3.2 +redis==4.3.4 +celery==5.2.7 +flower==1.2.0 +slowapi==0.1.6 + diff --git a/requirements.txt b/requirements.txt index 033c0415..6adae62f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ aiofiles==0.7.0 asgiref==3.3.4 -click==8.0.1 +click==8.0.3 fastapi==0.65.2 h11==0.12.0 importlib-metadata==4.5.0 @@ -26,7 +26,9 @@ sphinx==4.2.0 area==1.1.1 orjson==3.6.7 boto3==1.24.38 -Fiona==1.8.21 fastapi-versioning==0.10.0 -#gdal and ogr2ogr is required on the machine to run rawdata endpoint -# gdal == 3.3.2 +redis==4.3.4 +celery==5.2.7 +flower==1.2.0 +slowapi==0.1.6 + diff --git a/src/config.txt.sample b/src/config.txt.sample index 07ac9d20..ff0b1568 100644 --- a/src/config.txt.sample +++ b/src/config.txt.sample @@ -1,27 +1,30 @@ -############# -# MANDATORY # -############# - [INSIGHTS] host=localhost user=postgres password=admin database=insights -port=5432 +port=5434 [UNDERPASS] host=localhost user=postgres password=admin database=underpass -port=5432 +port=5434 [RAW_DATA] host=localhost user=postgres password=admin database=raw -port=5432 +port=5434 + +[TM] +host=localhost +user=postgres +password=admin +database=tm +port=5434 [OAUTH] client_id= @@ -34,30 +37,6 @@ secret_key=PutSomethingRandmHere [API_CONFIG] env=dev -############# -# OPTIONNAL # -############# - -# If enable this [API_CONFIG] section, remove the previous one -#[API_CONFIG] -#export_path=exports/ # used to store export path -#api_host=http://127.0.0.1 -#api_port=8000 -#max_area=100000 # max area to support for rawdata input -#use_connection_pooling=True -#log_level=info #options are info,debug,warning,error -#env=dev # default is prod , supported values are dev and prod -#shp_limit=6000 # in mb default is 4096 - -#[EXPORT_UPLOAD] -#FILE_UPLOAD_METHOD=disk # options are s3,disk -#AWS_ACCESS_KEY_ID= your id -#AWS_SECRET_ACCESS_KEY= yourkey -#BUCKET_NAME= your bucket name - -#[TM] -#host=localhost -#user=postgres -#password=admin -#database=tm -#port=5432 +[CELERY] +CELERY_BROKER_URL=redis://localhost:6379/0 +CELERY_RESULT_BACKEND=redis://localhost:6379/0 \ No newline at end of file diff --git a/src/galaxy/app.py b/src/galaxy/app.py index 760ae580..517bf487 100644 --- a/src/galaxy/app.py +++ b/src/galaxy/app.py @@ -20,13 +20,13 @@ import os import sys import threading -from .config import get_db_connection_params, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BUCKET_NAME, level, logger as logging, export_path, use_connection_pooling, shp_limit -from .validation.models import Source +from src.galaxy.config import get_db_connection_params, grid_index_threshold, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BUCKET_NAME, level, logger as logging, export_path, use_connection_pooling +from src.galaxy.validation.models import Source from psycopg2 import connect, sql from psycopg2.extras import DictCursor from psycopg2 import OperationalError -from .validation.models import UserRole, TeamMemberFunction, List, RawDataCurrentParams, RawDataOutputType, MapathonRequestParams, MappedFeature, MapathonSummary, MappedFeatureWithUser, MapathonContributor, MappedTaskStats, ValidatedTaskStats, TimeSpentMapping, OrganizationHashtagParams, DataRecencyParams, OrganizationHashtag, Trainings, TrainingParams, TrainingOrganisations, User, TimeSpentValidating, TMUserStats, MapathonDetail, UserStatistics, DataQualityHashtagParams, DataQuality_TM_RequestParams, DataQuality_username_RequestParams -from .query_builder.builder import generate_list_teams_metadata, get_grid_id_query, raw_currentdata_extraction_query, check_last_updated_rawdata, extract_geometry_type_query, raw_historical_data_extraction_query, generate_tm_teams_list, generate_tm_validators_stats_query, create_user_time_spent_mapping_and_validating_query, create_user_tasks_mapped_and_validated_query, generate_organization_hashtag_reports, check_last_updated_user_data_quality_underpass, create_changeset_query, create_osm_history_query, create_users_contributions_query, check_last_updated_osm_insights, generate_data_quality_TM_query, generate_data_quality_hashtag_reports, generate_data_quality_username_query, check_last_updated_mapathon_insights, check_last_updated_user_statistics_insights, check_last_updated_osm_underpass, generate_mapathon_summary_underpass_query, generate_training_organisations_query, generate_filter_training_query, generate_training_query, create_UserStats_get_statistics_query, create_userstats_get_statistics_with_hashtags_query +from src.galaxy.validation.models import UserRole, TeamMemberFunction, List, RawDataCurrentParams, RawDataOutputType, MapathonRequestParams, MappedFeature, MapathonSummary, MappedFeatureWithUser, MapathonContributor, MappedTaskStats, ValidatedTaskStats, TimeSpentMapping, OrganizationHashtagParams, DataRecencyParams, OrganizationHashtag, Trainings, TrainingParams, TrainingOrganisations, User, TimeSpentValidating, TMUserStats, MapathonDetail, UserStatistics, DataQualityHashtagParams, DataQuality_TM_RequestParams, DataQuality_username_RequestParams +from src.galaxy.query_builder.builder import generate_list_teams_metadata, get_grid_id_query, raw_currentdata_extraction_query, check_last_updated_rawdata, extract_geometry_type_query, raw_historical_data_extraction_query, generate_tm_teams_list, generate_tm_validators_stats_query, create_user_time_spent_mapping_and_validating_query, create_user_tasks_mapped_and_validated_query, generate_organization_hashtag_reports, check_last_updated_user_data_quality_underpass, create_changeset_query, create_osm_history_query, create_users_contributions_query, check_last_updated_osm_insights, generate_data_quality_TM_query, generate_data_quality_hashtag_reports, generate_data_quality_username_query, check_last_updated_mapathon_insights, check_last_updated_user_statistics_insights, check_last_updated_osm_underpass, generate_mapathon_summary_underpass_query, generate_training_organisations_query, generate_filter_training_query, generate_training_query, create_UserStats_get_statistics_query, create_userstats_get_statistics_with_hashtags_query import json import pandas from json import loads as json_loads @@ -37,8 +37,8 @@ from area import area import subprocess from json import dumps -import fiona -from fiona.crs import from_epsg +# import fiona +# from fiona.crs import from_epsg import time import shutil import boto3 @@ -917,90 +917,81 @@ def extract_historical_data(self): return RawData.to_geojson(results) @staticmethod - def ogr_export(outputtype, query=None, export_temp_path=None, point_query=None, line_query=None, poly_query=None, dump_temp_file_path=None, binding_file_dir=None): + def ogr_export_shp(point_query, line_query, poly_query, working_dir, file_name): + """Function written to support ogr type extractions as well , In this way we will be able to support all file formats supported by Ogr , Currently it is slow when dataset gets bigger as compared to our own conversion method but rich in feature and data types even though it is slow""" + db_items = get_db_connection_params("RAW_DATA") + if point_query: + query_path=os.path.join(working_dir,'point.sql') + # writing to .sql to pass in ogr2ogr because we don't want to pass too much argument on command with sql + with open(query_path, 'w', encoding="UTF-8") as file: + file.write(point_query) + # standard file path for the generation + point_file_path=os.path.join(working_dir,f"{file_name}_point.shp") + # command for ogr2ogr to generate file + cmd = '''ogr2ogr -overwrite -f "ESRI Shapefile" {export_path} PG:"host={host} port={port} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( + export_path=point_file_path, host=db_items.get('host'), port=db_items.get('port'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) + logging.debug("Calling ogr2ogr-Point Shapefile") + run_ogr2ogr_cmd(cmd) + # clear query file we don't need it anymore + os.remove(query_path) + + if line_query: + query_path=os.path.join(working_dir,'line.sql') + # writing to .sql to pass in ogr2ogr because we don't want to pass too much argument on command with sql + with open(query_path, 'w', encoding="UTF-8") as file: + file.write(line_query) + line_file_path=os.path.join(working_dir,f"{file_name}_line.shp") + cmd = '''ogr2ogr -overwrite -f "ESRI Shapefile" {export_path} PG:"host={host} port={port} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( + export_path=line_file_path, host=db_items.get('host'), port=db_items.get('port'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) + logging.debug("Calling ogr2ogr-Line Shapefile") + run_ogr2ogr_cmd(cmd) + # clear query file we don't need it anymore + os.remove(query_path) + + if poly_query: + query_path=os.path.join(working_dir,'poly.sql') + poly_file_path=os.path.join(working_dir,f"{file_name}_poly.shp") + # writing to .sql to pass in ogr2ogr because we don't want to pass too much argument on command with sql + with open(query_path, 'w', encoding="UTF-8") as file: + file.write(poly_query) + cmd = '''ogr2ogr -overwrite -f "ESRI Shapefile" {export_path} PG:"host={host} port={port} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( + export_path=poly_file_path, host=db_items.get('host'), port=db_items.get('port'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) + logging.debug("Calling ogr2ogr-Poly Shapefile") + run_ogr2ogr_cmd(cmd) + # clear query file we don't need it anymore + os.remove(query_path) + + @staticmethod + def ogr_export(query, outputtype, working_dir, dump_temp_path): """Function written to support ogr type extractions as well , In this way we will be able to support all file formats supported by Ogr , Currently it is slow when dataset gets bigger as compared to our own conversion method but rich in feature and data types even though it is slow""" db_items = get_db_connection_params("RAW_DATA") # format query if it has " in string" - formatted_query = '' - if query: - formatted_query = query.replace('"', '\\"') + query_path=os.path.join(working_dir,'export_query.sql') + # writing to .sql to pass in ogr2ogr because we don't want to pass too much argument on command with sql + with open(query_path, 'w', encoding="UTF-8") as file: + file.write(query) # for mbtiles we need additional input as well i.e. minzoom and maxzoom , setting default at max=22 and min=10 - if outputtype is RawDataOutputType.MBTILES.value: - cmd = '''ogr2ogr -overwrite -f \"{outputtype}\" -dsco MINZOOM=10 -dsco MAXZOOM=22 {export_path} PG:"host={host} user={username} dbname={db} password={password}" -sql "{pg_sql_select}" -progress'''.format( - outputtype=outputtype, export_path=export_temp_path, host=db_items.get('host'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=formatted_query) - - elif outputtype is RawDataOutputType.SHAPEFILE.value: - # if it is shapefile it needs different logic for point,line and polygon - file_paths = [] - outputtype = "ESRI Shapefile" - if point_query: - query_path = f"""{dump_temp_file_path}_sql.sql""" - - # writing to .sql to pass in ogr2ogr because we don't want to pass too much argument on command with sql - with open(query_path, 'w') as file: - file.write(point_query) - # standard file path for the generation - point_file_path = f"""{dump_temp_file_path}_point.shp""" - # command for ogr2ogr to generate file - cmd = '''ogr2ogr -overwrite -f \"{outputtype}\" {export_path} PG:"host={host} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( - outputtype=outputtype, export_path=point_file_path, host=db_items.get('host'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) - logging.debug("Calling ogr2ogr-Point Shapefile") - run_ogr2ogr_cmd(cmd, binding_file_dir) - # clear query file we don't need it anymore - os.remove(query_path) - - file_paths.append(point_file_path) - # need filepath to zip in to file and clear them after zipping - file_paths.append(f"""{dump_temp_file_path}_point.shx""") - # file_paths.append(f"""{dump_temp_file_path}_point.cpg""") - file_paths.append(f"""{dump_temp_file_path}_point.dbf""") - file_paths.append(f"""{dump_temp_file_path}_point.prj""") - if line_query: - query_path = f"""{dump_temp_file_path}_sql.sql""" - - # writing to .sql to pass in ogr2ogr because we don't want to pass too much argument on command with sql - with open(query_path, 'w') as file: - file.write(line_query) - - line_file_path = f"""{dump_temp_file_path}_line.shp""" - cmd = '''ogr2ogr -overwrite -f \"{outputtype}\" {export_path} PG:"host={host} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( - outputtype=outputtype, export_path=line_file_path, host=db_items.get('host'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) - logging.debug("Calling ogr2ogr-Line Shapefile") - run_ogr2ogr_cmd(cmd, binding_file_dir) - # clear query file we don't need it anymore - os.remove(query_path) - - file_paths.append(line_file_path) - file_paths.append(f"""{dump_temp_file_path}_line.shx""") - # file_paths.append(f"""{dump_temp_file_path}_line.cpg""") - file_paths.append(f"""{dump_temp_file_path}_line.dbf""") - file_paths.append(f"""{dump_temp_file_path}_line.prj""") - if poly_query: - - poly_file_path = f"""{dump_temp_file_path}_poly.shp""" - poly_query_path = f"""{dump_temp_file_path}_poly_sql.sql""" - - # writing to .sql to pass in ogr2ogr because we don't want to pass too much argument on command with sql - with open(poly_query_path, 'w') as file: - file.write(poly_query) - cmd = '''ogr2ogr -overwrite -f \"{outputtype}\" {export_path} PG:"host={host} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( - outputtype=outputtype, export_path=poly_file_path, host=db_items.get('host'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=poly_query_path) - logging.debug("Calling ogr2ogr-Poly Shapefile") - run_ogr2ogr_cmd(cmd, binding_file_dir) - # clear query file we don't need it anymore - os.remove(poly_query_path) - file_paths.append(poly_file_path) - file_paths.append(f"""{dump_temp_file_path}_poly.shx""") - # file_paths.append(f"""{dump_temp_file_path}_poly.cpg""") - file_paths.append(f"""{dump_temp_file_path}_poly.dbf""") - file_paths.append(f"""{dump_temp_file_path}_poly.prj""") - return file_paths - else: - # if it is not shapefile use standard ogr2ogr with their output format , will be useful for kml - cmd = '''ogr2ogr -overwrite -f \"{outputtype}\" {export_path} PG:"host={host} user={username} dbname={db} password={password}" -sql "{pg_sql_select}" -progress'''.format( - outputtype=outputtype, export_path=export_path, host=db_items.get('host'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=formatted_query) - run_ogr2ogr_cmd(cmd, binding_file_dir) - return export_path + if outputtype == RawDataOutputType.MBTILES.value: + cmd = '''ogr2ogr -overwrite -f MBTILES -dsco MINZOOM=10 -dsco MAXZOOM=22 {export_path} PG:"host={host} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( + export_path=dump_temp_path, host=db_items.get('host'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) + run_ogr2ogr_cmd(cmd) + + if outputtype == RawDataOutputType.FLATGEOBUF.value: + cmd = '''ogr2ogr -overwrite -f FLATGEOBUF {export_path} PG:"host={host} port={port} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress VERIFY_BUFFERS=NO'''.format( + export_path=dump_temp_path, host=db_items.get('host'), port=db_items.get('port'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) + run_ogr2ogr_cmd(cmd) + + if outputtype == RawDataOutputType.KML.value: + cmd = '''ogr2ogr -overwrite -f KML {export_path} PG:"host={host} port={port} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( + export_path=dump_temp_path, host=db_items.get('host'), port=db_items.get('port'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) + run_ogr2ogr_cmd(cmd) + + if outputtype == RawDataOutputType.GEOPACKAGE.value: + cmd = '''ogr2ogr -overwrite -f GPKG {export_path} PG:"host={host} port={port} user={username} dbname={db} password={password}" -sql @"{pg_sql_select}" -progress'''.format( + export_path=dump_temp_path, host=db_items.get('host'), port=db_items.get('port'), username=db_items.get('user'), db=db_items.get('database'), password=db_items.get('password'), pg_sql_select=query_path) + run_ogr2ogr_cmd(cmd) + # clear query file we don't need it anymore + os.remove(query_path) @staticmethod def query2geojson(con, extraction_query, dump_temp_file_path): @@ -1029,81 +1020,6 @@ def query2geojson(con, extraction_query, dump_temp_file_path): f.write(post_geojson) logging.debug("Server side Query Result Post Processing Done") - @staticmethod - def query2shapefile(con, point_query, line_query, poly_query, point_schema, line_schema, poly_schema, dump_temp_file_path): - """Function that transfer db query to shp""" - # schema: it is a simple dictionary with geometry and properties as keys - # schema = {'geometry': 'LineString','properties': {'test': 'int'}} - file_paths = [] - if point_query: - logging.debug("Writing Point Shapefile") - - schema = {'geometry': 'Point', 'properties': point_schema, } - point_file_path = f"""{dump_temp_file_path}_point.shp""" - # open a fiona object - pointShp = fiona.open(point_file_path, mode='w', driver='ESRI Shapefile', encoding='UTF-8', - schema=schema, crs="EPSG:4326") - - with con.cursor(name='fetch_raw') as cursor: # using server side cursor - cursor.itersize = 1000 # chunk size to get 1000 row at a time in client side - cursor.execute(point_query) - for row in cursor: - pointShp.write(orjson.loads(row[0])) - - cursor.close() # closing connection to avoid memory issues - # close fiona object - pointShp.close() - file_paths.append(point_file_path) - file_paths.append(f"""{dump_temp_file_path}_point.shx""") - file_paths.append(f"""{dump_temp_file_path}_point.cpg""") - file_paths.append(f"""{dump_temp_file_path}_point.dbf""") - file_paths.append(f"""{dump_temp_file_path}_point.prj""") - - if line_query: - logging.debug("Writing Line Shapefile") - - schema = {'geometry': 'LineString', 'properties': line_schema, } - # print(schema) - line_file_path = f"""{dump_temp_file_path}_line.shp""" - with fiona.open(line_file_path, 'w', encoding='UTF-8', crs=from_epsg(4326), driver='ESRI Shapefile', schema=schema) as layer: - with con.cursor(name='fetch_raw') as cursor: # using server side cursor - cursor.itersize = 1000 # chunk size to get 1000 row at a time in client side - cursor.execute(line_query) - for row in cursor: - layer.write(orjson.loads(row[0])) - - cursor.close() # closing connection to avoid memory issues - # close fiona object - layer.close() - file_paths.append(line_file_path) - file_paths.append(f"""{dump_temp_file_path}_line.shx""") - file_paths.append(f"""{dump_temp_file_path}_line.cpg""") - file_paths.append(f"""{dump_temp_file_path}_line.dbf""") - file_paths.append(f"""{dump_temp_file_path}_line.prj""") - - if poly_query: - logging.debug("Writing Poly Shapefile") - - poly_file_path = f"""{dump_temp_file_path}_poly.shp""" - schema = {'geometry': 'Polygon', 'properties': poly_schema, } - - with fiona.open(poly_file_path, 'w', encoding='UTF-8', crs=from_epsg(4326), driver='ESRI Shapefile', schema=schema) as layer: - with con.cursor(name='fetch_raw') as cursor: # using server side cursor - cursor.itersize = 1000 # chunk size to get 1000 row at a time in client side - cursor.execute(poly_query) - for row in cursor: - layer.write(orjson.loads(row[0])) - - cursor.close() # closing connection to avoid memory issues - # close fiona object - layer.close() - file_paths.append(poly_file_path) - file_paths.append(f"""{dump_temp_file_path}_poly.shx""") - file_paths.append(f"""{dump_temp_file_path}_poly.cpg""") - file_paths.append(f"""{dump_temp_file_path}_poly.dbf""") - file_paths.append(f"""{dump_temp_file_path}_poly.prj""") - return file_paths - @staticmethod def get_grid_id(geom, cur): """Gets the intersecting related grid id for the geometry that is passed @@ -1117,9 +1033,9 @@ def get_grid_id(geom, cur): """ geometry_dump = dumps(dict(geom)) # generating geometry area in sqkm - geom_area = int(area(json.loads(geom.json())) * 1E-6) + geom_area = area(json.loads(geom.json())) * 1E-6 # only apply grid in the logic if it exceeds the 5000 Sqkm - if geom_area > 5000: + if int(geom_area) > grid_index_threshold: # this will be applied only when polygon gets bigger we will be slicing index size to search cur.execute( get_grid_id_query(geometry_dump)) @@ -1135,46 +1051,34 @@ def extract_current_data(self, exportname): exportname: takes filename as argument to create geojson file passed from routers Returns: - _file_path_: geojson file location path + geom_area: area of polygon supplied + working_dir: dir where results are saved """ # first check either geometry needs grid or not for querying grid_id, geometry_dump, geom_area = RawData.get_grid_id( self.params.geometry, self.cur) - if self.params.output_type is None: - # if nothing is supplied then default output type will be geojson - output_type = RawDataOutputType.GEOJSON.value - else: - output_type = self.params.output_type - + output_type = self.params.output_type # Check whether the export path exists or not - isExist = os.path.exists(export_path) - if not isExist: + working_dir=os.path.join(export_path, exportname) + if not os.path.exists(working_dir): # Create a exports directory because it does not exist - os.makedirs(export_path) - root_dir_file = export_path - path = f"""{export_path}{exportname}/""" - os.makedirs(path) + os.makedirs(working_dir) # create file path with respect to of output type - dump_temp_file_path = f"""{path}{exportname}.{output_type.lower()}""" + dump_temp_file_path = os.path.join(working_dir, f"{self.params.file_name if self.params.file_name else 'Export'}.{output_type.lower()}") try: # currently we have only geojson binding function written other than that we have depend on ogr - if output_type is RawDataOutputType.GEOJSON.value: + if output_type == RawDataOutputType.GEOJSON.value: RawData.query2geojson(self.con, raw_currentdata_extraction_query( self.params, g_id=grid_id, geometry_dump=geometry_dump), dump_temp_file_path) # uses own conversion class - elif output_type is RawDataOutputType.SHAPEFILE.value: + elif output_type == RawDataOutputType.SHAPEFILE.value: point_query, line_query, poly_query, point_schema, line_schema, poly_schema = extract_geometry_type_query( self.params, ogr_export=True) - # point_query, line_query, poly_query, point_schema, line_schema, poly_schema = extract_geometry_type_query( - # self.params,ogr_export=True) - dump_temp_file_path = f"""{path}{exportname}""" - filepaths = RawData.ogr_export(outputtype=output_type, point_query=point_query, line_query=line_query, - poly_query=poly_query, dump_temp_file_path=dump_temp_file_path, binding_file_dir=path) # using ogr2ogr - # filepaths = RawData.query2shapefile(self.con, point_query, line_query, poly_query, point_schema, line_schema, poly_schema, dump_temp_file_path) #using fiona - return filepaths, geom_area, root_dir_file + RawData.ogr_export_shp(point_query=point_query, line_query=line_query, + poly_query=poly_query, working_dir=working_dir, file_name=self.params.file_name if self.params.file_name else 'Export') # using ogr2ogr else: - filepaths = RawData.ogr_export(query=raw_currentdata_extraction_query(self.params, grid_id, geometry_dump, ogr_export=True), - export_temp_path=dump_temp_file_path, outputtype=output_type, binding_file_dir=path) # uses ogr export to export - return [dump_temp_file_path], geom_area, root_dir_file + RawData.ogr_export(query=raw_currentdata_extraction_query(self.params, grid_id, geometry_dump, ogr_export=True), + outputtype=output_type, dump_temp_path=dump_temp_file_path, working_dir=working_dir ) # uses ogr export to export + return geom_area, working_dir except Exception as ex: logging.error(ex) raise ex @@ -1193,7 +1097,7 @@ def check_status(self): return str(behind_time[0][0]) -def run_ogr2ogr_cmd(cmd, binding_file_dir): +def run_ogr2ogr_cmd(cmd): """Runs command and monitors the file size until the process runs Args: @@ -1201,43 +1105,26 @@ def run_ogr2ogr_cmd(cmd, binding_file_dir): binding_file_dir (_type_): _description_ Raises: - ValueError: Shapefile exceed 4GB limit - ValueError: Binding failed + Exception: If process gets failed """ try: # start_time=time.time() - process = subprocess.Popen( + logging.debug("Calling command : %s", cmd) + process = subprocess.check_output( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + stderr=subprocess.STDOUT, shell=True, - preexec_fn=os.setsid + preexec_fn=os.setsid, + timeout=60*60*2 #setting timeout of 2 hour ) - while process.poll() is None: - # if (time.time()-start_time)/60 > 25 : - # raise ValueError("Shapefile Exceed Limit export") - - size = 0 - for ele in os.scandir(binding_file_dir): - size += os.path.getsize(ele) - # print(size/1000000) # in MB - if size / 1000000 > shp_limit: - logging.warn( - f"Killing ogr2ogr because it exceed {shp_limit} MB...") - # process.kill() - # os.killpg(os.getpgid(process.pid), signal.SIGTERM) # Send the signal to all the process groups - # shutil.rmtree(binding_file_dir) - raise HTTPException( - status_code=404, detail=f"Shapefile Exceed {shp_limit} MB Limit") - - logging.debug(process.stdout.read()) + logging.debug(process) except Exception as ex: logging.error(ex) - process.kill() - # Send the signal to all the process groups - os.killpg(os.getpgid(process.pid), signal.SIGTERM) - if os.path.exists(binding_file_dir): - shutil.rmtree(binding_file_dir) + # process.kill() + # # Send the signal to all the process groups + # os.killpg(os.getpgid(process.pid), signal.SIGTERM) + # if os.path.exists(binding_file_dir): + # shutil.rmtree(binding_file_dir) raise ex @@ -1275,13 +1162,13 @@ def get_bucket_location(self, bucket_name): raise ex return bucket_location or 'us-east-1' - def upload(self, file_path, file_prefix): + def upload(self, file_path, file_name, file_suffix='zip'): """Used for transferring file to s3 after reading path from the user , It will wait for the upload to complete Parameters :file_path --- your local file path to upload , file_prefix -- prefix for the filename which is stored sample function call : S3FileTransfer.transfer(file_path="exports",file_prefix="upload_test") """ - file_name = f"{file_prefix}.zip" + file_name = f"{file_name}.{file_suffix}" # instantiate upload start_time = time.time() @@ -1296,7 +1183,7 @@ def upload(self, file_path, file_prefix): logging.error(ex) raise ex logging.debug("Uploaded %s in %s sec", - file_prefix, time.time() - start_time) + file_name, time.time() - start_time) # generate the download url bucket_location = self.get_bucket_location(bucket_name=BUCKET_NAME) object_url = f"""https://s3.{bucket_location}.amazonaws.com/{BUCKET_NAME}/{file_name}""" diff --git a/src/galaxy/config.py b/src/galaxy/config.py index d9cda9bb..5af19c96 100644 --- a/src/galaxy/config.py +++ b/src/galaxy/config.py @@ -21,15 +21,32 @@ from configparser import ConfigParser import logging +import os +from slowapi.util import get_remote_address +from slowapi import Limiter +import errno +import os CONFIG_FILE_PATH = "src/config.txt" +use_s3_to_upload = False + +if os.path.exists(CONFIG_FILE_PATH) is False: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), CONFIG_FILE_PATH) config = ConfigParser() config.read(CONFIG_FILE_PATH) +limiter_storage_uri = config.get( + "API_CONFIG", "limiter_storage_uri", fallback="redis://localhost:6379" +) +limiter = Limiter(key_func=get_remote_address, storage_uri=limiter_storage_uri) # rate limiter for API requests based on the remote ip address and redis as backend + +export_rate_limit = int(config.get("API_CONFIG", "export_rate_limit", fallback=5)) + +grid_index_threshold = int(config.get("API_CONFIG", "grid_index_threshold", fallback=5000)) + # get log level from config log_level = config.get("API_CONFIG", "log_level", fallback=None) -use_s3_to_upload = False if log_level is None or log_level.lower() == 'debug': # default debug level = logging.DEBUG @@ -44,20 +61,26 @@ "logging config is not supported , Supported fields are : debug,error,warning,info , Logging to default :debug") level = logging.DEBUG -logging.getLogger("fiona").propagate = False # disable fiona logging +# logging.getLogger("fiona").propagate = False # disable fiona logging logging.basicConfig(format='%(asctime)s - %(message)s', level=level) logging.getLogger('boto3').propagate = False # disable boto3 logging +logging.getLogger('botocore').propagate = False # disable boto3 logging +logging.getLogger('s3transfer').propagate = False # disable boto3 logging +logging.getLogger('boto').propagate = False # disable boto3 logging -logger = logging.getLogger('galaxy') -export_path = config.get('API_CONFIG', 'export_path', fallback=None) -if export_path is None: - export_path = "exports/" -if export_path.endswith("/") is False: - export_path = f"""{export_path}/""" -shp_limit = int(config.get('API_CONFIG', 'shp_limit', fallback=4096)) +logger = logging.getLogger('src.galaxy') +export_path = config.get('API_CONFIG', 'export_path', fallback=None) +if export_path is None: + export_path = "exports" +if not os.path.exists(export_path): + # Create a exports directory because it does not exist + os.makedirs(export_path) +allow_bind_zip_filter=config.get('API_CONFIG', 'allow_bind_zip_filter', fallback=None) +if allow_bind_zip_filter: + allow_bind_zip_filter=True if allow_bind_zip_filter.lower()=='true' else False AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BUCKET_NAME = None, None, None # check either to use connection pooling or not diff --git a/src/galaxy/query_builder/builder.py b/src/galaxy/query_builder/builder.py index 3935193c..e00e4705 100644 --- a/src/galaxy/query_builder/builder.py +++ b/src/galaxy/query_builder/builder.py @@ -900,7 +900,7 @@ def create_column_filter(columns, create_schema=False): return """osm_id ,tags::text as tags,changeset,timestamp::text,geom""" # this is default attribute that we will deliver to user if user defines his own attribute column then those will be appended with osm_id only -def generate_tag_filter_query(filter): +def generate_tag_filter_query(filter,params): incoming_filter = [] for key, value in filter.items(): @@ -919,7 +919,10 @@ def generate_tag_filter_query(filter): f"""tags ->> '{key.strip()}' = '{value[0].strip()}'""") else: incoming_filter.append(f"""tags ? '{key.strip()}'""") - tag_filter = " OR ".join(incoming_filter) + if params.join_filter_type: + tag_filter = f" {params.join_filter_type} ".join(incoming_filter) + else: + tag_filter = " OR ".join(incoming_filter) return tag_filter @@ -942,7 +945,7 @@ def extract_geometry_type_query(params, ogr_export=False): select_condition, schema = create_column_filter( master_attribute_filter, create_schema=True) if master_tag_filter: - attribute_filter = generate_tag_filter_query(master_tag_filter) + attribute_filter = generate_tag_filter_query(master_tag_filter, params) if params.geometry_type is None: # fix me params.geometry_type = ['point', 'line', 'polygon'] @@ -958,7 +961,7 @@ def extract_geometry_type_query(params, ogr_export=False): where {geom_filter}""" if point_tag_filter: - attribute_filter = generate_tag_filter_query(point_tag_filter) + attribute_filter = generate_tag_filter_query(point_tag_filter, params) if attribute_filter: query_point += f""" and ({attribute_filter})""" point_schema = schema @@ -984,7 +987,7 @@ def extract_geometry_type_query(params, ogr_export=False): where {geom_filter}""" if line_tag_filter: - attribute_filter = generate_tag_filter_query(line_tag_filter) + attribute_filter = generate_tag_filter_query(line_tag_filter, params) if attribute_filter: query_ways_line += f""" and ({attribute_filter})""" query_relations_line += f""" and ({attribute_filter})""" @@ -1013,7 +1016,7 @@ def extract_geometry_type_query(params, ogr_export=False): where {geom_filter}""" if poly_tag_filter: - attribute_filter = generate_tag_filter_query(poly_tag_filter) + attribute_filter = generate_tag_filter_query(poly_tag_filter, params) if attribute_filter: query_ways_poly += f""" and ({attribute_filter})""" query_relations_poly += f""" and ({attribute_filter})""" @@ -1063,7 +1066,6 @@ def extract_attributes_tags(filters): poly_attribute_filter = v if k == SupportedGeometryFilters.ALLGEOM.value: master_attribute_filter = v - return tags, attributes, point_attribute_filter, line_attribute_filter, poly_attribute_filter, master_attribute_filter, point_tag_filter, line_tag_filter, poly_tag_filter, master_tag_filter @@ -1092,7 +1094,6 @@ def raw_currentdata_extraction_query(params, g_id, geometry_dump, ogr_export=Fal point_select_condition = select_condition # initializing default line_select_condition = select_condition poly_select_condition = select_condition - if params.filters: tags, attributes, point_attribute_filter, line_attribute_filter, poly_attribute_filter, master_attribute_filter, point_tag_filter, line_tag_filter, poly_tag_filter, master_tag_filter = extract_attributes_tags( params.filters) @@ -1115,22 +1116,23 @@ def raw_currentdata_extraction_query(params, g_id, geometry_dump, ogr_export=Fal line_select_condition = create_column_filter( line_attribute_filter) if poly_attribute_filter: - if len(line_attribute_filter) > 0: + if len(poly_attribute_filter) > 0: poly_select_condition = create_column_filter( point_attribute_filter) + print(poly_select_condition) if tags: if master_tag_filter: # if master tag is supplied then other tags should be ignored and master tag will be used - master_tag = generate_tag_filter_query(master_tag_filter) + master_tag = generate_tag_filter_query(master_tag_filter, params) point_tag = master_tag line_tag = master_tag poly_tag = master_tag else: if point_tag_filter: - point_tag = generate_tag_filter_query(point_tag_filter) + point_tag = generate_tag_filter_query(point_tag_filter, params) if line_tag_filter: - line_tag = generate_tag_filter_query(line_tag_filter) + line_tag = generate_tag_filter_query(line_tag_filter, params) if poly_tag_filter: - poly_tag = generate_tag_filter_query(poly_tag_filter) + poly_tag = generate_tag_filter_query(poly_tag_filter, params) # condition for geometry types if params.geometry_type is None: @@ -1193,7 +1195,7 @@ def raw_currentdata_extraction_query(params, g_id, geometry_dump, ogr_export=Fal query_ways_poly += f""" and ({poly_tag})""" base_query.append(query_ways_poly) query_relations_poly = f"""select - {select_condition} + {poly_select_condition} from relations where @@ -1217,7 +1219,7 @@ def raw_currentdata_extraction_query(params, g_id, geometry_dump, ogr_export=Fal def check_last_updated_rawdata(): - query = """select NOW()-importdate as last_updated from planet_osm_replication_status""" + query = """select importdate as last_updated from planet_osm_replication_status""" return query diff --git a/src/galaxy/validation/models.py b/src/galaxy/validation/models.py index 90c1e50b..6d0e1a2b 100644 --- a/src/galaxy/validation/models.py +++ b/src/galaxy/validation/models.py @@ -30,7 +30,7 @@ from area import area import re -from ..config import config +from src.galaxy.config import config, allow_bind_zip_filter MAX_POLYGON_AREA = 5000 # km^2 @@ -477,10 +477,11 @@ class TeamMemberFunction(Enum): class RawDataOutputType (Enum): GEOJSON = "GeoJSON" - KML = "KML" + KML = "kml" SHAPEFILE = "shp" - MBTILES = "MBTILES" # fully experimental for now - + FLATGEOBUF = "fgb" + MBTILES = "mbtiles" # fully experimental for now + GEOPACKAGE = "gpkg" class HashtagParams(BaseModel): hashtags: Optional[List[str]] @@ -579,13 +580,26 @@ def has_value(cls, value): """Checks if the value is supported""" return value in cls._value2member_map_ +class JoinFilterType (Enum): + OR = "OR" + AND ="AND" class RawDataCurrentParams(BaseModel): output_type: Optional[RawDataOutputType] = None file_name: Optional[str] = None geometry: Union[Polygon, MultiPolygon] filters: Optional[dict] = None + join_filter_type: Optional[JoinFilterType]=None geometry_type: Optional[List[SupportedGeometryFilters]] = None + if allow_bind_zip_filter: + bind_zip: Optional[bool] = True + + @validator("bind_zip", allow_reuse=True) + def check_bind_option(cls, value, values): + """checks if shp is selected along with bind to zip file""" + if value is False and values.get("output_type")=='shp': + raise ValueError("Can't deliver Shapefile without zip , Remove bind_zip paramet or set it to True") + return value @validator("filters", allow_reuse=True) def check_value(cls, value, values): @@ -639,7 +653,7 @@ def check_geometry_area(cls, value, values): output_type = values.get("output_type") if output_type: # for mbtiles ogr2ogr does very worst job when area gets bigger we should write owr own or find better approach for larger area - if output_type is RawDataOutputType.MBTILES.value: + if output_type == RawDataOutputType.MBTILES.value: RAWDATA_CURRENT_POLYGON_AREA = 2 # we need to figure out how much tile we are generating before passing request on the basis of bounding box we can restrict user , right now relation contains whole country for now restricted to this area but can not query relation will take ages because that will intersect with country boundary : need to clip it if area_km2 > RAWDATA_CURRENT_POLYGON_AREA: raise ValueError( diff --git a/tests/src/fixtures/raw_data.sql b/tests/src/fixtures/raw_data.sql index 643738aa..14fc17c0 100644 --- a/tests/src/fixtures/raw_data.sql +++ b/tests/src/fixtures/raw_data.sql @@ -24,7 +24,7 @@ CREATE EXTENSION IF NOT EXISTS btree_gist WITH SCHEMA public; -- --- Name: EXTENSION btree_gist; Type: COMMENT; Schema: -; Owner: +-- Name: EXTENSION btree_gist; Type: COMMENT; Schema: -; Owner: -- COMMENT ON EXTENSION btree_gist IS 'support for indexing common datatypes in GiST'; @@ -38,7 +38,7 @@ CREATE EXTENSION IF NOT EXISTS hstore WITH SCHEMA public; -- --- Name: EXTENSION hstore; Type: COMMENT; Schema: -; Owner: +-- Name: EXTENSION hstore; Type: COMMENT; Schema: -; Owner: -- COMMENT ON EXTENSION hstore IS 'data type for storing sets of (key, value) pairs'; @@ -52,7 +52,7 @@ CREATE EXTENSION IF NOT EXISTS postgis WITH SCHEMA public; -- --- Name: EXTENSION postgis; Type: COMMENT; Schema: -; Owner: +-- Name: EXTENSION postgis; Type: COMMENT; Schema: -; Owner: -- COMMENT ON EXTENSION postgis IS 'PostGIS geometry and geography spatial types and functions'; @@ -173,6 +173,15 @@ CREATE UNLOGGED TABLE public.ways_poly ( ) WITH (autovacuum_enabled=off); +CREATE TABLE public.planet_osm_replication_status ( + url text NULL, + "sequence" int4 NULL, + importdate timestamptz NULL +); + +INSERT INTO public.planet_osm_replication_status (url,"sequence",importdate) VALUES + ('https://planet.openstreetmap.org/replication/minute',5000271,'2022-04-04 02:44:59+05:45'); + ALTER TABLE public.ways_poly OWNER TO postgres; diff --git a/tests/src/fixtures/tasking-manager.sql b/tests/src/fixtures/tasking-manager.sql new file mode 100644 index 00000000..15d98f61 --- /dev/null +++ b/tests/src/fixtures/tasking-manager.sql @@ -0,0 +1,1503 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 11.2 +-- Dumped by pg_dump version 11.2 + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: topology; Type: SCHEMA; Schema: -; Owner: - +-- + +CREATE SCHEMA topology; + + +-- +-- Name: SCHEMA topology; Type: COMMENT; Schema: -; Owner: - +-- + +COMMENT ON SCHEMA topology IS 'PostGIS Topology schema'; + + +-- +-- Name: postgis; Type: EXTENSION; Schema: -; Owner: - +-- + +CREATE EXTENSION IF NOT EXISTS postgis WITH SCHEMA public; + + +-- +-- Name: EXTENSION postgis; Type: COMMENT; Schema: -; Owner: - +-- + +COMMENT ON EXTENSION postgis IS 'PostGIS geometry, geography, and raster spatial types and functions'; + + +-- +-- Name: postgis_topology; Type: EXTENSION; Schema: -; Owner: - +-- + +CREATE EXTENSION IF NOT EXISTS postgis_topology WITH SCHEMA topology; + + +-- +-- Name: EXTENSION postgis_topology; Type: COMMENT; Schema: -; Owner: - +-- + +COMMENT ON EXTENSION postgis_topology IS 'PostGIS topology spatial types and functions'; + + +SET default_tablespace = ''; + +SET default_with_oids = false; + +-- +-- Name: alembic_version; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.alembic_version ( + version_num character varying(32) NOT NULL +); + + +-- +-- Name: licenses; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.licenses ( + id integer NOT NULL, + name character varying, + description character varying, + plain_text character varying +); + + +-- +-- Name: licenses_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.licenses_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: licenses_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.licenses_id_seq OWNED BY public.licenses.id; + + +-- +-- Name: messages; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.messages ( + id integer NOT NULL, + message character varying, + subject character varying, + from_user_id bigint, + to_user_id bigint, + date timestamp without time zone, + read boolean, + message_type integer, + project_id integer, + task_id integer +); + + +-- +-- Name: messages_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.messages_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: messages_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.messages_id_seq OWNED BY public.messages.id; + + +-- +-- Name: priority_areas; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.priority_areas ( + id integer NOT NULL, + geometry public.geometry(Polygon,4326) +); + + +-- +-- Name: priority_areas_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.priority_areas_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: priority_areas_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.priority_areas_id_seq OWNED BY public.priority_areas.id; + + +-- +-- Name: project_allowed_users; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.project_allowed_users ( + project_id integer, + user_id bigint +); + + +-- +-- Name: project_chat; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.project_chat ( + id bigint NOT NULL, + project_id integer NOT NULL, + user_id integer NOT NULL, + time_stamp timestamp without time zone NOT NULL, + message character varying NOT NULL +); + + +-- +-- Name: project_chat_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.project_chat_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: project_chat_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.project_chat_id_seq OWNED BY public.project_chat.id; + + +-- +-- Name: project_info; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.project_info ( + project_id integer NOT NULL, + locale character varying(10) NOT NULL, + name character varying(512), + short_description character varying, + description character varying, + instructions character varying, + project_id_str character varying, + text_searchable tsvector, + per_task_instructions character varying +); + + +-- +-- Name: project_priority_areas; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.project_priority_areas ( + project_id integer, + priority_area_id integer +); + + +-- +-- Name: projects; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.projects ( + id integer NOT NULL, + status integer NOT NULL, + created timestamp without time zone NOT NULL, + priority integer, + default_locale character varying(10), + author_id bigint NOT NULL, + mapper_level integer NOT NULL, + enforce_mapper_level boolean, + enforce_validator_role boolean, + private boolean, + entities_to_map character varying, + changeset_comment character varying, + due_date timestamp without time zone, + imagery character varying, + josm_preset character varying, + last_updated timestamp without time zone, + mapping_types integer[], + organisation_tag character varying, + campaign_tag character varying, + total_tasks integer NOT NULL, + tasks_mapped integer NOT NULL, + tasks_validated integer NOT NULL, + tasks_bad_imagery integer NOT NULL, + license_id integer, + centroid public.geometry(Point,4326), + geometry public.geometry(MultiPolygon,4326), + task_creation_mode integer +); + + +-- +-- Name: projects_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.projects_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: projects_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.projects_id_seq OWNED BY public.projects.id; + + +-- +-- Name: tags; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.tags ( + id integer NOT NULL, + organisations character varying, + campaigns character varying +); + + +-- +-- Name: tags_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.tags_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: tags_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.tags_id_seq OWNED BY public.tags.id; + + +-- +-- Name: task_history; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.task_history ( + id integer NOT NULL, + project_id integer, + task_id integer NOT NULL, + action character varying NOT NULL, + action_text character varying, + action_date timestamp without time zone NOT NULL, + user_id bigint NOT NULL +); + + +-- +-- Name: task_history_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.task_history_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: task_history_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.task_history_id_seq OWNED BY public.task_history.id; + + +-- +-- Name: task_invalidation_history; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.task_invalidation_history ( + id integer NOT NULL, + project_id integer NOT NULL, + task_id integer NOT NULL, + is_closed boolean, + mapper_id bigint, + mapped_date timestamp without time zone, + invalidator_id bigint, + invalidated_date timestamp without time zone, + invalidation_history_id integer, + validator_id bigint, + validated_date timestamp without time zone, + updated_date timestamp without time zone +); + + +-- +-- Name: task_invalidation_history_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.task_invalidation_history_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: task_invalidation_history_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.task_invalidation_history_id_seq OWNED BY public.task_invalidation_history.id; + + +-- +-- Name: tasks; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.tasks ( + id integer NOT NULL, + project_id integer NOT NULL, + x integer, + y integer, + zoom integer, + geometry public.geometry(MultiPolygon,4326), + task_status integer, + locked_by bigint, + mapped_by bigint, + validated_by bigint, + is_square boolean, + extra_properties character varying +); + + +-- +-- Name: users; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.users ( + id bigint NOT NULL, + username character varying, + role integer NOT NULL, + mapping_level integer NOT NULL, + tasks_mapped integer NOT NULL, + tasks_validated integer NOT NULL, + tasks_invalidated integer NOT NULL, + projects_mapped integer[], + email_address character varying, + facebook_id character varying, + is_email_verified boolean, + linkedin_id character varying, + twitter_id character varying, + date_registered timestamp without time zone, + last_validation_date timestamp without time zone, + validation_message boolean DEFAULT true NOT NULL, + is_expert boolean +); + + +-- +-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.users_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id; + + +-- +-- Name: users_licenses; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.users_licenses ( + "user" bigint, + license integer +); + + +-- +-- Name: licenses id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.licenses ALTER COLUMN id SET DEFAULT nextval('public.licenses_id_seq'::regclass); + + +-- +-- Name: messages id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.messages ALTER COLUMN id SET DEFAULT nextval('public.messages_id_seq'::regclass); + + +-- +-- Name: priority_areas id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.priority_areas ALTER COLUMN id SET DEFAULT nextval('public.priority_areas_id_seq'::regclass); + + +-- +-- Name: project_chat id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_chat ALTER COLUMN id SET DEFAULT nextval('public.project_chat_id_seq'::regclass); + + +-- +-- Name: projects id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.projects ALTER COLUMN id SET DEFAULT nextval('public.projects_id_seq'::regclass); + + +-- +-- Name: tags id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tags ALTER COLUMN id SET DEFAULT nextval('public.tags_id_seq'::regclass); + + +-- +-- Name: task_history id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_history ALTER COLUMN id SET DEFAULT nextval('public.task_history_id_seq'::regclass); + + +-- +-- Name: task_invalidation_history id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_invalidation_history ALTER COLUMN id SET DEFAULT nextval('public.task_invalidation_history_id_seq'::regclass); + + +-- +-- Name: users id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass); + + +-- +-- Data for Name: alembic_version; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.alembic_version (version_num) FROM stdin; +0a6b82b55983 +\. + + +-- +-- Data for Name: licenses; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.licenses (id, name, description, plain_text) FROM stdin; +\. + + +-- +-- Data for Name: messages; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.messages (id, message, subject, from_user_id, to_user_id, date, read, message_type, project_id, task_id) FROM stdin; +2 Hi ramyaragupathy,
\n
\nWelcome to the HOT Tasking Manager, we hope you will enjoy being part of the community that is helping map the world.
\n
\nIf you would like to be alerted to project updates and feedback on your mapping, please add your email address to your profile by clicking on the link below.
\n
\nUpdate your profile here
\n
\nThank you very much!
\n
\nOn behalf of the Humanitarian OpenStreetMap Team volunteer and staff community we want to welcome you to humanitarian mapping and the wider OpenStreetMap community.
\n
\nFor a much more detailed welcome letter, please visit the OSM Wiki Tasking Manager Welcome page. It has links to great learning resources if you want to learn more right away!\n Welcome to the HOT Tasking Manager \N 2823295 2019-04-09 03:11:54.74308 t 1 \N \N +\. + + +-- +-- Data for Name: priority_areas; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.priority_areas (id, geometry) FROM stdin; +\. + + +-- +-- Data for Name: project_allowed_users; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.project_allowed_users (project_id, user_id) FROM stdin; +\. + + +-- +-- Data for Name: project_chat; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.project_chat (id, project_id, user_id, time_stamp, message) FROM stdin; +\. + + +-- +-- Data for Name: project_info; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.project_info (project_id, locale, name, short_description, description, instructions, project_id_str, text_searchable, per_task_instructions) FROM stdin; +1 en testing ssa testing ssa ahsjhdjshfjsh jhsjdh fsjhdfjs jsfhd jshjf hsjdhf sjhfjsh jshfjsdh jshjf hsjhf jsdhfjs hfjshf jsdhfj sdhfjh jsdhfjshfj shjf hsdj testing ssa ahsjhdjshfjsh jhsjdh fsjhdfjs jsfhd jshjf hsjdhf sjhfjsh jshfjsdh jshjf hsjhf jsdhfjs hfjshf jsdhfj sdhfjh jsdhfjshfj shjf hsdj testing ssa ahsjhdjshfjsh jhsjdh fsjhdfjs jsfhd jshjf hsjdhf sjhfjsh jshfjsdh jshjf hsjhf jsdhfjs hfjshf jsdhfj sdhfjh jsdhfjshfj shjf hsdj 1 \N +2 en arbitrary-project tests tsete tests tsete tests tsete 2 \N +3 en arbitrary-1 arbitrary- test split arbitrary- test split arbitrary- test split 3 \N +\. + + +-- +-- Data for Name: project_priority_areas; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.project_priority_areas (project_id, priority_area_id) FROM stdin; +\. + + +-- +-- Data for Name: projects; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.projects (id, status, created, priority, default_locale, author_id, mapper_level, enforce_mapper_level, enforce_validator_role, private, entities_to_map, changeset_comment, due_date, imagery, josm_preset, last_updated, mapping_types, organisation_tag, campaign_tag, total_tasks, tasks_mapped, tasks_validated, tasks_bad_imagery, license_id, centroid, geometry, task_creation_mode) FROM stdin; +2 1 2019-04-08 12:21:37.425808 1 en 360183 1 f f f \N #hotosm-project-2 \N \N \N 2019-04-09 03:34:52.985812 {} \N \N 16 1 0 1 \N 0101000020E61000004B5FA2C5E5DC3240E03320AF4AF740C0 0106000020E610000001000000010300000001000000050000000100405B40D53240CDE033A57FF540C0010040B6F2D6324010F53EC585F940C00000C02390E23240EDEACD35A0F840C0000080ADC9E63240EBCFC9CBFBF540C00100405B40D53240CDE033A57FF540C0 1 +1 1 2019-04-08 10:54:25.449637 1 en 360183 1 f f f \N #tm-project-1 \N \N \N 2019-04-08 11:40:37.248906 {} \N \N 171 1 1 2 \N 0101000020E61000005A0601152A3543C05A9C94D060CD29C0 0106000020E6100000010000000103000000010000000E00000015A922BEFA3943C0A897377D739529C015A922BE421F43C053173DCE36BD29C015A922BE522D43C0883A6048D8EC29C016A922BE9B2F43C0F948A4C9BBE829C014A9223E5A3C43C0D647DECB9E092AC014A9223ECA4443C05FC6E2FB38072AC015A922BED24343C0988DCFC461F429C015A922BE024143C0B194E08B47E429C015A922BE4B4343C0387210A150D929C016A922BEC73F43C0D1675BBE01CE29C015A922BE7E3D43C0FAB7CBCCD8AF29C016A9223EEC4043C06AD15D4AFE9C29C014A9223E924043C001D02AE3649329C015A922BEFA3943C0A897377D739529C0 0 +3 1 2019-04-08 12:23:34.568831 2 en 360183 1 f f f \N #hotosm-project-3 \N \N \N 2019-04-08 12:24:51.647033 {} \N \N 1 0 0 0 \N 0101000020E61000004B5FA2C5E5DC3240E03320AF4AF740C0 0106000020E610000001000000010300000001000000050000000100405B40D53240CDE033A57FF540C0010040B6F2D6324010F53EC585F940C00000C02390E23240EDEACD35A0F840C0000080ADC9E63240EBCFC9CBFBF540C00100405B40D53240CDE033A57FF540C0 1 +\. + + +-- +-- Data for Name: spatial_ref_sys; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.spatial_ref_sys (srid, auth_name, auth_srid, srtext, proj4text) FROM stdin; +\. + + +-- +-- Data for Name: tags; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.tags (id, organisations, campaigns) FROM stdin; +\. + + +-- +-- Data for Name: task_history; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.task_history (id, project_id, task_id, action, action_text, action_date, user_id) FROM stdin; +3 1 169 STATE_CHANGE SPLIT 2019-04-08 10:56:41.164122 360183 +4 1 169 STATE_CHANGE READY 2019-04-08 10:56:41.165376 360183 +6 1 170 STATE_CHANGE SPLIT 2019-04-08 10:56:41.295043 360183 +7 1 170 STATE_CHANGE READY 2019-04-08 10:56:41.29674 360183 +9 1 171 STATE_CHANGE SPLIT 2019-04-08 10:56:41.420013 360183 +10 1 171 STATE_CHANGE READY 2019-04-08 10:56:41.421315 360183 +12 1 172 STATE_CHANGE SPLIT 2019-04-08 10:56:41.596023 360183 +13 1 172 STATE_CHANGE READY 2019-04-08 10:56:41.597381 360183 +15 1 172 STATE_CHANGE BADIMAGERY 2019-04-08 10:56:46.913498 360183 +14 1 172 LOCKED_FOR_MAPPING 00:00:01.205755 2019-04-08 10:56:45.710522 360183 +17 1 170 STATE_CHANGE MAPPED 2019-04-08 10:56:57.744843 360183 +16 1 170 LOCKED_FOR_MAPPING 00:00:06.556031 2019-04-08 10:56:51.192412 360183 +18 1 169 LOCKED_FOR_MAPPING 00:00:02.350503 2019-04-08 10:57:01.171814 360183 +20 1 129 STATE_CHANGE BADIMAGERY 2019-04-08 11:35:52.948456 360183 +19 1 129 LOCKED_FOR_MAPPING 00:00:03.338244 2019-04-08 11:35:49.613867 360183 +22 1 170 STATE_CHANGE VALIDATED 2019-04-08 11:40:37.256744 360183 +21 1 170 LOCKED_FOR_VALIDATION 00:00:03.416170 2019-04-08 11:40:33.85086 360183 +38 2 6 STATE_CHANGE READY 2019-04-08 12:25:14.405231 360183 +39 2 6 STATE_CHANGE SPLIT 2019-04-08 12:25:14.403753 360183 +40 2 6 STATE_CHANGE SPLIT 2019-04-08 12:25:24.973398 360183 +41 2 6 STATE_CHANGE READY 2019-04-08 12:25:24.97435 360183 +43 2 7 STATE_CHANGE READY 2019-04-08 12:25:14.405231 360183 +44 2 7 STATE_CHANGE SPLIT 2019-04-08 12:25:14.403753 360183 +45 2 7 STATE_CHANGE SPLIT 2019-04-08 12:25:25.098002 360183 +46 2 7 STATE_CHANGE READY 2019-04-08 12:25:25.098977 360183 +48 2 8 STATE_CHANGE READY 2019-04-08 12:25:14.405231 360183 +49 2 8 STATE_CHANGE SPLIT 2019-04-08 12:25:14.403753 360183 +50 2 8 STATE_CHANGE SPLIT 2019-04-08 12:25:25.761684 360183 +51 2 8 STATE_CHANGE READY 2019-04-08 12:25:25.76252 360183 +53 2 9 STATE_CHANGE READY 2019-04-08 12:25:14.405231 360183 +54 2 9 STATE_CHANGE SPLIT 2019-04-08 12:25:14.403753 360183 +55 2 9 STATE_CHANGE SPLIT 2019-04-08 12:25:25.896845 360183 +56 2 9 STATE_CHANGE READY 2019-04-08 12:25:25.897859 360183 +59 2 10 STATE_CHANGE READY 2019-04-08 12:25:14.871701 360183 +60 2 10 STATE_CHANGE SPLIT 2019-04-08 12:25:14.870823 360183 +61 2 10 STATE_CHANGE SPLIT 2019-04-08 12:25:31.435842 360183 +62 2 10 STATE_CHANGE READY 2019-04-08 12:25:31.437325 360183 +64 2 11 STATE_CHANGE READY 2019-04-08 12:25:14.871701 360183 +65 2 11 STATE_CHANGE SPLIT 2019-04-08 12:25:14.870823 360183 +66 2 11 STATE_CHANGE SPLIT 2019-04-08 12:25:31.565856 360183 +67 2 11 STATE_CHANGE READY 2019-04-08 12:25:31.566573 360183 +69 2 12 STATE_CHANGE READY 2019-04-08 12:25:14.871701 360183 +70 2 12 STATE_CHANGE SPLIT 2019-04-08 12:25:14.870823 360183 +71 2 12 STATE_CHANGE SPLIT 2019-04-08 12:25:31.700419 360183 +72 2 12 STATE_CHANGE READY 2019-04-08 12:25:31.701984 360183 +74 2 13 STATE_CHANGE READY 2019-04-08 12:25:14.871701 360183 +75 2 13 STATE_CHANGE SPLIT 2019-04-08 12:25:14.870823 360183 +76 2 13 STATE_CHANGE SPLIT 2019-04-08 12:25:31.823686 360183 +77 2 13 STATE_CHANGE READY 2019-04-08 12:25:31.82449 360183 +80 2 14 STATE_CHANGE READY 2019-04-08 12:25:14.269767 360183 +81 2 14 STATE_CHANGE SPLIT 2019-04-08 12:25:14.268203 360183 +82 2 14 STATE_CHANGE SPLIT 2019-04-08 12:25:39.346795 360183 +83 2 14 STATE_CHANGE READY 2019-04-08 12:25:39.348197 360183 +85 2 15 STATE_CHANGE READY 2019-04-08 12:25:14.269767 360183 +86 2 15 STATE_CHANGE SPLIT 2019-04-08 12:25:14.268203 360183 +87 2 15 STATE_CHANGE SPLIT 2019-04-08 12:25:39.491335 360183 +88 2 15 STATE_CHANGE READY 2019-04-08 12:25:39.492087 360183 +90 2 16 STATE_CHANGE READY 2019-04-08 12:25:14.269767 360183 +91 2 16 STATE_CHANGE SPLIT 2019-04-08 12:25:14.268203 360183 +92 2 16 STATE_CHANGE SPLIT 2019-04-08 12:25:39.622269 360183 +93 2 16 STATE_CHANGE READY 2019-04-08 12:25:39.623319 360183 +95 2 17 STATE_CHANGE READY 2019-04-08 12:25:14.269767 360183 +96 2 17 STATE_CHANGE SPLIT 2019-04-08 12:25:14.268203 360183 +97 2 17 STATE_CHANGE SPLIT 2019-04-08 12:25:39.748029 360183 +98 2 17 STATE_CHANGE READY 2019-04-08 12:25:39.749705 360183 +100 2 18 STATE_CHANGE SPLIT 2019-04-08 12:25:14.535811 360183 +101 2 18 STATE_CHANGE READY 2019-04-08 12:25:14.536724 360183 +103 2 18 STATE_CHANGE SPLIT 2019-04-08 12:25:45.646575 360183 +104 2 18 STATE_CHANGE READY 2019-04-08 12:25:45.647648 360183 +105 2 19 STATE_CHANGE SPLIT 2019-04-08 12:25:14.535811 360183 +106 2 19 STATE_CHANGE READY 2019-04-08 12:25:14.536724 360183 +108 2 19 STATE_CHANGE SPLIT 2019-04-08 12:25:45.778525 360183 +109 2 19 STATE_CHANGE READY 2019-04-08 12:25:45.779649 360183 +110 2 20 STATE_CHANGE SPLIT 2019-04-08 12:25:14.535811 360183 +111 2 20 STATE_CHANGE READY 2019-04-08 12:25:14.536724 360183 +113 2 20 STATE_CHANGE SPLIT 2019-04-08 12:25:45.90077 360183 +114 2 20 STATE_CHANGE READY 2019-04-08 12:25:45.901905 360183 +115 2 21 STATE_CHANGE SPLIT 2019-04-08 12:25:14.535811 360183 +116 2 21 STATE_CHANGE READY 2019-04-08 12:25:14.536724 360183 +118 2 21 STATE_CHANGE SPLIT 2019-04-08 12:25:46.025551 360183 +119 2 21 STATE_CHANGE READY 2019-04-08 12:25:46.026555 360183 +121 2 16 STATE_CHANGE BADIMAGERY 2019-04-09 03:27:47.072069 2823295 +120 2 16 LOCKED_FOR_MAPPING 00:00:03.433979 2019-04-09 03:27:43.64278 2823295 +123 2 9 STATE_CHANGE MAPPED 2019-04-09 03:34:52.996621 360183 +122 2 9 LOCKED_FOR_MAPPING 00:02:33.724149 2019-04-09 03:32:19.275216 360183 +\. + + +-- +-- Data for Name: task_invalidation_history; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.task_invalidation_history (id, project_id, task_id, is_closed, mapper_id, mapped_date, invalidator_id, invalidated_date, invalidation_history_id, validator_id, validated_date, updated_date) FROM stdin; +\. + + +-- +-- Data for Name: tasks; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.tasks (id, project_id, x, y, zoom, geometry, task_status, locked_by, mapped_by, validated_by, is_square, extra_properties) FROM stdin; +1 1 6438 7594 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C0E020D5674B0D2AC0BF2CF1FF1F4543C080B07A6455022AC0E82EF1FF4F4243C080B07A6455022AC0E82EF1FF4F4243C0E020D5674B0D2AC0BF2CF1FF1F4543C0E020D5674B0D2AC0 0 \N \N \N t \N +2 1 6438 7595 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C080B07A6455022AC0BF2CF1FF1F4543C0F0EB21235FF729C0E82EF1FF4F4243C0F0EB21235FF729C0E82EF1FF4F4243C080B07A6455022AC0BF2CF1FF1F4543C080B07A6455022AC0 0 \N \N \N t \N +3 1 6438 7596 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C0F0EB21235FF729C0BF2CF1FF1F4543C05023E3A368EC29C0E82EF1FF4F4243C05023E3A368EC29C0E82EF1FF4F4243C0F0EB21235FF729C0BF2CF1FF1F4543C0F0EB21235FF729C0 0 \N \N \N t \N +4 1 6438 7597 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C05023E3A368EC29C0BF2CF1FF1F4543C060A9D6E671E129C0E82EF1FF4F4243C060A9D6E671E129C0E82EF1FF4F4243C05023E3A368EC29C0BF2CF1FF1F4543C05023E3A368EC29C0 0 \N \N \N t \N +5 1 6438 7598 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C060A9D6E671E129C0BF2CF1FF1F4543C0D0D314EC7AD629C0E82EF1FF4F4243C0D0D314EC7AD629C0E82EF1FF4F4243C060A9D6E671E129C0BF2CF1FF1F4543C060A9D6E671E129C0 0 \N \N \N t \N +6 1 6438 7599 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C0D0D314EC7AD629C0BF2CF1FF1F4543C008FBB5B383CB29C0E82EF1FF4F4243C008FBB5B383CB29C0E82EF1FF4F4243C0D0D314EC7AD629C0BF2CF1FF1F4543C0D0D314EC7AD629C0 0 \N \N \N t \N +7 1 6438 7600 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C008FBB5B383CB29C0BF2CF1FF1F4543C0507AD23D8CC029C0E82EF1FF4F4243C0507AD23D8CC029C0E82EF1FF4F4243C008FBB5B383CB29C0BF2CF1FF1F4543C008FBB5B383CB29C0 0 \N \N \N t \N +8 1 6438 7601 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C0507AD23D8CC029C0BF2CF1FF1F4543C0A0AF828A94B529C0E82EF1FF4F4243C0A0AF828A94B529C0E82EF1FF4F4243C0507AD23D8CC029C0BF2CF1FF1F4543C0507AD23D8CC029C0 0 \N \N \N t \N +9 1 6438 7602 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C0A0AF828A94B529C0BF2CF1FF1F4543C0C8FBDE999CAA29C0E82EF1FF4F4243C0C8FBDE999CAA29C0E82EF1FF4F4243C0A0AF828A94B529C0BF2CF1FF1F4543C0A0AF828A94B529C0 0 \N \N \N t \N +10 1 6438 7603 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C0C8FBDE999CAA29C0BF2CF1FF1F4543C070C2FF6BA49F29C0E82EF1FF4F4243C070C2FF6BA49F29C0E82EF1FF4F4243C0C8FBDE999CAA29C0BF2CF1FF1F4543C0C8FBDE999CAA29C0 0 \N \N \N t \N +11 1 6438 7604 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C070C2FF6BA49F29C0BF2CF1FF1F4543C0D869FD00AC9429C0E82EF1FF4F4243C0D869FD00AC9429C0E82EF1FF4F4243C070C2FF6BA49F29C0BF2CF1FF1F4543C070C2FF6BA49F29C0 0 \N \N \N t \N +12 1 6438 7605 14 0106000020E61000000100000001030000000100000005000000BF2CF1FF1F4543C0D869FD00AC9429C0BF2CF1FF1F4543C0205BF058B38929C0E82EF1FF4F4243C0205BF058B38929C0E82EF1FF4F4243C0D869FD00AC9429C0BF2CF1FF1F4543C0D869FD00AC9429C0 0 \N \N \N t \N +13 1 6439 7594 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C0E020D5674B0D2AC0E82EF1FF4F4243C080B07A6455022AC01131F1FF7F3F43C080B07A6455022AC01131F1FF7F3F43C0E020D5674B0D2AC0E82EF1FF4F4243C0E020D5674B0D2AC0 0 \N \N \N t \N +14 1 6439 7595 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C080B07A6455022AC0E82EF1FF4F4243C0F0EB21235FF729C01131F1FF7F3F43C0F0EB21235FF729C01131F1FF7F3F43C080B07A6455022AC0E82EF1FF4F4243C080B07A6455022AC0 0 \N \N \N t \N +15 1 6439 7596 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C0F0EB21235FF729C0E82EF1FF4F4243C05023E3A368EC29C01131F1FF7F3F43C05023E3A368EC29C01131F1FF7F3F43C0F0EB21235FF729C0E82EF1FF4F4243C0F0EB21235FF729C0 0 \N \N \N t \N +16 1 6439 7597 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C05023E3A368EC29C0E82EF1FF4F4243C060A9D6E671E129C01131F1FF7F3F43C060A9D6E671E129C01131F1FF7F3F43C05023E3A368EC29C0E82EF1FF4F4243C05023E3A368EC29C0 0 \N \N \N t \N +17 1 6439 7598 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C060A9D6E671E129C0E82EF1FF4F4243C0D0D314EC7AD629C01131F1FF7F3F43C0D0D314EC7AD629C01131F1FF7F3F43C060A9D6E671E129C0E82EF1FF4F4243C060A9D6E671E129C0 0 \N \N \N t \N +18 1 6439 7599 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C0D0D314EC7AD629C0E82EF1FF4F4243C008FBB5B383CB29C01131F1FF7F3F43C008FBB5B383CB29C01131F1FF7F3F43C0D0D314EC7AD629C0E82EF1FF4F4243C0D0D314EC7AD629C0 0 \N \N \N t \N +19 1 6439 7600 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C008FBB5B383CB29C0E82EF1FF4F4243C0507AD23D8CC029C01131F1FF7F3F43C0507AD23D8CC029C01131F1FF7F3F43C008FBB5B383CB29C0E82EF1FF4F4243C008FBB5B383CB29C0 0 \N \N \N t \N +20 1 6439 7601 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C0507AD23D8CC029C0E82EF1FF4F4243C0A0AF828A94B529C01131F1FF7F3F43C0A0AF828A94B529C01131F1FF7F3F43C0507AD23D8CC029C0E82EF1FF4F4243C0507AD23D8CC029C0 0 \N \N \N t \N +21 1 6439 7602 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C0A0AF828A94B529C0E82EF1FF4F4243C0C8FBDE999CAA29C01131F1FF7F3F43C0C8FBDE999CAA29C01131F1FF7F3F43C0A0AF828A94B529C0E82EF1FF4F4243C0A0AF828A94B529C0 0 \N \N \N t \N +22 1 6439 7603 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C0C8FBDE999CAA29C0E82EF1FF4F4243C070C2FF6BA49F29C01131F1FF7F3F43C070C2FF6BA49F29C01131F1FF7F3F43C0C8FBDE999CAA29C0E82EF1FF4F4243C0C8FBDE999CAA29C0 0 \N \N \N t \N +23 1 6439 7604 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C070C2FF6BA49F29C0E82EF1FF4F4243C0D869FD00AC9429C01131F1FF7F3F43C0D869FD00AC9429C01131F1FF7F3F43C070C2FF6BA49F29C0E82EF1FF4F4243C070C2FF6BA49F29C0 0 \N \N \N t \N +24 1 6439 7605 14 0106000020E61000000100000001030000000100000005000000E82EF1FF4F4243C0D869FD00AC9429C0E82EF1FF4F4243C0205BF058B38929C01131F1FF7F3F43C0205BF058B38929C01131F1FF7F3F43C0D869FD00AC9429C0E82EF1FF4F4243C0D869FD00AC9429C0 0 \N \N \N t \N +25 1 6440 7594 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C0E020D5674B0D2AC01131F1FF7F3F43C080B07A6455022AC03D33F1FFAF3C43C080B07A6455022AC03D33F1FFAF3C43C0E020D5674B0D2AC01131F1FF7F3F43C0E020D5674B0D2AC0 0 \N \N \N t \N +26 1 6440 7595 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C080B07A6455022AC01131F1FF7F3F43C0F0EB21235FF729C03D33F1FFAF3C43C0F0EB21235FF729C03D33F1FFAF3C43C080B07A6455022AC01131F1FF7F3F43C080B07A6455022AC0 0 \N \N \N t \N +27 1 6440 7596 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C0F0EB21235FF729C01131F1FF7F3F43C05023E3A368EC29C03D33F1FFAF3C43C05023E3A368EC29C03D33F1FFAF3C43C0F0EB21235FF729C01131F1FF7F3F43C0F0EB21235FF729C0 0 \N \N \N t \N +28 1 6440 7597 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C05023E3A368EC29C01131F1FF7F3F43C060A9D6E671E129C03D33F1FFAF3C43C060A9D6E671E129C03D33F1FFAF3C43C05023E3A368EC29C01131F1FF7F3F43C05023E3A368EC29C0 0 \N \N \N t \N +29 1 6440 7598 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C060A9D6E671E129C01131F1FF7F3F43C0D0D314EC7AD629C03D33F1FFAF3C43C0D0D314EC7AD629C03D33F1FFAF3C43C060A9D6E671E129C01131F1FF7F3F43C060A9D6E671E129C0 0 \N \N \N t \N +30 1 6440 7599 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C0D0D314EC7AD629C01131F1FF7F3F43C008FBB5B383CB29C03D33F1FFAF3C43C008FBB5B383CB29C03D33F1FFAF3C43C0D0D314EC7AD629C01131F1FF7F3F43C0D0D314EC7AD629C0 0 \N \N \N t \N +31 1 6440 7600 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C008FBB5B383CB29C01131F1FF7F3F43C0507AD23D8CC029C03D33F1FFAF3C43C0507AD23D8CC029C03D33F1FFAF3C43C008FBB5B383CB29C01131F1FF7F3F43C008FBB5B383CB29C0 0 \N \N \N t \N +32 1 6440 7601 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C0507AD23D8CC029C01131F1FF7F3F43C0A0AF828A94B529C03D33F1FFAF3C43C0A0AF828A94B529C03D33F1FFAF3C43C0507AD23D8CC029C01131F1FF7F3F43C0507AD23D8CC029C0 0 \N \N \N t \N +33 1 6440 7602 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C0A0AF828A94B529C01131F1FF7F3F43C0C8FBDE999CAA29C03D33F1FFAF3C43C0C8FBDE999CAA29C03D33F1FFAF3C43C0A0AF828A94B529C01131F1FF7F3F43C0A0AF828A94B529C0 0 \N \N \N t \N +34 1 6440 7603 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C0C8FBDE999CAA29C01131F1FF7F3F43C070C2FF6BA49F29C03D33F1FFAF3C43C070C2FF6BA49F29C03D33F1FFAF3C43C0C8FBDE999CAA29C01131F1FF7F3F43C0C8FBDE999CAA29C0 0 \N \N \N t \N +35 1 6440 7604 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C070C2FF6BA49F29C01131F1FF7F3F43C0D869FD00AC9429C03D33F1FFAF3C43C0D869FD00AC9429C03D33F1FFAF3C43C070C2FF6BA49F29C01131F1FF7F3F43C070C2FF6BA49F29C0 0 \N \N \N t \N +36 1 6440 7605 14 0106000020E610000001000000010300000001000000050000001131F1FF7F3F43C0D869FD00AC9429C01131F1FF7F3F43C0205BF058B38929C03D33F1FFAF3C43C0205BF058B38929C03D33F1FFAF3C43C0D869FD00AC9429C01131F1FF7F3F43C0D869FD00AC9429C0 0 \N \N \N t \N +37 1 6441 7594 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C0E020D5674B0D2AC03D33F1FFAF3C43C080B07A6455022AC06735F1FFDF3943C080B07A6455022AC06735F1FFDF3943C0E020D5674B0D2AC03D33F1FFAF3C43C0E020D5674B0D2AC0 0 \N \N \N t \N +38 1 6441 7595 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C080B07A6455022AC03D33F1FFAF3C43C0F0EB21235FF729C06735F1FFDF3943C0F0EB21235FF729C06735F1FFDF3943C080B07A6455022AC03D33F1FFAF3C43C080B07A6455022AC0 0 \N \N \N t \N +39 1 6441 7596 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C0F0EB21235FF729C03D33F1FFAF3C43C05023E3A368EC29C06735F1FFDF3943C05023E3A368EC29C06735F1FFDF3943C0F0EB21235FF729C03D33F1FFAF3C43C0F0EB21235FF729C0 0 \N \N \N t \N +40 1 6441 7597 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C05023E3A368EC29C03D33F1FFAF3C43C060A9D6E671E129C06735F1FFDF3943C060A9D6E671E129C06735F1FFDF3943C05023E3A368EC29C03D33F1FFAF3C43C05023E3A368EC29C0 0 \N \N \N t \N +41 1 6441 7598 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C060A9D6E671E129C03D33F1FFAF3C43C0D0D314EC7AD629C06735F1FFDF3943C0D0D314EC7AD629C06735F1FFDF3943C060A9D6E671E129C03D33F1FFAF3C43C060A9D6E671E129C0 0 \N \N \N t \N +42 1 6441 7599 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C0D0D314EC7AD629C03D33F1FFAF3C43C008FBB5B383CB29C06735F1FFDF3943C008FBB5B383CB29C06735F1FFDF3943C0D0D314EC7AD629C03D33F1FFAF3C43C0D0D314EC7AD629C0 0 \N \N \N t \N +43 1 6441 7600 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C008FBB5B383CB29C03D33F1FFAF3C43C0507AD23D8CC029C06735F1FFDF3943C0507AD23D8CC029C06735F1FFDF3943C008FBB5B383CB29C03D33F1FFAF3C43C008FBB5B383CB29C0 0 \N \N \N t \N +44 1 6441 7601 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C0507AD23D8CC029C03D33F1FFAF3C43C0A0AF828A94B529C06735F1FFDF3943C0A0AF828A94B529C06735F1FFDF3943C0507AD23D8CC029C03D33F1FFAF3C43C0507AD23D8CC029C0 0 \N \N \N t \N +45 1 6441 7602 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C0A0AF828A94B529C03D33F1FFAF3C43C0C8FBDE999CAA29C06735F1FFDF3943C0C8FBDE999CAA29C06735F1FFDF3943C0A0AF828A94B529C03D33F1FFAF3C43C0A0AF828A94B529C0 0 \N \N \N t \N +46 1 6441 7603 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C0C8FBDE999CAA29C03D33F1FFAF3C43C070C2FF6BA49F29C06735F1FFDF3943C070C2FF6BA49F29C06735F1FFDF3943C0C8FBDE999CAA29C03D33F1FFAF3C43C0C8FBDE999CAA29C0 0 \N \N \N t \N +47 1 6441 7604 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C070C2FF6BA49F29C03D33F1FFAF3C43C0D869FD00AC9429C06735F1FFDF3943C0D869FD00AC9429C06735F1FFDF3943C070C2FF6BA49F29C03D33F1FFAF3C43C070C2FF6BA49F29C0 0 \N \N \N t \N +48 1 6441 7605 14 0106000020E610000001000000010300000001000000050000003D33F1FFAF3C43C0D869FD00AC9429C03D33F1FFAF3C43C0205BF058B38929C06735F1FFDF3943C0205BF058B38929C06735F1FFDF3943C0D869FD00AC9429C03D33F1FFAF3C43C0D869FD00AC9429C0 0 \N \N \N t \N +49 1 6442 7594 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C0E020D5674B0D2AC06735F1FFDF3943C080B07A6455022AC09037F1FF0F3743C080B07A6455022AC09037F1FF0F3743C0E020D5674B0D2AC06735F1FFDF3943C0E020D5674B0D2AC0 0 \N \N \N t \N +50 1 6442 7595 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C080B07A6455022AC06735F1FFDF3943C0F0EB21235FF729C09037F1FF0F3743C0F0EB21235FF729C09037F1FF0F3743C080B07A6455022AC06735F1FFDF3943C080B07A6455022AC0 0 \N \N \N t \N +51 1 6442 7596 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C0F0EB21235FF729C06735F1FFDF3943C05023E3A368EC29C09037F1FF0F3743C05023E3A368EC29C09037F1FF0F3743C0F0EB21235FF729C06735F1FFDF3943C0F0EB21235FF729C0 0 \N \N \N t \N +52 1 6442 7597 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C05023E3A368EC29C06735F1FFDF3943C060A9D6E671E129C09037F1FF0F3743C060A9D6E671E129C09037F1FF0F3743C05023E3A368EC29C06735F1FFDF3943C05023E3A368EC29C0 0 \N \N \N t \N +53 1 6442 7598 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C060A9D6E671E129C06735F1FFDF3943C0D0D314EC7AD629C09037F1FF0F3743C0D0D314EC7AD629C09037F1FF0F3743C060A9D6E671E129C06735F1FFDF3943C060A9D6E671E129C0 0 \N \N \N t \N +54 1 6442 7599 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C0D0D314EC7AD629C06735F1FFDF3943C008FBB5B383CB29C09037F1FF0F3743C008FBB5B383CB29C09037F1FF0F3743C0D0D314EC7AD629C06735F1FFDF3943C0D0D314EC7AD629C0 0 \N \N \N t \N +55 1 6442 7600 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C008FBB5B383CB29C06735F1FFDF3943C0507AD23D8CC029C09037F1FF0F3743C0507AD23D8CC029C09037F1FF0F3743C008FBB5B383CB29C06735F1FFDF3943C008FBB5B383CB29C0 0 \N \N \N t \N +56 1 6442 7601 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C0507AD23D8CC029C06735F1FFDF3943C0A0AF828A94B529C09037F1FF0F3743C0A0AF828A94B529C09037F1FF0F3743C0507AD23D8CC029C06735F1FFDF3943C0507AD23D8CC029C0 0 \N \N \N t \N +57 1 6442 7602 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C0A0AF828A94B529C06735F1FFDF3943C0C8FBDE999CAA29C09037F1FF0F3743C0C8FBDE999CAA29C09037F1FF0F3743C0A0AF828A94B529C06735F1FFDF3943C0A0AF828A94B529C0 0 \N \N \N t \N +58 1 6442 7603 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C0C8FBDE999CAA29C06735F1FFDF3943C070C2FF6BA49F29C09037F1FF0F3743C070C2FF6BA49F29C09037F1FF0F3743C0C8FBDE999CAA29C06735F1FFDF3943C0C8FBDE999CAA29C0 0 \N \N \N t \N +59 1 6442 7604 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C070C2FF6BA49F29C06735F1FFDF3943C0D869FD00AC9429C09037F1FF0F3743C0D869FD00AC9429C09037F1FF0F3743C070C2FF6BA49F29C06735F1FFDF3943C070C2FF6BA49F29C0 0 \N \N \N t \N +60 1 6442 7605 14 0106000020E610000001000000010300000001000000050000006735F1FFDF3943C0D869FD00AC9429C06735F1FFDF3943C0205BF058B38929C09037F1FF0F3743C0205BF058B38929C09037F1FF0F3743C0D869FD00AC9429C06735F1FFDF3943C0D869FD00AC9429C0 0 \N \N \N t \N +61 1 6443 7594 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C0E020D5674B0D2AC09037F1FF0F3743C080B07A6455022AC0B939F1FF3F3443C080B07A6455022AC0B939F1FF3F3443C0E020D5674B0D2AC09037F1FF0F3743C0E020D5674B0D2AC0 0 \N \N \N t \N +62 1 6443 7595 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C080B07A6455022AC09037F1FF0F3743C0F0EB21235FF729C0B939F1FF3F3443C0F0EB21235FF729C0B939F1FF3F3443C080B07A6455022AC09037F1FF0F3743C080B07A6455022AC0 0 \N \N \N t \N +63 1 6443 7596 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C0F0EB21235FF729C09037F1FF0F3743C05023E3A368EC29C0B939F1FF3F3443C05023E3A368EC29C0B939F1FF3F3443C0F0EB21235FF729C09037F1FF0F3743C0F0EB21235FF729C0 0 \N \N \N t \N +64 1 6443 7597 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C05023E3A368EC29C09037F1FF0F3743C060A9D6E671E129C0B939F1FF3F3443C060A9D6E671E129C0B939F1FF3F3443C05023E3A368EC29C09037F1FF0F3743C05023E3A368EC29C0 0 \N \N \N t \N +65 1 6443 7598 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C060A9D6E671E129C09037F1FF0F3743C0D0D314EC7AD629C0B939F1FF3F3443C0D0D314EC7AD629C0B939F1FF3F3443C060A9D6E671E129C09037F1FF0F3743C060A9D6E671E129C0 0 \N \N \N t \N +66 1 6443 7599 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C0D0D314EC7AD629C09037F1FF0F3743C008FBB5B383CB29C0B939F1FF3F3443C008FBB5B383CB29C0B939F1FF3F3443C0D0D314EC7AD629C09037F1FF0F3743C0D0D314EC7AD629C0 0 \N \N \N t \N +67 1 6443 7600 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C008FBB5B383CB29C09037F1FF0F3743C0507AD23D8CC029C0B939F1FF3F3443C0507AD23D8CC029C0B939F1FF3F3443C008FBB5B383CB29C09037F1FF0F3743C008FBB5B383CB29C0 0 \N \N \N t \N +68 1 6443 7601 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C0507AD23D8CC029C09037F1FF0F3743C0A0AF828A94B529C0B939F1FF3F3443C0A0AF828A94B529C0B939F1FF3F3443C0507AD23D8CC029C09037F1FF0F3743C0507AD23D8CC029C0 0 \N \N \N t \N +69 1 6443 7602 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C0A0AF828A94B529C09037F1FF0F3743C0C8FBDE999CAA29C0B939F1FF3F3443C0C8FBDE999CAA29C0B939F1FF3F3443C0A0AF828A94B529C09037F1FF0F3743C0A0AF828A94B529C0 0 \N \N \N t \N +70 1 6443 7603 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C0C8FBDE999CAA29C09037F1FF0F3743C070C2FF6BA49F29C0B939F1FF3F3443C070C2FF6BA49F29C0B939F1FF3F3443C0C8FBDE999CAA29C09037F1FF0F3743C0C8FBDE999CAA29C0 0 \N \N \N t \N +71 1 6443 7604 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C070C2FF6BA49F29C09037F1FF0F3743C0D869FD00AC9429C0B939F1FF3F3443C0D869FD00AC9429C0B939F1FF3F3443C070C2FF6BA49F29C09037F1FF0F3743C070C2FF6BA49F29C0 0 \N \N \N t \N +72 1 6443 7605 14 0106000020E610000001000000010300000001000000050000009037F1FF0F3743C0D869FD00AC9429C09037F1FF0F3743C0205BF058B38929C0B939F1FF3F3443C0205BF058B38929C0B939F1FF3F3443C0D869FD00AC9429C09037F1FF0F3743C0D869FD00AC9429C0 0 \N \N \N t \N +73 1 6444 7594 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C0E020D5674B0D2AC0B939F1FF3F3443C080B07A6455022AC0E43BF1FF6F3143C080B07A6455022AC0E43BF1FF6F3143C0E020D5674B0D2AC0B939F1FF3F3443C0E020D5674B0D2AC0 0 \N \N \N t \N +74 1 6444 7595 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C080B07A6455022AC0B939F1FF3F3443C0F0EB21235FF729C0E43BF1FF6F3143C0F0EB21235FF729C0E43BF1FF6F3143C080B07A6455022AC0B939F1FF3F3443C080B07A6455022AC0 0 \N \N \N t \N +75 1 6444 7596 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C0F0EB21235FF729C0B939F1FF3F3443C05023E3A368EC29C0E43BF1FF6F3143C05023E3A368EC29C0E43BF1FF6F3143C0F0EB21235FF729C0B939F1FF3F3443C0F0EB21235FF729C0 0 \N \N \N t \N +76 1 6444 7597 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C05023E3A368EC29C0B939F1FF3F3443C060A9D6E671E129C0E43BF1FF6F3143C060A9D6E671E129C0E43BF1FF6F3143C05023E3A368EC29C0B939F1FF3F3443C05023E3A368EC29C0 0 \N \N \N t \N +77 1 6444 7598 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C060A9D6E671E129C0B939F1FF3F3443C0D0D314EC7AD629C0E43BF1FF6F3143C0D0D314EC7AD629C0E43BF1FF6F3143C060A9D6E671E129C0B939F1FF3F3443C060A9D6E671E129C0 0 \N \N \N t \N +78 1 6444 7599 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C0D0D314EC7AD629C0B939F1FF3F3443C008FBB5B383CB29C0E43BF1FF6F3143C008FBB5B383CB29C0E43BF1FF6F3143C0D0D314EC7AD629C0B939F1FF3F3443C0D0D314EC7AD629C0 0 \N \N \N t \N +79 1 6444 7600 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C008FBB5B383CB29C0B939F1FF3F3443C0507AD23D8CC029C0E43BF1FF6F3143C0507AD23D8CC029C0E43BF1FF6F3143C008FBB5B383CB29C0B939F1FF3F3443C008FBB5B383CB29C0 0 \N \N \N t \N +80 1 6444 7601 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C0507AD23D8CC029C0B939F1FF3F3443C0A0AF828A94B529C0E43BF1FF6F3143C0A0AF828A94B529C0E43BF1FF6F3143C0507AD23D8CC029C0B939F1FF3F3443C0507AD23D8CC029C0 0 \N \N \N t \N +81 1 6444 7602 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C0A0AF828A94B529C0B939F1FF3F3443C0C8FBDE999CAA29C0E43BF1FF6F3143C0C8FBDE999CAA29C0E43BF1FF6F3143C0A0AF828A94B529C0B939F1FF3F3443C0A0AF828A94B529C0 0 \N \N \N t \N +82 1 6444 7603 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C0C8FBDE999CAA29C0B939F1FF3F3443C070C2FF6BA49F29C0E43BF1FF6F3143C070C2FF6BA49F29C0E43BF1FF6F3143C0C8FBDE999CAA29C0B939F1FF3F3443C0C8FBDE999CAA29C0 0 \N \N \N t \N +84 1 6444 7605 14 0106000020E61000000100000001030000000100000005000000B939F1FF3F3443C0D869FD00AC9429C0B939F1FF3F3443C0205BF058B38929C0E43BF1FF6F3143C0205BF058B38929C0E43BF1FF6F3143C0D869FD00AC9429C0B939F1FF3F3443C0D869FD00AC9429C0 0 \N \N \N t \N +85 1 6445 7594 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C0E020D5674B0D2AC0E43BF1FF6F3143C080B07A6455022AC00D3EF1FF9F2E43C080B07A6455022AC00D3EF1FF9F2E43C0E020D5674B0D2AC0E43BF1FF6F3143C0E020D5674B0D2AC0 0 \N \N \N t \N +86 1 6445 7595 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C080B07A6455022AC0E43BF1FF6F3143C0F0EB21235FF729C00D3EF1FF9F2E43C0F0EB21235FF729C00D3EF1FF9F2E43C080B07A6455022AC0E43BF1FF6F3143C080B07A6455022AC0 0 \N \N \N t \N +87 1 6445 7596 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C0F0EB21235FF729C0E43BF1FF6F3143C05023E3A368EC29C00D3EF1FF9F2E43C05023E3A368EC29C00D3EF1FF9F2E43C0F0EB21235FF729C0E43BF1FF6F3143C0F0EB21235FF729C0 0 \N \N \N t \N +88 1 6445 7597 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C05023E3A368EC29C0E43BF1FF6F3143C060A9D6E671E129C00D3EF1FF9F2E43C060A9D6E671E129C00D3EF1FF9F2E43C05023E3A368EC29C0E43BF1FF6F3143C05023E3A368EC29C0 0 \N \N \N t \N +89 1 6445 7598 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C060A9D6E671E129C0E43BF1FF6F3143C0D0D314EC7AD629C00D3EF1FF9F2E43C0D0D314EC7AD629C00D3EF1FF9F2E43C060A9D6E671E129C0E43BF1FF6F3143C060A9D6E671E129C0 0 \N \N \N t \N +90 1 6445 7599 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C0D0D314EC7AD629C0E43BF1FF6F3143C008FBB5B383CB29C00D3EF1FF9F2E43C008FBB5B383CB29C00D3EF1FF9F2E43C0D0D314EC7AD629C0E43BF1FF6F3143C0D0D314EC7AD629C0 0 \N \N \N t \N +91 1 6445 7600 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C008FBB5B383CB29C0E43BF1FF6F3143C0507AD23D8CC029C00D3EF1FF9F2E43C0507AD23D8CC029C00D3EF1FF9F2E43C008FBB5B383CB29C0E43BF1FF6F3143C008FBB5B383CB29C0 0 \N \N \N t \N +92 1 6445 7601 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C0507AD23D8CC029C0E43BF1FF6F3143C0A0AF828A94B529C00D3EF1FF9F2E43C0A0AF828A94B529C00D3EF1FF9F2E43C0507AD23D8CC029C0E43BF1FF6F3143C0507AD23D8CC029C0 0 \N \N \N t \N +93 1 6445 7602 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C0A0AF828A94B529C0E43BF1FF6F3143C0C8FBDE999CAA29C00D3EF1FF9F2E43C0C8FBDE999CAA29C00D3EF1FF9F2E43C0A0AF828A94B529C0E43BF1FF6F3143C0A0AF828A94B529C0 0 \N \N \N t \N +94 1 6445 7603 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C0C8FBDE999CAA29C0E43BF1FF6F3143C070C2FF6BA49F29C00D3EF1FF9F2E43C070C2FF6BA49F29C00D3EF1FF9F2E43C0C8FBDE999CAA29C0E43BF1FF6F3143C0C8FBDE999CAA29C0 0 \N \N \N t \N +95 1 6445 7604 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C070C2FF6BA49F29C0E43BF1FF6F3143C0D869FD00AC9429C00D3EF1FF9F2E43C0D869FD00AC9429C00D3EF1FF9F2E43C070C2FF6BA49F29C0E43BF1FF6F3143C070C2FF6BA49F29C0 0 \N \N \N t \N +96 1 6445 7605 14 0106000020E61000000100000001030000000100000005000000E43BF1FF6F3143C0D869FD00AC9429C0E43BF1FF6F3143C0205BF058B38929C00D3EF1FF9F2E43C0205BF058B38929C00D3EF1FF9F2E43C0D869FD00AC9429C0E43BF1FF6F3143C0D869FD00AC9429C0 0 \N \N \N t \N +97 1 6446 7594 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C0E020D5674B0D2AC00D3EF1FF9F2E43C080B07A6455022AC03940F1FFCF2B43C080B07A6455022AC03940F1FFCF2B43C0E020D5674B0D2AC00D3EF1FF9F2E43C0E020D5674B0D2AC0 0 \N \N \N t \N +98 1 6446 7595 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C080B07A6455022AC00D3EF1FF9F2E43C0F0EB21235FF729C03940F1FFCF2B43C0F0EB21235FF729C03940F1FFCF2B43C080B07A6455022AC00D3EF1FF9F2E43C080B07A6455022AC0 0 \N \N \N t \N +99 1 6446 7596 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C0F0EB21235FF729C00D3EF1FF9F2E43C05023E3A368EC29C03940F1FFCF2B43C05023E3A368EC29C03940F1FFCF2B43C0F0EB21235FF729C00D3EF1FF9F2E43C0F0EB21235FF729C0 0 \N \N \N t \N +100 1 6446 7597 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C05023E3A368EC29C00D3EF1FF9F2E43C060A9D6E671E129C03940F1FFCF2B43C060A9D6E671E129C03940F1FFCF2B43C05023E3A368EC29C00D3EF1FF9F2E43C05023E3A368EC29C0 0 \N \N \N t \N +101 1 6446 7598 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C060A9D6E671E129C00D3EF1FF9F2E43C0D0D314EC7AD629C03940F1FFCF2B43C0D0D314EC7AD629C03940F1FFCF2B43C060A9D6E671E129C00D3EF1FF9F2E43C060A9D6E671E129C0 0 \N \N \N t \N +102 1 6446 7599 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C0D0D314EC7AD629C00D3EF1FF9F2E43C008FBB5B383CB29C03940F1FFCF2B43C008FBB5B383CB29C03940F1FFCF2B43C0D0D314EC7AD629C00D3EF1FF9F2E43C0D0D314EC7AD629C0 0 \N \N \N t \N +103 1 6446 7600 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C008FBB5B383CB29C00D3EF1FF9F2E43C0507AD23D8CC029C03940F1FFCF2B43C0507AD23D8CC029C03940F1FFCF2B43C008FBB5B383CB29C00D3EF1FF9F2E43C008FBB5B383CB29C0 0 \N \N \N t \N +104 1 6446 7601 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C0507AD23D8CC029C00D3EF1FF9F2E43C0A0AF828A94B529C03940F1FFCF2B43C0A0AF828A94B529C03940F1FFCF2B43C0507AD23D8CC029C00D3EF1FF9F2E43C0507AD23D8CC029C0 0 \N \N \N t \N +105 1 6446 7602 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C0A0AF828A94B529C00D3EF1FF9F2E43C0C8FBDE999CAA29C03940F1FFCF2B43C0C8FBDE999CAA29C03940F1FFCF2B43C0A0AF828A94B529C00D3EF1FF9F2E43C0A0AF828A94B529C0 0 \N \N \N t \N +106 1 6446 7603 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C0C8FBDE999CAA29C00D3EF1FF9F2E43C070C2FF6BA49F29C03940F1FFCF2B43C070C2FF6BA49F29C03940F1FFCF2B43C0C8FBDE999CAA29C00D3EF1FF9F2E43C0C8FBDE999CAA29C0 0 \N \N \N t \N +107 1 6446 7604 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C070C2FF6BA49F29C00D3EF1FF9F2E43C0D869FD00AC9429C03940F1FFCF2B43C0D869FD00AC9429C03940F1FFCF2B43C070C2FF6BA49F29C00D3EF1FF9F2E43C070C2FF6BA49F29C0 0 \N \N \N t \N +108 1 6446 7605 14 0106000020E610000001000000010300000001000000050000000D3EF1FF9F2E43C0D869FD00AC9429C00D3EF1FF9F2E43C0205BF058B38929C03940F1FFCF2B43C0205BF058B38929C03940F1FFCF2B43C0D869FD00AC9429C00D3EF1FF9F2E43C0D869FD00AC9429C0 0 \N \N \N t \N +109 1 6447 7594 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C0E020D5674B0D2AC03940F1FFCF2B43C080B07A6455022AC06242F1FFFF2843C080B07A6455022AC06242F1FFFF2843C0E020D5674B0D2AC03940F1FFCF2B43C0E020D5674B0D2AC0 0 \N \N \N t \N +110 1 6447 7595 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C080B07A6455022AC03940F1FFCF2B43C0F0EB21235FF729C06242F1FFFF2843C0F0EB21235FF729C06242F1FFFF2843C080B07A6455022AC03940F1FFCF2B43C080B07A6455022AC0 0 \N \N \N t \N +111 1 6447 7596 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C0F0EB21235FF729C03940F1FFCF2B43C05023E3A368EC29C06242F1FFFF2843C05023E3A368EC29C06242F1FFFF2843C0F0EB21235FF729C03940F1FFCF2B43C0F0EB21235FF729C0 0 \N \N \N t \N +112 1 6447 7597 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C05023E3A368EC29C03940F1FFCF2B43C060A9D6E671E129C06242F1FFFF2843C060A9D6E671E129C06242F1FFFF2843C05023E3A368EC29C03940F1FFCF2B43C05023E3A368EC29C0 0 \N \N \N t \N +113 1 6447 7598 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C060A9D6E671E129C03940F1FFCF2B43C0D0D314EC7AD629C06242F1FFFF2843C0D0D314EC7AD629C06242F1FFFF2843C060A9D6E671E129C03940F1FFCF2B43C060A9D6E671E129C0 0 \N \N \N t \N +114 1 6447 7599 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C0D0D314EC7AD629C03940F1FFCF2B43C008FBB5B383CB29C06242F1FFFF2843C008FBB5B383CB29C06242F1FFFF2843C0D0D314EC7AD629C03940F1FFCF2B43C0D0D314EC7AD629C0 0 \N \N \N t \N +115 1 6447 7600 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C008FBB5B383CB29C03940F1FFCF2B43C0507AD23D8CC029C06242F1FFFF2843C0507AD23D8CC029C06242F1FFFF2843C008FBB5B383CB29C03940F1FFCF2B43C008FBB5B383CB29C0 0 \N \N \N t \N +116 1 6447 7601 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C0507AD23D8CC029C03940F1FFCF2B43C0A0AF828A94B529C06242F1FFFF2843C0A0AF828A94B529C06242F1FFFF2843C0507AD23D8CC029C03940F1FFCF2B43C0507AD23D8CC029C0 0 \N \N \N t \N +117 1 6447 7602 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C0A0AF828A94B529C03940F1FFCF2B43C0C8FBDE999CAA29C06242F1FFFF2843C0C8FBDE999CAA29C06242F1FFFF2843C0A0AF828A94B529C03940F1FFCF2B43C0A0AF828A94B529C0 0 \N \N \N t \N +118 1 6447 7603 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C0C8FBDE999CAA29C03940F1FFCF2B43C070C2FF6BA49F29C06242F1FFFF2843C070C2FF6BA49F29C06242F1FFFF2843C0C8FBDE999CAA29C03940F1FFCF2B43C0C8FBDE999CAA29C0 0 \N \N \N t \N +119 1 6447 7604 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C070C2FF6BA49F29C03940F1FFCF2B43C0D869FD00AC9429C06242F1FFFF2843C0D869FD00AC9429C06242F1FFFF2843C070C2FF6BA49F29C03940F1FFCF2B43C070C2FF6BA49F29C0 0 \N \N \N t \N +120 1 6447 7605 14 0106000020E610000001000000010300000001000000050000003940F1FFCF2B43C0D869FD00AC9429C03940F1FFCF2B43C0205BF058B38929C06242F1FFFF2843C0205BF058B38929C06242F1FFFF2843C0D869FD00AC9429C03940F1FFCF2B43C0D869FD00AC9429C0 0 \N \N \N t \N +121 1 6448 7594 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C0E020D5674B0D2AC06242F1FFFF2843C080B07A6455022AC08C44F1FF2F2643C080B07A6455022AC08C44F1FF2F2643C0E020D5674B0D2AC06242F1FFFF2843C0E020D5674B0D2AC0 0 \N \N \N t \N +122 1 6448 7595 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C080B07A6455022AC06242F1FFFF2843C0F0EB21235FF729C08C44F1FF2F2643C0F0EB21235FF729C08C44F1FF2F2643C080B07A6455022AC06242F1FFFF2843C080B07A6455022AC0 0 \N \N \N t \N +123 1 6448 7596 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C0F0EB21235FF729C06242F1FFFF2843C05023E3A368EC29C08C44F1FF2F2643C05023E3A368EC29C08C44F1FF2F2643C0F0EB21235FF729C06242F1FFFF2843C0F0EB21235FF729C0 0 \N \N \N t \N +124 1 6448 7597 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C05023E3A368EC29C06242F1FFFF2843C060A9D6E671E129C08C44F1FF2F2643C060A9D6E671E129C08C44F1FF2F2643C05023E3A368EC29C06242F1FFFF2843C05023E3A368EC29C0 0 \N \N \N t \N +125 1 6448 7598 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C060A9D6E671E129C06242F1FFFF2843C0D0D314EC7AD629C08C44F1FF2F2643C0D0D314EC7AD629C08C44F1FF2F2643C060A9D6E671E129C06242F1FFFF2843C060A9D6E671E129C0 0 \N \N \N t \N +126 1 6448 7599 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C0D0D314EC7AD629C06242F1FFFF2843C008FBB5B383CB29C08C44F1FF2F2643C008FBB5B383CB29C08C44F1FF2F2643C0D0D314EC7AD629C06242F1FFFF2843C0D0D314EC7AD629C0 0 \N \N \N t \N +127 1 6448 7600 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C008FBB5B383CB29C06242F1FFFF2843C0507AD23D8CC029C08C44F1FF2F2643C0507AD23D8CC029C08C44F1FF2F2643C008FBB5B383CB29C06242F1FFFF2843C008FBB5B383CB29C0 0 \N \N \N t \N +128 1 6448 7601 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C0507AD23D8CC029C06242F1FFFF2843C0A0AF828A94B529C08C44F1FF2F2643C0A0AF828A94B529C08C44F1FF2F2643C0507AD23D8CC029C06242F1FFFF2843C0507AD23D8CC029C0 0 \N \N \N t \N +130 1 6448 7603 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C0C8FBDE999CAA29C06242F1FFFF2843C070C2FF6BA49F29C08C44F1FF2F2643C070C2FF6BA49F29C08C44F1FF2F2643C0C8FBDE999CAA29C06242F1FFFF2843C0C8FBDE999CAA29C0 0 \N \N \N t \N +131 1 6448 7604 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C070C2FF6BA49F29C06242F1FFFF2843C0D869FD00AC9429C08C44F1FF2F2643C0D869FD00AC9429C08C44F1FF2F2643C070C2FF6BA49F29C06242F1FFFF2843C070C2FF6BA49F29C0 0 \N \N \N t \N +132 1 6448 7605 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C0D869FD00AC9429C06242F1FFFF2843C0205BF058B38929C08C44F1FF2F2643C0205BF058B38929C08C44F1FF2F2643C0D869FD00AC9429C06242F1FFFF2843C0D869FD00AC9429C0 0 \N \N \N t \N +133 1 6449 7594 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C0E020D5674B0D2AC08C44F1FF2F2643C080B07A6455022AC0B546F1FF5F2343C080B07A6455022AC0B546F1FF5F2343C0E020D5674B0D2AC08C44F1FF2F2643C0E020D5674B0D2AC0 0 \N \N \N t \N +134 1 6449 7595 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C080B07A6455022AC08C44F1FF2F2643C0F0EB21235FF729C0B546F1FF5F2343C0F0EB21235FF729C0B546F1FF5F2343C080B07A6455022AC08C44F1FF2F2643C080B07A6455022AC0 0 \N \N \N t \N +135 1 6449 7596 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C0F0EB21235FF729C08C44F1FF2F2643C05023E3A368EC29C0B546F1FF5F2343C05023E3A368EC29C0B546F1FF5F2343C0F0EB21235FF729C08C44F1FF2F2643C0F0EB21235FF729C0 0 \N \N \N t \N +136 1 6449 7597 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C05023E3A368EC29C08C44F1FF2F2643C060A9D6E671E129C0B546F1FF5F2343C060A9D6E671E129C0B546F1FF5F2343C05023E3A368EC29C08C44F1FF2F2643C05023E3A368EC29C0 0 \N \N \N t \N +137 1 6449 7598 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C060A9D6E671E129C08C44F1FF2F2643C0D0D314EC7AD629C0B546F1FF5F2343C0D0D314EC7AD629C0B546F1FF5F2343C060A9D6E671E129C08C44F1FF2F2643C060A9D6E671E129C0 0 \N \N \N t \N +138 1 6449 7599 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C0D0D314EC7AD629C08C44F1FF2F2643C008FBB5B383CB29C0B546F1FF5F2343C008FBB5B383CB29C0B546F1FF5F2343C0D0D314EC7AD629C08C44F1FF2F2643C0D0D314EC7AD629C0 0 \N \N \N t \N +139 1 6449 7600 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C008FBB5B383CB29C08C44F1FF2F2643C0507AD23D8CC029C0B546F1FF5F2343C0507AD23D8CC029C0B546F1FF5F2343C008FBB5B383CB29C08C44F1FF2F2643C008FBB5B383CB29C0 0 \N \N \N t \N +140 1 6449 7601 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C0507AD23D8CC029C08C44F1FF2F2643C0A0AF828A94B529C0B546F1FF5F2343C0A0AF828A94B529C0B546F1FF5F2343C0507AD23D8CC029C08C44F1FF2F2643C0507AD23D8CC029C0 0 \N \N \N t \N +141 1 6449 7602 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C0A0AF828A94B529C08C44F1FF2F2643C0C8FBDE999CAA29C0B546F1FF5F2343C0C8FBDE999CAA29C0B546F1FF5F2343C0A0AF828A94B529C08C44F1FF2F2643C0A0AF828A94B529C0 0 \N \N \N t \N +142 1 6449 7603 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C0C8FBDE999CAA29C08C44F1FF2F2643C070C2FF6BA49F29C0B546F1FF5F2343C070C2FF6BA49F29C0B546F1FF5F2343C0C8FBDE999CAA29C08C44F1FF2F2643C0C8FBDE999CAA29C0 0 \N \N \N t \N +143 1 6449 7604 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C070C2FF6BA49F29C08C44F1FF2F2643C0D869FD00AC9429C0B546F1FF5F2343C0D869FD00AC9429C0B546F1FF5F2343C070C2FF6BA49F29C08C44F1FF2F2643C070C2FF6BA49F29C0 0 \N \N \N t \N +144 1 6449 7605 14 0106000020E610000001000000010300000001000000050000008C44F1FF2F2643C0D869FD00AC9429C08C44F1FF2F2643C0205BF058B38929C0B546F1FF5F2343C0205BF058B38929C0B546F1FF5F2343C0D869FD00AC9429C08C44F1FF2F2643C0D869FD00AC9429C0 0 \N \N \N t \N +145 1 6450 7594 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C0E020D5674B0D2AC0B546F1FF5F2343C080B07A6455022AC0DE48F1FF8F2043C080B07A6455022AC0DE48F1FF8F2043C0E020D5674B0D2AC0B546F1FF5F2343C0E020D5674B0D2AC0 0 \N \N \N t \N +146 1 6450 7595 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C080B07A6455022AC0B546F1FF5F2343C0F0EB21235FF729C0DE48F1FF8F2043C0F0EB21235FF729C0DE48F1FF8F2043C080B07A6455022AC0B546F1FF5F2343C080B07A6455022AC0 0 \N \N \N t \N +147 1 6450 7596 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C0F0EB21235FF729C0B546F1FF5F2343C05023E3A368EC29C0DE48F1FF8F2043C05023E3A368EC29C0DE48F1FF8F2043C0F0EB21235FF729C0B546F1FF5F2343C0F0EB21235FF729C0 0 \N \N \N t \N +148 1 6450 7597 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C05023E3A368EC29C0B546F1FF5F2343C060A9D6E671E129C0DE48F1FF8F2043C060A9D6E671E129C0DE48F1FF8F2043C05023E3A368EC29C0B546F1FF5F2343C05023E3A368EC29C0 0 \N \N \N t \N +149 1 6450 7598 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C060A9D6E671E129C0B546F1FF5F2343C0D0D314EC7AD629C0DE48F1FF8F2043C0D0D314EC7AD629C0DE48F1FF8F2043C060A9D6E671E129C0B546F1FF5F2343C060A9D6E671E129C0 0 \N \N \N t \N +150 1 6450 7599 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C0D0D314EC7AD629C0B546F1FF5F2343C008FBB5B383CB29C0DE48F1FF8F2043C008FBB5B383CB29C0DE48F1FF8F2043C0D0D314EC7AD629C0B546F1FF5F2343C0D0D314EC7AD629C0 0 \N \N \N t \N +151 1 6450 7600 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C008FBB5B383CB29C0B546F1FF5F2343C0507AD23D8CC029C0DE48F1FF8F2043C0507AD23D8CC029C0DE48F1FF8F2043C008FBB5B383CB29C0B546F1FF5F2343C008FBB5B383CB29C0 0 \N \N \N t \N +152 1 6450 7601 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C0507AD23D8CC029C0B546F1FF5F2343C0A0AF828A94B529C0DE48F1FF8F2043C0A0AF828A94B529C0DE48F1FF8F2043C0507AD23D8CC029C0B546F1FF5F2343C0507AD23D8CC029C0 0 \N \N \N t \N +153 1 6450 7602 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C0A0AF828A94B529C0B546F1FF5F2343C0C8FBDE999CAA29C0DE48F1FF8F2043C0C8FBDE999CAA29C0DE48F1FF8F2043C0A0AF828A94B529C0B546F1FF5F2343C0A0AF828A94B529C0 0 \N \N \N t \N +154 1 6450 7603 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C0C8FBDE999CAA29C0B546F1FF5F2343C070C2FF6BA49F29C0DE48F1FF8F2043C070C2FF6BA49F29C0DE48F1FF8F2043C0C8FBDE999CAA29C0B546F1FF5F2343C0C8FBDE999CAA29C0 0 \N \N \N t \N +155 1 6450 7604 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C070C2FF6BA49F29C0B546F1FF5F2343C0D869FD00AC9429C0DE48F1FF8F2043C0D869FD00AC9429C0DE48F1FF8F2043C070C2FF6BA49F29C0B546F1FF5F2343C070C2FF6BA49F29C0 0 \N \N \N t \N +156 1 6450 7605 14 0106000020E61000000100000001030000000100000005000000B546F1FF5F2343C0D869FD00AC9429C0B546F1FF5F2343C0205BF058B38929C0DE48F1FF8F2043C0205BF058B38929C0DE48F1FF8F2043C0D869FD00AC9429C0B546F1FF5F2343C0D869FD00AC9429C0 0 \N \N \N t \N +157 1 6451 7594 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C0E020D5674B0D2AC0DE48F1FF8F2043C080B07A6455022AC00A4BF1FFBF1D43C080B07A6455022AC00A4BF1FFBF1D43C0E020D5674B0D2AC0DE48F1FF8F2043C0E020D5674B0D2AC0 0 \N \N \N t \N +158 1 6451 7595 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C080B07A6455022AC0DE48F1FF8F2043C0F0EB21235FF729C00A4BF1FFBF1D43C0F0EB21235FF729C00A4BF1FFBF1D43C080B07A6455022AC0DE48F1FF8F2043C080B07A6455022AC0 0 \N \N \N t \N +159 1 6451 7596 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C0F0EB21235FF729C0DE48F1FF8F2043C05023E3A368EC29C00A4BF1FFBF1D43C05023E3A368EC29C00A4BF1FFBF1D43C0F0EB21235FF729C0DE48F1FF8F2043C0F0EB21235FF729C0 0 \N \N \N t \N +160 1 6451 7597 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C05023E3A368EC29C0DE48F1FF8F2043C060A9D6E671E129C00A4BF1FFBF1D43C060A9D6E671E129C00A4BF1FFBF1D43C05023E3A368EC29C0DE48F1FF8F2043C05023E3A368EC29C0 0 \N \N \N t \N +161 1 6451 7598 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C060A9D6E671E129C0DE48F1FF8F2043C0D0D314EC7AD629C00A4BF1FFBF1D43C0D0D314EC7AD629C00A4BF1FFBF1D43C060A9D6E671E129C0DE48F1FF8F2043C060A9D6E671E129C0 0 \N \N \N t \N +162 1 6451 7599 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C0D0D314EC7AD629C0DE48F1FF8F2043C008FBB5B383CB29C00A4BF1FFBF1D43C008FBB5B383CB29C00A4BF1FFBF1D43C0D0D314EC7AD629C0DE48F1FF8F2043C0D0D314EC7AD629C0 0 \N \N \N t \N +163 1 6451 7600 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C008FBB5B383CB29C0DE48F1FF8F2043C0507AD23D8CC029C00A4BF1FFBF1D43C0507AD23D8CC029C00A4BF1FFBF1D43C008FBB5B383CB29C0DE48F1FF8F2043C008FBB5B383CB29C0 0 \N \N \N t \N +164 1 6451 7601 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C0507AD23D8CC029C0DE48F1FF8F2043C0A0AF828A94B529C00A4BF1FFBF1D43C0A0AF828A94B529C00A4BF1FFBF1D43C0507AD23D8CC029C0DE48F1FF8F2043C0507AD23D8CC029C0 0 \N \N \N t \N +165 1 6451 7602 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C0A0AF828A94B529C0DE48F1FF8F2043C0C8FBDE999CAA29C00A4BF1FFBF1D43C0C8FBDE999CAA29C00A4BF1FFBF1D43C0A0AF828A94B529C0DE48F1FF8F2043C0A0AF828A94B529C0 0 \N \N \N t \N +166 1 6451 7603 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C0C8FBDE999CAA29C0DE48F1FF8F2043C070C2FF6BA49F29C00A4BF1FFBF1D43C070C2FF6BA49F29C00A4BF1FFBF1D43C0C8FBDE999CAA29C0DE48F1FF8F2043C0C8FBDE999CAA29C0 0 \N \N \N t \N +167 1 6451 7604 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C070C2FF6BA49F29C0DE48F1FF8F2043C0D869FD00AC9429C00A4BF1FFBF1D43C0D869FD00AC9429C00A4BF1FFBF1D43C070C2FF6BA49F29C0DE48F1FF8F2043C070C2FF6BA49F29C0 0 \N \N \N t \N +168 1 6451 7605 14 0106000020E61000000100000001030000000100000005000000DE48F1FF8F2043C0D869FD00AC9429C0DE48F1FF8F2043C0205BF058B38929C00A4BF1FFBF1D43C0205BF058B38929C00A4BF1FFBF1D43C0D869FD00AC9429C0DE48F1FF8F2043C0D869FD00AC9429C0 0 \N \N \N t \N +171 1 12889 15208 15 0106000020E61000000100000001030000000100000005000000C83AF1FFD73243C08EC2FF6BA49F29C0E13BF1FF6F3143C08EC2FF6BA49F29C0E13BF1FF6F3143C09373213E289A29C0C83AF1FFD73243C09373213E289A29C0C83AF1FFD73243C08EC2FF6BA49F29C0 0 \N \N \N t \N +12 2 \N \N \N 0106000020E61000000100000001030000000100000005000000E49ECD52E7E532406330BC5C89F640C082133037B2E43240E83320AF4AF740C093BB4F6D38E13240E83320AF4AF740C093BB4F6D38E132406330BC5C89F640C0E49ECD52E7E532406330BC5C89F640C0 0 \N \N \N f \N +13 2 \N \N \N 0106000020E6100000010000000103000000010000000500000093BB4F6D38E13240B1629661D4F540C0F7FF7FADC9E63240EFCFC9CBFBF540C0E49ECD52E7E532406330BC5C89F640C093BB4F6D38E132406330BC5C89F640C093BB4F6D38E13240B1629661D4F540C0 0 \N \N \N f \N +172 1 12889 15209 15 0106000020E61000000100000001030000000100000005000000C83AF1FFD73243C09373213E289A29C0E13BF1FF6F3143C09373213E289A29C0E13BF1FF6F3143C0E169FD00AC9429C0C83AF1FFD73243C0E169FD00AC9429C0C83AF1FFD73243C09373213E289A29C0 6 \N 360183 \N t \N +14 2 \N \N \N 0106000020E610000001000000010300000001000000050000005C52664B8DD9324070E0C44E52F940C0F6FF3FB6F2D632400BF53EC585F940C07DF31D1B6CD6324055235A8946F840C05C52664B8DD9324055235A8946F840C05C52664B8DD9324070E0C44E52F940C0 0 \N \N \N f \N +15 2 \N \N \N 0106000020E610000001000000010300000001000000050000007DF31D1B6CD6324055235A8946F840C0D8175CE901D63240E83320AF4AF740C05C52664B8DD93240E83320AF4AF740C05C52664B8DD9324055235A8946F840C07DF31D1B6CD6324055235A8946F840C0 0 \N \N \N f \N +169 1 12888 15208 15 0106000020E61000000100000001030000000100000005000000BC39F1FF3F3443C08EC2FF6BA49F29C0C83AF1FFD73243C08EC2FF6BA49F29C0C83AF1FFD73243C09373213E289A29C0BC39F1FF3F3443C09373213E289A29C0BC39F1FF3F3443C08EC2FF6BA49F29C0 0 \N \N \N t \N +17 2 \N \N \N 0106000020E610000001000000010300000001000000050000005C52664B8DD9324055235A8946F840C05C52664B8DD93240E83320AF4AF740C0385FA2C5E5DC3240E83320AF4AF740C0385FA2C5E5DC324055235A8946F840C05C52664B8DD9324055235A8946F840C0 0 \N \N \N f \N +129 1 6448 7602 14 0106000020E610000001000000010300000001000000050000006242F1FFFF2843C0A0AF828A94B529C06242F1FFFF2843C0C8FBDE999CAA29C08C44F1FF2F2643C0C8FBDE999CAA29C08C44F1FF2F2643C0A0AF828A94B529C06242F1FFFF2843C0A0AF828A94B529C0 6 \N 360183 \N t \N +18 2 \N \N \N 0106000020E610000001000000010300000001000000050000001C0821F41BE03240112867B5D0F840C0385FA2C5E5DC3240FBB72B2F10F940C0385FA2C5E5DC3240033CFBBF06F840C01C0821F41BE03240033CFBBF06F840C01C0821F41BE03240112867B5D0F840C0 0 \N \N \N f \N +170 1 12888 15209 15 0106000020E61000000100000001030000000100000005000000BC39F1FF3F3443C09373213E289A29C0C83AF1FFD73243C09373213E289A29C0C83AF1FFD73243C0E169FD00AC9429C0BC39F1FF3F3443C0E169FD00AC9429C0BC39F1FF3F3443C09373213E289A29C0 4 \N 360183 360183 t \N +1 3 \N \N \N 0106000020E610000001000000010300000001000000050000000100405B40D53240CEE033A57FF540C0000040B6F2D632400FF53EC585F940C00000C02390E23240EEEACD35A0F840C0010080ADC9E63240EBCFC9CBFBF540C00100405B40D53240CEE033A57FF540C0 0 \N \N \N f {} +19 2 \N \N \N 0106000020E61000000100000001030000000100000005000000385FA2C5E5DC3240033CFBBF06F840C0385FA2C5E5DC3240E83320AF4AF740C01C0821F41BE03240E83320AF4AF740C01C0821F41BE03240033CFBBF06F840C0385FA2C5E5DC3240033CFBBF06F840C0 0 \N \N \N f \N +20 2 \N \N \N 0106000020E6100000010000000103000000010000000500000077C0FC8285E33240033CFBBF06F840C00E00C02390E23240E8EACD35A0F840C01C0821F41BE03240112867B5D0F840C01C0821F41BE03240033CFBBF06F840C077C0FC8285E33240033CFBBF06F840C0 0 \N \N \N f \N +6 2 \N \N \N 0106000020E61000000100000001030000000100000005000000D8175CE901D63240E83320AF4AF740C01B6EF23DA5D532402DE641E86EF640C081542F2F30D932402DE641E86EF640C081542F2F30D93240E83320AF4AF740C0D8175CE901D63240E83320AF4AF740C0 0 \N \N \N f \N +7 2 \N \N \N 0106000020E610000001000000010300000001000000050000001B6EF23DA5D532402DE641E86EF640C0F6FF3F5B40D53240CFE033A57FF540C081542F2F30D9324029C537849BF540C081542F2F30D932402DE641E86EF640C01B6EF23DA5D532402DE641E86EF640C0 0 \N \N \N f \N +8 2 \N \N \N 0106000020E61000000100000001030000000100000005000000385FA2C5E5DC32402DE641E86EF640C0385FA2C5E5DC3240E83320AF4AF740C081542F2F30D93240E83320AF4AF740C081542F2F30D932402DE641E86EF640C0385FA2C5E5DC32402DE641E86EF640C0 0 \N \N \N f \N +10 2 \N \N \N 0106000020E6100000010000000103000000010000000500000093BB4F6D38E132406330BC5C89F640C093BB4F6D38E13240E83320AF4AF740C0385FA2C5E5DC3240E83320AF4AF740C0385FA2C5E5DC32406330BC5C89F640C093BB4F6D38E132406330BC5C89F640C0 0 \N \N \N f \N +11 2 \N \N \N 0106000020E61000000100000001030000000100000005000000385FA2C5E5DC32404883EAC6B5F540C093BB4F6D38E13240B1629661D4F540C093BB4F6D38E132406330BC5C89F640C0385FA2C5E5DC32406330BC5C89F640C0385FA2C5E5DC32404883EAC6B5F540C0 0 \N \N \N f \N +21 2 \N \N \N 0106000020E6100000010000000103000000010000000500000082133037B2E43240E83320AF4AF740C077C0FC8285E33240033CFBBF06F840C01C0821F41BE03240033CFBBF06F840C01C0821F41BE03240E83320AF4AF740C082133037B2E43240E83320AF4AF740C0 0 \N \N \N f \N +16 2 \N \N \N 0106000020E61000000100000001030000000100000005000000385FA2C5E5DC3240FBB72B2F10F940C05C52664B8DD9324070E0C44E52F940C05C52664B8DD9324055235A8946F840C0385FA2C5E5DC324055235A8946F840C0385FA2C5E5DC3240FBB72B2F10F940C0 6 \N 2823295 \N f \N +9 2 \N \N \N 0106000020E6100000010000000103000000010000000500000081542F2F30D9324029C537849BF540C0385FA2C5E5DC32404883EAC6B5F540C0385FA2C5E5DC32402DE641E86EF640C081542F2F30D932402DE641E86EF640C081542F2F30D9324029C537849BF540C0 2 \N 360183 \N f \N +\. + + +-- +-- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.users (id, username, role, mapping_level, tasks_mapped, tasks_validated, tasks_invalidated, projects_mapped, email_address, facebook_id, is_email_verified, linkedin_id, twitter_id, date_registered, last_validation_date, validation_message, is_expert) FROM stdin; +2823295 ramyaragupathy 0 3 0 0 0 {2} \N \N f \N \N 2019-04-09 03:11:54.503164 2019-04-09 03:11:54.503182 t f +360183 wille 1 3 2 1 0 {1,2} \N \N f \N \N 2019-04-08 10:51:26.758678 2019-04-08 11:40:37.239041 t f +94253 xamanu 1 3 0 0 0 \N \N \N f \N \N 2019-04-19 13:44:25.422959 2019-04-19 13:44:25.422967 t f +\. + + +-- +-- Data for Name: users_licenses; Type: TABLE DATA; Schema: public; Owner: - +-- + +COPY public.users_licenses ("user", license) FROM stdin; +\. + + +-- +-- Data for Name: topology; Type: TABLE DATA; Schema: topology; Owner: - +-- + +COPY topology.topology (id, name, srid, "precision", hasz) FROM stdin; +\. + + +-- +-- Data for Name: layer; Type: TABLE DATA; Schema: topology; Owner: - +-- + +COPY topology.layer (topology_id, layer_id, schema_name, table_name, feature_column, feature_type, level, child_id) FROM stdin; +\. + + +-- +-- Name: licenses_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.licenses_id_seq', 1, false); + + +-- +-- Name: messages_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.messages_id_seq', 2, true); + + +-- +-- Name: priority_areas_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.priority_areas_id_seq', 1, false); + + +-- +-- Name: project_chat_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.project_chat_id_seq', 1, false); + + +-- +-- Name: projects_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.projects_id_seq', 3, true); + + +-- +-- Name: tags_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.tags_id_seq', 1, false); + + +-- +-- Name: task_history_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.task_history_id_seq', 123, true); + + +-- +-- Name: task_invalidation_history_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.task_invalidation_history_id_seq', 1, false); + + +-- +-- Name: users_id_seq; Type: SEQUENCE SET; Schema: public; Owner: - +-- + +SELECT pg_catalog.setval('public.users_id_seq', 1, false); + + +-- +-- Name: alembic_version alembic_version_pkc; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.alembic_version + ADD CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num); + + +-- +-- Name: licenses licenses_name_key; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.licenses + ADD CONSTRAINT licenses_name_key UNIQUE (name); + + +-- +-- Name: licenses licenses_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.licenses + ADD CONSTRAINT licenses_pkey PRIMARY KEY (id); + + +-- +-- Name: messages messages_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.messages + ADD CONSTRAINT messages_pkey PRIMARY KEY (id); + + +-- +-- Name: priority_areas priority_areas_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.priority_areas + ADD CONSTRAINT priority_areas_pkey PRIMARY KEY (id); + + +-- +-- Name: project_chat project_chat_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_chat + ADD CONSTRAINT project_chat_pkey PRIMARY KEY (id); + + +-- +-- Name: project_info project_info_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_info + ADD CONSTRAINT project_info_pkey PRIMARY KEY (project_id, locale); + + +-- +-- Name: projects projects_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.projects + ADD CONSTRAINT projects_pkey PRIMARY KEY (id); + + +-- +-- Name: tags tags_campaigns_key; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tags + ADD CONSTRAINT tags_campaigns_key UNIQUE (campaigns); + + +-- +-- Name: tags tags_organisations_key; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tags + ADD CONSTRAINT tags_organisations_key UNIQUE (organisations); + + +-- +-- Name: tags tags_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tags + ADD CONSTRAINT tags_pkey PRIMARY KEY (id); + + +-- +-- Name: task_history task_history_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_history + ADD CONSTRAINT task_history_pkey PRIMARY KEY (id); + + +-- +-- Name: task_invalidation_history task_invalidation_history_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_invalidation_history + ADD CONSTRAINT task_invalidation_history_pkey PRIMARY KEY (id); + + +-- +-- Name: tasks tasks_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tasks + ADD CONSTRAINT tasks_pkey PRIMARY KEY (id, project_id); + + +-- +-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_pkey PRIMARY KEY (id); + + +-- +-- Name: users users_username_key; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_username_key UNIQUE (username); + + +-- +-- Name: idx_geometry; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX idx_geometry ON public.projects USING gist (geometry); + + +-- +-- Name: idx_project_info composite; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX "idx_project_info composite" ON public.project_info USING btree (locale, project_id); + + +-- +-- Name: idx_task_history_composite; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX idx_task_history_composite ON public.task_history USING btree (task_id, project_id); + + +-- +-- Name: idx_task_validation_history_composite; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX idx_task_validation_history_composite ON public.task_invalidation_history USING btree (task_id, project_id); + + +-- +-- Name: idx_task_validation_mapper_status_composite; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX idx_task_validation_mapper_status_composite ON public.task_invalidation_history USING btree (invalidator_id, is_closed); + + +-- +-- Name: idx_username_lower; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX idx_username_lower ON public.users USING btree (lower((username)::text)); + + +-- +-- Name: ix_messages_message_type; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_messages_message_type ON public.messages USING btree (message_type); + + +-- +-- Name: ix_messages_project_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_messages_project_id ON public.messages USING btree (project_id); + + +-- +-- Name: ix_messages_task_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_messages_task_id ON public.messages USING btree (task_id); + + +-- +-- Name: ix_messages_to_user_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_messages_to_user_id ON public.messages USING btree (to_user_id); + + +-- +-- Name: ix_project_chat_project_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_project_chat_project_id ON public.project_chat USING btree (project_id); + + +-- +-- Name: ix_projects_campaign_tag; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_projects_campaign_tag ON public.projects USING btree (campaign_tag); + + +-- +-- Name: ix_projects_mapper_level; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_projects_mapper_level ON public.projects USING btree (mapper_level); + + +-- +-- Name: ix_projects_mapping_types; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_projects_mapping_types ON public.projects USING btree (mapping_types); + + +-- +-- Name: ix_projects_organisation_tag; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_projects_organisation_tag ON public.projects USING btree (organisation_tag); + + +-- +-- Name: ix_task_history_project_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_task_history_project_id ON public.task_history USING btree (project_id); + + +-- +-- Name: ix_tasks_project_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_tasks_project_id ON public.tasks USING btree (project_id); + + +-- +-- Name: ix_users_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX ix_users_id ON public.users USING btree (id); + + +-- +-- Name: task_invalidation_history fk_invalidation_history; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_invalidation_history + ADD CONSTRAINT fk_invalidation_history FOREIGN KEY (invalidation_history_id) REFERENCES public.task_history(id); + + +-- +-- Name: task_invalidation_history fk_invalidators; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_invalidation_history + ADD CONSTRAINT fk_invalidators FOREIGN KEY (invalidator_id) REFERENCES public.users(id); + + +-- +-- Name: projects fk_licenses; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.projects + ADD CONSTRAINT fk_licenses FOREIGN KEY (license_id) REFERENCES public.licenses(id); + + +-- +-- Name: task_invalidation_history fk_mappers; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_invalidation_history + ADD CONSTRAINT fk_mappers FOREIGN KEY (mapper_id) REFERENCES public.users(id); + + +-- +-- Name: messages fk_message_projects; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.messages + ADD CONSTRAINT fk_message_projects FOREIGN KEY (project_id) REFERENCES public.projects(id); + + +-- +-- Name: task_history fk_tasks; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_history + ADD CONSTRAINT fk_tasks FOREIGN KEY (task_id, project_id) REFERENCES public.tasks(id, project_id); + + +-- +-- Name: task_invalidation_history fk_tasks; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_invalidation_history + ADD CONSTRAINT fk_tasks FOREIGN KEY (task_id, project_id) REFERENCES public.tasks(id, project_id); + + +-- +-- Name: projects fk_users; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.projects + ADD CONSTRAINT fk_users FOREIGN KEY (author_id) REFERENCES public.users(id); + + +-- +-- Name: task_history fk_users; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_history + ADD CONSTRAINT fk_users FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: tasks fk_users_locked; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tasks + ADD CONSTRAINT fk_users_locked FOREIGN KEY (locked_by) REFERENCES public.users(id); + + +-- +-- Name: tasks fk_users_mapper; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tasks + ADD CONSTRAINT fk_users_mapper FOREIGN KEY (mapped_by) REFERENCES public.users(id); + + +-- +-- Name: tasks fk_users_validator; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tasks + ADD CONSTRAINT fk_users_validator FOREIGN KEY (validated_by) REFERENCES public.users(id); + + +-- +-- Name: task_invalidation_history fk_validators; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_invalidation_history + ADD CONSTRAINT fk_validators FOREIGN KEY (validator_id) REFERENCES public.users(id); + + +-- +-- Name: messages messages_from_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.messages + ADD CONSTRAINT messages_from_user_id_fkey FOREIGN KEY (from_user_id) REFERENCES public.users(id); + + +-- +-- Name: messages messages_to_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.messages + ADD CONSTRAINT messages_to_user_id_fkey FOREIGN KEY (to_user_id) REFERENCES public.users(id); + + +-- +-- Name: project_allowed_users project_allowed_users_project_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_allowed_users + ADD CONSTRAINT project_allowed_users_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id); + + +-- +-- Name: project_allowed_users project_allowed_users_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_allowed_users + ADD CONSTRAINT project_allowed_users_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: project_chat project_chat_project_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_chat + ADD CONSTRAINT project_chat_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id); + + +-- +-- Name: project_chat project_chat_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_chat + ADD CONSTRAINT project_chat_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: project_info project_info_project_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_info + ADD CONSTRAINT project_info_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id); + + +-- +-- Name: project_priority_areas project_priority_areas_priority_area_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_priority_areas + ADD CONSTRAINT project_priority_areas_priority_area_id_fkey FOREIGN KEY (priority_area_id) REFERENCES public.priority_areas(id); + + +-- +-- Name: project_priority_areas project_priority_areas_project_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.project_priority_areas + ADD CONSTRAINT project_priority_areas_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id); + + +-- +-- Name: task_history task_history_project_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_history + ADD CONSTRAINT task_history_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id); + + +-- +-- Name: task_invalidation_history task_invalidation_history_project_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.task_invalidation_history + ADD CONSTRAINT task_invalidation_history_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id); + + +-- +-- Name: tasks tasks_project_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.tasks + ADD CONSTRAINT tasks_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id); + + +-- +-- Name: users_licenses users_licenses_license_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users_licenses + ADD CONSTRAINT users_licenses_license_fkey FOREIGN KEY (license) REFERENCES public.licenses(id); + + +-- +-- Name: users_licenses users_licenses_user_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users_licenses + ADD CONSTRAINT users_licenses_user_fkey FOREIGN KEY ("user") REFERENCES public.users(id); + + +-- +-- PostgreSQL database dump complete +--