diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0f8aa2a..c87806e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,7 @@ on: branches: - main - dev + - feat/fsspec-filecache tags: - 'v*' paths: @@ -48,13 +49,20 @@ jobs: python -m pip install pre-commit pre-commit run --all-files + - name: run tests without cache + if: ${{ matrix.python-version == env.LATEST_PY_VERSION }} + env: + TITILER_XARRAY_ENABLE_FSSPEC_CACHE: FALSE + run: python -m pytest --cov titiler.xarray --cov-report term-missing -s -vv + - name: Run tests run: python -m pytest --cov titiler.xarray --cov-report term-missing -s -vv deploy: - needs: [tests] + #needs: [tests] runs-on: ubuntu-latest - if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/dev' || startsWith(github.ref, 'refs/tags/v') + #if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/dev' || startsWith(github.ref, 'refs/tags/v') + if: github.ref == 'refs/heads/feat/fsspec-filecache' defaults: run: @@ -88,6 +96,16 @@ jobs: python -m pip install --upgrade pip python -m pip install -r requirements-cdk.txt + # Build and deploy to the feature environment whenever there is a push to main or dev + - name: Build & Deploy Feature2 + if: github.ref == 'refs/heads/feat/fsspec-filecache' + run: npm run cdk -- deploy titiler-xarray-feature2 --require-approval never + env: + TITILER_XARRAY_PYTHONWARNINGS: ignore + TITILER_XARRAY_DEBUG: True + STACK_ALARM_EMAIL: ${{ secrets.ALARM_EMAIL }} + STACK_STAGE: feature2 + # Build and deploy to the development environment whenever there is a push to main or dev - name: Build & Deploy Development if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/dev' diff --git a/.gitignore b/.gitignore index ff51c59..7070c57 100644 --- a/.gitignore +++ b/.gitignore @@ -108,3 +108,6 @@ cdk.out/ node_modules cdk.context.json *.nc + +*cache* +*logs.txt diff --git a/README.md b/README.md index 121751a..917b7d9 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ virtualenv .venv python -m pip install -e . uvicorn source .venv/bin/activate -uvicorn titiler.xarray.main:app --reload +FSSPEC_CACHE_DIRECTORY="fsspec_cache" uvicorn titiler.xarray.main:app --reload ``` To access the docs, visit http://127.0.0.1:8000/docs. diff --git a/infrastructure/aws/cdk/app.py b/infrastructure/aws/cdk/app.py index 350812f..eb54035 100644 --- a/infrastructure/aws/cdk/app.py +++ b/infrastructure/aws/cdk/app.py @@ -3,10 +3,13 @@ import os from typing import Any, Dict, List, Optional +import aws_cdk from aws_cdk import App, CfnOutput, Duration, Stack, Tags from aws_cdk import aws_apigatewayv2_alpha as apigw from aws_cdk import aws_cloudwatch as cloudwatch from aws_cdk import aws_cloudwatch_actions as cloudwatch_actions +from aws_cdk import aws_ec2 as ec2 +from aws_cdk import aws_efs as efs from aws_cdk import aws_iam as iam from aws_cdk import aws_lambda from aws_cdk import aws_logs as logs @@ -54,6 +57,44 @@ def __init__( permissions = permissions or [] environment = environment or {} + vpc = ec2.Vpc( + self, + "titiler-xarray-vpc", + max_azs=2, # Default is all AZs in the region + nat_gateways=1, + cidr="10.0.0.0/16", + # Define custom CIDR range for each subnet type + subnet_configuration=[ + ec2.SubnetConfiguration( + name="Public", subnet_type=ec2.SubnetType.PUBLIC, cidr_mask=24 + ), + ec2.SubnetConfiguration( + name="Private", + subnet_type=ec2.SubnetType.PRIVATE_WITH_NAT, + cidr_mask=24, + ), + ], + ) + + # Create and attach a file system + file_system = efs.FileSystem( + self, + "EfsFileSystem", + vpc=vpc, + lifecycle_policy=efs.LifecyclePolicy.AFTER_7_DAYS, # Or choose another policy + performance_mode=efs.PerformanceMode.GENERAL_PURPOSE, + ) + + access_point = file_system.add_access_point( + "AccessPoint", + path="/export/lambda", + create_acl={"owner_uid": "1001", "owner_gid": "1001", "permissions": "750"}, + posix_user={ + "uid": "1001", + "gid": "1001", + }, + ) + lambda_function = aws_lambda.Function( self, f"{id}-lambda", @@ -69,6 +110,18 @@ def __init__( timeout=Duration.seconds(timeout), environment={**DEFAULT_ENV, **environment}, log_retention=logs.RetentionDays.ONE_WEEK, + ephemeral_storage_size=aws_cdk.Size.gibibytes(10), + vpc=vpc, + filesystem=aws_lambda.FileSystem.from_efs_access_point( + access_point, "/mnt/efs" + ), # Mounting it to /mnt/efs in Lambda + ) + + file_system.connections.allow_default_port_from(lambda_function) + file_system.grant( + lambda_function, + "elasticfilesystem:ClientMount", + "elasticfilesystem:ClientWrite", ) for perm in permissions: diff --git a/infrastructure/aws/lambda/Dockerfile b/infrastructure/aws/lambda/Dockerfile index aef3e92..4ca6db9 100644 --- a/infrastructure/aws/lambda/Dockerfile +++ b/infrastructure/aws/lambda/Dockerfile @@ -16,7 +16,7 @@ COPY titiler/ titiler/ # we have to force using old package version that seems `almost` compatible with Lambda env botocore # https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html RUN pip install --upgrade pip -RUN pip install . "rio-tiler>=5.0.0" "cftime" "mangum>=0.10.0" "pandas==1.5.3" "botocore==1.29.76" "aiobotocore==2.5.0" "s3fs==2023.4.0" "fsspec==2023.4.0" "zarr==2.14.2" "xarray==0.19.0" -t /asset --no-binary pydantic +RUN pip install . "mangum>=0.10.0" "botocore==1.29.76" "aiobotocore==2.5.0" -t /asset --no-binary pydantic # Reduce package size and remove useless files RUN cd /asset && find . -type f -name '*.pyc' | while read f; do n=$(echo $f | sed 's/__pycache__\///' | sed 's/.cpython-[0-9]*//'); cp $f $n; done; @@ -24,7 +24,10 @@ RUN cd /asset && find . -type d -a -name '__pycache__' -print0 | xargs -0 rm -rf RUN cd /asset && find . -type f -a -name '*.py' -print0 | xargs -0 rm -f RUN find /asset -type d -a -name 'tests' -print0 | xargs -0 rm -rf RUN rm -rdf /asset/numpy/doc/ /asset/bin /asset/geos_license /asset/Misc -RUN rm -rdf /asset/boto3* /asset/botocore* +RUN rm -rdf /asset/boto3* +RUN rm -rdf /asset/botocore* +RUN rm -rdf /asset/dask/tests* +RUN rm -rdf /asset/dask/dataframe* COPY infrastructure/aws/lambda/handler.py /asset/handler.py diff --git a/pyproject.toml b/pyproject.toml index 590bf0f..eaab1f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,18 +25,19 @@ classifiers = [ ] dynamic = ["version"] dependencies = [ + "cftime", "h5netcdf", "xarray", "rioxarray", "zarr", "fsspec", "s3fs", - "requests", "aiohttp", + "requests", "pydantic==2.0.2", "titiler.core>=0.14.1,<0.15", - "starlette-cramjam>=0.3,<0.4", - "pydantic-settings~=2.0" + "pydantic-settings~=2.0", + "pandas==1.5.3", ] [project.optional-dependencies] diff --git a/tests/conftest.py b/tests/conftest.py index c75e63b..5f644de 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,8 @@ """titiler.xarray tests configuration.""" +import os +import shutil + import pytest from fastapi.testclient import TestClient @@ -13,3 +16,16 @@ def app(monkeypatch): with TestClient(app) as client: yield client + + +def pytest_sessionstart(session): + """Setup before tests run.""" + test_cache_dir = "fsspec_test_cache" + os.environ["TITILER_XARRAY_FSSPEC_CACHE_DIRECTORY"] = test_cache_dir + os.makedirs(test_cache_dir, exist_ok=True) + + +def pytest_sessionfinish(session, exitstatus): + """Cleanup step after all tests have been run.""" + shutil.rmtree(os.environ["TITILER_XARRAY_FSSPEC_CACHE_DIRECTORY"]) + print("\nAll tests are done! Cleaning up...") diff --git a/tests/fixtures/3B42_Daily.19980101.7.nc4 b/tests/fixtures/3B42_Daily.19980101.7.nc4 new file mode 100644 index 0000000..6b3e07c Binary files /dev/null and b/tests/fixtures/3B42_Daily.19980101.7.nc4 differ diff --git a/tests/fixtures/generate_test_reference.py b/tests/fixtures/generate_test_reference.py index 10c320f..8254961 100644 --- a/tests/fixtures/generate_test_reference.py +++ b/tests/fixtures/generate_test_reference.py @@ -2,12 +2,13 @@ from datetime import datetime -import fsspec import netCDF4 as nc import numpy as np from kerchunk.combine import MultiZarrToZarr from kerchunk.hdf import SingleHdf5ToZarr +import fsspec + def create_netcdf(filename, date): with nc.Dataset(filename, "w", format="NETCDF4") as ds: diff --git a/tests/test_app.py b/tests/test_app.py index 2a044eb..9ba9abd 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -7,6 +7,7 @@ test_zarr_store = os.path.join(DATA_DIR, "test_zarr_store.zarr") test_reference_store = os.path.join(DATA_DIR, "reference.json") test_netcdf_store = os.path.join(DATA_DIR, "testfile.nc") +test_transposed_netcdf_store = os.path.join(DATA_DIR, "3B42_Daily.19980101.7.nc4") test_unconsolidated_store = os.path.join(DATA_DIR, "unconsolidated.zarr") test_pyramid_store = os.path.join(DATA_DIR, "pyramid.zarr") @@ -28,6 +29,16 @@ "params": {"url": test_netcdf_store, "variable": "data", "decode_times": False}, "variables": ["data"], } + +test_transposed_netcdf_store_params = { + "params": { + "url": test_transposed_netcdf_store, + "variable": "precipitation", + "decode_times": False, + }, + "variables": ["precipitation"], +} + test_unconsolidated_store_params = { "params": { "url": test_unconsolidated_store, @@ -149,7 +160,7 @@ def test_get_tilejson_pyramid(app): def get_tile_test(app, ds_params, zoom: int = 0): response = app.get( - f"/tiles/{zoom}/0/0.png", + "/tiles/0/0/0.png", params=ds_params["params"], ) assert response.status_code == 200 @@ -173,6 +184,17 @@ def test_get_tile_netcdf(app): return get_tile_test(app, test_netcdf_store_params) +def test_get_tile_transposed_netcdf_error(app): + response = app.get( + "/tiles/0/0/0.png", + params=test_transposed_netcdf_store_params["params"], + ) + assert response.status_code == 422 + assert response.json() == { + "detail": "Invalid dimension order. Expected order: ('y', 'x'). You can use `DataArray.transpose('y', 'x')` to reorder your dimensions. Data variable: precipitation" + } + + def test_get_tile_unconsolidated(app): return get_tile_test(app, test_unconsolidated_store_params) diff --git a/tests/test_remote.py b/tests/test_remote.py new file mode 100644 index 0000000..d228518 --- /dev/null +++ b/tests/test_remote.py @@ -0,0 +1,14 @@ +from test_app import get_variables_test + +test_remote_netcdf_store = "https://nex-gddp-cmip6.s3-us-west-2.amazonaws.com/NEX-GDDP-CMIP6/GISS-E2-1-G/historical/r1i1p1f2/pr/pr_day_GISS-E2-1-G_historical_r1i1p1f2_gn_1950.nc" +test_remote_netcdf_store_params = { + "params": { + "url": test_remote_netcdf_store, + "variable": "pr", + "decode_times": False, + }, + "variables": ["pr"], +} + +def test_get_variables_remote_netcdf(app): + return get_variables_test(app, test_remote_netcdf_store_params) diff --git a/titiler/xarray/factory.py b/titiler/xarray/factory.py index 95861f6..92f75dd 100644 --- a/titiler/xarray/factory.py +++ b/titiler/xarray/factory.py @@ -229,6 +229,13 @@ def tiles_endpoint( # type: ignore description="Whether to expect and open zarr store with consolidated metadata", ), ] = True, + anon: Annotated[ + Optional[bool], + Query( + title="anon", + description="Use credentials to access data when false.", + ), + ] = True, ) -> Response: """Create map tile from a dataset.""" tms = self.supported_tms.get(tileMatrixSetId) @@ -242,8 +249,8 @@ def tiles_endpoint( # type: ignore time_slice=time_slice, tms=tms, consolidated=consolidated, + anon=anon, ) as src_dst: - image = src_dst.tile( x, y, z, tilesize=scale * 256, nodata=src_dst.input.rio.nodata ) @@ -387,6 +394,7 @@ def tilejson_endpoint( # type: ignore decode_times=decode_times, tms=tms, consolidated=consolidated, + drop_dim=drop_dim, ) as src_dst: # see https://github.com/corteva/rioxarray/issues/645 minx, miny, maxx, maxy = zip( @@ -433,6 +441,10 @@ def histogram( description="Select a specific zarr group from a zarr hierarchy, can be for pyramids or datasets. Can be used to open a dataset in HDF5 files." ), ] = None, + drop_dim: Annotated[ + Optional[str], + Query(description="Dimension to drop"), + ] = None, ): with self.reader( url, @@ -440,6 +452,7 @@ def histogram( reference=reference, consolidated=consolidated, group=group, + drop_dim=drop_dim, ) as src_dst: boolean_mask = ~np.isnan(src_dst.input) data_values = src_dst.input.values[boolean_mask] diff --git a/titiler/xarray/main.py b/titiler/xarray/main.py index 34f679e..0df0405 100644 --- a/titiler/xarray/main.py +++ b/titiler/xarray/main.py @@ -1,15 +1,16 @@ """titiler app.""" import logging +import os +import shutil import rioxarray -import xarray import zarr from fastapi import FastAPI from starlette import status from starlette.middleware.cors import CORSMiddleware -from starlette_cramjam.middleware import CompressionMiddleware +import titiler.xarray.reader as reader from titiler.core.errors import DEFAULT_STATUS_CODES, add_exception_handlers from titiler.core.factory import AlgorithmFactory, TMSFactory from titiler.core.middleware import ( @@ -53,6 +54,7 @@ error_codes = { zarr.errors.GroupNotFoundError: status.HTTP_422_UNPROCESSABLE_ENTITY, + rioxarray.exceptions.InvalidDimensionOrder: status.HTTP_422_UNPROCESSABLE_ENTITY, } add_exception_handlers(app, error_codes) add_exception_handlers(app, DEFAULT_STATUS_CODES) @@ -67,18 +69,6 @@ allow_headers=["*"], ) -app.add_middleware( - CompressionMiddleware, - minimum_size=0, - exclude_mediatype={ - "image/jpeg", - "image/jpg", - "image/png", - "image/jp2", - "image/webp", - }, -) - app.add_middleware( CacheControlMiddleware, cachecontrol=api_settings.cachecontrol, @@ -91,7 +81,7 @@ app.add_middleware( ServerTimingMiddleware, calls_to_track={ - "1-xarray-open_dataset": (xarray.open_dataset,), + "1-xarray-open_dataset": (reader.xarray_open_dataset,), "2-rioxarray-reproject": (rioxarray.raster_array.RasterArray.reproject,), }, ) @@ -107,3 +97,31 @@ def ping(): """Health check.""" return {"ping": "pong!"} + + +@app.get( + "/clear_cache", + description="Clear Cache", + summary="Clear Cache.", + operation_id="clear cache", + tags=["Clear Cache"], +) +def clear_cache(): + """ + Clear the cache. + """ + print("Clearing the cache...") + cache_dir = os.path.expanduser(api_settings.fsspec_cache_directory) + if os.path.exists(cache_dir): + # Iterate over each directory and file in the root of the EFS + for root_dir, dirs, files in os.walk(cache_dir, topdown=False): + for name in files: + file_path = os.path.join(root_dir, name) + os.remove(file_path) + print(f"Deleted file: {file_path}") + + for name in dirs: + dir_path = os.path.join(root_dir, name) + shutil.rmtree(dir_path) + print(f"Deleted directory: {dir_path}") + return {"message": "cache cleared"} diff --git a/titiler/xarray/map-form.html b/titiler/xarray/map-form.html index 8bd800f..6b73f7a 100644 --- a/titiler/xarray/map-form.html +++ b/titiler/xarray/map-form.html @@ -75,6 +75,10 @@ background-color: #0056b3; } + #other-fields-list li { + margin-bottom: 0.5em; + } + /* histogram container */ #histogram-container { margin-top: 20px; @@ -123,17 +127,23 @@

Step 1: Enter the URL of your Zarr store


Step 2: Define other fields to use when opening the URL with xarray

- - -
-
- - -
-
- -
If the URL is for a zarr hierarchy or HDF5, please specify the group to use when opening the dataset.
-
+
diff --git a/titiler/xarray/map.html b/titiler/xarray/map.html index 631ebe3..36948f3 100644 --- a/titiler/xarray/map.html +++ b/titiler/xarray/map.html @@ -160,6 +160,7 @@ data.tiles[0], { minZoom: data.minzoom, maxNativeZoom: maxZoom, + opacity: 0.5, bounds: L.latLngBounds([bottom, left], [top, right]), } ).addTo(map); diff --git a/titiler/xarray/reader.py b/titiler/xarray/reader.py index 6d1fe01..689d5d3 100644 --- a/titiler/xarray/reader.py +++ b/titiler/xarray/reader.py @@ -5,7 +5,6 @@ from typing import Any, Dict, List, Optional import attr -import fsspec import numpy import s3fs import xarray @@ -15,50 +14,106 @@ from rio_tiler.io.xarray import XarrayReader from rio_tiler.types import BBox +import fsspec +from titiler.xarray.settings import ApiSettings + +api_settings = ApiSettings() +DEFAULT_CACHE_TYPE = "blockcache" -def parse_protocol(src_path: str, reference: Optional[bool] = False): + +def parse_protocol(src_path: str, reference: Optional[bool] = False) -> str: """ - Parse protocol from path. + Parse the protocol from the source path. """ + if reference: + return "reference" match = re.match(r"^(s3|https|http)", src_path) - protocol = "file" if match: - protocol = match.group(0) - # override protocol if reference - if reference: - protocol = "reference" - return protocol + return match.group(0) + else: + return "file" -def xarray_engine(src_path: str): +def get_cache_args( + protocol: str, cache_type: str = DEFAULT_CACHE_TYPE +) -> Dict[str, Any]: """ - Parse xarray engine from path. + Get the cache arguments for the given protocol. """ - H5NETCDF_EXTENSIONS = [".nc", ".nc4"] - lower_filename = src_path.lower() - if any(lower_filename.endswith(ext) for ext in H5NETCDF_EXTENSIONS): - return "h5netcdf" + return { + "target_protocol": protocol, + "cache_storage": f"{api_settings.fsspec_cache_directory}/fsspec-{cache_type}", + "remote_options": {"anon": True}, + } + + +def get_reference_args(src_path: str, protocol: str, anon: Optional[bool]) -> Dict: + """ + Get the reference arguments for the given source path. + """ + base_args = {"remote_options": {"anon": anon}} + if api_settings.enable_fsspec_cache: + base_args["target_options"] = {"fo": src_path} # type: ignore + base_args.update(get_cache_args(protocol, cache_type="filecache")) else: - return "zarr" + base_args["fo"] = src_path # type: ignore + return base_args -def get_file_handler( - src_path: str, protocol: str, xr_engine: str, reference: Optional[bool] = False +def get_filesystem( + src_path: str, + protocol: str, + xr_engine: str, + enable_fsspec_cache: bool, + reference: Optional[bool], + anon: Optional[bool], ): """ - Returns the appropriate file handler based on the protocol. + Get the filesystem for the given source path. """ - if protocol in ["https", "http"] or xr_engine == "h5netcdf": - fs = fsspec.filesystem(protocol) - return fs.open(src_path) - elif protocol == "s3": - fs = s3fs.S3FileSystem() - return s3fs.S3Map(root=src_path, s3=fs) + if protocol == "s3": + s3_filesystem = ( + fsspec.filesystem(DEFAULT_CACHE_TYPE, **get_cache_args(protocol)) # type: ignore + if enable_fsspec_cache + else s3fs.S3FileSystem() + ) + return ( + s3_filesystem.open(src_path) + if xr_engine == "h5netcdf" + else s3fs.S3Map(root=src_path, s3=s3_filesystem) + ) elif reference: - fs = fsspec.filesystem("reference", fo=src_path, remote_options={"anon": True}) - return fs.get_mapper("") + reference_args = get_reference_args(src_path, protocol, anon) + return ( + # using blockcache returns '_io.BytesIO' object has no attribute 'blocksize' + fsspec.filesystem("filecache", **reference_args).get_mapper("") # type: ignore + if enable_fsspec_cache + else fsspec.filesystem("reference", **reference_args).get_mapper("") # type: ignore + ) + elif protocol in ["https", "http", "file"]: + filesystem = ( + fsspec.filesystem(DEFAULT_CACHE_TYPE, **get_cache_args(protocol)) # type: ignore + if enable_fsspec_cache + else fsspec.filesystem(protocol) # type: ignore + ) + if xr_engine == "h5netcdf": + return filesystem.open(src_path) + else: + return filesystem.get_mapper(src_path) else: - return src_path + raise ValueError(f"Unsupported protocol: {protocol}") + + +def xarray_engine(src_path: str): + """ + Parse xarray engine from path. + """ + H5NETCDF_EXTENSIONS = [".nc", ".nc4"] + lower_filename = src_path.lower() + if any(lower_filename.endswith(ext) for ext in H5NETCDF_EXTENSIONS): + return "h5netcdf" + else: + return "zarr" def xarray_open_dataset( @@ -67,18 +122,23 @@ def xarray_open_dataset( reference: Optional[bool] = False, decode_times: Optional[bool] = True, consolidated: Optional[bool] = True, + anon: Optional[bool] = True, ) -> xarray.Dataset: - """Open dataset.""" - + """ + Open dataset using xarray. + """ protocol = parse_protocol(src_path, reference=reference) xr_engine = xarray_engine(src_path) - file_handler = get_file_handler(src_path, protocol, xr_engine, reference) + file_handler = get_filesystem( + src_path, protocol, xr_engine, api_settings.enable_fsspec_cache, reference, anon + ) # Arguments for xarray.open_dataset # Default args xr_open_args: Dict[str, Any] = { "decode_coords": "all", "decode_times": decode_times, + "engine": xr_engine, } # Argument if we're opening a datatree @@ -116,26 +176,24 @@ def arrange_coordinates(da: xarray.DataArray) -> xarray.DataArray: if "longitude" in da.dims: longitude_var_name = "longitude" da = da.rename({latitude_var_name: "y", longitude_var_name: "x"}) - if "time" in da.dims: - da = da.transpose("time", "y", "x") - else: - da = da.transpose("y", "x") return da def get_variable( ds: xarray.Dataset, variable: str, + xr_engine: str, time_slice: Optional[str] = None, drop_dim: Optional[str] = None, ) -> xarray.DataArray: """Get Xarray variable as DataArray.""" + da = ds[variable] - da = arrange_coordinates(da) # TODO: add test if drop_dim: dim_to_drop, dim_val = drop_dim.split("=") da = da.sel({dim_to_drop: dim_val}).drop(dim_to_drop) + da = arrange_coordinates(da) if (da.x > 180).any(): # Adjust the longitude coordinates to the -180 to 180 range @@ -162,7 +220,6 @@ def get_variable( ) else: da = da.isel(time=0) - return da @@ -178,6 +235,7 @@ class ZarrReader(XarrayReader): decode_times: bool = attr.ib(default=False) group: Optional[Any] = attr.ib(default=None) consolidated: Optional[bool] = attr.ib(default=True) + anon: Optional[bool] = attr.ib(default=True) # xarray.DataArray options time_slice: Optional[str] = attr.ib(default=None) @@ -206,11 +264,13 @@ def __attrs_post_init__(self): group=self.group, reference=self.reference, consolidated=self.consolidated, - ), + anon=self.anon, + ) ) self.input = get_variable( self.ds, self.variable, + xr_engine=xarray_engine(self.src_path), time_slice=self.time_slice, drop_dim=self.drop_dim, ) diff --git a/titiler/xarray/settings.py b/titiler/xarray/settings.py index 3f6056d..13f733e 100644 --- a/titiler/xarray/settings.py +++ b/titiler/xarray/settings.py @@ -11,8 +11,10 @@ class ApiSettings(BaseSettings): cors_origins: str = "*" cors_allow_methods: str = "GET" cachecontrol: str = "public, max-age=3600" + enable_fsspec_cache: bool = True root_path: str = "" debug: bool = False + fsspec_cache_directory: str = "/mnt/efs" model_config = SettingsConfigDict(env_prefix="TITILER_XARRAY_", env_file=".env")