Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make redis and fakeredis optional dependencies #57

Open
wants to merge 22 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.8', '3.9', '3.10']
python-version: ['3.9', '3.10']

steps:
- uses: actions/checkout@v3
Expand All @@ -56,7 +56,7 @@ jobs:
needs: [tests]
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/dev' || startsWith(github.ref, 'refs/tags/v')

defaults:
run:
working-directory: infrastructure/aws
Expand Down Expand Up @@ -98,7 +98,7 @@ jobs:
TITILER_XARRAY_DEBUG: True
STACK_ALARM_EMAIL: ${{ secrets.ALARM_EMAIL }}
STACK_STAGE: development

# Build and deploy to production deployment whenever there a new tag is pushed
- name: Build & Deploy Production
if: startsWith(github.ref, 'refs/tags/v')
Expand Down
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,4 @@ repos:
exclude: tests/.*
additional_dependencies:
- types-attrs
- types-redis
108 changes: 65 additions & 43 deletions infrastructure/aws/cdk/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,70 +67,92 @@ def __init__(
],
)

security_group = ec2.SecurityGroup(
self,
"ElastiCacheSecurityGroup",
vpc=vpc,
description="Allow local access to ElastiCache redis",
allow_all_outbound=True,
)
security_group.add_ingress_rule(
ec2.Peer.ipv4(vpc.vpc_cidr_block), ec2.Port.tcp(6379)
)
if settings.enable_cache:
security_group = ec2.SecurityGroup(
self,
"ElastiCacheSecurityGroup",
vpc=vpc,
description="Allow local access to ElastiCache redis",
allow_all_outbound=True,
)
security_group.add_ingress_rule(
ec2.Peer.ipv4(vpc.vpc_cidr_block), ec2.Port.tcp(6379)
)

# Create the redis cluster
redis_cluster = elasticache.CfnCacheCluster(
self,
f"{id}-redis-cluster",
engine="redis",
cache_node_type="cache.t3.small",
num_cache_nodes=1,
vpc_security_group_ids=[security_group.security_group_id],
cache_subnet_group_name=f"{id}-cache-subnet-group",
cluster_name=f"{id}-redis-cluster",
)
# Create the redis cluster
redis_cluster = elasticache.CfnCacheCluster(
self,
f"{id}-redis-cluster",
engine="redis",
cache_node_type="cache.t3.small",
num_cache_nodes=1,
vpc_security_group_ids=[security_group.security_group_id],
cache_subnet_group_name=f"{id}-cache-subnet-group",
cluster_name=f"{id}-redis-cluster",
)

# Define the subnet group for the ElastiCache cluster
subnet_group = elasticache.CfnSubnetGroup(
self,
f"{id}-cache-subnet-group",
description="Subnet group for ElastiCache",
subnet_ids=vpc.select_subnets(subnet_type=ec2.SubnetType.PUBLIC).subnet_ids,
cache_subnet_group_name=f"{id}-cache-subnet-group",
)
# Define the subnet group for the ElastiCache cluster
subnet_group = elasticache.CfnSubnetGroup(
self,
f"{id}-cache-subnet-group",
description="Subnet group for ElastiCache",
subnet_ids=vpc.select_subnets(
subnet_type=ec2.SubnetType.PUBLIC
).subnet_ids,
cache_subnet_group_name=f"{id}-cache-subnet-group",
)

# Add dependency - ensure subnet group is created before the cache cluster
redis_cluster.add_depends_on(subnet_group)
# Add dependency - ensure subnet group is created before the cache cluster
redis_cluster.add_depends_on(subnet_group)

veda_reader_role = iam.Role.from_role_arn(
self,
"veda-reader-dev-role",
role_arn=f"arn:aws:iam::{self.account}:role/veda-data-reader-dev",
)
if settings.data_access_role_name is not None:
data_access_role = iam.Role.from_role_arn(
self,
"data-access-role",
role_arn=f"arn:aws:iam::{self.account}:role/{settings.data_access_role_name}",
)
else:
data_access_role = iam.Role(
self,
"data-access-role",
assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"),
)

data_access_role.add_managed_policy(
iam.ManagedPolicy.from_aws_managed_policy_name("AmazonS3ReadOnlyAccess")
)

titiler_env = {
**DEFAULT_ENV,
**environment,
}

if settings.enable_cache:
titiler_env.update(
{"TITILER_XARRAY_CACHE_HOST": redis_cluster.attr_redis_endpoint_address}
)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we move this block up, so we don't have 2 settings.enable_cache

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not-mandatory if you want to merge


dockerfile_name = "Dockerfile.redis" if settings.enable_cache else "Dockerfile"

lambda_function = aws_lambda.Function(
self,
f"{id}-lambda",
runtime=runtime,
code=aws_lambda.Code.from_docker_build(
path=os.path.abspath(context_dir),
file="infrastructure/aws/lambda/Dockerfile",
file=f"infrastructure/aws/lambda/{dockerfile_name}",
platform="linux/amd64",
),
handler="handler.handler",
memory_size=memory,
reserved_concurrent_executions=concurrent,
timeout=Duration.seconds(timeout),
environment={
**DEFAULT_ENV,
**environment,
"TITILER_XARRAY_CACHE_HOST": redis_cluster.attr_redis_endpoint_address,
},
environment=titiler_env,
log_retention=logs.RetentionDays.ONE_WEEK,
vpc=vpc,
vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PUBLIC),
allow_public_subnet=True,
role=veda_reader_role,
role=data_access_role,
)

# Create an S3 VPC Endpoint
Expand Down
4 changes: 4 additions & 0 deletions infrastructure/aws/cdk/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ class StackSettings(pydantic.BaseSettings):
timeout: int = 30
memory: int = 3009

enable_cache: bool = True

data_access_role_name: Optional[str] = "veda-data-reader-dev"

# The maximum of concurrent executions you want to reserve for the function.
# Default: - No specific limit - account limit.
max_concurrent: Optional[int]
Expand Down
5 changes: 5 additions & 0 deletions infrastructure/aws/lambda/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ RUN find /asset -type d -a -name 'tests' -print0 | xargs -0 rm -rf
RUN rm -rdf /asset/numpy/doc/ /asset/bin /asset/geos_license /asset/Misc
RUN rm -rdf /asset/boto3*
RUN rm -rdf /asset/botocore*
RUN find /asset -type f -path '*LICENSE*' -delete
RUN find /asset -type f -path '*README*' -delete
RUN find /asset -type f -path '*AUTHORS*' -delete
RUN find /asset -type f -path '*pyproject*' -delete
RUN find /asset -type f -path '*setupcf*' -delete

COPY infrastructure/aws/lambda/handler.py /asset/handler.py

Expand Down
38 changes: 38 additions & 0 deletions infrastructure/aws/lambda/Dockerfile.redis
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
ARG PYTHON_VERSION=3.10

FROM --platform=linux/amd64 public.ecr.aws/lambda/python:${PYTHON_VERSION}

WORKDIR /tmp

COPY pyproject.toml pyproject.toml
COPY LICENSE LICENSE
COPY README.md README.md
COPY titiler/ titiler/

# Install dependencies
# HACK: aiobotocore has a tight botocore dependency
# https://github.com/aio-libs/aiobotocore/issues/862
# and becaise we NEED to remove both boto3 and botocore to save space for the package
# we have to force using old package version that seems `almost` compatible with Lambda env botocore
# https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html
RUN pip install --upgrade pip

RUN pip install ".[cache]" "mangum>=0.10.0" "botocore==1.29.76" "aiobotocore==2.5.0" -t /asset --no-binary pydantic;

# Reduce package size and remove useless files
RUN cd /asset && find . -type f -name '*.pyc' | while read f; do n=$(echo $f | sed 's/__pycache__\///' | sed 's/.cpython-[0-9]*//'); cp $f $n; done;
RUN cd /asset && find . -type d -a -name '__pycache__' -print0 | xargs -0 rm -rf
RUN cd /asset && find . -type f -a -name '*.py' -print0 | xargs -0 rm -f
RUN find /asset -type d -a -name 'tests' -print0 | xargs -0 rm -rf
RUN rm -rdf /asset/numpy/doc/ /asset/bin /asset/geos_license /asset/Misc
RUN rm -rdf /asset/boto3*
RUN rm -rdf /asset/botocore*
RUN find /asset -type f -path '*LICENSE*' -delete
RUN find /asset -type f -path '*README*' -delete
RUN find /asset -type f -path '*AUTHORS*' -delete
RUN find /asset -type f -path '*pyproject*' -delete
RUN find /asset -type f -path '*setupcf*' -delete

COPY infrastructure/aws/lambda/handler.py /asset/handler.py

CMD ["echo", "hello world"]
32 changes: 16 additions & 16 deletions infrastructure/aws/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion infrastructure/aws/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"license": "MIT",
"private": true,
"dependencies": {
"cdk": "2.76.0-alpha.0"
"cdk": "2.138.0"
},
"scripts": {
"cdk": "cdk"
Expand Down
8 changes: 4 additions & 4 deletions infrastructure/aws/requirements-cdk.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
aws-cdk-lib==2.76.0
aws_cdk-aws_apigatewayv2_alpha==2.76.0a0
aws_cdk-aws_apigatewayv2_integrations_alpha==2.76.0a0
aws-cdk-lib==2.138.0
aws_cdk-aws_apigatewayv2_alpha==2.114.1a0
aws_cdk-aws_apigatewayv2_integrations_alpha==2.114.1a0
constructs>=10.0.0

pydantic~=1.0
python-dotenv
python-dotenv
36 changes: 20 additions & 16 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
name = "titiler.xarray"
description = "TiTiler extension for xarray."
readme = "README.md"
requires-python = ">=3.8"
requires-python = ">=3.9"
authors = [
{name = "Vincent Sarago", email = "[email protected]"},
]
Expand All @@ -17,31 +17,31 @@ classifiers = [
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Scientific/Engineering :: GIS",
]
dynamic = ["version"]
dependencies = [
"cftime",
"h5netcdf",
"xarray",
"rioxarray",
"zarr",
"fakeredis",
"cftime==1.6.3",
"h5netcdf==1.3.0",
"xarray==2024.3.0",
"rioxarray==0.15.0",
"zarr==2.17.2",
"h5py==3.10.0",
"rasterio==1.3.9",
"fsspec",
"s3fs",
"aiohttp",
"requests",
"requests==2.31.0",
"pydantic==2.0.2",
"titiler.core>=0.14.1,<0.15",
"pydantic-settings~=2.0",
"titiler.core==0.14.1",
"pydantic-settings==2.0.3",
"pandas==1.5.3",
"redis",
"fastapi>=0.100.0,<0.107.0",
"starlette<0.28",
"fastapi==0.106.0",
"starlette==0.27.0",
]

[project.optional-dependencies]
Expand All @@ -51,17 +51,21 @@ test = [
"pytest-asyncio",
"httpx",
"yappi",
"fakeredis"
]
dev = [
"pre-commit"
"pre-commit",
"fakeredis"
]
debug = [
"yappi"
]
server = [
"uvicorn"
]

cache = [
"redis==5.0.3"
]
[project.urls]
Homepage = "https://github.com/developmentseed/titiler-xarray"
Issues = "https://github.com/developmentseed/titiler-xarray/issues"
Expand Down
Loading
Loading