diff --git a/nad_ch/application/interfaces.py b/nad_ch/application/interfaces.py
index 6b06bae..e205d1e 100644
--- a/nad_ch/application/interfaces.py
+++ b/nad_ch/application/interfaces.py
@@ -1,5 +1,6 @@
 from typing import Optional, Protocol
 from nad_ch.application.dtos import DownloadResult
+from nad_ch.domain.repositories import DataProviderRepository, DataSubmissionRepository
 
 
 class Logger(Protocol):
@@ -30,3 +31,25 @@ def cleanup_temp_dir(self, temp_dir: str) -> bool:
 class TaskQueue(Protocol):
     def run_load_and_validate(self, path: str):
         ...
+
+
+class ApplicationContext:
+    @property
+    def providers(self) -> DataProviderRepository:
+        return self._providers
+
+    @property
+    def submissions(self) -> DataSubmissionRepository:
+        return self._submissions
+
+    @property
+    def logger(self) -> Logger:
+        return self._logger
+
+    @property
+    def storage(self) -> Storage:
+        return self._storage
+
+    @property
+    def task_queue(self) -> TaskQueue:
+        return self._task_queue
diff --git a/nad_ch/application/use_cases.py b/nad_ch/application/use_cases.py
index 456ae7b..1a5c4e3 100644
--- a/nad_ch/application/use_cases.py
+++ b/nad_ch/application/use_cases.py
@@ -1,7 +1,7 @@
 import os
 from typing import List
 from nad_ch.application.dtos import DownloadResult
-from nad_ch.application_context import ApplicationContext
+from nad_ch.application.interfaces import ApplicationContext
 from nad_ch.domain.entities import DataProvider, DataSubmission
 
 
diff --git a/nad_ch/application_context.py b/nad_ch/application_context.py
deleted file mode 100644
index 686bbbb..0000000
--- a/nad_ch/application_context.py
+++ /dev/null
@@ -1,111 +0,0 @@
-import logging
-from nad_ch.application.interfaces import Logger, Storage, TaskQueue
-import nad_ch.config as config
-from nad_ch.domain.repositories import DataProviderRepository, DataSubmissionRepository
-from nad_ch.infrastructure.database import (
-    create_session_factory,
-    SqlAlchemyDataProviderRepository,
-    SqlAlchemyDataSubmissionRepository,
-)
-from nad_ch.infrastructure.logger import BasicLogger
-from nad_ch.infrastructure.storage import S3Storage, MinioStorage
-from nad_ch.infrastructure.task_queue import celery_app, CeleryTaskQueue
-from tests.fakes_and_mocks import (
-    FakeDataProviderRepository,
-    FakeDataSubmissionRepository,
-    FakeStorage,
-)
-
-
-class ApplicationContext:
-    def __init__(self):
-        self._session = create_session_factory(config.DATABASE_URL)
-        self._providers = self.create_provider_repository()
-        self._submissions = self.create_submission_repository()
-        self._logger = self.create_logger()
-        self._storage = self.create_storage()
-        self._task_queue = self.create_task_queue()
-
-    def create_provider_repository(self):
-        return SqlAlchemyDataProviderRepository(self._session)
-
-    def create_submission_repository(self):
-        return SqlAlchemyDataSubmissionRepository(self._session)
-
-    def create_logger(self):
-        return BasicLogger(__name__)
-
-    def create_storage(self):
-        return S3Storage(
-            config.S3_ACCESS_KEY,
-            config.S3_SECRET_ACCESS_KEY,
-            config.S3_REGION,
-            config.S3_BUCKET_NAME,
-        )
-
-    def create_task_queue(self):
-        return CeleryTaskQueue(celery_app)
-
-    @property
-    def providers(self) -> DataProviderRepository:
-        return self._providers
-
-    @property
-    def submissions(self) -> DataSubmissionRepository:
-        return self._submissions
-
-    @property
-    def logger(self) -> Logger:
-        return self._logger
-
-    @property
-    def storage(self) -> Storage:
-        return self._storage
-
-    @property
-    def task_queue(self) -> TaskQueue:
-        return self._task_queue
-
-
-class DevLocalApplicationContext(ApplicationContext):
-    def create_logger(self):
-        return BasicLogger(__name__, logging.DEBUG)
-
-    def create_storage(self):
-        return MinioStorage(
-            config.S3_ENDPOINT,
-            config.S3_ACCESS_KEY,
-            config.S3_SECRET_ACCESS_KEY,
-            config.S3_BUCKET_NAME,
-        )
-
-
-class TestApplicationContext(ApplicationContext):
-    def __init__(self):
-        self._session = None
-        self._providers = self.create_provider_repository()
-        self._submissions = self.create_submission_repository()
-        self._logger = self.create_logger()
-        self._storage = self.create_storage()
-        self._task_queue = self.create_task_queue()
-
-    def create_provider_repository(self):
-        return FakeDataProviderRepository()
-
-    def create_submission_repository(self):
-        return FakeDataSubmissionRepository()
-
-    def create_logger(self):
-        return BasicLogger(__name__, logging.DEBUG)
-
-    def create_storage(self):
-        return FakeStorage()
-
-
-def create_app_context():
-    if config.APP_ENV == "test":
-        return TestApplicationContext()
-    elif config.APP_ENV == "dev_local":
-        return DevLocalApplicationContext()
-    else:
-        return ApplicationContext()
diff --git a/nad_ch/cli.py b/nad_ch/cli.py
index 8fe70b3..683090e 100644
--- a/nad_ch/cli.py
+++ b/nad_ch/cli.py
@@ -1,5 +1,5 @@
 from nad_ch.controllers.cli import cli
-from nad_ch.application_context import create_app_context
+from nad_ch.config import create_app_context
 
 
 def main():
diff --git a/nad_ch/config/__init__.py b/nad_ch/config/__init__.py
index e884c56..94c724a 100644
--- a/nad_ch/config/__init__.py
+++ b/nad_ch/config/__init__.py
@@ -1,7 +1,9 @@
 from .base import APP_ENV
 
 
-if APP_ENV == "dev_local" or APP_ENV == "test":
+if APP_ENV == "dev_local":
     from .development_local import *
 elif APP_ENV == "dev_remote":
     from .development_remote import *
+elif APP_ENV == "test":
+    from .test import *
diff --git a/nad_ch/config/development_local.py b/nad_ch/config/development_local.py
index bd6a1b8..43ac8e8 100644
--- a/nad_ch/config/development_local.py
+++ b/nad_ch/config/development_local.py
@@ -1,12 +1,17 @@
+import logging
 import os
 from .base import *
+from nad_ch.application.interfaces import ApplicationContext
+from nad_ch.infrastructure.database import (
+    create_session_factory,
+    SqlAlchemyDataProviderRepository,
+    SqlAlchemyDataSubmissionRepository,
+)
+from nad_ch.infrastructure.logger import BasicLogger
+from nad_ch.infrastructure.storage import MinioStorage
 
 
-# Local development config
-APP_ENV = os.getenv("APP_ENV")
 STORAGE_PATH = os.getenv("STORAGE_PATH")
-
-
 postgres_user = os.getenv("POSTGRES_USER")
 postgres_password = os.getenv("POSTGRES_PASSWORD")
 postgres_host = os.getenv("POSTGRES_HOST")
@@ -16,13 +21,46 @@
     f"postgresql+psycopg2://{postgres_user}:{postgres_password}"
     f"@{postgres_host}:{postgres_port}/{postgres_db}"
 )
-
-
 QUEUE_BROKER_URL = os.getenv("QUEUE_BROKER_URL")
 QUEUE_BACKEND_URL = os.getenv("QUEUE_BACKEND_URL")
-
 S3_BUCKET_NAME = os.getenv("S3_BUCKET_NAME")
 S3_ENDPOINT = os.getenv("S3_ENDPOINT")
 S3_ACCESS_KEY = os.getenv("S3_ACCESS_KEY")
 S3_SECRET_ACCESS_KEY = os.getenv("S3_SECRET_ACCESS_KEY")
 S3_REGION = os.getenv("S3_REGION")
+
+
+class DevLocalApplicationContext(ApplicationContext):
+    def __init__(self):
+        self._session = create_session_factory(DATABASE_URL)
+        self._providers = self.create_provider_repository()
+        self._submissions = self.create_submission_repository()
+        self._logger = self.create_logger()
+        self._storage = self.create_storage()
+        self._task_queue = self.create_task_queue()
+
+    def create_provider_repository(self):
+        return SqlAlchemyDataProviderRepository(self._session)
+
+    def create_submission_repository(self):
+        return SqlAlchemyDataSubmissionRepository(self._session)
+
+    def create_logger(self):
+        return BasicLogger(__name__, logging.DEBUG)
+
+    def create_storage(self):
+        return MinioStorage(
+            S3_ENDPOINT,
+            S3_ACCESS_KEY,
+            S3_SECRET_ACCESS_KEY,
+            S3_BUCKET_NAME,
+        )
+
+    def create_task_queue(self):
+        from nad_ch.infrastructure.task_queue import celery_app, CeleryTaskQueue
+
+        return CeleryTaskQueue(celery_app)
+
+
+def create_app_context():
+    return DevLocalApplicationContext()
diff --git a/nad_ch/config/development_remote.py b/nad_ch/config/development_remote.py
index 310c857..427e5b2 100644
--- a/nad_ch/config/development_remote.py
+++ b/nad_ch/config/development_remote.py
@@ -1,6 +1,14 @@
 import json
 import os
 from .base import *
+from nad_ch.application.interfaces import ApplicationContext
+from nad_ch.infrastructure.database import (
+    create_session_factory,
+    SqlAlchemyDataProviderRepository,
+    SqlAlchemyDataSubmissionRepository,
+)
+from nad_ch.infrastructure.logger import BasicLogger
+from nad_ch.infrastructure.storage import S3Storage
 
 
 def get_credentials(service_name, default={}):
@@ -8,7 +16,6 @@ def get_credentials(service_name, default={}):
     return service[0].get("credentials", default) if service else default
 
 
-# Remote development config
 vcap_services = json.loads(os.getenv("VCAP_SERVICES", "{}"))
 
 
@@ -28,3 +35,39 @@ def get_credentials(service_name, default={}):
     "secret_access_key", os.getenv("S3_SECRET_ACCESS_KEY")
 )
 S3_REGION = s3_credentials.get("region", os.getenv("S3_REGION"))
+
+
+class DevRemoteApplicationContext(ApplicationContext):
+    def __init__(self):
+        self._session = create_session_factory(DATABASE_URL)
+        self._providers = self.create_provider_repository()
+        self._submissions = self.create_submission_repository()
+        self._logger = self.create_logger()
+        self._storage = self.create_storage()
+        self._task_queue = self.create_task_queue()
+
+    def create_provider_repository(self):
+        return SqlAlchemyDataProviderRepository(self._session)
+
+    def create_submission_repository(self):
+        return SqlAlchemyDataSubmissionRepository(self._session)
+
+    def create_logger(self):
+        return BasicLogger(__name__)
+
+    def create_storage(self):
+        return S3Storage(
+            S3_ACCESS_KEY,
+            S3_SECRET_ACCESS_KEY,
+            S3_REGION,
+            S3_BUCKET_NAME,
+        )
+
+    def create_task_queue(self):
+        from nad_ch.infrastructure.task_queue import celery_app, CeleryTaskQueue
+
+        return CeleryTaskQueue(celery_app)
+
+
+def create_app_context():
+    return DevRemoteApplicationContext()
diff --git a/nad_ch/config/test.py b/nad_ch/config/test.py
new file mode 100644
index 0000000..d42138e
--- /dev/null
+++ b/nad_ch/config/test.py
@@ -0,0 +1,45 @@
+import logging
+import os
+from nad_ch.application.interfaces import ApplicationContext
+from nad_ch.infrastructure.logger import BasicLogger
+from tests.fakes_and_mocks import (
+    FakeDataProviderRepository,
+    FakeDataSubmissionRepository,
+    FakeStorage,
+)
+
+
+DATABASE_URL = os.getenv("DATABASE_URL")
+QUEUE_BROKER_URL = os.getenv("QUEUE_BROKER_URL")
+QUEUE_BACKEND_URL = os.getenv("QUEUE_BACKEND_URL")
+
+
+class TestApplicationContext(ApplicationContext):
+    def __init__(self):
+        self._session = None
+        self._providers = self.create_provider_repository()
+        self._submissions = self.create_submission_repository()
+        self._logger = self.create_logger()
+        self._storage = self.create_storage()
+        self._task_queue = self.create_task_queue()
+
+    def create_provider_repository(self):
+        return FakeDataProviderRepository()
+
+    def create_submission_repository(self):
+        return FakeDataSubmissionRepository()
+
+    def create_logger(self):
+        return BasicLogger(__name__, logging.DEBUG)
+
+    def create_storage(self):
+        return FakeStorage()
+
+    def create_task_queue(self):
+        from nad_ch.infrastructure.task_queue import celery_app, CeleryTaskQueue
+
+        return CeleryTaskQueue(celery_app)
+
+
+def create_app_context():
+    return TestApplicationContext()
diff --git a/nad_ch/infrastructure/task_queue.py b/nad_ch/infrastructure/task_queue.py
index fa37060..fb94fb6 100644
--- a/nad_ch/infrastructure/task_queue.py
+++ b/nad_ch/infrastructure/task_queue.py
@@ -1,3 +1,4 @@
+import os
 from celery import Celery
 import geopandas as gpd
 from nad_ch.application.interfaces import TaskQueue
diff --git a/tests/application/test_use_cases.py b/tests/application/test_use_cases.py
index 493b7b3..7842564 100644
--- a/tests/application/test_use_cases.py
+++ b/tests/application/test_use_cases.py
@@ -1,6 +1,6 @@
 import pytest
 import re
-from nad_ch.application_context import create_app_context
+from nad_ch.config import create_app_context
 from nad_ch.domain.entities import DataProvider, DataSubmission
 from nad_ch.application.use_cases import (
     add_data_provider,
@@ -27,13 +27,13 @@ def test_add_data_provider(app_context):
 
 
 def test_add_data_provider_logs_error_if_no_provider_name_given(mocker):
-    mock_context = mocker.patch("nad_ch.application_context.create_app_context")
+    mock_context = mocker.patch("nad_ch.config.create_app_context")
     add_data_provider(mock_context, "")
     mock_context.logger.error.assert_called_once_with("Provider name required")
 
 
 def test_add_data_provider_logs_error_if_provider_name_not_unique(mocker):
-    mock_context = mocker.patch("nad_ch.application_context.create_app_context")
+    mock_context = mocker.patch("nad_ch.config.create_app_context")
     mock_context.providers.get_by_name.return_value("State X")
     add_data_provider(mock_context, "State X")