diff --git a/.github/workflows/build-and-release.yaml b/.github/workflows/build-and-release.yaml index 10d64707e..9501c2f32 100644 --- a/.github/workflows/build-and-release.yaml +++ b/.github/workflows/build-and-release.yaml @@ -37,6 +37,14 @@ jobs: - name: Ruff run: ruff check --no-cache --output-format github . + - name: Install production requirements (for Mypy) + run: | + # Mypy needs production packages for typechecking + pip install --no-deps -r requirements/requirements-prod.txt + + - name: Mypy + run: mypy + check-compiled-requirements: runs-on: ubuntu-24.04 if: ${{ !contains(github.event.head_commit.message, '#notests') }} diff --git a/bin/backfix-duplicate-categories.py b/bin/backfix-duplicate-categories.py index 5057b4ed8..5d9769fb1 100755 --- a/bin/backfix-duplicate-categories.py +++ b/bin/backfix-duplicate-categories.py @@ -7,8 +7,8 @@ from itertools import chain import click -from sqlalchemy import func -from sqlalchemy.sql import and_, exists +from sqlalchemy import func # type: ignore[import-untyped] +from sqlalchemy.sql import and_, exists # type: ignore[import-untyped] from inbox.error_handling import maybe_enable_rollbar from inbox.ignition import engine_manager @@ -20,7 +20,7 @@ log = get_logger(purpose="duplicate-category-backfill") -def backfix_shard(shard_id, dry_run) -> None: +def backfix_shard(shard_id, dry_run) -> None: # type: ignore[no-untyped-def] categories_to_fix = [] with session_scope_by_shard_id(shard_id) as db_session: # 'SELECT id FROM GROUP BY ' does not select _all_ of the @@ -188,7 +188,7 @@ def backfix_shard(shard_id, dry_run) -> None: @click.command() @click.option("--shard-id", type=int, default=None) @click.option("--dry-run", is_flag=True) -def main(shard_id, dry_run) -> None: +def main(shard_id, dry_run) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() if shard_id is not None: diff --git a/bin/backfix-generic-imap-separators.py b/bin/backfix-generic-imap-separators.py index fa92c8e9f..b243fdbce 100755 --- a/bin/backfix-generic-imap-separators.py +++ b/bin/backfix-generic-imap-separators.py @@ -23,7 +23,7 @@ @click.option("--min-id", type=int, default=None) @click.option("--max-id", type=int, default=None) @click.option("--shard-id", type=int, default=None) -def main(min_id, max_id, shard_id) -> None: +def main(min_id, max_id, shard_id) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() generic_accounts = [] @@ -37,12 +37,12 @@ def main(min_id, max_id, shard_id) -> None: ) if min_id is not None: - generic_accounts = generic_accounts.filter( + generic_accounts = generic_accounts.filter( # type: ignore[attr-defined] GenericAccount.id > min_id ) if max_id is not None: - generic_accounts = generic_accounts.filter( + generic_accounts = generic_accounts.filter( # type: ignore[attr-defined] GenericAccount.id <= max_id ) diff --git a/bin/check-attachments.py b/bin/check-attachments.py index 534d0eba2..da5a217de 100755 --- a/bin/check-attachments.py +++ b/bin/check-attachments.py @@ -5,8 +5,8 @@ from collections import defaultdict import click -from sqlalchemy import true -from sqlalchemy.sql.expression import func +from sqlalchemy import true # type: ignore[import-untyped] +from sqlalchemy.sql.expression import func # type: ignore[import-untyped] from inbox.error_handling import maybe_enable_rollbar from inbox.logging import configure_logging, get_logger @@ -20,8 +20,8 @@ NUM_MESSAGES = 10 -def process_account(account_id): # noqa: ANN201 - ret = defaultdict(int) +def process_account(account_id): # type: ignore[no-untyped-def] # noqa: ANN201 + ret = defaultdict(int) # type: ignore[var-annotated] try: with session_scope(account_id) as db_session: @@ -67,7 +67,7 @@ def process_account(account_id): # noqa: ANN201 @click.command() @click.option("--num-accounts", type=int, default=1500) -def main(num_accounts) -> None: +def main(num_accounts) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() with global_session_scope() as db_session: diff --git a/bin/clear-all-heartbeats.py b/bin/clear-all-heartbeats.py index 56aea03df..56fd80822 100755 --- a/bin/clear-all-heartbeats.py +++ b/bin/clear-all-heartbeats.py @@ -19,7 +19,7 @@ @click.option("--host", "-h", type=str, default="localhost") @click.option("--port", "-p", type=int, default=6379) @click.option("--database", "-d", type=int, default=STATUS_DATABASE) -def main(host, port, database) -> None: +def main(host, port, database) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() connection_pool = BlockingConnectionPool( @@ -36,7 +36,10 @@ def main(host, port, database) -> None: count = 0 for name in client.scan_iter(count=100): - if name == "ElastiCacheMasterReplicationTimestamp": + if ( + name # type: ignore[comparison-overlap] + == "ElastiCacheMasterReplicationTimestamp" + ): continue batch_client.delete(name) count += 1 diff --git a/bin/clear-db.py b/bin/clear-db.py index 8c2c69e77..775625b09 100755 --- a/bin/clear-db.py +++ b/bin/clear-db.py @@ -16,7 +16,10 @@ def main() -> None: default=False, ) args = parser.parse_args() - from inbox.ignition import init_db, main_engine + from inbox.ignition import ( # type: ignore[attr-defined] + init_db, + main_engine, + ) maybe_enable_rollbar() diff --git a/bin/clear-heartbeat-status.py b/bin/clear-heartbeat-status.py index 516a62f29..c6fe01020 100755 --- a/bin/clear-heartbeat-status.py +++ b/bin/clear-heartbeat-status.py @@ -20,11 +20,15 @@ @click.option("--account-id", "-a", type=int, required=True) @click.option("--folder-id", "-f", type=int) @click.option("--device-id", "-d", type=int) -def main(host, port, account_id, folder_id, device_id) -> None: +def main( # type: ignore[no-untyped-def] + host, port, account_id, folder_id, device_id +) -> None: maybe_enable_rollbar() print("Clearing heartbeat status...") - n = clear_heartbeat_status(account_id, folder_id, device_id, host, port) + n = clear_heartbeat_status( # type: ignore[call-arg] + account_id, folder_id, device_id, host, port + ) print(f"{n} folders cleared.") exit(0) diff --git a/bin/clear-kv.py b/bin/clear-kv.py index c0fa49362..d2112c041 100755 --- a/bin/clear-kv.py +++ b/bin/clear-kv.py @@ -5,7 +5,7 @@ import click from inbox.error_handling import maybe_enable_rollbar -from inbox.heartbeat.config import ( +from inbox.heartbeat.config import ( # type: ignore[attr-defined] REPORT_DATABASE, STATUS_DATABASE, _get_redis_client, @@ -16,7 +16,7 @@ @click.command() @click.option("--host", "-h", type=str) @click.option("--port", "-p", type=int, default=6379) -def main(host, port) -> None: +def main(host, port) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() if host: diff --git a/bin/correct-autoincrements.py b/bin/correct-autoincrements.py index 4d4258ddd..6b695d65b 100755 --- a/bin/correct-autoincrements.py +++ b/bin/correct-autoincrements.py @@ -10,7 +10,7 @@ @click.command() @click.option("--dry-run", is_flag=True) -def reset_db(dry_run) -> None: +def reset_db(dry_run) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() database_hosts = config.get_required("DATABASE_HOSTS") diff --git a/bin/create-db.py b/bin/create-db.py index d633650db..10e88f3e0 100755 --- a/bin/create-db.py +++ b/bin/create-db.py @@ -6,7 +6,7 @@ import alembic.command import alembic.config import click -import sqlalchemy +import sqlalchemy # type: ignore[import-untyped] from inbox.config import config from inbox.error_handling import maybe_enable_rollbar @@ -21,7 +21,7 @@ help="Limit database initialization to only one host / set of shards", ) @click.option("--host-ip", default=None) -def main(target_hostname, host_ip) -> None: +def main(target_hostname, host_ip) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() database_hosts = config.get_required("DATABASE_HOSTS") diff --git a/bin/create-encryption-keys.py b/bin/create-encryption-keys.py index e18a6f0b7..a7d2565f0 100755 --- a/bin/create-encryption-keys.py +++ b/bin/create-encryption-keys.py @@ -11,7 +11,7 @@ def main() -> None: - from inbox.config import config, secrets_path + from inbox.config import config, secrets_path # type: ignore[attr-defined] maybe_enable_rollbar() diff --git a/bin/create-event-contact-associations.py b/bin/create-event-contact-associations.py index 7bfe5a1c4..28321b219 100755 --- a/bin/create-event-contact-associations.py +++ b/bin/create-event-contact-associations.py @@ -5,7 +5,7 @@ import click -from sqlalchemy import asc +from sqlalchemy import asc # type: ignore[import-untyped] from inbox.contacts.processing import update_contacts_from_event from inbox.error_handling import maybe_enable_rollbar @@ -19,7 +19,9 @@ log = get_logger(purpose="create-event-contact-associations") -def process_shard(shard_id, dry_run, id_start: int = 0) -> None: +def process_shard( # type: ignore[no-untyped-def] + shard_id, dry_run, id_start: int = 0 +) -> None: # At 500K events, we need to process 6 events per second to finish within a day. batch_size = 100 rps = 6 / batch_size @@ -95,7 +97,7 @@ def process_shard(shard_id, dry_run, id_start: int = 0) -> None: @click.option("--shard-id", type=int, default=None) @click.option("--id-start", type=int, default=0) @click.option("--dry-run", is_flag=True) -def main(shard_id, id_start, dry_run) -> None: +def main(shard_id, id_start, dry_run) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() if shard_id is not None: diff --git a/bin/delete-account-data.py b/bin/delete-account-data.py index 00d882c21..729d2be1c 100755 --- a/bin/delete-account-data.py +++ b/bin/delete-account-data.py @@ -35,7 +35,9 @@ @click.option("--dry-run", is_flag=True) @click.option("--yes", is_flag=True) @click.option("--throttle", is_flag=True) -def delete_account_data(account_id, dry_run, yes, throttle) -> int | None: +def delete_account_data( # type: ignore[no-untyped-def] + account_id, dry_run, yes, throttle +) -> int | None: maybe_enable_rollbar() with session_scope(account_id) as db_session: @@ -63,7 +65,9 @@ def delete_account_data(account_id, dry_run, yes, throttle) -> int | None: ) ) - answer = raw_input(question).strip().lower() # noqa: F821 + answer = ( + raw_input(question).strip().lower() # type: ignore[name-defined] # noqa: F821 + ) if answer != "yes": print("Will NOT delete, goodbye.") diff --git a/bin/delete-marked-accounts.py b/bin/delete-marked-accounts.py index 85a7f0417..4065e4411 100755 --- a/bin/delete-marked-accounts.py +++ b/bin/delete-marked-accounts.py @@ -29,7 +29,7 @@ @click.command() @click.option("--throttle", is_flag=True) @click.option("--dry-run", is_flag=True) -def run(throttle, dry_run) -> None: +def run(throttle, dry_run) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() print("Python", sys.version, file=sys.stderr) @@ -42,7 +42,9 @@ def run(throttle, dry_run) -> None: executor.submit(delete_account_data, host, throttle, dry_run) -def delete_account_data(host, throttle, dry_run) -> None: +def delete_account_data( # type: ignore[no-untyped-def] + host, throttle, dry_run +) -> None: while True: for shard in host["SHARDS"]: # Ensure shard is explicitly not marked as disabled diff --git a/bin/detect-missing-sync-host.py b/bin/detect-missing-sync-host.py index 5224888ee..e75a6c11b 100755 --- a/bin/detect-missing-sync-host.py +++ b/bin/detect-missing-sync-host.py @@ -2,7 +2,7 @@ import click -from sqlalchemy.orm import load_only +from sqlalchemy.orm import load_only # type: ignore[import-untyped] from inbox.error_handling import maybe_enable_rollbar from inbox.models.account import Account diff --git a/bin/get-accounts-for-host.py b/bin/get-accounts-for-host.py index 8ee51c887..759da5f03 100755 --- a/bin/get-accounts-for-host.py +++ b/bin/get-accounts-for-host.py @@ -10,7 +10,7 @@ @click.command() @click.argument("hostname") -def main(hostname) -> None: +def main(hostname) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() with global_session_scope() as db_session: diff --git a/bin/get-id.py b/bin/get-id.py index 20860aa3b..1660d8c02 100755 --- a/bin/get-id.py +++ b/bin/get-id.py @@ -37,7 +37,7 @@ @click.option("--type", "-t", type=str, required=True) @click.option("--id", type=str, default=None) @click.option("--public-id", type=str, default=None) -def main(type, id, public_id) -> None: +def main(type, id, public_id) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() type = type.lower() # noqa: A001 @@ -55,11 +55,19 @@ def main(type, id, public_id) -> None: with global_session_scope() as db_session: if public_id: obj = ( - db_session.query(cls).filter(cls.public_id == public_id).one() + db_session.query(cls) + .filter( + cls.public_id == public_id # type: ignore[attr-defined] + ) + .one() ) print(obj.id) elif id: - obj = db_session.query(cls).filter(cls.id == id).one() + obj = ( + db_session.query(cls) + .filter(cls.id == id) # type: ignore[attr-defined] + .one() + ) print(obj.public_id) diff --git a/bin/get-object.py b/bin/get-object.py index 9c1bdde37..297715a84 100755 --- a/bin/get-object.py +++ b/bin/get-object.py @@ -34,7 +34,9 @@ ) try: - from inbox.models.backends.eas import EASFolderSyncStatus + from inbox.models.backends.eas import ( # type: ignore[import-not-found] + EASFolderSyncStatus, + ) cls_for_type["easfoldersyncstatus"] = EASFolderSyncStatus except ImportError: @@ -48,7 +50,9 @@ @click.option("--account-id", type=str, default=None) @click.option("--namespace-id", type=str, default=None) @click.option("--readwrite", is_flag=True, default=False) -def main(type, id, public_id, account_id, namespace_id, readwrite) -> None: +def main( # type: ignore[no-untyped-def] + type, id, public_id, account_id, namespace_id, readwrite +) -> None: maybe_enable_rollbar() type = type.lower() # noqa: A001 @@ -67,14 +71,20 @@ def main(type, id, public_id, account_id, namespace_id, readwrite) -> None: qu = db_session.query(cls) if public_id: - qu = qu.filter(cls.public_id == public_id) + qu = qu.filter( + cls.public_id == public_id # type: ignore[attr-defined] + ) elif id: - qu = qu.filter(cls.id == id) + qu = qu.filter(cls.id == id) # type: ignore[attr-defined] if account_id: - qu = qu.filter(cls.account_id == account_id) + qu = qu.filter( + cls.account_id == account_id # type: ignore[attr-defined] + ) elif namespace_id: - qu = qu.filter(cls.namespace_id == namespace_id) + qu = qu.filter( + cls.namespace_id == namespace_id # type: ignore[attr-defined] + ) obj = qu.one() # noqa: F841 diff --git a/bin/inbox-api.py b/bin/inbox-api.py index 26f276e0d..3dad7e8f2 100755 --- a/bin/inbox-api.py +++ b/bin/inbox-api.py @@ -2,10 +2,11 @@ import os import sys +from typing import Any import click import werkzeug.serving -from setproctitle import setproctitle +from setproctitle import setproctitle # type: ignore[import-not-found] setproctitle("inbox-api") @@ -27,7 +28,7 @@ from inbox.logging import configure_logging, get_logger from inbox.util.startup import load_overrides -syncback = None +syncback: Any = None @click.command() @@ -40,7 +41,7 @@ "-c", "--config", default=None, help="Path to JSON configuration file." ) @click.option("-p", "--port", default=5555, help="Port to run flask app on.") -def main(prod, config, port) -> None: +def main(prod, config, port) -> None: # type: ignore[no-untyped-def] """Launch the Nylas API service.""" level = os.environ.get("LOGLEVEL", inbox_config.get("LOGLEVEL")) configure_logging(log_level=level) diff --git a/bin/inbox-auth.py b/bin/inbox-auth.py index e5301d8b8..8d1758a6b 100755 --- a/bin/inbox-auth.py +++ b/bin/inbox-auth.py @@ -4,7 +4,7 @@ import sys import click -from setproctitle import setproctitle +from setproctitle import setproctitle # type: ignore[import-not-found] setproctitle("inbox-auth") @@ -39,7 +39,9 @@ is_flag=False, help="Manually specify the provider instead of trying to detect it", ) -def main(email_address, reauth, target, provider) -> None: +def main( # type: ignore[no-untyped-def] + email_address, reauth, target, provider +) -> None: """Auth an email account.""" preflight() diff --git a/bin/inbox-console.py b/bin/inbox-console.py index 77d0577ee..662169aef 100755 --- a/bin/inbox-console.py +++ b/bin/inbox-console.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -from setproctitle import setproctitle +from setproctitle import setproctitle # type: ignore[import-not-found] setproctitle("inbox-console") @@ -25,7 +25,7 @@ @click.option( "-c", "--client", is_flag=True, help="Start a repl with an APIClient" ) -def console(email_address, client) -> None: +def console(email_address, client) -> None: # type: ignore[no-untyped-def] """REPL for Nylas.""" maybe_enable_rollbar() diff --git a/bin/inbox-start.py b/bin/inbox-start.py index f043a3bb2..c11442f81 100755 --- a/bin/inbox-start.py +++ b/bin/inbox-start.py @@ -8,7 +8,7 @@ import sys import click -import setproctitle +import setproctitle # type: ignore[import-not-found] # Check that the inbox package is installed. It seems Vagrant may sometimes # fail to provision the box appropriately; this check is a reasonable @@ -72,7 +72,9 @@ help="This process's number in the process group: a unique " "number satisfying 0 <= process_num < total_processes.", ) -def main(prod, enable_profiler, config, process_num) -> None: +def main( # type: ignore[no-untyped-def] + prod, enable_profiler, config, process_num +) -> None: """Launch the Nylas sync service.""" level = os.environ.get("LOGLEVEL", inbox_config.get("LOGLEVEL")) configure_logging(log_level=level) diff --git a/bin/mysql-prompt.py b/bin/mysql-prompt.py index 7093f0062..dfd6ed929 100755 --- a/bin/mysql-prompt.py +++ b/bin/mysql-prompt.py @@ -58,10 +58,10 @@ def main(shard_num: int | None, execute: str | None, batch: bool) -> None: process_arguments = [ "mysql", - "-h" + creds["hostname"], - "-u" + creds["username"], - "-D" + creds["db_name"], - "-p" + creds["password"], + "-h" + creds["hostname"], # type: ignore[operator] + "-u" + creds["username"], # type: ignore[operator] + "-D" + creds["db_name"], # type: ignore[operator] + "-p" + creds["password"], # type: ignore[operator] "--safe-updates", ] if batch: diff --git a/bin/purge-transaction-log.py b/bin/purge-transaction-log.py index da9af4aaf..7f55921ec 100755 --- a/bin/purge-transaction-log.py +++ b/bin/purge-transaction-log.py @@ -27,7 +27,9 @@ @click.option("--limit", type=int, default=1000) @click.option("--throttle", is_flag=True) @click.option("--dry-run", is_flag=True) -def run(days_ago, limit, throttle, dry_run) -> None: +def run( # type: ignore[no-untyped-def] + days_ago, limit, throttle, dry_run +) -> None: maybe_enable_rollbar() print("Python", sys.version, file=sys.stderr) @@ -46,7 +48,9 @@ def run(days_ago, limit, throttle, dry_run) -> None: ) -def purge_old_transactions(host, days_ago, limit, throttle, dry_run) -> None: +def purge_old_transactions( # type: ignore[no-untyped-def] + host, days_ago, limit, throttle, dry_run +) -> None: while True: for shard in host["SHARDS"]: # Ensure shard is explicitly not marked as disabled diff --git a/bin/remove-message-attachments.py b/bin/remove-message-attachments.py index 7e7453067..5e0e4665d 100755 --- a/bin/remove-message-attachments.py +++ b/bin/remove-message-attachments.py @@ -5,8 +5,8 @@ from collections.abc import Iterable import click -from sqlalchemy.orm import Query, joinedload -from sqlalchemy.sql import func +from sqlalchemy.orm import Query, joinedload # type: ignore[import-untyped] +from sqlalchemy.sql import func # type: ignore[import-untyped] from inbox.logging import configure_logging, get_logger from inbox.models.block import Block @@ -33,7 +33,7 @@ def find_blocks( ) -> "Iterable[tuple[Block, int]]": query = ( Query([Block]) - .options(joinedload(Block.parts)) + .options(joinedload(Block.parts)) # type: ignore[attr-defined] .filter(Block.size > 0) # empty blocks are not stored in S3 .order_by(Block.id) ) @@ -102,7 +102,9 @@ def run( if check_existence: data = blockstore.get_from_blockstore(block.data_sha256) else: - data = ... # assume it exists, it's OK to delete non-existent data + data = ( + ... # type: ignore[assignment] + ) # assume it exists, it's OK to delete non-existent data if data is None: resolution = Resolution.NOT_PRESENT @@ -116,8 +118,8 @@ def run( block.created_at.date(), resolution.value, block.data_sha256, - block.size if data else None, - len(block.parts), + (block.size if data else None), + len(block.parts), # type: ignore[attr-defined] ) if resolution is Resolution.DELETE: diff --git a/bin/restart-forgotten-accounts.py b/bin/restart-forgotten-accounts.py index 7a4a41210..464229ae2 100755 --- a/bin/restart-forgotten-accounts.py +++ b/bin/restart-forgotten-accounts.py @@ -13,7 +13,7 @@ configure_logging() log = get_logger() -accounts_without_sync_host = set() +accounts_without_sync_host = set() # type: ignore[var-annotated] def check_accounts() -> None: diff --git a/bin/set-desired-host.py b/bin/set-desired-host.py index bdac3770f..283520172 100755 --- a/bin/set-desired-host.py +++ b/bin/set-desired-host.py @@ -13,7 +13,9 @@ @click.option("--desired-host") @click.option("--dry-run", is_flag=True) @click.option("--toggle-sync", is_flag=True) -def main(account_id, desired_host, dry_run, toggle_sync) -> None: +def main( # type: ignore[no-untyped-def] + account_id, desired_host, dry_run, toggle_sync +) -> None: maybe_enable_rollbar() with global_session_scope() as db_session: diff --git a/bin/set-throttled.py b/bin/set-throttled.py index b16096512..4e1f9d04c 100755 --- a/bin/set-throttled.py +++ b/bin/set-throttled.py @@ -24,7 +24,7 @@ def print_usage() -> None: ) -def throttle(options) -> None: +def throttle(options) -> None: # type: ignore[no-untyped-def] account_id = int(options.account_id) with session_scope(account_id) as db_session: if options.account_id: diff --git a/bin/stamp-db.py b/bin/stamp-db.py index 83a9be98d..a9455afc3 100755 --- a/bin/stamp-db.py +++ b/bin/stamp-db.py @@ -12,7 +12,7 @@ from inbox.error_handling import maybe_enable_rollbar -def main(revision_id) -> None: +def main(revision_id) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() alembic_ini_filename = os.environ.get("ALEMBIC_INI_PATH", "alembic.ini") diff --git a/bin/supervisor.py b/bin/supervisor.py index 5cbc62646..46567471c 100755 --- a/bin/supervisor.py +++ b/bin/supervisor.py @@ -68,8 +68,8 @@ def prepare_exit_after( if not exit_after: return - exit_after = exit_after.split(":") - exit_after_min, exit_after_max = int(exit_after[0]), int(exit_after[1]) + exit_after = exit_after.split(":") # type: ignore[assignment] + exit_after_min, exit_after_max = (int(exit_after[0]), int(exit_after[1])) exit_after_seconds = random.randint( exit_after_min * 60, exit_after_max * 60 ) @@ -89,7 +89,9 @@ def perform_exit_after(process: subprocess.Popen[bytes], seconds: int) -> None: terminate(process) -def terminate(process: subprocess.Popen[bytes], timeout: int = 30) -> int: +def terminate( # type: ignore[return] + process: subprocess.Popen[bytes], timeout: int = 30 +) -> int: """ Terminate the given process. diff --git a/bin/syncback-service.py b/bin/syncback-service.py index 81968a3bb..a23fbf2be 100755 --- a/bin/syncback-service.py +++ b/bin/syncback-service.py @@ -11,7 +11,7 @@ import sys import click -from setproctitle import setproctitle +from setproctitle import setproctitle # type: ignore[import-not-found] from inbox.config import config as inbox_config from inbox.error_handling import maybe_enable_rollbar @@ -49,7 +49,9 @@ default=False, help="Enables the CPU profiler web API", ) -def main(prod, config, process_num, syncback_id, enable_profiler) -> None: +def main( # type: ignore[no-untyped-def] + prod, config, process_num, syncback_id, enable_profiler +) -> None: """Launch the actions syncback service.""" setproctitle(f"syncback-{process_num}") @@ -87,7 +89,9 @@ def start() -> None: start() else: preflight() - from werkzeug.serving import run_with_reloader + from werkzeug.serving import ( # type: ignore[attr-defined] + run_with_reloader, + ) run_with_reloader(start) diff --git a/bin/syncback-stats.py b/bin/syncback-stats.py index 403274099..5d3a2790c 100755 --- a/bin/syncback-stats.py +++ b/bin/syncback-stats.py @@ -1,7 +1,7 @@ #!/usr/bin/env python import click -from sqlalchemy import func +from sqlalchemy import func # type: ignore[import-untyped] from inbox.error_handling import maybe_enable_rollbar from inbox.ignition import engine_manager diff --git a/bin/unschedule-account-syncs.py b/bin/unschedule-account-syncs.py index 9a547be09..61ffb43dd 100755 --- a/bin/unschedule-account-syncs.py +++ b/bin/unschedule-account-syncs.py @@ -13,7 +13,9 @@ @click.option("--number", type=int, help="how many accounts to unschedule") @click.argument("hostname") @click.argument("process", required=False, default=None) -def main(dry_run, number, hostname, process) -> None: +def main( # type: ignore[no-untyped-def] + dry_run, number, hostname, process +) -> None: """ Unschedule all accounts assigned to a given sync host. Intended primarily for use when decomissioning sync instances or for @@ -27,7 +29,10 @@ def main(dry_run, number, hostname, process) -> None: "You have not provided a --number option. This will " "unschedule ALL syncs on the host. Proceed? [Y/n] " ) - if raw_input(message).strip().lower() == "n": # noqa: F821 + if ( + raw_input(message).strip().lower() # type: ignore[name-defined] # noqa: F821 + == "n" + ): print("Will not proceed") return @@ -38,7 +43,10 @@ def main(dry_run, number, hostname, process) -> None: hostname ) ) - if raw_input(message).strip().lower() == "n": # noqa: F821 + if ( + raw_input(message).strip().lower() # type: ignore[name-defined] # noqa: F821 + == "n" + ): print("Bailing out") return diff --git a/bin/update-categories.py b/bin/update-categories.py index 7349edcca..0f1d4ca34 100755 --- a/bin/update-categories.py +++ b/bin/update-categories.py @@ -9,7 +9,7 @@ @click.command() @click.option("--shard_id", type=int) -def main(shard_id) -> None: +def main(shard_id) -> None: # type: ignore[no-untyped-def] maybe_enable_rollbar() if shard_id is not None: @@ -21,7 +21,9 @@ def main(shard_id) -> None: update_folders_and_labels_for_shard(key) -def update_categories_for_shard(shard_id) -> None: +def update_categories_for_shard( # type: ignore[no-untyped-def] + shard_id, +) -> None: print(f"Updating categories for shard {shard_id}") engine = engine_manager.engines[shard_id] @@ -35,7 +37,9 @@ def update_categories_for_shard(shard_id) -> None: engine.execute(query) -def update_folders_and_labels_for_shard(shard_id) -> None: +def update_folders_and_labels_for_shard( # type: ignore[no-untyped-def] + shard_id, +) -> None: print(f"Updating folders for shard {shard_id}") engine = engine_manager.engines[shard_id] diff --git a/inbox/actions/backends/generic.py b/inbox/actions/backends/generic.py index 81eda3852..caa962789 100644 --- a/inbox/actions/backends/generic.py +++ b/inbox/actions/backends/generic.py @@ -37,7 +37,7 @@ # * should add support for rolling back message.categories() on failure. -def uids_by_folder(message_id, db_session): +def uids_by_folder(message_id, db_session): # type: ignore[no-untyped-def] results = ( db_session.query(ImapUid.msg_uid, Folder.name) .join(Folder) @@ -50,7 +50,7 @@ def uids_by_folder(message_id, db_session): return mapping -def _create_email(account, message): +def _create_email(account, message): # type: ignore[no-untyped-def] blocks = [p.block for p in message.attachments] attachments = generate_attachments(message, blocks) from_name, from_email = message.from_addr[0] @@ -71,7 +71,7 @@ def _create_email(account, message): return msg -def _set_flag( +def _set_flag( # type: ignore[no-untyped-def] crispin_client, account_id, message_id, flag_name, is_add ) -> None: with session_scope(account_id) as db_session: @@ -88,15 +88,21 @@ def _set_flag( crispin_client.conn.remove_flags(uids, [flag_name], silent=True) -def set_remote_starred(crispin_client, account, message_id, starred) -> None: +def set_remote_starred( # type: ignore[no-untyped-def] + crispin_client, account, message_id, starred +) -> None: _set_flag(crispin_client, account, message_id, "\\Flagged", starred) -def set_remote_unread(crispin_client, account, message_id, unread) -> None: +def set_remote_unread( # type: ignore[no-untyped-def] + crispin_client, account, message_id, unread +) -> None: _set_flag(crispin_client, account, message_id, "\\Seen", not unread) -def remote_move(crispin_client, account_id, message_id, destination) -> None: +def remote_move( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id, destination +) -> None: with session_scope(account_id) as db_session: uids_for_message = uids_by_folder(message_id, db_session) if not uids_for_message: @@ -109,7 +115,9 @@ def remote_move(crispin_client, account_id, message_id, destination) -> None: crispin_client.delete_uids(uids) -def remote_create_folder(crispin_client, account_id, category_id) -> None: +def remote_create_folder( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id +) -> None: with session_scope(account_id) as db_session: category = db_session.query(Category).get(category_id) if category is None: @@ -118,7 +126,7 @@ def remote_create_folder(crispin_client, account_id, category_id) -> None: crispin_client.conn.create_folder(display_name) -def remote_update_folder( +def remote_update_folder( # type: ignore[no-untyped-def] crispin_client, account_id, category_id, old_name, new_name ) -> None: with session_scope(account_id) as db_session: @@ -148,7 +156,9 @@ def remote_update_folder( folder.name = new_display_name -def remote_delete_folder(crispin_client, account_id, category_id) -> None: +def remote_delete_folder( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id +) -> None: with session_scope(account_id) as db_session: category = db_session.query(Category).get(category_id) if category is None: @@ -168,7 +178,9 @@ def remote_delete_folder(crispin_client, account_id, category_id) -> None: db_session.commit() -def remote_save_draft(crispin_client, account_id, message_id) -> None: +def remote_save_draft( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id +) -> None: with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) message = db_session.query(Message).get(message_id) @@ -185,7 +197,7 @@ def remote_save_draft(crispin_client, account_id, message_id) -> None: crispin_client.save_draft(mimemsg) -def remote_update_draft( +def remote_update_draft( # type: ignore[no-untyped-def] crispin_client, account_id, message_id, old_message_id_header ) -> None: with session_scope(account_id) as db_session: @@ -233,7 +245,7 @@ def remote_update_draft( ) -def remote_delete_draft( +def remote_delete_draft( # type: ignore[no-untyped-def] crispin_client, account_id, nylas_uid, message_id_header ) -> None: if "drafts" not in crispin_client.folder_names(): @@ -245,7 +257,7 @@ def remote_delete_draft( crispin_client.delete_draft(message_id_header) -def remote_delete_sent( +def remote_delete_sent( # type: ignore[no-untyped-def] crispin_client, account_id, message_id_header, @@ -260,7 +272,9 @@ def remote_delete_sent( crispin_client.delete_sent_message(message_id_header, delete_multiple) -def remote_save_sent(crispin_client, account_id, message_id) -> None: +def remote_save_sent( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id +) -> None: with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) message = db_session.query(Message).get(message_id) diff --git a/inbox/actions/backends/gmail.py b/inbox/actions/backends/gmail.py index 08bf36645..c67f0c902 100644 --- a/inbox/actions/backends/gmail.py +++ b/inbox/actions/backends/gmail.py @@ -3,7 +3,7 @@ import contextlib from imaplib import IMAP4 -import imapclient +import imapclient # type: ignore[import-untyped] from inbox.actions.backends.generic import uids_by_folder from inbox.mailsync.backends.imap.generic import uidvalidity_cb @@ -15,11 +15,11 @@ __all__ = ["remote_create_label", "remote_update_label", "remote_delete_label"] -def _encode_labels(labels): +def _encode_labels(labels): # type: ignore[no-untyped-def] return [imapclient.imap_utf7.encode(label) for label in labels] -def remote_change_labels( +def remote_change_labels( # type: ignore[no-untyped-def] crispin_client, account_id, message_ids, removed_labels, added_labels ) -> None: uids_for_message: dict[str, list[str]] = {} @@ -43,7 +43,9 @@ def remote_change_labels( ) -def remote_create_label(crispin_client, account_id, category_id) -> None: +def remote_create_label( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id +) -> None: with session_scope(account_id) as db_session: category = db_session.query(Category).get(category_id) if category is None: @@ -52,13 +54,15 @@ def remote_create_label(crispin_client, account_id, category_id) -> None: crispin_client.conn.create_folder(display_name) -def remote_update_label( +def remote_update_label( # type: ignore[no-untyped-def] crispin_client, account_id, category_id, old_name, new_name ) -> None: crispin_client.conn.rename_folder(old_name, new_name) -def remote_delete_label(crispin_client, account_id, category_id) -> None: +def remote_delete_label( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id +) -> None: with session_scope(account_id) as db_session: category = db_session.query(Category).get(category_id) if category is None: diff --git a/inbox/actions/base.py b/inbox/actions/base.py index 6d55162c1..6a4654759 100644 --- a/inbox/actions/base.py +++ b/inbox/actions/base.py @@ -51,26 +51,34 @@ log = get_logger() -def can_handle_multiple_records(action_name): # noqa: ANN201 +def can_handle_multiple_records(action_name): # type: ignore[no-untyped-def] # noqa: ANN201 return action_name == "change_labels" -def mark_unread(crispin_client, account_id, message_id, args) -> None: +def mark_unread( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id, args +) -> None: unread = args["unread"] set_remote_unread(crispin_client, account_id, message_id, unread) -def mark_starred(crispin_client, account_id, message_id, args) -> None: +def mark_starred( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id, args +) -> None: starred = args["starred"] set_remote_starred(crispin_client, account_id, message_id, starred) -def move(crispin_client, account_id, message_id, args) -> None: +def move( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id, args +) -> None: destination = args["destination"] remote_move(crispin_client, account_id, message_id, destination) -def change_labels(crispin_client, account_id, message_ids, args) -> None: +def change_labels( # type: ignore[no-untyped-def] + crispin_client, account_id, message_ids, args +) -> None: added_labels = args["added_labels"] removed_labels = args["removed_labels"] remote_change_labels( @@ -78,11 +86,15 @@ def change_labels(crispin_client, account_id, message_ids, args) -> None: ) -def create_folder(crispin_client, account_id, category_id) -> None: +def create_folder( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id +) -> None: remote_create_folder(crispin_client, account_id, category_id) -def update_folder(crispin_client, account_id, category_id, args) -> None: +def update_folder( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id, args +) -> None: old_name = args["old_name"] new_name = args["new_name"] remote_update_folder( @@ -90,15 +102,21 @@ def update_folder(crispin_client, account_id, category_id, args) -> None: ) -def delete_folder(crispin_client, account_id, category_id) -> None: +def delete_folder( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id +) -> None: remote_delete_folder(crispin_client, account_id, category_id) -def create_label(crispin_client, account_id, category_id) -> None: +def create_label( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id +) -> None: remote_create_label(crispin_client, account_id, category_id) -def update_label(crispin_client, account_id, category_id, args) -> None: +def update_label( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id, args +) -> None: old_name = args["old_name"] new_name = args["new_name"] remote_update_label( @@ -106,11 +124,15 @@ def update_label(crispin_client, account_id, category_id, args) -> None: ) -def delete_label(crispin_client, account_id, category_id) -> None: +def delete_label( # type: ignore[no-untyped-def] + crispin_client, account_id, category_id +) -> None: remote_delete_label(crispin_client, account_id, category_id) -def save_draft(crispin_client, account_id, message_id, args) -> None: +def save_draft( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id, args +) -> None: """Sync a new draft back to the remote backend.""" with session_scope(account_id) as db_session: message = db_session.query(Message).get(message_id) @@ -136,7 +158,9 @@ def save_draft(crispin_client, account_id, message_id, args) -> None: remote_save_draft(crispin_client, account_id, message_id) -def update_draft(crispin_client, account_id, message_id, args) -> None: +def update_draft( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id, args +) -> None: """Sync an updated draft back to the remote backend.""" with session_scope(account_id) as db_session: message = db_session.query(Message).get(message_id) @@ -166,7 +190,9 @@ def update_draft(crispin_client, account_id, message_id, args) -> None: ) -def delete_draft(crispin_client, account_id, draft_id, args) -> None: +def delete_draft( # type: ignore[no-untyped-def] + crispin_client, account_id, draft_id, args +) -> None: """ Delete a draft from the remote backend. `args` should contain an `nylas_uid` or a `message_id_header` key. This is used to find the draft on @@ -181,7 +207,9 @@ def delete_draft(crispin_client, account_id, draft_id, args) -> None: ) -def save_sent_email(crispin_client, account_id, message_id) -> None: +def save_sent_email( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id +) -> None: """ Create an email on the remote backend. Generic providers expect us to create a copy of the message in the sent folder. @@ -189,7 +217,9 @@ def save_sent_email(crispin_client, account_id, message_id) -> None: remote_save_sent(crispin_client, account_id, message_id) -def delete_sent_email(crispin_client, account_id, message_id, args) -> None: +def delete_sent_email( # type: ignore[no-untyped-def] + crispin_client, account_id, message_id, args +) -> None: """ Delete an email on the remote backend, in the sent folder. """ diff --git a/inbox/api/err.py b/inbox/api/err.py index 11f29a039..635b30e8a 100644 --- a/inbox/api/err.py +++ b/inbox/api/err.py @@ -1,7 +1,7 @@ import sys import traceback -import rollbar +import rollbar # type: ignore[import-untyped] from flask import jsonify, make_response, request from inbox.logging import create_error_log_context, get_logger @@ -11,11 +11,11 @@ from inbox.config import is_live_env # noqa: E402 -def get_request_uid(headers): # noqa: ANN201 +def get_request_uid(headers): # type: ignore[no-untyped-def] # noqa: ANN201 return headers.get("X-Unique-ID") -def log_exception(exc_info, **kwargs) -> None: +def log_exception(exc_info, **kwargs) -> None: # type: ignore[no-untyped-def] """ Add exception info to the log context for the request. @@ -58,7 +58,7 @@ class InputError(APIException): status_code = 400 - def __init__(self, message) -> None: + def __init__(self, message) -> None: # type: ignore[no-untyped-def] self.message = message super().__init__(message) @@ -68,7 +68,7 @@ class NotFoundError(APIException): status_code = 404 - def __init__(self, message) -> None: + def __init__(self, message) -> None: # type: ignore[no-untyped-def] self.message = message super().__init__(message) @@ -76,7 +76,7 @@ def __init__(self, message) -> None: class ConflictError(APIException): status_code = 409 - def __init__(self, message) -> None: + def __init__(self, message) -> None: # type: ignore[no-untyped-def] self.message = message super().__init__(message) @@ -110,7 +110,7 @@ class AccountDoesNotExistError(APIException): message = "The account does not exist." -def err(http_code, message, **kwargs): # noqa: ANN201 +def err(http_code, message, **kwargs): # type: ignore[no-untyped-def] # noqa: ANN201 """Handle unexpected errors, including sending the traceback to Rollbar.""" log_exception(sys.exc_info(), user_error_message=message, **kwargs) resp = {"type": "api_error", "message": message} diff --git a/inbox/api/filtering.py b/inbox/api/filtering.py index 2b8b9bfbd..9e5899566 100644 --- a/inbox/api/filtering.py +++ b/inbox/api/filtering.py @@ -1,5 +1,15 @@ -from sqlalchemy import and_, asc, bindparam, desc, func, or_ -from sqlalchemy.orm import contains_eager, subqueryload +from sqlalchemy import ( # type: ignore[import-untyped] + and_, + asc, + bindparam, + desc, + func, + or_, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + contains_eager, + subqueryload, +) from inbox.api.err import InputError from inbox.api.validation import valid_public_id @@ -19,7 +29,7 @@ from inbox.models.event import RecurringEvent -def contact_subquery( # noqa: ANN201 +def contact_subquery( # type: ignore[no-untyped-def] # noqa: ANN201 db_session, namespace_id, email_address, field ): return ( @@ -35,7 +45,7 @@ def contact_subquery( # noqa: ANN201 ) -def threads( # noqa: ANN201 +def threads( # type: ignore[no-untyped-def] # noqa: ANN201 namespace_id, subject, from_addr, @@ -119,7 +129,9 @@ def threads( # noqa: ANN201 .join(MessageContactAssociation) .join(Contact, MessageContactAssociation.contact_id == Contact.id) .filter( - Contact.email_address.in_(any_email), + Contact.email_address.in_( # type: ignore[attr-defined] + any_email + ), Contact.namespace_id == namespace_id, ) .subquery() @@ -154,7 +166,7 @@ def threads( # noqa: ANN201 category_query = ( db_session.query(Message.thread_id) .prefix_with("STRAIGHT_JOIN") - .join(Message.messagecategories) + .join(Message.messagecategories) # type: ignore[attr-defined] .join(MessageCategory.category) .filter( Category.namespace_id == namespace_id, or_(*category_filters) @@ -205,7 +217,7 @@ def threads( # noqa: ANN201 return query.all() -def messages_or_drafts( # noqa: ANN201 +def messages_or_drafts( # type: ignore[no-untyped-def] # noqa: ANN201 namespace_id, drafts, subject, @@ -414,7 +426,9 @@ def messages_or_drafts( # noqa: ANN201 db_session.query(MessageContactAssociation.message_id) .join(Contact, MessageContactAssociation.contact_id == Contact.id) .filter( - Contact.email_address.in_(any_email), + Contact.email_address.in_( # type: ignore[attr-defined] + any_email + ), Contact.namespace_id == bindparam("namespace_id"), ) .subquery() @@ -451,7 +465,7 @@ def messages_or_drafts( # noqa: ANN201 pass query = ( query.prefix_with("STRAIGHT_JOIN") - .join(Message.messagecategories) + .join(Message.messagecategories) # type: ignore[attr-defined] .join(MessageCategory.category) .filter( Category.namespace_id == namespace_id, or_(*category_filters) @@ -477,18 +491,20 @@ def messages_or_drafts( # noqa: ANN201 # thread table. We should eventually try to simplify this. query = query.options( contains_eager(Message.thread), - subqueryload(Message.messagecategories).joinedload( - "category", "created_at" + subqueryload( + Message.messagecategories # type: ignore[attr-defined] + ).joinedload("category", "created_at"), + subqueryload(Message.parts).joinedload( # type: ignore[attr-defined] + Part.block ), - subqueryload(Message.parts).joinedload(Part.block), - subqueryload(Message.events), + subqueryload(Message.events), # type: ignore[attr-defined] ) prepared = query.params(**param_dict) return prepared.all() -def files( # noqa: ANN201 +def files( # type: ignore[no-untyped-def] # noqa: ANN201 namespace_id, message_public_id, filename, @@ -545,7 +561,7 @@ def files( # noqa: ANN201 return query.all() -def filter_event_query( # noqa: ANN201 +def filter_event_query( # type: ignore[no-untyped-def] # noqa: ANN201 query, event_cls, namespace_id, @@ -586,7 +602,7 @@ def filter_event_query( # noqa: ANN201 return query -def recurring_events( # noqa: ANN201 +def recurring_events( # type: ignore[no-untyped-def] # noqa: ANN201 filters, starts_before, starts_after, @@ -644,7 +660,7 @@ def recurring_events( # noqa: ANN201 return recur_instances -def events( # noqa: ANN201 +def events( # type: ignore[no-untyped-def] # noqa: ANN201 namespace_id, event_public_id, calendar_public_id, @@ -824,7 +840,7 @@ def events( # noqa: ANN201 return all_events -def messages_for_contact_scores( # noqa: ANN201 +def messages_for_contact_scores( # type: ignore[no-untyped-def] # noqa: ANN201 db_session, namespace_id, starts_after=None ): query = ( diff --git a/inbox/api/kellogs.py b/inbox/api/kellogs.py index b94de2de1..f3acd3965 100644 --- a/inbox/api/kellogs.py +++ b/inbox/api/kellogs.py @@ -2,7 +2,7 @@ import datetime from json import JSONEncoder, dumps -import arrow +import arrow # type: ignore[import-untyped] from flask import Response from inbox.events.timezones import timezones_table @@ -30,13 +30,13 @@ log = get_logger() -def format_address_list(addresses): # noqa: ANN201 +def format_address_list(addresses): # type: ignore[no-untyped-def] # noqa: ANN201 if addresses is None: return [] return [{"name": name, "email": email} for name, email in addresses] -def format_categories(categories): # noqa: ANN201 +def format_categories(categories): # type: ignore[no-untyped-def] # noqa: ANN201 if categories is None: return [] return [ @@ -50,7 +50,9 @@ def format_categories(categories): # noqa: ANN201 ] -def format_messagecategories(messagecategories): # noqa: ANN201 +def format_messagecategories( # type: ignore[no-untyped-def] # noqa: ANN201 + messagecategories, +): if messagecategories is None: return [] return [ @@ -65,7 +67,7 @@ def format_messagecategories(messagecategories): # noqa: ANN201 ] -def format_phone_numbers(phone_numbers): # noqa: ANN201 +def format_phone_numbers(phone_numbers): # type: ignore[no-untyped-def] # noqa: ANN201 formatted_phone_numbers = [] for number in phone_numbers: formatted_phone_numbers.append( @@ -74,7 +76,7 @@ def format_phone_numbers(phone_numbers): # noqa: ANN201 return formatted_phone_numbers -def encode( # noqa: ANN201 +def encode( # type: ignore[no-untyped-def] # noqa: ANN201 obj, namespace_public_id=None, expand: bool = False, is_n1: bool = False ): try: @@ -91,7 +93,7 @@ def encode( # noqa: ANN201 raise -def _convert_timezone_to_iana_tz(original_tz): +def _convert_timezone_to_iana_tz(original_tz): # type: ignore[no-untyped-def] if original_tz is None: return None @@ -102,7 +104,7 @@ def _convert_timezone_to_iana_tz(original_tz): return original_tz -def _encode( # noqa: D417 +def _encode( # type: ignore[no-untyped-def] # noqa: D417 obj, namespace_public_id=None, expand: bool = False, is_n1: bool = False ): """ @@ -124,10 +126,10 @@ def _encode( # noqa: D417 """ # noqa: D401 - def _get_namespace_public_id(obj): + def _get_namespace_public_id(obj): # type: ignore[no-untyped-def] return namespace_public_id or obj.namespace.public_id - def _format_participant_data(participant): + def _format_participant_data(participant): # type: ignore[no-untyped-def] """ Event.participants is a JSON blob which may contain internal data. This function returns a dict with only the data we want to make @@ -139,7 +141,7 @@ def _format_participant_data(participant): return dct - def _get_lowercase_class_name(obj): + def _get_lowercase_class_name(obj): # type: ignore[no-untyped-def] return type(obj).__name__.lower() # Flask's jsonify() doesn't handle datetimes or json arrays as primary @@ -204,10 +206,14 @@ def _get_lowercase_class_name(obj): "unread": not obj.is_read, "starred": obj.is_starred, "files": obj.api_attachment_metadata, - "events": [encode(e) for e in obj.events], + "events": [ + encode(e) for e in obj.events # type: ignore[attr-defined] + ], } - categories = format_messagecategories(obj.messagecategories) + categories = format_messagecategories( + obj.messagecategories # type: ignore[attr-defined] + ) if obj.namespace.account.category_type == "folder": resp["folder"] = categories[0] if categories else None else: @@ -257,7 +263,9 @@ def _get_lowercase_class_name(obj): if not expand: base["message_ids"] = [ - m.public_id for m in obj.messages if not m.is_draft + m.public_id + for m in obj.messages # type: ignore[attr-defined] + if not m.is_draft ] base["draft_ids"] = [m.public_id for m in obj.drafts] return base @@ -265,7 +273,7 @@ def _get_lowercase_class_name(obj): # Expand messages within threads all_expanded_messages = [] all_expanded_drafts = [] - for msg in obj.messages: + for msg in obj.messages: # type: ignore[attr-defined] resp = { "id": msg.public_id, "object": "message", @@ -318,7 +326,9 @@ def _get_lowercase_class_name(obj): "account_id": _get_namespace_public_id(obj), "name": obj.name, "email": obj.email_address, - "phone_numbers": format_phone_numbers(obj.phone_numbers), + "phone_numbers": format_phone_numbers( + obj.phone_numbers # type: ignore[attr-defined] + ), } elif isinstance(obj, Event): @@ -395,15 +405,24 @@ def _get_lowercase_class_name(obj): "size": obj.size, "filename": obj.filename, } - if len(obj.parts): + if len(obj.parts): # type: ignore[attr-defined] # if obj is actually a message attachment (and not merely an # uploaded file), set additional properties resp.update( - {"message_ids": [p.message.public_id for p in obj.parts]} + { + "message_ids": [ + p.message.public_id + for p in obj.parts # type: ignore[attr-defined] + ] + } ) content_ids = list( - {p.content_id for p in obj.parts if p.content_id is not None} + { + p.content_id + for p in obj.parts # type: ignore[attr-defined] + if p.content_id is not None + } ) content_id = None if len(content_ids) > 0: @@ -454,7 +473,7 @@ class APIEncoder: """ - def __init__( + def __init__( # type: ignore[no-untyped-def] self, namespace_public_id=None, expand: bool = False, @@ -464,11 +483,11 @@ def __init__( namespace_public_id, expand, is_n1=is_n1 ) - def _encoder_factory( + def _encoder_factory( # type: ignore[no-untyped-def] self, namespace_public_id, expand, is_n1: bool = False ): class InternalEncoder(JSONEncoder): - def default(self, obj): + def default(self, obj): # type: ignore[no-untyped-def] custom_representation = encode( obj, namespace_public_id, expand=expand, is_n1=is_n1 ) @@ -479,7 +498,9 @@ def default(self, obj): return InternalEncoder - def cereal(self, obj, pretty: bool = False): # noqa: ANN201, D417 + def cereal( # type: ignore[no-untyped-def] # noqa: ANN201, D417 + self, obj, pretty: bool = False + ): """ Returns the JSON string representation of obj. @@ -505,7 +526,7 @@ def cereal(self, obj, pretty: bool = False): # noqa: ANN201, D417 ) return dumps(obj, cls=self.encoder_class) - def jsonify(self, obj): # noqa: ANN201, D417 + def jsonify(self, obj): # type: ignore[no-untyped-def] # noqa: ANN201, D417 """ Returns a Flask Response object encapsulating the JSON representation of obj. diff --git a/inbox/api/metrics_api.py b/inbox/api/metrics_api.py index 92200daa2..fa5bb5af3 100644 --- a/inbox/api/metrics_api.py +++ b/inbox/api/metrics_api.py @@ -3,8 +3,8 @@ from typing import Any from flask import Blueprint, request -from sqlalchemy.orm import joinedload -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.orm import joinedload # type: ignore[import-untyped] +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from inbox.api.err import InputError from inbox.api.kellogs import APIEncoder @@ -21,7 +21,7 @@ app = Blueprint("metrics_api", __name__, url_prefix="/metrics") -def _get_calendar_data(db_session, namespace): +def _get_calendar_data(db_session, namespace): # type: ignore[no-untyped-def] calendars = db_session.query(Calendar) if namespace: calendars = calendars.filter_by(namespace_id=namespace.id) @@ -55,7 +55,7 @@ def _get_calendar_data(db_session, namespace): return calendar_data -def _get_folder_data(db_session, accounts): +def _get_folder_data(db_session, accounts): # type: ignore[no-untyped-def] folder_sync_statuses = db_session.query(ImapFolderSyncStatus) # This assumes that the only cases for metrics we have is 1) fetching # metrics for a specific account, and 2) fetching metrics for all accounts. @@ -93,7 +93,7 @@ def _get_folder_data(db_session, accounts): @app.route("/") -def index(): # noqa: ANN201 +def index(): # type: ignore[no-untyped-def] # noqa: ANN201 with global_session_scope() as db_session: if "namespace_id" in request.args: try: @@ -114,7 +114,9 @@ def index(): # noqa: ANN201 ) if namespace: - accounts = accounts.filter(Account.namespace == namespace) + accounts = accounts.filter( + Account.namespace == namespace # type: ignore[attr-defined] + ) else: # Get all account IDs that aren't deleted account_ids = [ @@ -223,8 +225,8 @@ def index(): # noqa: ANN201 "namespace_private_id": account.namespace.id, "account_id": account.public_id, "namespace_id": account.namespace.public_id, - "events_alive": events_alive, - "email_alive": email_alive, + "events_alive": events_alive, # type: ignore[possibly-undefined] + "email_alive": email_alive, # type: ignore[possibly-undefined] "alive": alive, "email_initial_sync": email_initial_sync, "events_initial_sync": events_initial_sync, @@ -264,7 +266,7 @@ def index(): # noqa: ANN201 @app.route("/global-deltas") -def global_deltas(): # noqa: ANN201 +def global_deltas(): # type: ignore[no-untyped-def] # noqa: ANN201 """ Return the namespaces with recent transactions. diff --git a/inbox/api/ns_api.py b/inbox/api/ns_api.py index 4c9ab192a..37c071094 100644 --- a/inbox/api/ns_api.py +++ b/inbox/api/ns_api.py @@ -19,11 +19,14 @@ stream_with_context, ) from flask import jsonify as flask_jsonify -from flask_restful import reqparse -from sqlalchemy import asc, func -from sqlalchemy.exc import OperationalError -from sqlalchemy.orm import joinedload, load_only -from sqlalchemy.orm.exc import NoResultFound +from flask_restful import reqparse # type: ignore[import-untyped] +from sqlalchemy import asc, func # type: ignore[import-untyped] +from sqlalchemy.exc import OperationalError # type: ignore[import-untyped] +from sqlalchemy.orm import ( # type: ignore[import-untyped] + joinedload, + load_only, +) +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] import inbox.contacts.crud from inbox.actions.backends.generic import remote_delete_sent @@ -124,7 +127,9 @@ with contextlib.suppress(ImportError): # ImportError: Only important for EAS search failures, so shouldn't trigger # test failure. - from inbox.util.eas.codes import STORE_STATUS_CODES + from inbox.util.eas.codes import ( # type: ignore[import-not-found] + STORE_STATUS_CODES, + ) from typing import Never @@ -139,7 +144,7 @@ app = Blueprint("namespace_api", __name__, url_prefix="") -app.log_exception = log_exception +app.log_exception = log_exception # type: ignore[attr-defined] # Configure mimetype -> extension map # TODO perhaps expand to encompass non-standard mimetypes too @@ -155,7 +160,7 @@ if not x or x.startswith("#"): continue m = x.split() - mime_type, extensions = m[0], m[1:] + mime_type, extensions = (m[0], m[1:]) assert extensions, "Must have at least one extension per mimetype" common_extensions[mime_type.lower()] = extensions[0] @@ -245,7 +250,7 @@ def before_remote_request() -> None: @app.after_request -def finish(response): # noqa: ANN201 +def finish(response): # type: ignore[no-untyped-def] # noqa: ANN201 if response.status_code == 200 and hasattr(g, "db_session"): # be cautious g.db_session.commit() if hasattr(g, "db_session"): @@ -254,9 +259,12 @@ def finish(response): # noqa: ANN201 @app.errorhandler(OperationalError) -def handle_operational_error(error): # noqa: ANN201 +def handle_operational_error(error): # type: ignore[no-untyped-def] # noqa: ANN201 rule = request.url_rule - if "send" in rule.rule and "rsvp" not in rule.rule: + if ( + "send" in rule.rule # type: ignore[union-attr] + and "rsvp" not in rule.rule # type: ignore[union-attr] + ): message = "A temporary database error prevented us from serving this request. Your message has NOT been sent. Please try again in a few minutes." else: message = "A temporary database error prevented us from serving this request. Please try again." @@ -268,7 +276,7 @@ def handle_operational_error(error): # noqa: ANN201 @app.errorhandler(NotImplementedError) -def handle_not_implemented_error(error): # noqa: ANN201 +def handle_not_implemented_error(error): # type: ignore[no-untyped-def] # noqa: ANN201 request.environ["log_context"]["error"] = "NotImplementedError" response = flask_jsonify( message="API endpoint not yet implemented", type="api_error" @@ -278,7 +286,7 @@ def handle_not_implemented_error(error): # noqa: ANN201 @app.errorhandler(APIException) -def handle_input_error(error): # noqa: ANN201 +def handle_input_error(error): # type: ignore[no-untyped-def] # noqa: ANN201 # these "errors" are normal, so we don't need to save a traceback request.environ["log_context"]["error"] = error.__class__.__name__ request.environ["log_context"]["error_message"] = error.message @@ -290,7 +298,7 @@ def handle_input_error(error): # noqa: ANN201 @app.errorhandler(Exception) -def handle_generic_error(error): # noqa: ANN201 +def handle_generic_error(error): # type: ignore[no-untyped-def] # noqa: ANN201 log_exception(sys.exc_info()) response = flask_jsonify( message="An internal error occured. If this issue persists, please contact support@nylas.com and include this request_uid: {}".format( @@ -302,7 +310,7 @@ def handle_generic_error(error): # noqa: ANN201 @app.route("/account") -def one_account(): # noqa: ANN201 +def one_account(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) # Use a new encoder object with the expand parameter set. @@ -314,7 +322,7 @@ def one_account(): # noqa: ANN201 # Sync status (enable/disable account / throttling) # @app.route("/status/", methods=["GET", "PUT"]) -def status(): # noqa: ANN201 +def status(): # type: ignore[no-untyped-def] # noqa: ANN201 account = g.namespace.account # Don't allow resuming accounts marked for deletion. @@ -344,7 +352,7 @@ def status(): # noqa: ANN201 # Threads # @app.route("/threads/") -def thread_query_api(): # noqa: ANN201 +def thread_query_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("subject", type=bounded_str, location="args") g.parser.add_argument("to", type=bounded_str, location="args") g.parser.add_argument("from", type=bounded_str, location="args") @@ -403,7 +411,7 @@ def thread_query_api(): # noqa: ANN201 @app.route("/threads/search", methods=["GET"]) -def thread_search_api(): # noqa: ANN201 +def thread_search_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) if not args["q"]: @@ -430,7 +438,7 @@ def thread_search_api(): # noqa: ANN201 @app.route("/threads/search/streaming", methods=["GET"]) -def thread_streaming_search_api(): # noqa: ANN201 +def thread_streaming_search_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) if not args["q"]: @@ -458,7 +466,7 @@ def thread_streaming_search_api(): # noqa: ANN201 @app.route("/threads/") -def thread_api(public_id): # noqa: ANN201 +def thread_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) # Use a new encoder object with the expand parameter set. @@ -485,7 +493,7 @@ def thread_api(public_id): # noqa: ANN201 # Update thread # @app.route("/threads/", methods=["PUT", "PATCH"]) -def thread_api_update(public_id): # noqa: ANN201 +def thread_api_update(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 try: valid_public_id(public_id) thread = ( @@ -516,7 +524,7 @@ def thread_api_update(public_id): # noqa: ANN201 # Delete thread # @app.route("/threads/", methods=["DELETE"]) -def thread_api_delete(public_id) -> Never: +def thread_api_delete(public_id) -> Never: # type: ignore[no-untyped-def] """Moves the thread to the trash""" # noqa: D401 raise NotImplementedError @@ -525,7 +533,7 @@ def thread_api_delete(public_id) -> Never: # Messages ## @app.route("/messages/") -def message_query_api(): # noqa: ANN201 +def message_query_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("subject", type=bounded_str, location="args") g.parser.add_argument("to", type=bounded_str, location="args") g.parser.add_argument("from", type=bounded_str, location="args") @@ -585,7 +593,7 @@ def message_query_api(): # noqa: ANN201 @app.route("/messages/search", methods=["GET"]) -def message_search_api(): # noqa: ANN201 +def message_search_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) if not args["q"]: @@ -612,7 +620,7 @@ def message_search_api(): # noqa: ANN201 @app.route("/messages/search/streaming", methods=["GET"]) -def message_streaming_search_api(): # noqa: ANN201 +def message_streaming_search_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) if not args["q"]: @@ -640,7 +648,7 @@ def message_streaming_search_api(): # noqa: ANN201 @app.route("/messages/", methods=["GET"]) -def message_read_api(public_id): # noqa: ANN201 +def message_read_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) encoder = APIEncoder(g.namespace.public_id, args["view"] == "expanded") @@ -722,7 +730,7 @@ def message_read_api(public_id): # noqa: ANN201 @app.route("/messages/", methods=["PUT", "PATCH"]) -def message_update_api(public_id): # noqa: ANN201 +def message_update_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 try: valid_public_id(public_id) message = ( @@ -751,9 +759,9 @@ def message_update_api(public_id): # noqa: ANN201 # Folders / Labels @app.route("/folders") @app.route("/labels") -def folders_labels_query_api(): # noqa: ANN201 +def folders_labels_query_api(): # type: ignore[no-untyped-def] # noqa: ANN201 category_type = g.namespace.account.category_type - rule = request.url_rule.rule + rule = request.url_rule.rule # type: ignore[union-attr] valid_category_type(category_type, rule) g.parser.add_argument("view", type=bounded_str, location="args") @@ -780,18 +788,18 @@ def folders_labels_query_api(): # noqa: ANN201 @app.route("/folders/") -def folder_api(public_id): # noqa: ANN201 +def folder_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 return folders_labels_api_impl(public_id) @app.route("/labels/") -def label_api(public_id): # noqa: ANN201 +def label_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 return folders_labels_api_impl(public_id) -def folders_labels_api_impl(public_id): # noqa: ANN201 +def folders_labels_api_impl(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 category_type = g.namespace.account.category_type - rule = request.url_rule.rule + rule = request.url_rule.rule # type: ignore[union-attr] valid_category_type(category_type, rule) valid_public_id(public_id) try: @@ -811,9 +819,9 @@ def folders_labels_api_impl(public_id): # noqa: ANN201 @app.route("/folders", methods=["POST"]) @app.route("/labels", methods=["POST"]) -def folders_labels_create_api(): # noqa: ANN201 +def folders_labels_create_api(): # type: ignore[no-untyped-def] # noqa: ANN201 category_type = g.namespace.account.category_type - rule = request.url_rule.rule + rule = request.url_rule.rule # type: ignore[union-attr] valid_category_type(category_type, rule) data = request.get_json(force=True) display_name = data.get("display_name") @@ -871,9 +879,9 @@ def folders_labels_create_api(): # noqa: ANN201 @app.route("/folders/", methods=["PUT", "PATCH"]) @app.route("/labels/", methods=["PUT", "PATCH"]) -def folder_label_update_api(public_id): # noqa: ANN201 +def folder_label_update_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 category_type = g.namespace.account.category_type - rule = request.url_rule.rule + rule = request.url_rule.rule # type: ignore[union-attr] valid_category_type(category_type, rule) valid_public_id(public_id) try: @@ -939,9 +947,9 @@ def folder_label_update_api(public_id): # noqa: ANN201 @app.route("/folders/", methods=["DELETE"]) @app.route("/labels/", methods=["DELETE"]) -def folder_label_delete_api(public_id): # noqa: ANN201 +def folder_label_delete_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 category_type = g.namespace.account.category_type - rule = request.url_rule.rule + rule = request.url_rule.rule # type: ignore[union-attr] valid_category_type(category_type, rule) valid_public_id(public_id) try: @@ -1005,7 +1013,7 @@ def folder_label_delete_api(public_id): # noqa: ANN201 # Contacts ## @app.route("/contacts/", methods=["GET"]) -def contact_api(): # noqa: ANN201 +def contact_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument( "filter", type=bounded_str, default="", location="args" ) @@ -1031,7 +1039,7 @@ def contact_api(): # noqa: ANN201 if args["view"] != "ids": results = results.options( load_only("public_id", "_raw_address", "name"), - joinedload(Contact.phone_numbers), + joinedload(Contact.phone_numbers), # type: ignore[attr-defined] ) results = results.order_by(asc(Contact.created_at)) @@ -1043,7 +1051,7 @@ def contact_api(): # noqa: ANN201 @app.route("/contacts/", methods=["GET"]) -def contact_read_api(public_id): # noqa: ANN201 +def contact_read_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 # Get all data for an existing contact. valid_public_id(public_id) result = inbox.contacts.crud.read(g.namespace, g.db_session, public_id) @@ -1056,7 +1064,7 @@ def contact_read_api(public_id): # noqa: ANN201 # Events ## @app.route("/events/", methods=["GET"]) -def event_api(): # noqa: ANN201 +def event_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("event_id", type=valid_public_id, location="args") g.parser.add_argument("calendar_id", type=valid_public_id, location="args") g.parser.add_argument("title", type=bounded_str, location="args") @@ -1113,7 +1121,7 @@ def event_api(): # noqa: ANN201 @app.route("/events/", methods=["POST"]) -def event_create_api(): # noqa: ANN201 +def event_create_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument( "notify_participants", type=strict_bool, location="args" ) @@ -1178,7 +1186,7 @@ def event_create_api(): # noqa: ANN201 @app.route("/events/", methods=["GET"]) -def event_read_api(public_id): # noqa: ANN201 +def event_read_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 """Get all data for an existing event.""" valid_public_id(public_id) try: @@ -1199,7 +1207,7 @@ def event_read_api(public_id): # noqa: ANN201 @app.route("/events/", methods=["PUT", "PATCH"]) -def event_update_api(public_id): # noqa: ANN201 +def event_update_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument( "notify_participants", type=strict_bool, location="args" ) @@ -1307,7 +1315,7 @@ def event_update_api(public_id): # noqa: ANN201 @app.route("/events/", methods=["DELETE"]) -def event_delete_api(public_id): # noqa: ANN201 +def event_delete_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument( "notify_participants", type=strict_bool, location="args" ) @@ -1362,7 +1370,7 @@ def event_delete_api(public_id): # noqa: ANN201 @app.route("/send-rsvp", methods=["POST"]) -def event_rsvp_api(): # noqa: ANN201 +def event_rsvp_api(): # type: ignore[no-untyped-def] # noqa: ANN201 data = request.get_json(force=True) event_id = data.get("event_id") @@ -1458,7 +1466,7 @@ def event_rsvp_api(): # noqa: ANN201 # Files # @app.route("/files/", methods=["GET"]) -def files_api(): # noqa: ANN201 +def files_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("filename", type=bounded_str, location="args") g.parser.add_argument("message_id", type=valid_public_id, location="args") g.parser.add_argument("content_type", type=bounded_str, location="args") @@ -1481,7 +1489,7 @@ def files_api(): # noqa: ANN201 @app.route("/files/", methods=["GET"]) -def file_read_api(public_id): # noqa: ANN201 +def file_read_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 valid_public_id(public_id) try: f = ( @@ -1498,7 +1506,7 @@ def file_read_api(public_id): # noqa: ANN201 @app.route("/files/", methods=["DELETE"]) -def file_delete_api(public_id): # noqa: ANN201 +def file_delete_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 valid_public_id(public_id) try: f = ( @@ -1535,13 +1543,13 @@ def file_delete_api(public_id): # noqa: ANN201 # $ curl http://localhost:5555/n/4s4iz36h36w17kumggi36ha2b/files \ # --form upload=@dancingbaby.gif @app.route("/files/", methods=["POST"]) -def file_upload_api(): # noqa: ANN201 +def file_upload_api(): # type: ignore[no-untyped-def] # noqa: ANN201 all_files = [] for name, uploaded in request.files.items(): request.environ["log_context"].setdefault("filenames", []).append(name) f = Block() f.namespace = g.namespace - f.content_type = uploaded.content_type + f.content_type = uploaded.content_type # type: ignore[assignment] f.filename = uploaded.filename f.data = uploaded.read() all_files.append(f) @@ -1556,7 +1564,7 @@ def file_upload_api(): # noqa: ANN201 # File downloads # @app.route("/files//download") -def file_download_api(public_id): # noqa: ANN201 +def file_download_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 valid_public_id(public_id) try: f = ( @@ -1608,11 +1616,13 @@ def file_download_api(public_id): # noqa: ANN201 statsd_client.incr(f"{statsd_string}.successes") except TemporaryEmailFetchException: - statsd_client.incr(f"{statsd_string}.temporary_failure") + statsd_client.incr( + f"{statsd_string}.temporary_failure" # type: ignore[possibly-undefined] + ) log.warning( "Exception when fetching email", - account_id=account.id, - provider=account.provider, + account_id=account.id, # type: ignore[possibly-undefined] + provider=account.provider, # type: ignore[possibly-undefined] logstash_tag="direct_fetching", exc_info=True, ) @@ -1623,18 +1633,22 @@ def file_download_api(public_id): # noqa: ANN201 "Please try again in a few minutes.", ) except EmailDeletedException: - statsd_client.incr(f"{statsd_string}.deleted") + statsd_client.incr( + f"{statsd_string}.deleted" # type: ignore[possibly-undefined] + ) log.warning( "Exception when fetching email", - account_id=account.id, - provider=account.provider, + account_id=account.id, # type: ignore[possibly-undefined] + provider=account.provider, # type: ignore[possibly-undefined] logstash_tag="direct_fetching", exc_info=True, ) return err(404, "The data was deleted on the email server.") except EmailFetchException: - statsd_client.incr(f"{statsd_string}.failures") + statsd_client.incr( + f"{statsd_string}.failures" # type: ignore[possibly-undefined] + ) log.warning( "Exception when fetching email", logstash_tag="direct_fetching", @@ -1651,7 +1665,7 @@ def file_download_api(public_id): # noqa: ANN201 # Calendars ## @app.route("/calendars/", methods=["GET"]) -def calendar_api(): # noqa: ANN201 +def calendar_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) @@ -1676,7 +1690,7 @@ def calendar_api(): # noqa: ANN201 @app.route("/calendars/", methods=["GET"]) -def calendar_read_api(public_id): # noqa: ANN201 +def calendar_read_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 """Get all data for an existing calendar.""" valid_public_id(public_id) @@ -1705,7 +1719,7 @@ def calendar_read_api(public_id): # noqa: ANN201 @app.route("/drafts/", methods=["GET"]) -def draft_query_api(): # noqa: ANN201 +def draft_query_api(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("subject", type=bounded_str, location="args") g.parser.add_argument("to", type=bounded_str, location="args") g.parser.add_argument("cc", type=bounded_str, location="args") @@ -1762,7 +1776,7 @@ def draft_query_api(): # noqa: ANN201 @app.route("/drafts/", methods=["GET"]) -def draft_get_api(public_id): # noqa: ANN201 +def draft_get_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 valid_public_id(public_id) draft = ( g.db_session.query(Message) @@ -1778,7 +1792,7 @@ def draft_get_api(public_id): # noqa: ANN201 @app.route("/drafts/", methods=["POST"]) -def draft_create_api(): # noqa: ANN201 +def draft_create_api(): # type: ignore[no-untyped-def] # noqa: ANN201 data = request.get_json(force=True) draft = create_message_from_json( data, g.namespace, g.db_session, is_draft=True @@ -1787,7 +1801,7 @@ def draft_create_api(): # noqa: ANN201 @app.route("/drafts/", methods=["PUT", "PATCH"]) -def draft_update_api(public_id): # noqa: ANN201 +def draft_update_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 data = request.get_json(force=True) original_draft = get_draft( public_id, data.get("version"), g.namespace.id, g.db_session @@ -1830,20 +1844,22 @@ def draft_update_api(public_id): # noqa: ANN201 @app.route("/drafts/", methods=["DELETE"]) -def draft_delete_api(public_id): # noqa: ANN201 +def draft_delete_api(public_id): # type: ignore[no-untyped-def] # noqa: ANN201 data = request.get_json(force=True) # Validate draft id, version, etc. draft = get_draft( public_id, data.get("version"), g.namespace.id, g.db_session ) - result = delete_draft(g.db_session, g.namespace.account, draft) + result = delete_draft( # type: ignore[func-returns-value] + g.db_session, g.namespace.account, draft + ) return g.encoder.jsonify(result) @app.route("/send", methods=["POST"]) @app.route("/send-with-features", methods=["POST"]) # TODO deprecate this URL -def draft_send_api(): # noqa: ANN201 +def draft_send_api(): # type: ignore[no-untyped-def] # noqa: ANN201 request_started = time.time() account = g.namespace.account @@ -1875,7 +1891,9 @@ def draft_send_api(): # noqa: ANN201 if tracking_options: # Open/Link/Reply tracking set try: - from redwood.api.tracking import handle_tracking_options + from redwood.api.tracking import ( # type: ignore[import-not-found] + handle_tracking_options, + ) except ImportError: return err( 501, @@ -1919,7 +1937,7 @@ def draft_send_api(): # noqa: ANN201 @app.route("/send-multiple", methods=["POST"]) -def multi_send_create(): # noqa: ANN201 +def multi_send_create(): # type: ignore[no-untyped-def] # noqa: ANN201 """Initiates a multi-send session by creating a new multi-send draft.""" # noqa: D401 account = g.namespace.account @@ -1943,7 +1961,7 @@ def multi_send_create(): # noqa: ANN201 @app.route("/send-multiple/", methods=["POST"]) -def multi_send(draft_id): # noqa: ANN201 +def multi_send(draft_id): # type: ignore[no-untyped-def] # noqa: ANN201 """ Performs a single send operation in an individualized multi-send session. Sends a copy of the draft at draft_id to the specified address @@ -1993,7 +2011,7 @@ def multi_send(draft_id): # noqa: ANN201 @app.route("/send-multiple/", methods=["DELETE"]) -def multi_send_finish(draft_id): # noqa: ANN201 +def multi_send_finish(draft_id): # type: ignore[no-untyped-def] # noqa: ANN201 """ Closes out a multi-send session by marking the sending draft as sent and moving it to the user's Sent folder. @@ -2038,7 +2056,7 @@ def multi_send_finish(draft_id): # noqa: ANN201 ## @app.route("/delta") @app.route("/delta/longpoll") -def sync_deltas(): # noqa: ANN201 +def sync_deltas(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument( "cursor", type=valid_public_id, location="args", required=True ) @@ -2129,7 +2147,10 @@ def sync_deltas(): # noqa: ANN201 return g.encoder.jsonify(response) # No changes. perhaps wait - elif "/delta/longpoll" in request.url_rule.rule: + elif ( + "/delta/longpoll" + in request.url_rule.rule # type: ignore[union-attr] + ): time.sleep(poll_interval) else: # Return immediately response["cursor_end"] = cursor @@ -2137,13 +2158,13 @@ def sync_deltas(): # noqa: ANN201 return g.encoder.jsonify(response) # If nothing happens until timeout, just return the end of the cursor - response["cursor_end"] = cursor + response["cursor_end"] = cursor # type: ignore[possibly-undefined] return g.encoder.jsonify(response) # TODO Deprecate this @app.route("/delta/generate_cursor", methods=["POST"]) -def generate_cursor(): # noqa: ANN201 +def generate_cursor(): # type: ignore[no-untyped-def] # noqa: ANN201 data = request.get_json(force=True) if list(data) != ["start"] or not isinstance(data["start"], int): @@ -2169,7 +2190,7 @@ def generate_cursor(): # noqa: ANN201 @app.route("/delta/latest_cursor", methods=["POST"]) -def latest_cursor(): # noqa: ANN201 +def latest_cursor(): # type: ignore[no-untyped-def] # noqa: ANN201 cursor = delta_sync.get_transaction_cursor_near_timestamp( g.namespace.id, int(time.time()), g.db_session ) @@ -2182,7 +2203,7 @@ def latest_cursor(): # noqa: ANN201 @app.route("/delta/streaming") -def stream_changes(): # noqa: ANN201 +def stream_changes(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument("timeout", type=float, location="args") g.parser.add_argument( "cursor", type=valid_public_id, location="args", required=True @@ -2278,7 +2299,7 @@ def stream_changes(): # noqa: ANN201 @app.route("/groups/intrinsic") -def groups_intrinsic(): # noqa: ANN201 +def groups_intrinsic(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument( "force_recalculate", type=strict_bool, location="args" ) @@ -2290,7 +2311,9 @@ def groups_intrinsic(): # noqa: ANN201 .one() ) except NoResultFound: - dpcache = DataProcessingCache(namespace_id=g.namespace.id) + dpcache = DataProcessingCache( # type: ignore[call-arg] + namespace_id=g.namespace.id + ) last_updated = dpcache.contact_groups_last_updated cached_data = dpcache.contact_groups @@ -2328,7 +2351,7 @@ def groups_intrinsic(): # noqa: ANN201 @app.route("/contacts/rankings") -def contact_rankings(): # noqa: ANN201 +def contact_rankings(): # type: ignore[no-untyped-def] # noqa: ANN201 g.parser.add_argument( "force_recalculate", type=strict_bool, location="args" ) @@ -2340,7 +2363,9 @@ def contact_rankings(): # noqa: ANN201 .one() ) except NoResultFound: - dpcache = DataProcessingCache(namespace_id=g.namespace.id) + dpcache = DataProcessingCache( # type: ignore[call-arg] + namespace_id=g.namespace.id + ) last_updated = dpcache.contact_rankings_last_updated cached_data = dpcache.contact_rankings diff --git a/inbox/api/sending.py b/inbox/api/sending.py index 8487a4ac0..ac15645ca 100644 --- a/inbox/api/sending.py +++ b/inbox/api/sending.py @@ -8,7 +8,7 @@ log = get_logger() -def send_draft(account, draft, db_session): # noqa: ANN201 +def send_draft(account, draft, db_session): # type: ignore[no-untyped-def] # noqa: ANN201 """Send the draft with id = `draft_id`.""" # Update message state and prepare a response so that we can immediately # return it on success, and not potentially have queries fail after @@ -30,7 +30,9 @@ def send_draft(account, draft, db_session): # noqa: ANN201 return response_on_success -def send_draft_copy(account, draft, custom_body, recipient): # noqa: ANN201 +def send_draft_copy( # type: ignore[no-untyped-def] # noqa: ANN201 + account, draft, custom_body, recipient +): """ Sends a copy of this draft to the recipient, using the specified body rather that the one on the draft object, and not marking the draft as @@ -63,7 +65,9 @@ def send_draft_copy(account, draft, custom_body, recipient): # noqa: ANN201 return response_on_success -def update_draft_on_send(account, draft, db_session) -> None: +def update_draft_on_send( # type: ignore[no-untyped-def] + account, draft, db_session +) -> None: # Update message draft.is_sent = True draft.is_draft = False @@ -74,7 +78,7 @@ def update_draft_on_send(account, draft, db_session) -> None: db_session.flush() -def send_raw_mime(account, db_session, msg): # noqa: ANN201 +def send_raw_mime(account, db_session, msg): # type: ignore[no-untyped-def] # noqa: ANN201 # Prepare a response so that we can immediately return it on success, and # not potentially have queries fail after sending. response_on_success = APIEncoder().jsonify(msg) diff --git a/inbox/api/srv.py b/inbox/api/srv.py index 0ecd6f531..d3c53ac71 100644 --- a/inbox/api/srv.py +++ b/inbox/api/srv.py @@ -1,8 +1,8 @@ from typing import Any from flask import Flask, g, jsonify, make_response, request -from flask_restful import reqparse -from sqlalchemy.orm.exc import NoResultFound +from flask_restful import reqparse # type: ignore[import-untyped] +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from werkzeug.exceptions import HTTPException, default_exceptions from inbox.api.err import APIException, InputError, NotFoundError @@ -44,28 +44,34 @@ @app.errorhandler(APIException) -def handle_input_error(error): # noqa: ANN201 +def handle_input_error(error): # type: ignore[no-untyped-def] # noqa: ANN201 response = jsonify(message=error.message, type="invalid_request_error") response.status_code = error.status_code return response -def default_json_error(ex): # noqa: ANN201 +def default_json_error(ex): # type: ignore[no-untyped-def] # noqa: ANN201 """Exception -> flask JSON responder""" logger = get_logger() logger.error("Uncaught error thrown by Flask/Werkzeug", exc_info=ex) response = jsonify(message=str(ex), type="api_error") - response.status_code = ex.code if isinstance(ex, HTTPException) else 500 + response.status_code = ( + ex.code # type: ignore[assignment] + if isinstance(ex, HTTPException) + else 500 + ) return response # Patch all error handlers in werkzeug for code in default_exceptions: - app.error_handler_spec[None][code] = default_json_error + app.error_handler_spec[None][ + code + ] = default_json_error # type: ignore[assignment] @app.before_request -def auth(): # noqa: ANN201 +def auth(): # type: ignore[no-untyped-def] # noqa: ANN201 """Check for account ID on all non-root URLS""" if ( request.path == "/" @@ -119,7 +125,7 @@ def auth(): # noqa: ANN201 @app.after_request -def finish(response): # noqa: ANN201 +def finish(response): # type: ignore[no-untyped-def] # noqa: ANN201 origin = request.headers.get("origin") if origin: # means it's just a regular request response.headers["Access-Control-Allow-Origin"] = origin @@ -134,7 +140,7 @@ def finish(response): # noqa: ANN201 @app.route("/accounts/", methods=["GET"]) -def ns_all(): # noqa: ANN201 +def ns_all(): # type: ignore[no-untyped-def] # noqa: ANN201 """Return all namespaces""" # We do this outside the blueprint to support the case of an empty # public_id. However, this means the before_request isn't run, so we need @@ -162,7 +168,9 @@ def ns_all(): # noqa: ANN201 return encoder.jsonify(namespaces) -def _get_account_data_for_generic_account(data): +def _get_account_data_for_generic_account( # type: ignore[no-untyped-def] + data, +): email_address = data["email_address"] sync_email = data.get("sync_email", True) smtp_server_host = data.get("smtp_server_host", "localhost") @@ -253,7 +261,7 @@ def _get_account_data_for_microsoft_account( @app.route("/accounts/", methods=["POST"]) -def create_account(): # noqa: ANN201 +def create_account(): # type: ignore[no-untyped-def] # noqa: ANN201 """Create a new account""" data = request.get_json(force=True) @@ -276,11 +284,11 @@ def create_account(): # noqa: ANN201 db_session.commit() encoder = APIEncoder() - return encoder.jsonify(account.namespace) + return encoder.jsonify(account.namespace) # type: ignore[union-attr] @app.route("/accounts//", methods=["PUT"]) -def modify_account(namespace_public_id): # noqa: ANN201 +def modify_account(namespace_public_id): # type: ignore[no-untyped-def] # noqa: ANN201 """ Modify an existing account @@ -320,7 +328,7 @@ def modify_account(namespace_public_id): # noqa: ANN201 @app.route("/accounts//", methods=["DELETE"]) -def delete_account(namespace_public_id): # noqa: ANN201 +def delete_account(namespace_public_id): # type: ignore[no-untyped-def] # noqa: ANN201 """Mark an existing account for deletion.""" try: with global_session_scope() as db_session: @@ -347,7 +355,7 @@ def home() -> str: @app.route("/logout") -def logout(): # noqa: ANN201 +def logout(): # type: ignore[no-untyped-def] # noqa: ANN201 """ Utility function used to force browsers to reset cached HTTP Basic Auth credentials diff --git a/inbox/api/update.py b/inbox/api/update.py index 6dad69ef3..1744659a0 100644 --- a/inbox/api/update.py +++ b/inbox/api/update.py @@ -1,6 +1,6 @@ from datetime import datetime -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from inbox.api.err import InputError from inbox.api.validation import valid_public_id @@ -13,7 +13,9 @@ # STOPSHIP(emfree): better naming/structure for this module -def update_message(message, request_data, db_session, optimistic) -> None: +def update_message( # type: ignore[no-untyped-def] + message, request_data, db_session, optimistic +) -> None: accept_labels = message.namespace.account.provider == "gmail" # Update flags (message.{is_read, is_starred}) unread, starred = parse_flags(request_data) @@ -33,7 +35,9 @@ def update_message(message, request_data, db_session, optimistic) -> None: update_message_folder(message, db_session, folder, optimistic) -def update_thread(thread, request_data, db_session, optimistic) -> None: +def update_thread( # type: ignore[no-untyped-def] + thread, request_data, db_session, optimistic +) -> None: accept_labels = thread.namespace.account.provider == "gmail" (unread, starred) = parse_flags(request_data) @@ -45,7 +49,7 @@ def update_thread(thread, request_data, db_session, optimistic) -> None: raise InputError(f"Unexpected attribute: {list(request_data)[0]}") if accept_labels: - if labels is not None: + if labels is not None: # type: ignore[possibly-undefined] new_labels = labels - set(thread.categories) removed_labels = set(thread.categories) - labels @@ -59,7 +63,7 @@ def update_thread(thread, request_data, db_session, optimistic) -> None: optimistic, ) - elif folder is not None: + elif folder is not None: # type: ignore[possibly-undefined] for message in thread.messages: # Exclude drafts and sent messages from thread-level moves. if ( @@ -79,7 +83,7 @@ def update_thread(thread, request_data, db_session, optimistic) -> None: ## FLAG UPDATES ## -def parse_flags(request_data): # noqa: ANN201 +def parse_flags(request_data): # type: ignore[no-untyped-def] # noqa: ANN201 unread = request_data.pop("unread", None) if unread is not None and not isinstance(unread, bool): raise InputError('"unread" must be true or false') @@ -90,7 +94,7 @@ def parse_flags(request_data): # noqa: ANN201 return unread, starred -def update_message_flags( +def update_message_flags( # type: ignore[no-untyped-def] message, db_session, optimistic, unread=None, starred=None ) -> None: if unread is not None: @@ -121,7 +125,9 @@ def update_message_flags( ## FOLDER UPDATES ## -def parse_folder(request_data, db_session, namespace_id): # noqa: ANN201 +def parse_folder( # type: ignore[no-untyped-def] # noqa: ANN201 + request_data, db_session, namespace_id +): # TODO deprecate being able to post "folder" and not "folder_id" if "folder_id" not in request_data and "folder" not in request_data: return None @@ -149,7 +155,9 @@ def parse_folder(request_data, db_session, namespace_id): # noqa: ANN201 ) -def update_message_folder(message, db_session, category, optimistic) -> None: +def update_message_folder( # type: ignore[no-untyped-def] + message, db_session, category, optimistic +) -> None: # STOPSHIP(emfree): what about sent/inbox duality? if optimistic: message.categories = [category] @@ -167,7 +175,9 @@ def update_message_folder(message, db_session, category, optimistic) -> None: ### LABEL UPDATES ### -def parse_labels(request_data, db_session, namespace_id): # noqa: ANN201 +def parse_labels( # type: ignore[no-untyped-def] # noqa: ANN201 + request_data, db_session, namespace_id +): # TODO deprecate being able to post "labels" and not "label_ids" if "label_ids" not in request_data and "labels" not in request_data: return None @@ -205,7 +215,7 @@ def parse_labels(request_data, db_session, namespace_id): # noqa: ANN201 return labels -def update_message_labels( +def update_message_labels( # type: ignore[no-untyped-def] message, db_session, added_categories, removed_categories, optimistic ) -> None: special_label_map = { @@ -248,7 +258,7 @@ def update_message_labels( # created_at value. Taken from # https://docs.sqlalchemy.org/en/13/orm/extensions/ # associationproxy.html#simplifying-association-objects - MessageCategory( + MessageCategory( # type: ignore[call-arg] category=category, message=message, created_at=update_time ) @@ -292,7 +302,9 @@ def update_message_labels( ) -def validate_labels(db_session, added_categories, removed_categories) -> None: +def validate_labels( # type: ignore[no-untyped-def] + db_session, added_categories, removed_categories +) -> None: """ Validate that the labels added and removed obey Gmail's semantics -- Gmail messages MUST belong to exactly ONE of the '[Gmail]All Mail', @@ -316,7 +328,7 @@ def validate_labels(db_session, added_categories, removed_categories) -> None: raise InputError('"all", "trash" and "spam" cannot all be removed') -def apply_gmail_label_rules( +def apply_gmail_label_rules( # type: ignore[no-untyped-def] db_session, message, added_categories, removed_categories ) -> None: r""" diff --git a/inbox/api/validation.py b/inbox/api/validation.py index 43e1e4d1a..f91fb5327 100644 --- a/inbox/api/validation.py +++ b/inbox/api/validation.py @@ -3,11 +3,11 @@ import contextlib from typing import Never -import arrow -from arrow.parser import ParserError -from flanker.addresslib import address -from flask_restful import reqparse -from sqlalchemy.orm.exc import NoResultFound +import arrow # type: ignore[import-untyped] +from arrow.parser import ParserError # type: ignore[import-untyped] +from flanker.addresslib import address # type: ignore[import-untyped] +from flask_restful import reqparse # type: ignore[import-untyped] +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from inbox.api.err import ( AccountInvalidError, @@ -27,20 +27,22 @@ class ValidatableArgument(reqparse.Argument): - def handle_validation_error(self, error, bundle_errors) -> Never: + def handle_validation_error( # type: ignore[no-untyped-def] + self, error, bundle_errors + ) -> Never: raise InputError(str(error)) # Custom parameter types -def bounded_str(value, key): # noqa: ANN201 +def bounded_str(value, key): # type: ignore[no-untyped-def] # noqa: ANN201 if len(value) > 255: raise ValueError(f"Value {value} for {key} is too long") return value -def comma_separated_email_list(value, key): # noqa: ANN201 +def comma_separated_email_list(value, key): # type: ignore[no-untyped-def] # noqa: ANN201 addresses = value.split(",") # Note that something like "foo,bar"@example.com is technical a valid # email address, but in practice nobody does this (and they shouldn't!) @@ -60,7 +62,7 @@ def comma_separated_email_list(value, key): # noqa: ANN201 return good_emails -def strict_bool(value, key): # noqa: ANN201 +def strict_bool(value, key): # type: ignore[no-untyped-def] # noqa: ANN201 if value.lower() not in ["true", "false"]: raise ValueError( f'Value must be "true" or "false" (not "{value}") for {key}' @@ -68,14 +70,14 @@ def strict_bool(value, key): # noqa: ANN201 return value.lower() == "true" -def view(value, key): # noqa: ANN201 +def view(value, key): # type: ignore[no-untyped-def] # noqa: ANN201 allowed_views = ["count", "ids", "expanded"] if value not in allowed_views: raise ValueError(f"Unknown view type {value}.") return value -def limit(value): # noqa: ANN201 +def limit(value): # type: ignore[no-untyped-def] # noqa: ANN201 try: value = int(value) except ValueError: @@ -89,7 +91,7 @@ def limit(value): # noqa: ANN201 return value -def offset(value): # noqa: ANN201 +def offset(value): # type: ignore[no-untyped-def] # noqa: ANN201 try: value = int(value) except ValueError: @@ -99,7 +101,7 @@ def offset(value): # noqa: ANN201 return value -def valid_public_id(value): # noqa: ANN201 +def valid_public_id(value): # type: ignore[no-untyped-def] # noqa: ANN201 if "_" in value: raise InputError(f"Invalid id: {value}") @@ -112,14 +114,14 @@ def valid_public_id(value): # noqa: ANN201 return value -def valid_account(namespace) -> None: +def valid_account(namespace) -> None: # type: ignore[no-untyped-def] if namespace.account.sync_state == "invalid": raise AccountInvalidError() if namespace.account.sync_state == "stopped": raise AccountStoppedError() -def valid_category_type(category_type, rule): # noqa: ANN201 +def valid_category_type(category_type, rule): # type: ignore[no-untyped-def] # noqa: ANN201 if category_type not in rule: if category_type == "label": raise NotFoundError("GMail accounts don't support folders") @@ -128,7 +130,7 @@ def valid_category_type(category_type, rule): # noqa: ANN201 return category_type -def timestamp(value, key): # noqa: ANN201 +def timestamp(value, key): # type: ignore[no-untyped-def] # noqa: ANN201 try: with contextlib.suppress(ValueError): value = float(value) @@ -144,7 +146,7 @@ def timestamp(value, key): # noqa: ANN201 ) -def strict_parse_args(parser, raw_args): # noqa: ANN201 +def strict_parse_args(parser, raw_args): # type: ignore[no-untyped-def] # noqa: ANN201 """ Wrapper around parser.parse_args that raises a ValueError if unexpected arguments are present. @@ -159,7 +161,7 @@ def strict_parse_args(parser, raw_args): # noqa: ANN201 return args -def get_sending_draft( # noqa: ANN201 +def get_sending_draft( # type: ignore[no-untyped-def] # noqa: ANN201 draft_public_id, namespace_id, db_session ): valid_public_id(draft_public_id) @@ -184,7 +186,7 @@ def get_sending_draft( # noqa: ANN201 return draft -def get_draft( # noqa: ANN201 +def get_draft( # type: ignore[no-untyped-def] # noqa: ANN201 draft_public_id, version, namespace_id, db_session ): valid_public_id(draft_public_id) @@ -219,7 +221,7 @@ def get_draft( # noqa: ANN201 return draft -def get_attachments( # noqa: ANN201 +def get_attachments( # type: ignore[no-untyped-def] # noqa: ANN201 block_public_ids, namespace_id, db_session ): attachments: set[Block] = set() @@ -249,7 +251,9 @@ def get_attachments( # noqa: ANN201 return attachments -def get_message(message_public_id, namespace_id, db_session): # noqa: ANN201 +def get_message( # type: ignore[no-untyped-def] # noqa: ANN201 + message_public_id, namespace_id, db_session +): if message_public_id is None: return None valid_public_id(message_public_id) @@ -268,7 +272,9 @@ def get_message(message_public_id, namespace_id, db_session): # noqa: ANN201 ) -def get_thread(thread_public_id, namespace_id, db_session): # noqa: ANN201 +def get_thread( # type: ignore[no-untyped-def] # noqa: ANN201 + thread_public_id, namespace_id, db_session +): if thread_public_id is None: return None valid_public_id(thread_public_id) @@ -288,7 +294,7 @@ def get_thread(thread_public_id, namespace_id, db_session): # noqa: ANN201 ) -def get_recipients(recipients, field): # noqa: ANN201 +def get_recipients(recipients, field): # type: ignore[no-untyped-def] # noqa: ANN201 if recipients is None: return None if not isinstance(recipients, list): @@ -307,7 +313,9 @@ def get_recipients(recipients, field): # noqa: ANN201 return [(r.get("name", ""), r.get("email", "")) for r in recipients] -def get_calendar(calendar_public_id, namespace, db_session): # noqa: ANN201 +def get_calendar( # type: ignore[no-untyped-def] # noqa: ANN201 + calendar_public_id, namespace, db_session +): valid_public_id(calendar_public_id) try: return ( @@ -324,14 +332,14 @@ def get_calendar(calendar_public_id, namespace, db_session): # noqa: ANN201 ) -def valid_when(when) -> None: +def valid_when(when) -> None: # type: ignore[no-untyped-def] try: parse_as_when(when) except (ValueError, ParserError) as e: raise InputError(str(e)) # noqa: B904 -def valid_event(event) -> None: +def valid_event(event) -> None: # type: ignore[no-untyped-def] if "when" not in event: raise InputError("Must specify 'when' when creating an event.") @@ -366,7 +374,9 @@ def valid_event(event) -> None: ) -def valid_event_update(event, namespace, db_session) -> None: +def valid_event_update( # type: ignore[no-untyped-def] + event, namespace, db_session +) -> None: if "when" in event: valid_when(event["when"]) @@ -388,7 +398,7 @@ def valid_event_update(event, namespace, db_session) -> None: ) -def noop_event_update(event, data) -> bool: +def noop_event_update(event, data) -> bool: # type: ignore[no-untyped-def] # Check whether the update is actually updating fields. # We do this by cloning the event, updating the fields and # comparing them. This is less cumbersome than having to think @@ -440,7 +450,7 @@ def noop_event_update(event, data) -> bool: return True -def valid_delta_object_types(types_arg): # noqa: ANN201 +def valid_delta_object_types(types_arg): # type: ignore[no-untyped-def] # noqa: ANN201 types = [item.strip() for item in types_arg.split(",")] allowed_types = ( "contact", @@ -459,7 +469,7 @@ def valid_delta_object_types(types_arg): # noqa: ANN201 return types -def validate_draft_recipients(draft) -> None: +def validate_draft_recipients(draft) -> None: # type: ignore[no-untyped-def] """ Check that a draft has at least one recipient, and that all recipient emails are at least plausible email addresses, before we try to send it. @@ -477,7 +487,7 @@ def validate_draft_recipients(draft) -> None: ) -def valid_display_name( # noqa: ANN201 +def valid_display_name( # type: ignore[no-untyped-def] # noqa: ANN201 namespace_id, category_type, display_name, db_session ): if display_name is None or not isinstance(display_name, str): diff --git a/inbox/api/wsgi.py b/inbox/api/wsgi.py index 7f8778abc..00f1d8c67 100644 --- a/inbox/api/wsgi.py +++ b/inbox/api/wsgi.py @@ -1,8 +1,10 @@ import logging import sys -import json_log_formatter -from gunicorn.workers.gthread import ThreadWorker +import json_log_formatter # type: ignore[import-untyped] +from gunicorn.workers.gthread import ( # type: ignore[import-untyped] + ThreadWorker, +) from inbox.error_handling import maybe_enable_rollbar from inbox.logging import configure_logging, get_logger @@ -44,21 +46,33 @@ def json_record( # Convert the log record to a JSON object. # See https://docs.gunicorn.org/en/stable/settings.html#access-log-format - url = record.args["U"] - if record.args["q"]: - url += f"?{record.args['q']}" + url = record.args["U"] # type: ignore[call-overload, index] + if record.args["q"]: # type: ignore[call-overload, index] + url += ( # type: ignore[operator] + f"?{record.args['q']}" # type: ignore[call-overload, index] + ) - method = record.args["m"] - log_context = record.args.get("{log_context}e", {}) + method = record.args["m"] # type: ignore[call-overload, index] + log_context = record.args.get( # type: ignore[union-attr] + "{log_context}e", {} + ) return dict( - response_bytes=record.args["B"], - request_time=float(record.args["L"]), - remote_address=record.args["h"], - http_status=record.args["s"], + response_bytes=record.args[ # type: ignore[call-overload, dict-item, index] + "B" + ], + request_time=float( + record.args["L"] # type: ignore[arg-type, call-overload, index] + ), + remote_address=record.args[ # type: ignore[call-overload, dict-item, index] + "h" + ], + http_status=record.args[ # type: ignore[call-overload, dict-item, index] + "s" + ], http_request=f"{method} {url}", - request_method=method, - **log_context, + request_method=method, # type: ignore[dict-item] + **log_context, # type: ignore[dict-item] ) diff --git a/inbox/auth/base.py b/inbox/auth/base.py index 22393d868..a84332738 100644 --- a/inbox/auth/base.py +++ b/inbox/auth/base.py @@ -1,7 +1,7 @@ import socket from typing import Never -from imapclient import IMAPClient +from imapclient import IMAPClient # type: ignore[import-untyped] from inbox.crispin import CrispinClient from inbox.exceptions import NotSupportedError, UserRecoverableConfigError @@ -13,7 +13,7 @@ log = get_logger() -def handler_from_provider(provider_name): # noqa: ANN201 +def handler_from_provider(provider_name): # type: ignore[no-untyped-def] # noqa: ANN201 """ Return an authentication handler for the given provider. @@ -44,7 +44,9 @@ def handler_from_provider(provider_name): # noqa: ANN201 class AuthHandler: - def create_account(self, account_data) -> Never: + def create_account( # type: ignore[no-untyped-def] + self, account_data + ) -> Never: """ Create a new account with the given subclass-specific account data. @@ -53,7 +55,9 @@ def create_account(self, account_data) -> Never: """ raise NotImplementedError() - def update_account(self, account, account_data) -> Never: + def update_account( # type: ignore[no-untyped-def] + self, account, account_data + ) -> Never: """ Update an existing account with the given subclass-specific account data. @@ -62,7 +66,7 @@ def update_account(self, account, account_data) -> Never: """ raise NotImplementedError() - def get_imap_connection( # noqa: ANN201 + def get_imap_connection( # type: ignore[no-untyped-def] # noqa: ANN201 self, account, use_timeout: bool = True ): host, port = account.imap_endpoint @@ -78,20 +82,24 @@ def get_imap_connection( # noqa: ANN201 ) raise - def authenticate_imap_connection(self, account, conn) -> Never: + def authenticate_imap_connection( # type: ignore[no-untyped-def] + self, account, conn + ) -> Never: raise NotImplementedError() - def get_authenticated_imap_connection( # noqa: ANN201 + def get_authenticated_imap_connection( # type: ignore[no-untyped-def] # noqa: ANN201 self, account, use_timeout: bool = True ): conn = self.get_imap_connection(account, use_timeout=use_timeout) self.authenticate_imap_connection(account, conn) - return conn + return conn # type: ignore[unreachable] - def interactive_auth(self, email_address) -> Never: + def interactive_auth( # type: ignore[no-untyped-def] + self, email_address + ) -> Never: raise NotImplementedError() - def verify_account(self, account) -> bool: + def verify_account(self, account) -> bool: # type: ignore[no-untyped-def] """ Verifies a generic IMAP account by logging in and logging out to both the IMAP/ SMTP servers. diff --git a/inbox/auth/generic.py b/inbox/auth/generic.py index d560f08b8..52995def1 100644 --- a/inbox/auth/generic.py +++ b/inbox/auth/generic.py @@ -2,7 +2,7 @@ import getpass import attr -from imapclient import IMAPClient +from imapclient import IMAPClient # type: ignore[import-untyped] from inbox.auth.utils import auth_is_invalid, auth_requires_app_password from inbox.exceptions import AppPasswordError, ValidationError @@ -17,31 +17,33 @@ @attr.s class GenericAccountData: - email = attr.ib() + email = attr.ib() # type: ignore[var-annotated] - imap_server_host = attr.ib() - imap_server_port = attr.ib() - imap_username = attr.ib() - imap_password = attr.ib() + imap_server_host = attr.ib() # type: ignore[var-annotated] + imap_server_port = attr.ib() # type: ignore[var-annotated] + imap_username = attr.ib() # type: ignore[var-annotated] + imap_password = attr.ib() # type: ignore[var-annotated] - smtp_server_host = attr.ib() - smtp_server_port = attr.ib() - smtp_username = attr.ib() - smtp_password = attr.ib() + smtp_server_host = attr.ib() # type: ignore[var-annotated] + smtp_server_port = attr.ib() # type: ignore[var-annotated] + smtp_username = attr.ib() # type: ignore[var-annotated] + smtp_password = attr.ib() # type: ignore[var-annotated] - sync_email = attr.ib() + sync_email = attr.ib() # type: ignore[var-annotated] class GenericAuthHandler(AuthHandler): - def create_account(self, account_data): # noqa: ANN201 + def create_account(self, account_data): # type: ignore[no-untyped-def] # noqa: ANN201 namespace = Namespace() - account = GenericAccount(namespace=namespace) + account = GenericAccount(namespace=namespace) # type: ignore[call-arg] account.provider = "custom" account.create_emailed_events_calendar() account.sync_should_run = False return self.update_account(account, account_data) - def update_account(self, account, account_data): # noqa: ANN201 + def update_account( # type: ignore[no-untyped-def] # noqa: ANN201 + self, account, account_data + ): account.email_address = account_data.email account.imap_endpoint = ( @@ -66,7 +68,9 @@ def update_account(self, account, account_data): # noqa: ANN201 return account - def authenticate_imap_connection(self, account, conn) -> None: + def authenticate_imap_connection( # type: ignore[no-untyped-def, override] + self, account, conn + ) -> None: try: conn.login(account.imap_username, account.imap_password) except IMAPClient.Error as exc: @@ -85,7 +89,7 @@ def authenticate_imap_connection(self, account, conn) -> None: ) raise - def interactive_auth(self, email_address): # noqa: ANN201 + def interactive_auth(self, email_address): # type: ignore[no-untyped-def] # noqa: ANN201 imap_server_host = input("IMAP server host: ").strip() imap_server_port = input("IMAP server port: ").strip() or 993 imap_um = "IMAP username (empty for same as email address): " diff --git a/inbox/auth/google.py b/inbox/auth/google.py index 7308dd7d8..b8f5ceee2 100644 --- a/inbox/auth/google.py +++ b/inbox/auth/google.py @@ -17,17 +17,17 @@ @attr.s class GoogleAccountData: - email = attr.ib() + email = attr.ib() # type: ignore[var-annotated] - secret_type = attr.ib() - secret_value = attr.ib() + secret_type = attr.ib() # type: ignore[var-annotated] + secret_value = attr.ib() # type: ignore[var-annotated] - client_id = attr.ib() - scope = attr.ib() + client_id = attr.ib() # type: ignore[var-annotated] + scope = attr.ib() # type: ignore[var-annotated] - sync_email = attr.ib() - sync_contacts = attr.ib() - sync_events = attr.ib() + sync_email = attr.ib() # type: ignore[var-annotated] + sync_contacts = attr.ib() # type: ignore[var-annotated] + sync_events = attr.ib() # type: ignore[var-annotated] class GoogleAuthHandler(OAuthAuthHandler): @@ -48,17 +48,21 @@ class GoogleAuthHandler(OAuthAuthHandler): ] ) - def create_account(self, account_data: GoogleAccountData) -> GmailAccount: + def create_account( # type: ignore[override] + self, account_data: GoogleAccountData + ) -> GmailAccount: namespace = Namespace() - account = GmailAccount(namespace=namespace) + account = GmailAccount(namespace=namespace) # type: ignore[call-arg] account.create_emailed_events_calendar() account.sync_should_run = False return self.update_account(account, account_data) - def update_account( + def update_account( # type: ignore[override] self, account: GmailAccount, account_data: GoogleAccountData ) -> GmailAccount: - account.email_address = account_data.email + account.email_address = ( # type: ignore[method-assign] + account_data.email + ) if account_data.secret_type: account.set_secret( @@ -77,7 +81,9 @@ def update_account( return account - def interactive_auth(self, email_address=None): # noqa: ANN201 + def interactive_auth( # type: ignore[no-untyped-def] # noqa: ANN201 + self, email_address=None + ): url_args = { "redirect_uri": self.OAUTH_REDIRECT_URI, "client_id": self.OAUTH_CLIENT_ID, @@ -114,7 +120,7 @@ def interactive_auth(self, email_address=None): # noqa: ANN201 "\nInvalid authorization code, try again...\n" ) - def verify_account(self, account) -> bool: + def verify_account(self, account) -> bool: # type: ignore[no-untyped-def] """ Verify the credentials provided by logging in. Verify the account configuration -- specifically checks for the presence diff --git a/inbox/auth/microsoft.py b/inbox/auth/microsoft.py index 62705c415..e97f783dc 100644 --- a/inbox/auth/microsoft.py +++ b/inbox/auth/microsoft.py @@ -12,16 +12,16 @@ @attr.s class MicrosoftAccountData: - email = attr.ib() + email = attr.ib() # type: ignore[var-annotated] - secret_type = attr.ib() - secret_value = attr.ib() + secret_type = attr.ib() # type: ignore[var-annotated] + secret_value = attr.ib() # type: ignore[var-annotated] - client_id = attr.ib() - scope = attr.ib() + client_id = attr.ib() # type: ignore[var-annotated] + scope = attr.ib() # type: ignore[var-annotated] - sync_email = attr.ib() - sync_events = attr.ib() + sync_email = attr.ib() # type: ignore[var-annotated] + sync_events = attr.ib() # type: ignore[var-annotated] class MicrosoftAuthHandler(OAuthAuthHandler): @@ -49,19 +49,21 @@ class MicrosoftAuthHandler(OAuthAuthHandler): ] ) - def create_account( + def create_account( # type: ignore[override] self, account_data: MicrosoftAccountData ) -> OutlookAccount: namespace = Namespace() - account = OutlookAccount(namespace=namespace) + account = OutlookAccount(namespace=namespace) # type: ignore[call-arg] account.create_emailed_events_calendar() account.sync_should_run = False return self.update_account(account, account_data) - def update_account( + def update_account( # type: ignore[override] self, account: OutlookAccount, account_data: MicrosoftAccountData ) -> OutlookAccount: - account.email_address = account_data.email + account.email_address = ( # type: ignore[method-assign] + account_data.email + ) if account_data.secret_type: account.set_secret( @@ -79,7 +81,9 @@ def update_account( return account - def interactive_auth(self, email_address=None): # noqa: ANN201 + def interactive_auth( # type: ignore[no-untyped-def] # noqa: ANN201 + self, email_address=None + ): url_args = { "redirect_uri": self.OAUTH_REDIRECT_URI, "client_id": self.OAUTH_CLIENT_ID, diff --git a/inbox/auth/oauth.py b/inbox/auth/oauth.py index f53049697..5e3552d89 100644 --- a/inbox/auth/oauth.py +++ b/inbox/auth/oauth.py @@ -7,10 +7,17 @@ import pytz import requests -from authalligator_client.client import Client as AuthAlligatorApiClient -from authalligator_client.enums import AccountErrorCode, ProviderType -from authalligator_client.exceptions import AccountError -from imapclient import IMAPClient +from authalligator_client.client import ( # type: ignore[import-untyped] + Client as AuthAlligatorApiClient, +) +from authalligator_client.enums import ( # type: ignore[import-untyped] + AccountErrorCode, + ProviderType, +) +from authalligator_client.exceptions import ( # type: ignore[import-untyped] + AccountError, +) +from imapclient import IMAPClient # type: ignore[import-untyped] from inbox.config import config from inbox.exceptions import ( @@ -67,7 +74,9 @@ def _new_access_token_from_refresh_token( "Accept": "text/plain", } - account_logger = log.bind(account_id=account.id) + account_logger = log.bind( + account_id=account.id # type: ignore[attr-defined] + ) try: response = requests.post( @@ -150,7 +159,7 @@ def _new_access_token_from_authalligator( except AccountError as exc: log.warning( "AccountError during AuthAlligator account query", - account_id=account.id, + account_id=account.id, # type: ignore[attr-defined] error_code=exc.code and exc.code.value, error_message=exc.message, retry_in=exc.retry_in, @@ -217,14 +226,16 @@ def acquire_access_token( # noqa: D417 else: raise OAuthError("No supported secret found.") - def authenticate_imap_connection( + def authenticate_imap_connection( # type: ignore[override] self, account: OAuthAccount, conn: IMAPClient ) -> None: token = token_manager.get_token( account, force_refresh=False, scopes=account.email_scopes ) try: - conn.oauth2_login(account.email_address, token) + conn.oauth2_login( + account.email_address, token # type: ignore[attr-defined] + ) except IMAPClient.Error as original_exc: exc = _process_imap_exception(original_exc) @@ -238,7 +249,7 @@ def authenticate_imap_connection( log.warning( "Error during IMAP XOAUTH2 login", - account_id=account.id, + account_id=account.id, # type: ignore[attr-defined] error=exc, ) if not isinstance(exc, ImapSupportDisabledError): @@ -251,7 +262,9 @@ def authenticate_imap_connection( account, force_refresh=True, scopes=account.email_scopes ) try: - conn.oauth2_login(account.email_address, token) + conn.oauth2_login( + account.email_address, token # type: ignore[attr-defined] + ) except IMAPClient.Error as original_exc: exc = _process_imap_exception(original_exc) if ( @@ -265,7 +278,7 @@ def authenticate_imap_connection( "imap_disabled_for_account" ) from original_exc - def _get_user_info(self, session_dict): + def _get_user_info(self, session_dict): # type: ignore[no-untyped-def] access_token = session_dict["access_token"] assert self.OAUTH_USER_INFO_URL, "OAUTH_USER_INFO_URL is not defined" request = urllib.request.Request( @@ -287,7 +300,9 @@ def _get_user_info(self, session_dict): return {"email": userinfo_dict["EmailAddress"]} - def _get_authenticated_user(self, authorization_code): + def _get_authenticated_user( # type: ignore[no-untyped-def] + self, authorization_code + ): args = { "client_id": self.OAUTH_CLIENT_ID, "client_secret": self.OAUTH_CLIENT_SECRET, @@ -324,15 +339,15 @@ def _get_authenticated_user(self, authorization_code): class OAuthRequestsWrapper(requests.auth.AuthBase): """Helper class for setting the Authorization header on HTTP requests.""" - def __init__(self, token) -> None: + def __init__(self, token) -> None: # type: ignore[no-untyped-def] self.token = token - def __call__(self, r): # noqa: ANN204 + def __call__(self, r): # type: ignore[no-untyped-def] # noqa: ANN204 r.headers["Authorization"] = f"Bearer {self.token}" return r -def _process_imap_exception(exc): +def _process_imap_exception(exc): # type: ignore[no-untyped-def] message = exc.args[0] if exc.args else "" if "Lookup failed" in message: # Gmail is disabled for this apps account diff --git a/inbox/auth/utils.py b/inbox/auth/utils.py index aa4cfc73c..037832b77 100644 --- a/inbox/auth/utils.py +++ b/inbox/auth/utils.py @@ -1,6 +1,6 @@ import ssl -from imapclient import IMAPClient +from imapclient import IMAPClient # type: ignore[import-untyped] from inbox.exceptions import SSLNotSupportedError from inbox.logging import get_logger @@ -15,7 +15,7 @@ def safe_decode(message: str | bytes) -> str: return message -def auth_requires_app_password(exc): # noqa: ANN201 +def auth_requires_app_password(exc): # type: ignore[no-untyped-def] # noqa: ANN201 # Some servers require an application specific password, token, or # authorization code to login PREFIXES = ( # noqa: N806 @@ -27,7 +27,7 @@ def auth_requires_app_password(exc): # noqa: ANN201 return any(message.lower().startswith(msg.lower()) for msg in PREFIXES) -def auth_is_invalid(exc): # noqa: ANN201 +def auth_is_invalid(exc): # type: ignore[no-untyped-def] # noqa: ANN201 # IMAP doesn't really have error semantics, so we have to match the error # message against a list of known response strings to determine whether we # couldn't log in because the credentials are invalid, or because of some @@ -60,7 +60,7 @@ def auth_is_invalid(exc): # noqa: ANN201 ) -def create_imap_connection( # noqa: ANN201 +def create_imap_connection( # type: ignore[no-untyped-def] # noqa: ANN201 host, port, use_timeout: bool = True ): """ @@ -102,7 +102,7 @@ def create_imap_connection( # noqa: ANN201 return conn -def create_default_context(): # noqa: ANN201 +def create_default_context(): # type: ignore[no-untyped-def] # noqa: ANN201 """ Return a backports.ssl.SSLContext object configured with sensible default settings. This was adapted from imapclient.create_default_context diff --git a/inbox/config.py b/inbox/config.py index defc00b70..10f27917f 100644 --- a/inbox/config.py +++ b/inbox/config.py @@ -29,7 +29,9 @@ def is_live_env() -> bool: class ConfigError(Exception): - def __init__(self, error=None, help=None) -> None: + def __init__( # type: ignore[no-untyped-def] + self, error=None, help=None + ) -> None: self.error = error or "" self.help = ( help @@ -40,18 +42,20 @@ def __str__(self) -> str: return f"{self.error} {self.help}" -class Configuration(dict): - def __init__(self, *args, **kwargs) -> None: +class Configuration(dict): # type: ignore[type-arg] + def __init__( # type: ignore[no-untyped-def] + self, *args, **kwargs + ) -> None: dict.__init__(self, *args, **kwargs) - def get_required(self, key): + def get_required(self, key): # type: ignore[no-untyped-def] if key not in self: raise ConfigError(f"Missing config value for {key}.") return self[key] -def _update_config_from_env(config, env): +def _update_config_from_env(config, env): # type: ignore[no-untyped-def] """ Update a config dictionary from configuration files specified in the environment. @@ -112,7 +116,9 @@ def _update_config_from_env(config, env): raise -def _update_config_from_env_variables(config) -> None: +def _update_config_from_env_variables( # type: ignore[no-untyped-def] + config, +) -> None: flags = ( os.environ.get("FEATURE_FLAGS", "") or config.get("FEATURE_FLAGS", "") ).split() @@ -124,7 +130,7 @@ def _update_config_from_env_variables(config) -> None: config["CALENDAR_POLL_FREQUENCY"] = calendar_poll_frequencey -def _get_process_name(config) -> None: +def _get_process_name(config) -> None: # type: ignore[no-untyped-def] if os.environ.get("PROCESS_NAME") is not None: config["PROCESS_NAME"] = os.environ.get("PROCESS_NAME") diff --git a/inbox/console.py b/inbox/console.py index 3531d6a8a..3907da2ae 100755 --- a/inbox/console.py +++ b/inbox/console.py @@ -8,7 +8,7 @@ from inbox.models.session import global_session_scope -def user_console(user_email_address) -> None: +def user_console(user_email_address) -> None: # type: ignore[no-untyped-def] with global_session_scope() as db_session: result = ( db_session.query(Account) @@ -66,7 +66,9 @@ def user_console(user_email_address) -> None: IPython.embed(banner1=banner) -def start_console(user_email_address=None) -> None: +def start_console( # type: ignore[no-untyped-def] + user_email_address=None, +) -> None: # You can also do this with # $ python -m imapclient.interact -H -u ... # but we want to use our session and crispin so we're not. @@ -76,9 +78,13 @@ def start_console(user_email_address=None) -> None: IPython.embed() -def start_client_console(user_email_address=None) -> None: +def start_client_console( # type: ignore[no-untyped-def] + user_email_address=None, +) -> None: try: - from tests.system.client import NylasTestClient + from tests.system.client import ( # type: ignore[import-untyped] + NylasTestClient, + ) except ImportError: sys.exit( "You need to have the Nylas Python SDK installed to use this option." diff --git a/inbox/contacts/algorithms.py b/inbox/contacts/algorithms.py index 79b48cc44..1266a1b43 100644 --- a/inbox/contacts/algorithms.py +++ b/inbox/contacts/algorithms.py @@ -25,17 +25,19 @@ ## -def _get_message_weight(now, message_date): +def _get_message_weight(now, message_date): # type: ignore[no-untyped-def] timediff = now - message_date weight = 1 - (timediff.total_seconds() / LOOKBACK_TIME) return max(weight, MIN_MESSAGE_WEIGHT) -def _jaccard_similarity(set1, set2): +def _jaccard_similarity(set1, set2): # type: ignore[no-untyped-def] return len(set1.intersection(set2)) / float(len(set1.union(set2))) -def _get_participants(msg, excluded_emails=None): +def _get_participants( # type: ignore[no-untyped-def] + msg, excluded_emails=None +): """ Returns an alphabetically sorted list of emails addresses that msg was sent to (including cc and bcc) @@ -54,7 +56,7 @@ def _get_participants(msg, excluded_emails=None): # Not really an algorithm, but it seemed reasonable to put this here? -def is_stale(last_updated, lifespan: int = 14): # noqa: ANN201 +def is_stale(last_updated, lifespan: int = 14): # type: ignore[no-untyped-def] # noqa: ANN201 """ last_updated is a datetime.datetime object lifespan is measured in days @@ -70,7 +72,7 @@ def is_stale(last_updated, lifespan: int = 14): # noqa: ANN201 ## -def calculate_contact_scores( # noqa: ANN201 +def calculate_contact_scores( # type: ignore[no-untyped-def] # noqa: ANN201 messages, time_dependent: bool = True ): now = datetime.datetime.now() @@ -86,7 +88,9 @@ def calculate_contact_scores( # noqa: ANN201 return res -def calculate_group_counts(messages, user_email): # noqa: ANN201 +def calculate_group_counts( # type: ignore[no-untyped-def] # noqa: ANN201 + messages, user_email +): """ Strips out most of the logic from calculate_group_scores algorithm and just returns raw counts for each group. @@ -99,7 +103,9 @@ def calculate_group_counts(messages, user_email): # noqa: ANN201 return res -def calculate_group_scores(messages, user_email): # noqa: ANN201 +def calculate_group_scores( # type: ignore[no-untyped-def] # noqa: ANN201 + messages, user_email +): """ This is a (modified) implementation of the algorithm described in this paper: http://mobisocial.stanford.edu/papers/iui11g.pdf @@ -114,7 +120,7 @@ def calculate_group_scores(messages, user_email): # noqa: ANN201 message_ids_to_scores: dict[str, float] = {} molecules_dict = defaultdict(set) # (emails, ...) -> {message ids, ...} - def get_message_list_weight(message_ids): + def get_message_list_weight(message_ids): # type: ignore[no-untyped-def] return sum(message_ids_to_scores[m_id] for m_id in message_ids) # Gather initial candidate social molecules @@ -154,7 +160,9 @@ def get_message_list_weight(message_ids): # Helper functions for calculating group scores -def _expand_molecule_pool(molecules_dict) -> None: +def _expand_molecule_pool( # type: ignore[no-untyped-def] + molecules_dict, +) -> None: mditems = [(set(g), msgs) for (g, msgs) in molecules_dict.items()] for i in range(len(mditems)): g1, m1 = mditems[i] @@ -167,7 +175,9 @@ def _expand_molecule_pool(molecules_dict) -> None: ) -def _subsume_molecules(molecules_list, get_message_list_weight): +def _subsume_molecules( # type: ignore[no-untyped-def] + molecules_list, get_message_list_weight +): molecules_list.sort(key=lambda x: len(x[0]), reverse=True) is_subsumed = [False] * len(molecules_list) mol_weights = [get_message_list_weight(m) for (_, m) in molecules_list] @@ -191,7 +201,7 @@ def _subsume_molecules(molecules_list, get_message_list_weight): return [ml for (ml, dead) in zip(molecules_list, is_subsumed) if not dead] -def _combine_similar_molecules(molecules_list): +def _combine_similar_molecules(molecules_list): # type: ignore[no-untyped-def] """Using a greedy approach here for speed""" # noqa: D401 new_guys_start_idx = 0 while new_guys_start_idx < len(molecules_list): @@ -205,7 +215,7 @@ def _combine_similar_molecules(molecules_list): js = _jaccard_similarity(g1, g2) if js > JACCARD_THRESHOLD: new_guys.append((g1.union(g2), m1.union(m2))) - combined[i], combined[j] = True, True + (combined[i], combined[j]) = (True, True) break molecules_list = [ diff --git a/inbox/contacts/carddav.py b/inbox/contacts/carddav.py index a8596b858..cb4e5fe75 100644 --- a/inbox/contacts/carddav.py +++ b/inbox/contacts/carddav.py @@ -20,7 +20,7 @@ """ # noqa: D404 -import lxml.etree as ET # noqa: N812 +import lxml.etree as ET # type: ignore[import-untyped] # noqa: N812 import requests # Fake it till you make it @@ -30,7 +30,7 @@ ) -def supports_carddav(url) -> None: +def supports_carddav(url) -> None: # type: ignore[no-untyped-def] """Basic verification that the endpoint supports CardDav""" # noqa: D401 response = requests.request( "OPTIONS", url, headers={"User-Agent": USER_AGENT, "Depth": "1"} @@ -43,14 +43,16 @@ def supports_carddav(url) -> None: class CardDav: """NOTE: Only supports iCloud for now""" - def __init__(self, email_address, password, base_url) -> None: + def __init__( # type: ignore[no-untyped-def] + self, email_address, password, base_url + ) -> None: self.session = requests.Session() self.session.auth = (email_address, password) self.session.verify = True # verify SSL certs self.session.headers.update({"User-Agent": USER_AGENT, "Depth": "1"}) self.base_url = base_url - def get_principal_url(self): # noqa: ANN201 + def get_principal_url(self): # type: ignore[no-untyped-def] # noqa: ANN201 """Use PROPFIND method to find the `principal` carddav url""" payload = """ @@ -69,7 +71,7 @@ def get_principal_url(self): # noqa: ANN201 principal_href = element[0][1][0][0][0].text return principal_href - def get_address_book_home(self, url): # noqa: ANN201 + def get_address_book_home(self, url): # type: ignore[no-untyped-def] # noqa: ANN201 payload = """ @@ -107,7 +109,7 @@ def get_address_book_home(self, url): # noqa: ANN201 # response.raise_for_status() # return response.content - def get_cards(self, url): # noqa: ANN201 + def get_cards(self, url): # type: ignore[no-untyped-def] # noqa: ANN201 payload = """ diff --git a/inbox/contacts/crud.py b/inbox/contacts/crud.py index 5e46ea4dc..c22b5ed3f 100644 --- a/inbox/contacts/crud.py +++ b/inbox/contacts/crud.py @@ -11,8 +11,8 @@ INBOX_PROVIDER_NAME = "inbox" -def create(namespace, db_session, name, email): # noqa: ANN201 - contact = Contact( +def create(namespace, db_session, name, email): # type: ignore[no-untyped-def] # noqa: ANN201 + contact = Contact( # type: ignore[call-arg] namespace=namespace, provider_name=INBOX_PROVIDER_NAME, uid=uuid.uuid4().hex, @@ -24,7 +24,9 @@ def create(namespace, db_session, name, email): # noqa: ANN201 return contact -def read(namespace, db_session, contact_public_id): # noqa: ANN201 +def read( # type: ignore[no-untyped-def] # noqa: ANN201 + namespace, db_session, contact_public_id +): return ( db_session.query(Contact) .filter( @@ -35,9 +37,13 @@ def read(namespace, db_session, contact_public_id): # noqa: ANN201 ) -def update(namespace, db_session, contact_public_id, name, email) -> Never: +def update( # type: ignore[no-untyped-def] + namespace, db_session, contact_public_id, name, email +) -> Never: raise NotImplementedError -def delete(namespace, db_session, contact_public_id) -> Never: +def delete( # type: ignore[no-untyped-def] + namespace, db_session, contact_public_id +) -> Never: raise NotImplementedError diff --git a/inbox/contacts/google.py b/inbox/contacts/google.py index 168b2a8ce..d9ce073df 100644 --- a/inbox/contacts/google.py +++ b/inbox/contacts/google.py @@ -41,7 +41,9 @@ class GoogleContactsProvider(AbstractContactsProvider): PROVIDER_NAME = "google" - def __init__(self, account_id, namespace_id) -> None: + def __init__( # type: ignore[no-untyped-def] + self, account_id, namespace_id + ) -> None: self.account_id = account_id self.namespace_id = namespace_id self.log = logger.new( @@ -50,20 +52,26 @@ def __init__(self, account_id, namespace_id) -> None: provider=self.PROVIDER_NAME, ) - def _get_google_client(self, retry_conn_errors: bool = True): + def _get_google_client( # type: ignore[no-untyped-def] + self, retry_conn_errors: bool = True + ): """Return the Google API client.""" with session_scope(self.namespace_id) as db_session: account = db_session.query(GmailAccount).get(self.account_id) db_session.expunge(account) access_token = token_manager.get_token(account) - token = gdata.gauth.AuthSubToken(access_token) # noqa: F821 - google_client = gdata.contacts.client.ContactsClient( # noqa: F821 + token = gdata.gauth.AuthSubToken( # type: ignore[name-defined] # noqa: F821 + access_token + ) + google_client = gdata.contacts.client.ContactsClient( # type: ignore[name-defined] # noqa: F821 source=SOURCE_APP_NAME ) google_client.auth_token = token return google_client - def _parse_contact_result(self, google_contact): + def _parse_contact_result( # type: ignore[no-untyped-def] + self, google_contact + ): """ Constructs a Contact object from a Google contact entry. @@ -118,7 +126,7 @@ def _parse_contact_result(self, google_contact): deleted = google_contact.deleted is not None - return Contact( + return Contact( # type: ignore[call-arg] namespace_id=self.namespace_id, uid=g_id, name=name, @@ -128,7 +136,7 @@ def _parse_contact_result(self, google_contact): raw_data=raw_data, ) - def get_items( # noqa: ANN201 + def get_items( # type: ignore[no-untyped-def] # noqa: ANN201 self, sync_from_dt=None, max_results: int = 100000 ): """ @@ -154,7 +162,9 @@ def get_items( # noqa: ANN201 insufficient permissions, respectively. """ # noqa: D401 - query = gdata.contacts.client.ContactsQuery() # noqa: F821 + query = ( + gdata.contacts.client.ContactsQuery() # type: ignore[name-defined] # noqa: F821 + ) # TODO(emfree): Implement batch fetching # Note: The Google contacts API will only return 25 results if # query.max_results is not explicitly set, so have to set it to a large @@ -170,7 +180,9 @@ def get_items( # noqa: ANN201 return [ self._parse_contact_result(result) for result in results ] - except gdata.client.RequestError as e: # noqa: F821 + except ( + gdata.client.RequestError # type: ignore[name-defined] # noqa: F821 + ) as e: if e.status == 503: self.log.info( "Ran into Google bot detection. Sleeping.", message=e @@ -181,7 +193,7 @@ def get_items( # noqa: ANN201 "contact sync request failure; retrying", message=e ) time.sleep(30 + random.randrange(0, 60)) - except gdata.client.Unauthorized: # noqa: F821 + except gdata.client.Unauthorized: # type: ignore[name-defined] # noqa: F821 self.log.warning( "Invalid access token; refreshing and retrying" ) diff --git a/inbox/contacts/icloud.py b/inbox/contacts/icloud.py index 1ab7f91a6..a126d608c 100644 --- a/inbox/contacts/icloud.py +++ b/inbox/contacts/icloud.py @@ -2,7 +2,7 @@ import contextlib -import lxml.etree as ET # noqa: N812 +import lxml.etree as ET # type: ignore[import-untyped] # noqa: N812 from inbox.contacts.abc import AbstractContactsProvider from inbox.contacts.carddav import supports_carddav @@ -26,7 +26,9 @@ class ICloudContactsProvider(AbstractContactsProvider): PROVIDER_NAME = "icloud" - def __init__(self, account_id, namespace_id) -> None: + def __init__( # type: ignore[no-untyped-def] + self, account_id, namespace_id + ) -> None: supports_carddav(ICLOUD_CONTACTS_URL) self.account_id = account_id self.namespace_id = namespace_id @@ -36,10 +38,14 @@ def __init__(self, account_id, namespace_id) -> None: provider=self.PROVIDER_NAME, ) - def _vCard_raw_to_contact(self, cardstring): # noqa: N802 + def _vCard_raw_to_contact( # type: ignore[no-untyped-def] # noqa: N802 + self, cardstring + ): card = vcard_from_string(cardstring) - def _x(key): # Ugly parsing helper for ugly formats + def _x( # type: ignore[no-untyped-def] + key, + ): # Ugly parsing helper for ugly formats if key in card: with contextlib.suppress(IndexError): return card[key][0][0] @@ -57,7 +63,7 @@ def _x(key): # Ugly parsing helper for ugly formats # phone_number = _x('TEL') # organization = _x('ORG') - return Contact( + return Contact( # type: ignore[call-arg] namespace_id=self.namespace_id, provider_name=self.PROVIDER_NAME, uid=uid, @@ -66,7 +72,7 @@ def _x(key): # Ugly parsing helper for ugly formats raw_data=cardstring, ) - def get_items( # noqa: ANN201 + def get_items( # type: ignore[no-untyped-def] # noqa: ANN201 self, sync_from_dt=None, max_results: int = 100000 ): with session_scope(self.namespace_id) as db_session: diff --git a/inbox/contacts/processing.py b/inbox/contacts/processing.py index 06212166d..b43e6c965 100644 --- a/inbox/contacts/processing.py +++ b/inbox/contacts/processing.py @@ -1,7 +1,7 @@ import uuid from typing import TYPE_CHECKING -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session # type: ignore[import-untyped] from inbox.contacts.crud import INBOX_PROVIDER_NAME from inbox.models import ( @@ -44,10 +44,10 @@ def _get_contact_map( contact_map = {c._canonicalized_address: c for c in existing_contacts} for name, email_address in all_addresses: canonicalized_address = canonicalize(email_address) - if isinstance(name, list): - name = name[0].strip() + if isinstance(name, list): # type: ignore[unreachable] + name = name[0].strip() # type: ignore[unreachable] if canonicalized_address not in contact_map: - new_contact = Contact( + new_contact = Contact( # type: ignore[call-arg] name=name, email_address=email_address, namespace_id=namespace_id, @@ -65,7 +65,7 @@ def _get_contact_from_map( return None canonicalized_address = canonicalize(email_address) - contact = contact_map.get(canonicalized_address) + contact = contact_map.get(canonicalized_address) # type: ignore[arg-type] assert contact # Hackily address the condition that you get mail from e.g. @@ -73,7 +73,10 @@ def _get_contact_from_map( # "Christine Spang (via Google Drive) None: +def update_contacts_from_event( # type: ignore[no-untyped-def] + db_session, event, namespace_id +) -> None: with db_session.no_autoflush: # First create Contact objects for any email addresses that we haven't # seen yet. We want to dedupe by canonicalized address, so this part is @@ -158,7 +163,7 @@ def update_contacts_from_event(db_session, event, namespace_id) -> None: # delete any previous EventContactAssociation for the event db_session.execute( - EventContactAssociation.__table__.delete().where( + EventContactAssociation.__table__.delete().where( # type: ignore[attr-defined] EventContactAssociation.event_id == event.id ) ) @@ -193,5 +198,7 @@ def update_contacts_from_event(db_session, event, namespace_id) -> None: if values: db_session.execute( - EventContactAssociation.__table__.insert().values(values) + EventContactAssociation.__table__.insert().values( # type: ignore[attr-defined] + values + ) ) diff --git a/inbox/contacts/remote_sync.py b/inbox/contacts/remote_sync.py index 8c9df79a1..f4b79916a 100644 --- a/inbox/contacts/remote_sync.py +++ b/inbox/contacts/remote_sync.py @@ -3,7 +3,7 @@ from datetime import datetime from typing import Literal -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from inbox.contacts.abc import AbstractContactsProvider from inbox.contacts.google import GoogleContactsProvider @@ -45,7 +45,7 @@ class ContactSync(BaseSyncMonitor): """ - def __init__( + def __init__( # type: ignore[no-untyped-def] self, email_address, provider_name, @@ -72,7 +72,7 @@ def __init__( scope="contacts", ) - def sync(self) -> None: + def sync(self) -> None: # type: ignore[override] """ Query a remote provider for updates and persist them to the database. This function runs every `self.poll_frequency`. @@ -86,12 +86,14 @@ def sync(self) -> None: account = db_session.query(Account).get(self.account_id) last_sync_dt = account.last_synced_contacts - all_contacts = self.provider.get_items(sync_from_dt=last_sync_dt) + all_contacts = self.provider.get_items( # type: ignore[var-annotated] + sync_from_dt=last_sync_dt + ) # Do a batch insertion of every 100 contact objects - change_counter: typing.Counter[ - Literal["deleted", "updated", "added"] - ] = Counter() + change_counter: ( # type: ignore[unreachable] + typing.Counter[Literal["deleted", "updated", "added"]] + ) = Counter() for new_contact in all_contacts: new_contact.namespace = account.namespace assert ( @@ -145,7 +147,9 @@ def sync(self) -> None: db_session.commit() # Update last sync - with session_scope(self.namespace_id) as db_session: + with session_scope( # type: ignore[unreachable] + self.namespace_id + ) as db_session: account = db_session.query(Account).get(self.account_id) account.last_synced_contacts = sync_timestamp diff --git a/inbox/contacts/vcard.py b/inbox/contacts/vcard.py index 79ade36ab..8926d8b25 100644 --- a/inbox/contacts/vcard.py +++ b/inbox/contacts/vcard.py @@ -31,13 +31,13 @@ import sys from collections import defaultdict -import vobject +import vobject # type: ignore[import-untyped] NTEXT = "\x1b[0m" BTEXT = "\x1b[1m" -def fix_vobject(vcard): # noqa: ANN201 +def fix_vobject(vcard): # type: ignore[no-untyped-def] # noqa: ANN201 """ Trying to fix some more or less common errors in vcards @@ -54,7 +54,7 @@ def fix_vobject(vcard): # noqa: ANN201 return vcard -def vcard_from_vobject(vcard): # noqa: ANN201 +def vcard_from_vobject(vcard): # type: ignore[no-untyped-def] # noqa: ANN201 vcard = fix_vobject(vcard) vdict = VCard() if vcard.name != "VCARD": @@ -75,7 +75,7 @@ def vcard_from_vobject(vcard): # noqa: ANN201 return vdict -def vcard_from_string(vcard_string): # noqa: ANN201 +def vcard_from_string(vcard_string): # type: ignore[no-untyped-def] # noqa: ANN201 """ vcard_string: str returns VCard() @@ -87,7 +87,7 @@ def vcard_from_string(vcard_string): # noqa: ANN201 return vcard_from_vobject(vcard) -class VCard(defaultdict): +class VCard(defaultdict): # type: ignore[type-arg] """ internal representation of a VCard. This is dict with some associated methods, @@ -110,41 +110,41 @@ def __init__(self, ddict: str = "") -> None: if ddict == "": defaultdict.__init__(self, list) else: - defaultdict.__init__(self, list, ddict) + defaultdict.__init__(self, list, ddict) # type: ignore[arg-type] self.href = "" self.account = "" self.etag = "" self.edited = 0 - def serialize(self): # noqa: ANN201 + def serialize(self): # type: ignore[no-untyped-def] # noqa: ANN201 return repr(list(self.items())) @property - def name(self): # noqa: ANN201 + def name(self): # type: ignore[no-untyped-def] # noqa: ANN201 return str(self["N"][0][0]) if self["N"] else "" @name.setter - def name(self, value) -> None: + def name(self, value) -> None: # type: ignore[no-untyped-def] if not self["N"]: self["N"] = [("", {})] self["N"][0][0] = value @property - def fname(self): # noqa: ANN201 + def fname(self): # type: ignore[no-untyped-def] # noqa: ANN201 return str(self["FN"][0][0]) if self["FN"] else "" @fname.setter - def fname(self, value) -> None: + def fname(self, value) -> None: # type: ignore[no-untyped-def] self["FN"][0] = (value, {}) - def alt_keys(self): # noqa: ANN201 + def alt_keys(self): # type: ignore[no-untyped-def] # noqa: ANN201 keylist = list(self) for one in [x for x in ["FN", "N", "VERSION"] if x in keylist]: keylist.remove(one) keylist.sort() return keylist - def print_email(self): # noqa: ANN201 + def print_email(self): # type: ignore[no-untyped-def] # noqa: ANN201 """Prints only name, email and type for use with mutt""" # noqa: D401 collector = list() try: @@ -158,7 +158,7 @@ def print_email(self): # noqa: ANN201 except KeyError: return "" - def print_tel(self): # noqa: ANN201 + def print_tel(self): # type: ignore[no-untyped-def] # noqa: ANN201 """Prints only name, email and type for use with mutt""" # noqa: D401 collector = list() try: @@ -173,14 +173,14 @@ def print_tel(self): # noqa: ANN201 return "" @property - def pretty(self): # noqa: ANN201 + def pretty(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self._pretty_base(self.alt_keys()) @property - def pretty_min(self): # noqa: ANN201 + def pretty_min(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self._pretty_base(["TEL", "EMAIL"]) - def _pretty_base(self, keylist): + def _pretty_base(self, keylist): # type: ignore[no-untyped-def] collector = list() if sys.stdout.isatty(): collector.append("\n" + BTEXT + "Name: " + self.fname + NTEXT) @@ -196,7 +196,7 @@ def _pretty_base(self, keylist): collector.append(line) return "\n".join(collector) - def _line_helper(self, line): + def _line_helper(self, line): # type: ignore[no-untyped-def] collector = list() for key in line[1].keys(): collector.append(key + "=" + ",".join(line[1][key])) @@ -206,7 +206,7 @@ def _line_helper(self, line): return ";" + ";".join(collector) @property - def vcf(self): # noqa: ANN201 + def vcf(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ Serialize to VCARD as specified in RFC2426, if no UID is specified yet, one will be added (as a UID is mandatory @@ -216,7 +216,7 @@ def vcf(self): # noqa: ANN201 import random import string - def generate_random_uid(): + def generate_random_uid(): # type: ignore[no-untyped-def] """ Generate a random uid, when random isn't broken, getting a random UID from a pool of roughly 10^56 should be good enough diff --git a/inbox/crispin.py b/inbox/crispin.py index 793af3381..89c41e29a 100644 --- a/inbox/crispin.py +++ b/inbox/crispin.py @@ -9,22 +9,22 @@ from collections.abc import Callable, Iterable from typing import Any, NamedTuple -import imapclient -import imapclient.exceptions -import imapclient.imap_utf7 -import imapclient.imapclient -import imapclient.response_parser +import imapclient # type: ignore[import-untyped] +import imapclient.exceptions # type: ignore[import-untyped] +import imapclient.imap_utf7 # type: ignore[import-untyped] +import imapclient.imapclient # type: ignore[import-untyped] +import imapclient.response_parser # type: ignore[import-untyped] from inbox import interruptible_threading from inbox.constants import MAX_MESSAGE_BODY_LENGTH # Prevent "got more than 1000000 bytes" errors for servers that send more data. -imaplib._MAXLINE = 10000000 # type: ignore +imaplib._MAXLINE = 10000000 # type: ignore[attr-defined] # Even though RFC 2060 says that the date component must have two characters # (either two digits or space+digit), it seems that some IMAP servers only # return one digit. Fun times. -imaplib.InternalDate = re.compile( # type: ignore +imaplib.InternalDate = re.compile( # type: ignore[attr-defined] r'.*INTERNALDATE "' r"(?P[ 0123]?[0-9])-" # insert that `?` to make first digit optional r"(?P[A-Z][a-z][a-z])-" @@ -44,7 +44,9 @@ from email.parser import HeaderParser # noqa: E402 from threading import BoundedSemaphore # noqa: E402 -from sqlalchemy.orm import joinedload # noqa: E402 +from sqlalchemy.orm import ( # type: ignore[import-untyped] # noqa: E402 + joinedload, +) from inbox.exceptions import GmailSettingError # noqa: E402 from inbox.folder_edge_cases import localized_folder_names # noqa: E402 @@ -129,7 +131,9 @@ class DraftDeletionException(Exception): pass -def _get_connection_pool(account_id, pool_size, pool_map, readonly): +def _get_connection_pool( # type: ignore[no-untyped-def] + account_id, pool_size, pool_map, readonly +): with _lock_map[account_id]: if account_id not in pool_map: pool_map[account_id] = CrispinConnectionPool( @@ -141,7 +145,9 @@ def _get_connection_pool(account_id, pool_size, pool_map, readonly): _pool_map: dict[int, "CrispinConnectionPool"] = {} -def connection_pool(account_id, pool_size=None): +def connection_pool( # type: ignore[no-untyped-def] + account_id, pool_size=None +): """ Per-account crispin connection pool. @@ -169,7 +175,9 @@ def connection_pool(account_id, pool_size=None): _writable_pool_map: dict[int, "CrispinConnectionPool"] = {} -def writable_connection_pool(account_id, pool_size: int = 1): +def writable_connection_pool( # type: ignore[no-untyped-def] + account_id, pool_size: int = 1 +): """ Per-account crispin connection pool, with *read-write* connections. @@ -220,7 +228,9 @@ class CrispinConnectionPool: """ - def __init__(self, account_id, num_connections, readonly) -> None: + def __init__( # type: ignore[no-untyped-def] + self, account_id, num_connections, readonly + ) -> None: log.info( "Creating Crispin connection pool", account_id=account_id, @@ -236,20 +246,22 @@ def __init__(self, account_id, num_connections, readonly) -> None: self._sem = BoundedSemaphore(num_connections) self._set_account_info() - def _should_timeout_connection(self): + def _should_timeout_connection(self): # type: ignore[no-untyped-def] # Writable pools don't need connection timeouts because # SyncbackBatchTasks properly scope the IMAP connection across its # constituent SyncbackTasks. return self.readonly - def _logout(self, client) -> None: + def _logout(self, client) -> None: # type: ignore[no-untyped-def] try: client.logout() except Exception: log.info("Error on IMAP logout", exc_info=True) @contextlib.contextmanager - def get(self, *, timeout: "float | None" = None): + def get( # type: ignore[no-untyped-def] + self, *, timeout: "float | None" = None + ): """ Get a connection from the pool, or instantiate a new one if needed. @@ -316,7 +328,7 @@ def _set_account_info(self) -> None: else: self.client_cls = CrispinClient - def _new_raw_connection(self): + def _new_raw_connection(self): # type: ignore[no-untyped-def] """Returns a new, authenticated IMAPClient instance for the account.""" # noqa: D401 from inbox.auth.google import GoogleAuthHandler from inbox.auth.microsoft import MicrosoftAuthHandler @@ -338,7 +350,7 @@ def _new_raw_connection(self): account, self._should_timeout_connection() ) - def _new_connection(self): + def _new_connection(self): # type: ignore[no-untyped-def] conn = self._new_raw_connection() return self.client_cls( self.account_id, @@ -349,7 +361,7 @@ def _new_connection(self): ) -def _exc_callback(exc) -> None: +def _exc_callback(exc) -> None: # type: ignore[no-untyped-def] log.info( "Connection broken with error; retrying with new connection", exc_info=True, @@ -588,19 +600,19 @@ def select_folder( return uidvalidity_callback(self.account_id, folder_name, select_info) @property - def selected_folder_name(self): # noqa: ANN201 + def selected_folder_name(self): # type: ignore[no-untyped-def] # noqa: ANN201 return or_none(self.selected_folder, lambda f: f[0]) @property - def selected_folder_info(self): # noqa: ANN201 + def selected_folder_info(self): # type: ignore[no-untyped-def] # noqa: ANN201 return or_none(self.selected_folder, lambda f: f[1]) @property - def selected_uidvalidity(self): # noqa: ANN201 + def selected_uidvalidity(self): # type: ignore[no-untyped-def] # noqa: ANN201 return or_none(self.selected_folder_info, lambda i: i[b"UIDVALIDITY"]) @property - def selected_uidnext(self): # noqa: ANN201 + def selected_uidnext(self): # type: ignore[no-untyped-def] # noqa: ANN201 return or_none(self.selected_folder_info, lambda i: i.get(b"UIDNEXT")) @property @@ -620,12 +632,12 @@ def folder_prefix(self) -> str: # Unfortunately, some servers don't support the NAMESPACE command. # In this case, assume that there's no folder prefix. if self.conn.has_capability("NAMESPACE"): - folder_prefix, __ = self.conn.namespace()[0][0] + (folder_prefix, __) = self.conn.namespace()[0][0] return folder_prefix else: return "" - def sync_folders(self): # noqa: ANN201 + def sync_folders(self): # type: ignore[no-untyped-def] # noqa: ANN201 # () -> List[str] """ List of folders to sync, in order of sync priority. Currently, that @@ -847,7 +859,7 @@ def _process_folder( return RawFolder(display_name=display_name, role=role) - def create_folder(self, name) -> None: + def create_folder(self, name) -> None: # type: ignore[no-untyped-def] interruptible_threading.check_interrupted() self.conn.create_folder(name) @@ -871,7 +883,7 @@ def search_uids(self, criteria: list[str]) -> Iterable[int]: """ interruptible_threading.check_interrupted() return ( - int(uid) if not isinstance(uid, int) else uid + (int(uid) if not isinstance(uid, int) else uid) for uid in self.conn.search(criteria) ) @@ -929,7 +941,11 @@ def all_uids(self) -> Iterable[int]: elapsed = time.time() - t log.debug("Requested all UIDs", search_time=elapsed) return ( - int(uid) if not isinstance(uid, int) else uid + ( + int(uid) + if not isinstance(uid, int) # type: ignore[redundant-expr] + else uid + ) for uid in fetch_result ) @@ -1039,7 +1055,7 @@ def flags(self, uids: list[int]) -> dict[int, GmailFlags | Flags]: if uid in uid_set } - def delete_uids(self, uids) -> None: + def delete_uids(self, uids) -> None: # type: ignore[no-untyped-def] uids = [str(u) for u in uids] interruptible_threading.check_interrupted() @@ -1048,14 +1064,16 @@ def delete_uids(self, uids) -> None: interruptible_threading.check_interrupted() self.conn.expunge() - def set_starred(self, uids, starred) -> None: + def set_starred( # type: ignore[no-untyped-def] + self, uids, starred + ) -> None: interruptible_threading.check_interrupted() if starred: self.conn.add_flags(uids, ["\\Flagged"], silent=True) else: self.conn.remove_flags(uids, ["\\Flagged"], silent=True) - def set_unread(self, uids, unread) -> None: + def set_unread(self, uids, unread) -> None: # type: ignore[no-untyped-def] uids = [str(u) for u in uids] interruptible_threading.check_interrupted() @@ -1064,7 +1082,9 @@ def set_unread(self, uids, unread) -> None: else: self.conn.add_flags(uids, ["\\Seen"], silent=True) - def save_draft(self, message, date=None) -> None: + def save_draft( # type: ignore[no-untyped-def] + self, message, date=None + ) -> None: assert ( self.selected_folder_name in self.folder_names()["drafts"] ), f"Must select a drafts folder first ({self.selected_folder_name})" @@ -1074,7 +1094,9 @@ def save_draft(self, message, date=None) -> None: self.selected_folder_name, message, ["\\Draft", "\\Seen"], date ) - def create_message(self, message, date=None): # noqa: ANN201 + def create_message( # type: ignore[no-untyped-def] # noqa: ANN201 + self, message, date=None + ): """ Create a message on the server. Only used to fix server-side bugs, like iCloud not saving Sent messages. @@ -1104,7 +1126,9 @@ def fetch_headers( headers.update(self.conn.fetch(uid_chunk, ["BODY.PEEK[HEADER]"])) return headers - def find_by_header(self, header_name, header_value): # noqa: ANN201 + def find_by_header( # type: ignore[no-untyped-def] # noqa: ANN201 + self, header_name, header_value + ): """Find all uids in the selected folder with the given header value.""" all_uids = self.all_uids() # It would be nice to just search by header too, but some backends @@ -1124,7 +1148,7 @@ def find_by_header(self, header_name, header_value): # noqa: ANN201 return results - def delete_sent_message( # noqa: ANN201 + def delete_sent_message( # type: ignore[no-untyped-def] # noqa: ANN201 self, message_id_header, delete_multiple: bool = False ): """ @@ -1152,7 +1176,7 @@ def delete_sent_message( # noqa: ANN201 self._delete_message(message_id_header, delete_multiple) return msg_deleted - def delete_draft(self, message_id_header): # noqa: ANN201 + def delete_draft(self, message_id_header): # type: ignore[no-untyped-def] # noqa: ANN201 """ Delete a draft, as identified by its Message-Id header. We first delete the message from the Drafts folder, @@ -1179,7 +1203,7 @@ def delete_draft(self, message_id_header): # noqa: ANN201 self._delete_message(message_id_header) return draft_deleted - def _delete_message( + def _delete_message( # type: ignore[no-untyped-def] self, message_id_header, delete_multiple: bool = False ) -> bool: """ @@ -1214,7 +1238,7 @@ def logout(self) -> None: interruptible_threading.check_interrupted() self.conn.logout() - def idle(self, timeout: int): # noqa: ANN201 + def idle(self, timeout: int): # type: ignore[no-untyped-def] # noqa: ANN201 """ Idle for up to `timeout` seconds. Make sure we take the connection back out of idle mode so that we can reuse this connection in another @@ -1235,7 +1259,7 @@ def idle(self, timeout: int): # noqa: ANN201 if responses: break - return responses + return responses # type: ignore[possibly-undefined] def condstore_changed_flags( self, modseq: int @@ -1513,7 +1537,7 @@ def uids(self, uids: list[int]) -> list[RawMessage]: ) return raw_messages - def g_metadata(self, uids): # noqa: ANN201 + def g_metadata(self, uids): # type: ignore[no-untyped-def] # noqa: ANN201 """ Download Gmail MSGIDs, THRIDs, and message sizes for the given uids. @@ -1545,7 +1569,7 @@ def g_metadata(self, uids): # noqa: ANN201 if uid in uid_set } - def expand_thread(self, g_thrid): # noqa: ANN201 + def expand_thread(self, g_thrid): # type: ignore[no-untyped-def] # noqa: ANN201 """ Find all message UIDs in the selected folder with X-GM-THRID equal to g_thrid. @@ -1560,14 +1584,18 @@ def expand_thread(self, g_thrid): # noqa: ANN201 # UIDs ascend over time; return in order most-recent first return sorted(uids, reverse=True) - def find_by_header(self, header_name, header_value): # noqa: ANN201 + def find_by_header( # type: ignore[no-untyped-def] # noqa: ANN201 + self, header_name, header_value + ): interruptible_threading.check_interrupted() return self.conn.search(["HEADER", header_name, header_value]) - def _decode_labels(self, labels): + def _decode_labels(self, labels): # type: ignore[no-untyped-def] return [imapclient.imap_utf7.decode(label) for label in labels] - def delete_draft(self, message_id_header) -> bool: + def delete_draft( # type: ignore[no-untyped-def] + self, message_id_header + ) -> bool: """ Delete a message in the drafts folder, as identified by the Message-Id header. This overrides the parent class's method because gmail has @@ -1645,7 +1673,7 @@ def delete_draft(self, message_id_header) -> bool: self.conn.expunge() return True - def delete_sent_message( + def delete_sent_message( # type: ignore[no-untyped-def] self, message_id_header, delete_multiple: bool = False ) -> bool: """ @@ -1744,7 +1772,9 @@ def search_uids(self, criteria: list[str]) -> Iterable[int]: original_msg=m.group(1), criteria=( f'"{criteria}"' - if not isinstance(criteria, list) + if not isinstance( # type: ignore[redundant-expr] + criteria, list + ) else criteria ), ) @@ -1755,5 +1785,5 @@ def search_uids(self, criteria: list[str]) -> Iterable[int]: response = imapclient.response_parser.parse_message_list(data) return ( - int(uid) if not isinstance(uid, int) else uid for uid in response + (int(uid) if not isinstance(uid, int) else uid) for uid in response ) diff --git a/inbox/error_handling.py b/inbox/error_handling.py index 618324303..181aa6b24 100644 --- a/inbox/error_handling.py +++ b/inbox/error_handling.py @@ -3,9 +3,9 @@ import os import sys -import rollbar +import rollbar # type: ignore[import-untyped] import structlog -from rollbar.logger import RollbarHandler +from rollbar.logger import RollbarHandler # type: ignore[import-untyped] from inbox.logging import create_error_log_context, get_logger @@ -15,7 +15,7 @@ class SyncEngineRollbarHandler(RollbarHandler): - def emit(self, record): # noqa: ANN201 + def emit(self, record): # type: ignore[no-untyped-def] # noqa: ANN201 try: data = json.loads(record.msg) except ValueError: @@ -38,7 +38,9 @@ def emit(self, record): # noqa: ANN201 return super().emit(record) -def log_uncaught_errors(logger=None, **kwargs) -> None: # noqa: D417 +def log_uncaught_errors( # type: ignore[no-untyped-def] # noqa: D417 + logger=None, **kwargs +) -> None: """ Helper to log uncaught exceptions. @@ -77,7 +79,7 @@ def log_uncaught_errors(logger=None, **kwargs) -> None: # noqa: D417 ] -def payload_handler(payload, **kw): # noqa: ANN201 +def payload_handler(payload, **kw): # type: ignore[no-untyped-def] # noqa: ANN201 title = payload["data"].get("title") exception = ( payload["data"].get("body", {}).get("trace", {}).get("exception", {}) diff --git a/inbox/events/actions/backends/gmail.py b/inbox/events/actions/backends/gmail.py index 9f88c22b4..49439d63e 100644 --- a/inbox/events/actions/backends/gmail.py +++ b/inbox/events/actions/backends/gmail.py @@ -7,7 +7,9 @@ __all__ = ["remote_create_event", "remote_update_event", "remote_delete_event"] -def remote_create_event(account, event, db_session, extra_args) -> None: +def remote_create_event( # type: ignore[no-untyped-def] + account, event, db_session, extra_args +) -> None: provider = GoogleEventsProvider(account.id, account.namespace.id) result = provider.create_remote_event(event, **extra_args) # The events crud API assigns a random uid to an event when creating it. @@ -16,12 +18,14 @@ def remote_create_event(account, event, db_session, extra_args) -> None: db_session.commit() -def remote_update_event(account, event, db_session, extra_args) -> None: +def remote_update_event( # type: ignore[no-untyped-def] + account, event, db_session, extra_args +) -> None: provider = GoogleEventsProvider(account.id, account.namespace.id) provider.update_remote_event(event, **extra_args) -def remote_delete_event( +def remote_delete_event( # type: ignore[no-untyped-def] account, event_uid, calendar_name, calendar_uid, db_session, extra_args ) -> None: provider = GoogleEventsProvider(account.id, account.namespace.id) diff --git a/inbox/events/actions/base.py b/inbox/events/actions/base.py index 2c90dc29a..5a88f69c3 100644 --- a/inbox/events/actions/base.py +++ b/inbox/events/actions/base.py @@ -5,7 +5,9 @@ from inbox.models.session import session_scope -def create_event(account_id, event_id, extra_args) -> None: +def create_event( # type: ignore[no-untyped-def] + account_id, event_id, extra_args +) -> None: with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) event = db_session.query(Event).get(event_id) @@ -36,7 +38,9 @@ def create_event(account_id, event_id, extra_args) -> None: send_invite(ical_file, event, account, invite_type="cancel") -def update_event(account_id, event_id, extra_args) -> None: +def update_event( # type: ignore[no-untyped-def] + account_id, event_id, extra_args +) -> None: with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) event = db_session.query(Event).get(event_id) @@ -70,7 +74,9 @@ def update_event(account_id, event_id, extra_args) -> None: db_session.commit() -def delete_event(account_id, event_id, extra_args) -> None: +def delete_event( # type: ignore[no-untyped-def] + account_id, event_id, extra_args +) -> None: with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) event = db_session.query(Event).get(event_id) diff --git a/inbox/events/google.py b/inbox/events/google.py index 59c797eca..da6513e73 100644 --- a/inbox/events/google.py +++ b/inbox/events/google.py @@ -9,7 +9,7 @@ import uuid from typing import Any -import arrow +import arrow # type: ignore[import-untyped] import attrs import attrs.validators import requests @@ -161,7 +161,9 @@ def _get_raw_events( else: raise - def _get_resource_list(self, url: str, **params) -> list[dict[str, Any]]: + def _get_resource_list( # type: ignore[no-untyped-def] + self, url: str, **params + ) -> list[dict[str, Any]]: """Handles response pagination.""" # noqa: D401 token = self._get_access_token() items = [] @@ -192,34 +194,42 @@ def _get_resource_list(self, url: str, **params) -> list[dict[str, Any]]: except requests.HTTPError as e: self.log.warning( "HTTP error making Google Calendar API request", - url=r.url, - response=r.content, - status=r.status_code, + url=r.url, # type: ignore[possibly-undefined] + response=r.content, # type: ignore[possibly-undefined] + status=r.status_code, # type: ignore[possibly-undefined] ) - if r.status_code == 401: + if r.status_code == 401: # type: ignore[possibly-undefined] self.log.warning( "Invalid access token; refreshing and retrying", - url=r.url, - response=r.content, - status=r.status_code, + url=r.url, # type: ignore[possibly-undefined] + response=r.content, # type: ignore[possibly-undefined] + status=r.status_code, # type: ignore[possibly-undefined] ) token = self._get_access_token(force_refresh=True) continue - elif r.status_code in (500, 503): + elif r.status_code in ( # type: ignore[possibly-undefined] + 500, + 503, + ): self.log.warning("Backend error in calendar API; retrying") time.sleep(30 + random.randrange(0, 60)) continue - elif r.status_code == 403: + elif r.status_code == 403: # type: ignore[possibly-undefined] try: - reason = r.json()["error"]["errors"][0]["reason"] + reason = r.json()[ # type: ignore[possibly-undefined] + "error" + ]["errors"][0]["reason"] except (KeyError, ValueError): self.log.error( "Couldn't parse API error response", - response=r.content, - status=r.status_code, + response=r.content, # type: ignore[possibly-undefined] + status=r.status_code, # type: ignore[possibly-undefined] ) - r.raise_for_status() - if reason == "userRateLimitExceeded": + r.raise_for_status() # type: ignore[possibly-undefined] + if ( + reason # type: ignore[possibly-undefined] + == "userRateLimitExceeded" + ): self.log.warning( "API request was rate-limited; retrying" ) @@ -233,7 +243,7 @@ def _get_resource_list(self, url: str, **params) -> list[dict[str, Any]]: # Unexpected error; raise. raise - def _make_event_request( + def _make_event_request( # type: ignore[no-untyped-def] self, method: str, calendar_uid: str, @@ -251,7 +261,9 @@ def _make_event_request( ) return response - def create_remote_event(self, event, **kwargs): # noqa: ANN201 + def create_remote_event( # type: ignore[no-untyped-def] # noqa: ANN201 + self, event, **kwargs + ): data = _dump_event(event) params = {} @@ -268,7 +280,9 @@ def create_remote_event(self, event, **kwargs): # noqa: ANN201 response.raise_for_status() return response.json() - def update_remote_event(self, event, **kwargs) -> None: + def update_remote_event( # type: ignore[no-untyped-def] + self, event, **kwargs + ) -> None: data = _dump_event(event) params = {} @@ -284,7 +298,9 @@ def update_remote_event(self, event, **kwargs) -> None: # All non-200 statuses are considered errors response.raise_for_status() - def delete_remote_event(self, calendar_uid, event_uid, **kwargs) -> None: + def delete_remote_event( # type: ignore[no-untyped-def] + self, calendar_uid, event_uid, **kwargs + ) -> None: params = {} if kwargs.get("notify_participants") is True: @@ -310,7 +326,7 @@ def delete_remote_event(self, calendar_uid, event_uid, **kwargs) -> None: # -------- logic for push notification subscriptions -------- # - def _get_access_token_for_push_notifications( + def _get_access_token_for_push_notifications( # type: ignore[no-untyped-def] self, account, force_refresh: bool = False ): if not self.webhook_notifications_enabled(account): @@ -318,7 +334,10 @@ def _get_access_token_for_push_notifications( return token_manager.get_token(account, force_refresh) def webhook_notifications_enabled(self, account: Account) -> bool: - return account.get_client_info()[0] in WEBHOOK_ENABLED_CLIENT_IDS + return ( + account.get_client_info()[0] # type: ignore[attr-defined] + in WEBHOOK_ENABLED_CLIENT_IDS + ) def watch_calendar_list( self, account: Account @@ -481,7 +500,10 @@ def _handle_watch_errors(self, r: requests.Response) -> None: response=r.content, status=r.status_code, ) - if reason == "userRateLimitExceeded": + if ( + reason # type: ignore[possibly-undefined] + == "userRateLimitExceeded" + ): # Sleep before proceeding (naive backoff) time.sleep(30 + random.randrange(0, 60)) self.log.warning("API request was rate-limited") @@ -528,7 +550,7 @@ def parse_calendar_response( # noqa: D417 read_only = False description = calendar.get("description") - return Calendar( + return Calendar( # type: ignore[call-arg] uid=uid, name=name, read_only=read_only, description=description ) @@ -543,12 +565,16 @@ def parse_calendar_response( # noqa: D417 @attrs.frozen(kw_only=True) class EntryPoint: - uri: str = attrs.field(validator=STRING_VALIDATORS) # type: ignore + uri: str = attrs.field( + validator=STRING_VALIDATORS # type: ignore[arg-type] + ) @attrs.frozen(kw_only=True) class ConferenceSolution: - name: str = attrs.field(validator=STRING_VALIDATORS) # type: ignore + name: str = attrs.field( + validator=STRING_VALIDATORS # type: ignore[arg-type] + ) @attrs.frozen(kw_only=True) @@ -710,7 +736,7 @@ def parse_event_response( # noqa: D417 ) -def _dump_event(event): +def _dump_event(event): # type: ignore[no-untyped-def] """Convert an event db object to the Google API JSON format.""" dump = { "summary": event.title, diff --git a/inbox/events/ical.py b/inbox/events/ical.py index 9ab70729c..58abfae07 100644 --- a/inbox/events/ical.py +++ b/inbox/events/ical.py @@ -5,14 +5,14 @@ from email.utils import formataddr from typing import TYPE_CHECKING, Literal -import arrow -import icalendar +import arrow # type: ignore[import-untyped] +import icalendar # type: ignore[import-untyped] import pytz import requests -from flanker import mime +from flanker import mime # type: ignore[import-untyped] from html2text import html2text from icalendar import Calendar as iCalendar -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session # type: ignore[import-untyped] from inbox.config import config from inbox.contacts.processing import update_contacts_from_event @@ -52,13 +52,17 @@ def normalize_repeated_component( return None elif isinstance(component, str): return component - elif isinstance(component, list) and set(component) == {component[0]}: + elif isinstance(component, list) and set( # type: ignore[redundant-expr] + component + ) == {component[0]}: return component[0] else: raise MalformedEventError("Cannot normalize component", component) -def events_from_ics(namespace, calendar, ics_str): # noqa: ANN201 +def events_from_ics( # type: ignore[no-untyped-def] # noqa: ANN201 + namespace, calendar, ics_str +): try: cal = iCalendar.from_ical(ics_str) except (ValueError, IndexError, KeyError, TypeError) as e: @@ -341,7 +345,9 @@ def events_from_ics(namespace, calendar, ics_str): # noqa: ANN201 return events -def process_invites(db_session, message, account, invites) -> None: +def process_invites( # type: ignore[no-untyped-def] + db_session, message, account, invites +) -> None: new_uids = [event.uid for event in invites] # Get the list of events which share a uid with those we received. @@ -402,7 +408,7 @@ def process_invites(db_session, message, account, invites) -> None: ) -def _cleanup_nylas_uid(uid): +def _cleanup_nylas_uid(uid): # type: ignore[no-untyped-def] uid = uid.lower() if "@nylas.com" in uid: return uid[:-10] @@ -410,7 +416,9 @@ def _cleanup_nylas_uid(uid): return uid -def process_nylas_rsvps(db_session, message, account, rsvps) -> None: +def process_nylas_rsvps( # type: ignore[no-untyped-def] + db_session, message, account, rsvps +) -> None: # The invite sending code generates invites with uids of the form # `public_id@nylas.com`. We couldn't use Event.uid for this because # it wouldn't work with Exchange (Exchange uids are of the form @@ -493,7 +501,9 @@ def import_attached_events( continue new_events = events_from_ics( - account.namespace, account.emailed_events_calendar, part_data + account.namespace, # type: ignore[attr-defined] + account.emailed_events_calendar, + part_data, ) except MalformedEventError: log.error( @@ -541,7 +551,7 @@ def import_attached_events( ) -def generate_icalendar_invite( # noqa: ANN201 +def generate_icalendar_invite( # type: ignore[no-untyped-def] # noqa: ANN201 event, invite_type: str = "request" ): # Generates an iCalendar invite from an event. @@ -616,7 +626,7 @@ def generate_icalendar_invite( # noqa: ANN201 return cal -def generate_invite_message( # noqa: ANN201 +def generate_invite_message( # type: ignore[no-untyped-def] # noqa: ANN201 ical_txt, event, account, invite_type: str = "request" ): assert invite_type in ["request", "update", "cancel"] @@ -661,7 +671,7 @@ def generate_invite_message( # noqa: ANN201 return msg -def send_invite( +def send_invite( # type: ignore[no-untyped-def] ical_txt, event, account, invite_type: str = "request" ) -> None: # We send those transactional emails through a separate domain. @@ -704,7 +714,7 @@ def send_invite( ) -def _generate_rsvp(status, account, event): +def _generate_rsvp(status, account, event): # type: ignore[no-untyped-def] # It seems that Google Calendar requires us to copy a number of fields # in the RVSP reply. I suppose it's for reconciling the reply with the # invite. - karim @@ -752,7 +762,7 @@ def _generate_rsvp(status, account, event): return {"cal": cal} -def generate_rsvp(event, participant, account): # noqa: ANN201 +def generate_rsvp(event, participant, account): # type: ignore[no-untyped-def] # noqa: ANN201 # Generates an iCalendar file to RSVP to an invite. status = INVERTED_STATUS_MAP.get(participant["status"]) return _generate_rsvp(status, account, event) @@ -762,7 +772,7 @@ def generate_rsvp(event, participant, account): # noqa: ANN201 # We try to find the organizer address from the iCal file. # If it's not defined, we try to return the invite sender's # email address. -def rsvp_recipient(event): # noqa: ANN201 +def rsvp_recipient(event): # type: ignore[no-untyped-def] # noqa: ANN201 if event is None: return None @@ -783,7 +793,9 @@ def rsvp_recipient(event): # noqa: ANN201 return None -def send_rsvp(ical_data, event, body_text, status, account) -> None: +def send_rsvp( # type: ignore[no-untyped-def] + ical_data, event, body_text, status, account +) -> None: from inbox.sendmail.base import SendMailException, get_sendmail_client ical_file = ical_data["cal"] diff --git a/inbox/events/microsoft/events_provider.py b/inbox/events/microsoft/events_provider.py index ddea006e3..50fea148c 100644 --- a/inbox/events/microsoft/events_provider.py +++ b/inbox/events/microsoft/events_provider.py @@ -153,15 +153,15 @@ def sync_events( updates.append(event) if isinstance(event, RecurringEvent): - exceptions, cancellations = self._get_event_overrides( + (exceptions, cancellations) = self._get_event_overrides( raw_event, event, read_only=read_only ) - updates.extend(exceptions) - updates.extend(cancellations) + updates.extend(exceptions) # type: ignore[arg-type] + updates.extend(cancellations) # type: ignore[arg-type] return updates - def _get_event_overrides( + def _get_event_overrides( # type: ignore[no-untyped-def] self, raw_master_event: MsGraphEvent, master_event: RecurringEvent, @@ -216,7 +216,7 @@ def _get_event_overrides( for cancellation in raw_cancellations ] - return exceptions, cancellations + return exceptions, cancellations # type: ignore[return-value] def webhook_notifications_enabled(self, account: Account) -> bool: """ diff --git a/inbox/events/microsoft/graph_client.py b/inbox/events/microsoft/graph_client.py index bb12b8f87..5c84ffb7a 100644 --- a/inbox/events/microsoft/graph_client.py +++ b/inbox/events/microsoft/graph_client.py @@ -120,9 +120,11 @@ def request( break try: - response.raise_for_status() + response.raise_for_status() # type: ignore[possibly-undefined] except requests.HTTPError as e: - raise MicrosoftGraphClientException(response) from e + raise MicrosoftGraphClientException( + response # type: ignore[possibly-undefined] + ) from e if not response.text: # Some DELETE operations return empty body diff --git a/inbox/events/microsoft/parse.py b/inbox/events/microsoft/parse.py index a73810f3d..803bbf5b1 100644 --- a/inbox/events/microsoft/parse.py +++ b/inbox/events/microsoft/parse.py @@ -70,7 +70,7 @@ def get_microsoft_tzinfo(timezone_id: str) -> pytz.tzinfo.BaseTzInfo: MAX_DATETIME = datetime.datetime(9999, 12, 31, 23, 59, 59) -def parse_msgraph_datetime_tz_as_utc( # noqa: ANN201 +def parse_msgraph_datetime_tz_as_utc( # type: ignore[no-untyped-def] # noqa: ANN201 datetime_tz: MsGraphDateTimeTimeZone, ): """ @@ -238,7 +238,7 @@ def parse_msgraph_range_start_and_until( else: raise NotImplementedError() - return start_datetime, until_datetime + return (start_datetime, until_datetime) MS_GRAPH_PATTERN_TYPE_TO_ICAL_FREQ_INTERVAL_MULTIPLIER: dict[ @@ -508,7 +508,7 @@ def calculate_exception_and_canceled_occurrences( for dt in canceled_datetimes ] - return exception_occurrences, canceled_occurrences + return (exception_occurrences, canceled_occurrences) MS_GRAPH_TO_SYNC_ENGINE_STATUS_MAP: dict[MsGraphResponse, str] = { @@ -786,4 +786,6 @@ def parse_calendar(calendar: MsGraphCalendar) -> Calendar: read_only = not calendar["canEdit"] default = calendar["isDefaultCalendar"] - return Calendar(uid=uid, name=name, read_only=read_only, default=default) + return Calendar( # type: ignore[call-arg] + uid=uid, name=name, read_only=read_only, default=default + ) diff --git a/inbox/events/recurring.py b/inbox/events/recurring.py index 312de0596..cc08adcb9 100644 --- a/inbox/events/recurring.py +++ b/inbox/events/recurring.py @@ -1,4 +1,4 @@ -import arrow +import arrow # type: ignore[import-untyped] from dateutil.rrule import ( FR, MO, @@ -24,7 +24,7 @@ EXPAND_RECURRING_YEARS = 1 -def link_events(db_session, event): # noqa: ANN201 +def link_events(db_session, event): # type: ignore[no-untyped-def] # noqa: ANN201 if isinstance(event, RecurringEvent): # Attempt to find my overrides return link_overrides(db_session, event) @@ -34,7 +34,7 @@ def link_events(db_session, event): # noqa: ANN201 return None -def link_overrides(db_session, event): # noqa: ANN201 +def link_overrides(db_session, event): # type: ignore[no-untyped-def] # noqa: ANN201 # Find event instances which override this specific # RecurringEvent instance. overrides = ( @@ -53,7 +53,7 @@ def link_overrides(db_session, event): # noqa: ANN201 return overrides -def link_master(db_session, event): # noqa: ANN201 +def link_master(db_session, event): # type: ignore[no-untyped-def] # noqa: ANN201 # Find the master RecurringEvent that spawned this # RecurringEventOverride (may not exist if it hasn't # been synced yet) @@ -73,7 +73,7 @@ def link_master(db_session, event): # noqa: ANN201 return event.master # This may be None. -def parse_rrule(event): # noqa: ANN201 +def parse_rrule(event): # type: ignore[no-untyped-def] # noqa: ANN201 # Parse the RRULE string and return a dateutil.rrule.rrule object if event.rrule is not None: if event.all_day: @@ -98,7 +98,7 @@ def parse_rrule(event): # noqa: ANN201 return None -def parse_exdate(event): # noqa: ANN201 +def parse_exdate(event): # type: ignore[no-untyped-def] # noqa: ANN201 # Parse the EXDATE string and return a list of arrow datetimes excl_dates = [] if event.exdate: @@ -115,7 +115,9 @@ def parse_exdate(event): # noqa: ANN201 return excl_dates -def get_start_times(event, start=None, end=None): # noqa: ANN201 +def get_start_times( # type: ignore[no-untyped-def] # noqa: ANN201 + event, start=None, end=None +): # Expands the rrule on event to return a list of arrow datetimes # representing start times for its recurring instances. # If start and/or end are supplied, will return times within that range, @@ -194,7 +196,7 @@ def get_start_times(event, start=None, end=None): # noqa: ANN201 weekday_map = (MO, TU, WE, TH, FR, SA, SU) -def rrule_to_json(r): # noqa: ANN201 +def rrule_to_json(r): # type: ignore[no-untyped-def] # noqa: ANN201 if not isinstance(r, rrule): r = parse_rrule(r) info = vars(r) diff --git a/inbox/events/remote_sync.py b/inbox/events/remote_sync.py index 1d951632d..6b3dc0964 100644 --- a/inbox/events/remote_sync.py +++ b/inbox/events/remote_sync.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta from typing import Any -import more_itertools +import more_itertools # type: ignore[import-not-found] from requests.exceptions import HTTPError from inbox.config import config @@ -67,7 +67,7 @@ def __init__( scope="calendar", ) - def sync(self) -> None: + def sync(self) -> None: # type: ignore[override] """ Query a remote provider for updates and persist them to the database. This function runs every `self.poll_frequency`. @@ -75,7 +75,7 @@ def sync(self) -> None: self.log.debug("syncing events") try: - deleted_uids, calendar_changes = self.provider.sync_calendars() + (deleted_uids, calendar_changes) = self.provider.sync_calendars() except AccessNotEnabledError: self.log.warning( "Access to provider calendar API not enabled; bypassing sync" @@ -139,7 +139,7 @@ def handle_calendar_deletes( log.info("deleted calendars", deleted=deleted_count) -def handle_calendar_updates( +def handle_calendar_updates( # type: ignore[no-untyped-def] namespace_id: int, calendars, log: Any, db_session: Any ) -> list[tuple[str, int]]: """Persists new or updated Calendar objects to the database.""" # noqa: D401 @@ -162,7 +162,9 @@ def handle_calendar_updates( local_calendar.update(calendar) updated_count += 1 else: - local_calendar = Calendar(namespace_id=namespace_id) + local_calendar = Calendar( # type: ignore[call-arg] + namespace_id=namespace_id + ) local_calendar.update(calendar) db_session.add(local_calendar) added_count += 1 @@ -223,7 +225,9 @@ def handle_event_updates( and event.status == "cancelled" and local_event.status != "cancelled" ): - for override in local_event.overrides: + for ( + override + ) in local_event.overrides: # type: ignore[attr-defined] override.status = "cancelled" local_event.update(event) @@ -259,7 +263,9 @@ def handle_event_updates( class WebhookEventSync(EventSync): - def __init__(self, *args, **kwargs) -> None: + def __init__( # type: ignore[no-untyped-def] + self, *args, **kwargs + ) -> None: super().__init__(*args, **kwargs) with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) @@ -273,7 +279,7 @@ def __init__(self, *args, **kwargs) -> None: # too long. self.poll_frequency = PUSH_NOTIFICATION_POLL_FREQUENCY - def sync(self) -> None: + def sync(self) -> None: # type: ignore[override] """ Query a remote provider for updates and persist them to the database. This function runs every `self.poll_frequency`. @@ -400,7 +406,7 @@ def _sync_data(self) -> None: def _sync_calendar_list(self, account: Account, db_session: Any) -> None: sync_timestamp = datetime.utcnow() - deleted_uids, calendar_changes = self.provider.sync_calendars() + (deleted_uids, calendar_changes) = self.provider.sync_calendars() handle_calendar_deletes( self.namespace_id, deleted_uids, self.log, db_session @@ -409,7 +415,9 @@ def _sync_calendar_list(self, account: Account, db_session: Any) -> None: self.namespace_id, calendar_changes, self.log, db_session ) - account.last_calendar_list_sync = sync_timestamp + account.last_calendar_list_sync = ( # type: ignore[attr-defined] + sync_timestamp + ) db_session.commit() def _sync_calendar(self, calendar: Calendar, db_session: Any) -> None: diff --git a/inbox/events/util.py b/inbox/events/util.py index e248ba91d..1c9f51652 100644 --- a/inbox/events/util.py +++ b/inbox/events/util.py @@ -2,7 +2,7 @@ from collections import namedtuple from typing import NamedTuple -import arrow +import arrow # type: ignore[import-untyped] from dateutil.parser import parse from inbox.models.calendar import Calendar @@ -14,7 +14,7 @@ class MalformedEventError(Exception): pass -def parse_datetime(datetime): # noqa: ANN201 +def parse_datetime(datetime): # type: ignore[no-untyped-def] # noqa: ANN201 # returns a UTC-aware datetime as an Arrow object. # to access the `datetime` object: `obj.datetime` # to convert to a naive datetime: `obj.naive` @@ -26,7 +26,9 @@ def parse_datetime(datetime): # noqa: ANN201 return None -def parse_rrule_datetime(datetime, tzinfo=None): # noqa: ANN201 +def parse_rrule_datetime( # type: ignore[no-untyped-def] # noqa: ANN201 + datetime, tzinfo=None +): # format: 20140904T133000Z (datetimes) or 20140904 (dates) if datetime[-1] == "Z": tzinfo = "UTC" @@ -40,19 +42,19 @@ def parse_rrule_datetime(datetime, tzinfo=None): # noqa: ANN201 return dt -def serialize_datetime(d): # noqa: ANN201 +def serialize_datetime(d): # type: ignore[no-untyped-def] # noqa: ANN201 return d.strftime("%Y%m%dT%H%M%SZ") EventTime = namedtuple("EventTime", ["start", "end", "all_day"]) -def when_to_event_time(raw): # noqa: ANN201 +def when_to_event_time(raw): # type: ignore[no-untyped-def] # noqa: ANN201 when = parse_as_when(raw) return EventTime(when.start, when.end, when.all_day) -def parse_google_time(d): # noqa: ANN201 +def parse_google_time(d): # type: ignore[no-untyped-def] # noqa: ANN201 # google dictionaries contain either 'date' or 'dateTime' & 'timeZone' # 'dateTime' is in ISO format so is UTC-aware, 'date' is just a date for key, dt in d.items(): @@ -80,12 +82,12 @@ def google_to_event_time(start_raw: str, end_raw: str) -> EventTime: return event_time -def valid_base36(uid): # noqa: ANN201 +def valid_base36(uid): # type: ignore[no-untyped-def] # noqa: ANN201 # Check that an uid is a base36 element. return all(c in (string.ascii_lowercase + string.digits) for c in uid) -def removed_participants( # noqa: ANN201 +def removed_participants( # type: ignore[no-untyped-def] # noqa: ANN201 original_participants, update_participants ): """ diff --git a/inbox/exceptions.py b/inbox/exceptions.py index a9d8e101b..d0546bdb5 100644 --- a/inbox/exceptions.py +++ b/inbox/exceptions.py @@ -31,7 +31,7 @@ class GmailSettingError(ValidationError): class ImapSupportDisabledError(ValidationError): - def __init__(self, reason=None) -> None: + def __init__(self, reason=None) -> None: # type: ignore[no-untyped-def] super().__init__(reason) self.reason = reason diff --git a/inbox/heartbeat/config.py b/inbox/heartbeat/config.py index 477441fb6..6a40d62b7 100644 --- a/inbox/heartbeat/config.py +++ b/inbox/heartbeat/config.py @@ -21,7 +21,7 @@ } -def _get_redis_connection_pool(host, port, db): +def _get_redis_connection_pool(host, port, db): # type: ignore[no-untyped-def] # This function is called once per sync process at the time of # instantiating the singleton HeartBeatStore, so doing this here # should be okay for now. @@ -43,11 +43,11 @@ def _get_redis_connection_pool(host, port, db): return connection_pool -def account_redis_shard_number(account_id): # noqa: ANN201 +def account_redis_shard_number(account_id): # type: ignore[no-untyped-def] # noqa: ANN201 return account_id % len(REDIS_SHARDS) -def get_redis_client(account_id): # noqa: ANN201 +def get_redis_client(account_id): # type: ignore[no-untyped-def] # noqa: ANN201 account_shard_number = account_redis_shard_number(account_id) host = REDIS_SHARDS[account_shard_number] diff --git a/inbox/heartbeat/status.py b/inbox/heartbeat/status.py index b05902d1c..19f369a4f 100644 --- a/inbox/heartbeat/status.py +++ b/inbox/heartbeat/status.py @@ -13,7 +13,7 @@ FolderPing = namedtuple("FolderPing", ["id", "alive", "timestamp"]) -def get_ping_status( # noqa: ANN201 +def get_ping_status( # type: ignore[no-untyped-def] # noqa: ANN201 account_ids, host=None, port: int = 6379, threshold=ALIVE_EXPIRY ): # Query the indexes and not the per-folder info for faster lookup. @@ -47,7 +47,7 @@ def get_ping_status( # noqa: ANN201 return accounts -def clear_heartbeat_status( # noqa: ANN201 +def clear_heartbeat_status( # type: ignore[no-untyped-def] # noqa: ANN201 account_id, folder_id=None, device_id=None ): # Clears the status for the account, folder and/or device. diff --git a/inbox/heartbeat/store.py b/inbox/heartbeat/store.py index 31b91e30c..f5f1e56f3 100644 --- a/inbox/heartbeat/store.py +++ b/inbox/heartbeat/store.py @@ -12,8 +12,8 @@ log = get_logger() -def safe_failure(f): # noqa: ANN201 - def wrapper(*args, **kwargs): +def safe_failure(f): # type: ignore[no-untyped-def] # noqa: ANN201 + def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] try: return f(*args, **kwargs) except Exception: @@ -25,7 +25,9 @@ def wrapper(*args, **kwargs): class HeartbeatStatusKey: - def __init__(self, account_id, folder_id) -> None: + def __init__( # type: ignore[no-untyped-def] + self, account_id, folder_id + ) -> None: self.account_id = account_id self.folder_id = folder_id self.key = f"{self.account_id}:{self.folder_id}" @@ -33,37 +35,37 @@ def __init__(self, account_id, folder_id) -> None: def __repr__(self) -> str: return self.key - def __lt__(self, other): # noqa: ANN204 + def __lt__(self, other): # type: ignore[no-untyped-def] # noqa: ANN204 if self.account_id != other.account_id: return self.account_id < other.account_id return self.folder_id < other.folder_id - def __eq__(self, other): # noqa: ANN204 + def __eq__(self, other): # type: ignore[no-untyped-def] # noqa: ANN204 return ( self.account_id == other.account_id and self.folder_id == other.folder_id ) @classmethod - def all_folders(cls, account_id): # noqa: ANN206 + def all_folders(cls, account_id): # type: ignore[no-untyped-def] # noqa: ANN206 return cls(account_id, "*") @classmethod - def contacts(cls, account_id): # noqa: ANN206 + def contacts(cls, account_id): # type: ignore[no-untyped-def] # noqa: ANN206 return cls(account_id, CONTACTS_FOLDER_ID) @classmethod - def events(cls, account_id): # noqa: ANN206 + def events(cls, account_id): # type: ignore[no-untyped-def] # noqa: ANN206 return cls(account_id, EVENTS_FOLDER_ID) @classmethod - def from_string(cls, string_key): # noqa: ANN206 + def from_string(cls, string_key): # type: ignore[no-untyped-def] # noqa: ANN206 account_id, folder_id = (int(part) for part in string_key.split(":")) return cls(account_id, folder_id) class HeartbeatStatusProxy: - def __init__( + def __init__( # type: ignore[no-untyped-def] self, account_id, folder_id, @@ -79,7 +81,7 @@ def __init__( self.store = HeartbeatStore.store() @safe_failure - def publish(self, **kwargs) -> None: + def publish(self, **kwargs) -> None: # type: ignore[no-untyped-def] try: self.heartbeat_at = time.time() self.store.publish(self.key, self.heartbeat_at) @@ -108,23 +110,27 @@ class HeartbeatStore: _instances: dict[str | None, "HeartbeatStore"] = {} - def __init__(self, host=None, port: int = 6379) -> None: + def __init__( # type: ignore[no-untyped-def] + self, host=None, port: int = 6379 + ) -> None: self.host = host self.port = port @classmethod - def store(cls, host=None, port=None): # noqa: ANN206 + def store(cls, host=None, port=None): # type: ignore[no-untyped-def] # noqa: ANN206 # Allow singleton access to the store, keyed by host. if cls._instances.get(host) is None: cls._instances[host] = cls(host, port) return cls._instances.get(host) @safe_failure - def publish(self, key, timestamp) -> None: + def publish(self, key, timestamp) -> None: # type: ignore[no-untyped-def] # Update indexes self.update_folder_index(key, float(timestamp)) - def remove(self, key, device_id=None, client=None) -> None: + def remove( # type: ignore[no-untyped-def] + self, key, device_id=None, client=None + ) -> None: # Remove a key from the store, or device entry from a key. if not client: client = heartbeat_config.get_redis_client(key.account_id) @@ -140,7 +146,7 @@ def remove(self, key, device_id=None, client=None) -> None: self.remove_from_folder_index(key, client) @safe_failure - def remove_folders( # noqa: ANN201 + def remove_folders( # type: ignore[no-untyped-def] # noqa: ANN201 self, account_id, folder_id=None, device_id=None ): # Remove heartbeats for the given account, folder and/or device. @@ -166,13 +172,17 @@ def remove_folders( # noqa: ANN201 pipeline.reset() return n - def update_folder_index(self, key, timestamp) -> None: + def update_folder_index( # type: ignore[no-untyped-def] + self, key, timestamp + ) -> None: assert isinstance(timestamp, float) # Update the folder timestamp index for this specific account, too client = heartbeat_config.get_redis_client(key.account_id) client.zadd(key.account_id, {key.folder_id: timestamp}) - def update_accounts_index(self, key) -> None: + def update_accounts_index( # type: ignore[no-untyped-def] + self, key + ) -> None: # Find the oldest heartbeat from the account-folder index try: client = heartbeat_config.get_redis_client(key.account_id) @@ -185,21 +195,27 @@ def update_accounts_index(self, key) -> None: # will fail -- ignore it. pass - def remove_from_folder_index(self, key, client) -> None: + def remove_from_folder_index( # type: ignore[no-untyped-def] + self, key, client + ) -> None: client.zrem("folder_index", key) if isinstance(key, str): key = HeartbeatStatusKey.from_string(key) client.zrem(key.account_id, key.folder_id) - def remove_from_account_index(self, account_id, client) -> None: + def remove_from_account_index( # type: ignore[no-untyped-def] + self, account_id, client + ) -> None: client.delete(account_id) client.zrem("account_index", account_id) - def get_account_folders(self, account_id): # noqa: ANN201 + def get_account_folders(self, account_id): # type: ignore[no-untyped-def] # noqa: ANN201 client = heartbeat_config.get_redis_client(account_id) return client.zrange(account_id, 0, -1, withscores=True) - def get_accounts_folders(self, account_ids): # noqa: ANN201 + def get_accounts_folders( # type: ignore[no-untyped-def] # noqa: ANN201 + self, account_ids + ): # This is where things get interesting --- we need to make queries # to multiple shards and return the results to a single caller. # Preferred method of querying for multiple accounts. Uses pipelining diff --git a/inbox/ignition.py b/inbox/ignition.py index c1947a72b..162056a18 100644 --- a/inbox/ignition.py +++ b/inbox/ignition.py @@ -6,9 +6,9 @@ from urllib.parse import quote_plus as urlquote from warnings import filterwarnings -import limitlion +import limitlion # type: ignore[import-untyped] import redis -from sqlalchemy import create_engine, event +from sqlalchemy import create_engine, event # type: ignore[import-untyped] from inbox.config import config from inbox.logging import find_first_app_frame_and_name, get_logger @@ -31,7 +31,7 @@ pool_tracker: MutableMapping[Any, dict[str, Any]] = weakref.WeakKeyDictionary() -def build_uri( # noqa: ANN201 +def build_uri( # type: ignore[no-untyped-def] # noqa: ANN201 username, password, hostname, port, database_name ): uri_template = ( @@ -47,7 +47,7 @@ def build_uri( # noqa: ANN201 ) -def engine( # noqa: ANN201 +def engine( # type: ignore[no-untyped-def] # noqa: ANN201 database_name, database_uri, pool_size=DB_POOL_SIZE, @@ -74,7 +74,7 @@ def engine( # noqa: ANN201 ) @event.listens_for(engine, "checkout") - def receive_checkout( + def receive_checkout( # type: ignore[no-untyped-def] dbapi_connection, connection_record, connection_proxy ) -> None: """Log checkedout and overflow when a connection is checked out""" @@ -123,7 +123,9 @@ def receive_checkout( } @event.listens_for(engine, "checkin") - def receive_checkin(dbapi_connection, connection_record) -> None: + def receive_checkin( # type: ignore[no-untyped-def] + dbapi_connection, connection_record + ) -> None: if dbapi_connection in pool_tracker: del pool_tracker[dbapi_connection] @@ -131,7 +133,7 @@ def receive_checkin(dbapi_connection, connection_record) -> None: class EngineManager: - def __init__( + def __init__( # type: ignore[no-untyped-def] self, databases, users, include_disabled: bool = False ) -> None: self.engines = {} @@ -179,16 +181,16 @@ def __init__( self.engines[key] = engine(schema_name, uri) self._engine_zones[key] = zone - def shard_key_for_id(self, id_) -> int: + def shard_key_for_id(self, id_) -> int: # type: ignore[no-untyped-def] return 0 - def get_for_id(self, id_): # noqa: ANN201 + def get_for_id(self, id_): # type: ignore[no-untyped-def] # noqa: ANN201 return self.engines[self.shard_key_for_id(id_)] - def zone_for_id(self, id_): # noqa: ANN201 + def zone_for_id(self, id_): # type: ignore[no-untyped-def] # noqa: ANN201 return self._engine_zones[self.shard_key_for_id(id_)] - def shards_for_zone(self, zone): # noqa: ANN201 + def shards_for_zone(self, zone): # type: ignore[no-untyped-def] # noqa: ANN201 return [k for k, z in self._engine_zones.items() if z == zone] @@ -198,7 +200,7 @@ def shards_for_zone(self, zone): # noqa: ANN201 ) -def init_db(engine, key: int = 0) -> None: +def init_db(engine, key: int = 0) -> None: # type: ignore[no-untyped-def] """ Make the tables. @@ -217,24 +219,28 @@ def init_db(engine, key: int = 0) -> None: # to execute this function multiple times. # STOPSHIP(emfree): verify increment = (key << 48) + 1 - for table in MailSyncBase.metadata.tables.values(): + for ( + table + ) in MailSyncBase.metadata.tables.values(): # type: ignore[attr-defined] event.listen( table, "after_create", DDL(f"ALTER TABLE {table} AUTO_INCREMENT={increment}"), ) with disabled_dubiously_many_queries_warning(): - MailSyncBase.metadata.create_all(engine) + MailSyncBase.metadata.create_all(engine) # type: ignore[attr-defined] -def verify_db(engine, schema, key) -> None: +def verify_db(engine, schema, key) -> None: # type: ignore[no-untyped-def] from inbox.models.base import MailSyncBase query = """SELECT AUTO_INCREMENT from information_schema.TABLES where table_schema='{}' AND table_name='{}';""" verified = set() - for table in MailSyncBase.metadata.sorted_tables: + for ( + table + ) in MailSyncBase.metadata.sorted_tables: # type: ignore[attr-defined] # ContactSearchIndexCursor does not need to be checked because there's # only one row in the table if str(table) == "contactsearchindexcursor": @@ -262,7 +268,7 @@ def verify_db(engine, schema, key) -> None: verified.add(table) -def reset_invalid_autoincrements( # noqa: ANN201 +def reset_invalid_autoincrements( # type: ignore[no-untyped-def] # noqa: ANN201 engine, schema, key, dry_run: bool = True ): from inbox.models.base import MailSyncBase @@ -271,7 +277,9 @@ def reset_invalid_autoincrements( # noqa: ANN201 table_schema='{}' AND table_name='{}';""" reset = set() - for table in MailSyncBase.metadata.sorted_tables: + for ( + table + ) in MailSyncBase.metadata.sorted_tables: # type: ignore[attr-defined] increment = engine.execute(query.format(schema, table)).scalar() if increment is not None and (increment >> 48) != key: if not dry_run: diff --git a/inbox/instrumentation.py b/inbox/instrumentation.py index e220bd093..289d29b9c 100644 --- a/inbox/instrumentation.py +++ b/inbox/instrumentation.py @@ -13,10 +13,12 @@ class ProfileCollector: def __init__(self, interval: float = 0.005) -> None: self.interval = interval self._started = None - self._stack_counts = collections.defaultdict(int) + self._stack_counts = ( # type: ignore[var-annotated] + collections.defaultdict(int) + ) def start(self) -> None: - self._started = time.time() + self._started = time.time() # type: ignore[assignment] try: signal.signal(signal.SIGVTALRM, self._sample) except ValueError: @@ -26,7 +28,7 @@ def start(self) -> None: signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0) - def _sample(self, signum, frame) -> None: + def _sample(self, signum, frame) -> None: # type: ignore[no-untyped-def] stack: list[str] = [] while frame is not None: stack.append(self._format_frame(frame)) @@ -36,15 +38,15 @@ def _sample(self, signum, frame) -> None: self._stack_counts[stack_str] += 1 signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0) - def _format_frame(self, frame) -> str: + def _format_frame(self, frame) -> str: # type: ignore[no-untyped-def] return "{}({})".format( frame.f_code.co_name, frame.f_globals.get("__name__") ) - def stats(self): # noqa: ANN201 + def stats(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self._started is None: return "" - elapsed = time.time() - self._started + elapsed = time.time() - self._started # type: ignore[unreachable] lines = [f"elapsed {elapsed}", f"granularity {self.interval}"] ordered_stacks = sorted( self._stack_counts.items(), key=lambda kv: kv[1], reverse=True @@ -53,5 +55,5 @@ def stats(self): # noqa: ANN201 return "\n".join(lines) + "\n" def reset(self) -> None: - self._started = time.time() + self._started = time.time() # type: ignore[assignment] self._stack_counts = collections.defaultdict(int) diff --git a/inbox/interruptible_threading.py b/inbox/interruptible_threading.py index b4ebc5729..e59b3bd78 100644 --- a/inbox/interruptible_threading.py +++ b/inbox/interruptible_threading.py @@ -181,7 +181,7 @@ def decorator( def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: current_thread = threading.current_thread() if not isinstance(current_thread, InterruptibleThread): - return blocking_function(*args) + return blocking_function(*args) # type: ignore[call-arg] return interruptible_function(current_thread, *args, **kwargs) @@ -255,7 +255,7 @@ class InterruptibleThreadTimeout(BaseException): @contextlib.contextmanager -def timeout(timeout: float): # noqa: ANN201 +def timeout(timeout: float): # type: ignore[no-untyped-def] # noqa: ANN201 """ Context manager to set a timeout for the interruptible operations run by the current interruptible thread. diff --git a/inbox/logging.py b/inbox/logging.py index 0a0492fc6..b161210f0 100644 --- a/inbox/logging.py +++ b/inbox/logging.py @@ -22,7 +22,9 @@ MAX_EXCEPTION_LENGTH = 10000 -def find_first_app_frame_and_name(ignores=None): # noqa: ANN201 +def find_first_app_frame_and_name( # type: ignore[no-untyped-def] # noqa: ANN201 + ignores=None, +): """ Remove ignorable calls and return the relevant app frame. Borrowed from structlog, but fixes an issue when the stack includes an 'exec' statement @@ -42,7 +44,7 @@ def find_first_app_frame_and_name(ignores=None): # noqa: ANN201 f = sys._getframe() name = f.f_globals.get("__name__") while ( - f is not None + f is not None # type: ignore[redundant-expr] and f.f_back is not None and (name is None or any(name.startswith(i) for i in ignores)) ): @@ -51,7 +53,7 @@ def find_first_app_frame_and_name(ignores=None): # noqa: ANN201 return f, name -def _record_level(logger, name, event_dict): +def _record_level(logger, name, event_dict): # type: ignore[no-untyped-def] """ Processor that records the log level ('info', 'warning', etc.) in the structlog event dictionary. @@ -60,7 +62,7 @@ def _record_level(logger, name, event_dict): return event_dict -def _record_module(logger, name, event_dict): +def _record_module(logger, name, event_dict): # type: ignore[no-untyped-def] """ Processor that records the module and line where the logging call was invoked. @@ -79,7 +81,9 @@ def _record_module(logger, name, event_dict): return event_dict -def safe_format_exception(etype, value, tb, limit=None): # noqa: ANN201 +def safe_format_exception( # type: ignore[no-untyped-def] # noqa: ANN201 + etype, value, tb, limit=None +): """ Similar to structlog._format_exception, but truncate the exception part. This is because SQLAlchemy exceptions can sometimes have ludicrously large @@ -101,7 +105,7 @@ def safe_format_exception(etype, value, tb, limit=None): # noqa: ANN201 return "".join(list) -def _is_log_in_same_fn_scope(exc_tb): +def _is_log_in_same_fn_scope(exc_tb): # type: ignore[no-untyped-def] """ exc_info returns exception data according to the following spec: @@ -130,14 +134,14 @@ def _is_log_in_same_fn_scope(exc_tb): return any(fn_name == calling_fn for _, _, fn_name, _ in exc_tb_stack) -def _get_exc_info_if_in_scope(): +def _get_exc_info_if_in_scope(): # type: ignore[no-untyped-def] exc_info = sys.exc_info() if _is_log_in_same_fn_scope(exc_info[2]): return exc_info return (None, None, None) -def _safe_exc_info_renderer(_, __, event_dict): +def _safe_exc_info_renderer(_, __, event_dict): # type: ignore[no-untyped-def] """Processor that formats exception info safely.""" error = event_dict.pop("error", None) exc_info = event_dict.pop("exc_info", None) @@ -194,7 +198,7 @@ def _safe_exc_info_renderer(_, __, event_dict): return event_dict -def _safe_encoding_renderer(_, __, event_dict): +def _safe_encoding_renderer(_, __, event_dict): # type: ignore[no-untyped-def] """ Processor that converts all strings to unicode. Note that we ignore conversion errors. @@ -210,7 +214,9 @@ def _safe_encoding_renderer(_, __, event_dict): class BoundLogger(structlog.stdlib.BoundLogger): """BoundLogger which always adds thread_id and env to positional args""" - def _proxy_to_logger(self, method_name, event, *event_args, **event_kw): + def _proxy_to_logger( # type: ignore[no-untyped-def, override] + self, method_name, event, *event_args, **event_kw + ): event_kw["thread_id"] = hex(threading.get_native_id()) # 'prod', 'staging', 'dev' ... @@ -251,13 +257,13 @@ def _proxy_to_logger(self, method_name, event, *event_args, **event_kw): } -def json_excepthook(etype, value, tb) -> None: +def json_excepthook(etype, value, tb) -> None: # type: ignore[no-untyped-def] log = get_logger() log.error(**create_error_log_context((etype, value, tb))) class ConditionalFormatter(logging.Formatter): - def format(self, record): # noqa: ANN201 + def format(self, record): # type: ignore[no-untyped-def] # noqa: ANN201 if ( record.name in ("__main__", "inbox") or record.name.startswith("inbox.") @@ -274,7 +280,7 @@ def format(self, record): # noqa: ANN201 return super().format(record) -def configure_logging(log_level=None) -> None: +def configure_logging(log_level=None) -> None: # type: ignore[no-untyped-def] """ Idempotently configure logging. @@ -310,9 +316,9 @@ def configure_logging(log_level=None) -> None: }, ) else: - formatter = ConditionalFormatter() + formatter = ConditionalFormatter() # type: ignore[assignment] tty_handler.setFormatter(formatter) - tty_handler._nylas = True # type: ignore + tty_handler._nylas = True # type: ignore[attr-defined] # Configure the root logger. root_logger = logging.getLogger() diff --git a/inbox/mailsync/backends/base.py b/inbox/mailsync/backends/base.py index 2a19a54f1..09a6167b9 100644 --- a/inbox/mailsync/backends/base.py +++ b/inbox/mailsync/backends/base.py @@ -46,7 +46,9 @@ class BaseMailSyncMonitor(InterruptibleThread): How often to check for commands. """ - def __init__(self, account, heartbeat: int = 1) -> None: + def __init__( # type: ignore[no-untyped-def] + self, account, heartbeat: int = 1 + ) -> None: bind_context(self, "mailsyncmonitor", account.id) self.shutdown = threading.Event() # how often to check inbox, in seconds @@ -61,7 +63,7 @@ def __init__(self, account, heartbeat: int = 1) -> None: self.name = f"{self.__class__.__name__}(account_id={account.id!r})" - def _run(self): + def _run(self): # type: ignore[no-untyped-def] try: return retry_with_logging( self._run_impl, @@ -73,7 +75,7 @@ def _run(self): self._cleanup() raise - def _run_impl(self): + def _run_impl(self): # type: ignore[no-untyped-def] self.sync_thread = InterruptibleThread( retry_with_logging, self.sync, @@ -96,7 +98,7 @@ def _run_impl(self): provider=self.provider_name, exc=self.sync_thread.exception, ) - raise self.sync_thread.exception + raise self.sync_thread.exception # type: ignore[misc] def sync(self) -> Never: raise NotImplementedError @@ -104,9 +106,9 @@ def sync(self) -> Never: def _cleanup(self) -> None: self.sync_thread.kill() with session_scope(self.namespace_id) as mailsync_db_session: - for x in self.folder_monitors: + for x in self.folder_monitors: # type: ignore[attr-defined] x.set_stopped(mailsync_db_session) - kill_all(self.folder_monitors) + kill_all(self.folder_monitors) # type: ignore[attr-defined] def __repr__(self) -> str: return f"<{self.name}>" diff --git a/inbox/mailsync/backends/gmail.py b/inbox/mailsync/backends/gmail.py index fff902022..b82098f72 100644 --- a/inbox/mailsync/backends/gmail.py +++ b/inbox/mailsync/backends/gmail.py @@ -25,7 +25,10 @@ from threading import Semaphore from typing import TYPE_CHECKING, ClassVar -from sqlalchemy.orm import joinedload, load_only +from sqlalchemy.orm import ( # type: ignore[import-untyped] + joinedload, + load_only, +) from inbox import interruptible_threading from inbox.logging import get_logger @@ -56,11 +59,13 @@ class GmailFolderSyncEngine(FolderSyncEngine): - def __init__(self, *args, **kwargs) -> None: + def __init__( # type: ignore[no-untyped-def] + self, *args, **kwargs + ) -> None: FolderSyncEngine.__init__(self, *args, **kwargs) - self.saved_uids = set() + self.saved_uids = set() # type: ignore[var-annotated] - def is_all_mail(self, crispin_client): # noqa: ANN201 + def is_all_mail(self, crispin_client): # type: ignore[no-untyped-def] # noqa: ANN201 if not hasattr(self, "_is_all_mail"): folder_names = crispin_client.folder_names() self._is_all_mail = ( @@ -69,7 +74,7 @@ def is_all_mail(self, crispin_client): # noqa: ANN201 ) return self._is_all_mail - def should_idle(self, crispin_client): # noqa: ANN201 + def should_idle(self, crispin_client): # type: ignore[no-untyped-def] # noqa: ANN201 return self.is_all_mail(crispin_client) def initial_sync_impl(self, crispin_client: "CrispinClient") -> None: @@ -130,7 +135,12 @@ def initial_sync_impl(self, crispin_client: "CrispinClient") -> None: since = datetime.utcnow() - timedelta(days=30) inbox_uids = set( crispin_client.search_uids( - ["X-GM-LABELS", "inbox", "SINCE", since] + [ + "X-GM-LABELS", + "inbox", + "SINCE", + since, # type: ignore[list-item] + ] ) ) @@ -234,7 +244,7 @@ def resync_uids_impl(self) -> None: imap_folder_info_entry.highestmodseq = None db_session.commit() - def __deduplicate_message_object_creation( + def __deduplicate_message_object_creation( # type: ignore[no-untyped-def] self, db_session, raw_messages, account ): """ @@ -293,7 +303,7 @@ def __deduplicate_message_object_creation( message_id=message_obj.id, ) continue - uid = ImapUid( + uid = ImapUid( # type: ignore[call-arg] account=account, folder=folder, msg_uid=raw_message.uid, @@ -308,7 +318,7 @@ def __deduplicate_message_object_creation( return brand_new_messages - def add_message_to_thread( + def add_message_to_thread( # type: ignore[no-untyped-def] self, db_session, message_obj, raw_message ) -> None: """ @@ -326,7 +336,9 @@ def add_message_to_thread( db_session, self.namespace_id, message_obj ) - def download_and_commit_uids(self, crispin_client, uids) -> int | None: + def download_and_commit_uids( # type: ignore[no-untyped-def] + self, crispin_client, uids + ) -> int | None: start = datetime.utcnow() raw_messages = crispin_client.uids(uids) if not raw_messages: @@ -367,7 +379,7 @@ def download_and_commit_uids(self, crispin_client, uids) -> int | None: self.saved_uids.update(new_uids) return None - def expand_uids_to_download( # noqa: ANN201 + def expand_uids_to_download( # type: ignore[no-untyped-def] # noqa: ANN201 self, crispin_client, uids, metadata ): # During Gmail initial sync, we expand threads: given a UID to @@ -394,7 +406,7 @@ def expand_uids_to_download( # noqa: ANN201 metadata.update(crispin_client.g_metadata(uids)) yield from sorted(uids, reverse=True) - def batch_download_uids( + def batch_download_uids( # type: ignore[no-untyped-def] self, crispin_client, uids, @@ -434,7 +446,7 @@ def batch_download_uids( interruptible_threading.sleep(THROTTLE_WAIT) @property - def throttled(self): # noqa: ANN201 + def throttled(self): # type: ignore[no-untyped-def] # noqa: ANN201 with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) throttled = account.throttled @@ -442,7 +454,7 @@ def throttled(self): # noqa: ANN201 return throttled -def g_msgids(namespace_id, session, in_): # noqa: ANN201 +def g_msgids(namespace_id, session, in_): # type: ignore[no-untyped-def] # noqa: ANN201 if not in_: return [] # Easiest way to account-filter Messages is to namespace-filter from @@ -472,7 +484,9 @@ def g_msgids(namespace_id, session, in_): # noqa: ANN201 class GmailSyncMonitor(ImapSyncMonitor): sync_engine_class: ClassVar[type[FolderSyncEngine]] = GmailFolderSyncEngine - def __init__(self, *args, **kwargs) -> None: + def __init__( # type: ignore[no-untyped-def] + self, *args, **kwargs + ) -> None: super().__init__(*args, **kwargs) # We start a label refresh whenever we find a new labels @@ -491,7 +505,7 @@ def __init__(self, *args, **kwargs) -> None: self.label_rename_semaphore = Semaphore(value=1) self.label_rename_handlers: "dict[str, LabelRenameHandler]" = {} - def handle_raw_folder_change( + def handle_raw_folder_change( # type: ignore[no-untyped-def] self, db_session, account, raw_folder ) -> None: folder = ( @@ -534,7 +548,9 @@ def handle_raw_folder_change( db_session, account, raw_folder.display_name, raw_folder.role ) - def set_sync_should_run_bit(self, account) -> None: + def set_sync_should_run_bit( # type: ignore[no-untyped-def] + self, account + ) -> None: # Ensure sync_should_run is True for the folders we want to sync (for # Gmail, that's just all folders, since we created them above if # they didn't exist.) @@ -542,7 +558,9 @@ def set_sync_should_run_bit(self, account) -> None: if folder.imapsyncstatus: folder.imapsyncstatus.sync_should_run = True - def mark_deleted_labels(self, db_session, deleted_labels) -> None: + def mark_deleted_labels( # type: ignore[no-untyped-def] + self, db_session, deleted_labels + ) -> None: # Go through the labels which have been "deleted" (i.e: they don't # show up when running LIST) and mark them as such. # We can't delete labels directly because Gmail allows users to hide @@ -555,7 +573,9 @@ def mark_deleted_labels(self, db_session, deleted_labels) -> None: category = deleted_label.category category.deleted_at = datetime.now() - def save_folder_names(self, db_session, raw_folders) -> None: + def save_folder_names( # type: ignore[no-untyped-def] + self, db_session, raw_folders + ) -> None: """ Save the folders, labels present on the remote backend for an account. diff --git a/inbox/mailsync/backends/imap/common.py b/inbox/mailsync/backends/imap/common.py index 620cccf94..062be528a 100644 --- a/inbox/mailsync/backends/imap/common.py +++ b/inbox/mailsync/backends/imap/common.py @@ -14,10 +14,10 @@ from datetime import datetime -from sqlalchemy import bindparam, desc -from sqlalchemy.orm import Session -from sqlalchemy.orm.exc import NoResultFound -from sqlalchemy.sql.expression import func +from sqlalchemy import bindparam, desc # type: ignore[import-untyped] +from sqlalchemy.orm import Session # type: ignore[import-untyped] +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] +from sqlalchemy.sql.expression import func # type: ignore[import-untyped] from inbox.contacts.processing import update_contacts_from_message from inbox.crispin import RawMessage @@ -32,7 +32,7 @@ log = get_logger() -def local_uids( +def local_uids( # type: ignore[no-untyped-def] account_id: int, session, folder_id: int, limit: "int | None" = None ) -> "set[int]": """ @@ -60,7 +60,9 @@ def local_uids( return {uid for uid, in db_api_cursor.fetchall()} -def lastseenuid(account_id, session, folder_id): # noqa: ANN201 +def lastseenuid( # type: ignore[no-untyped-def] # noqa: ANN201 + account_id, session, folder_id +): q = session.query(func.max(ImapUid.msg_uid)).with_hint( ImapUid, "FORCE INDEX (ix_imapuid_account_id_folder_id_msg_uid_desc)" ) @@ -79,7 +81,8 @@ def update_message_metadata( # Sort imapuids in a way that the ones that were added later come last now = datetime.utcnow() sorted_imapuids: list[ImapUid] = sorted( - message.imapuids, key=lambda imapuid: imapuid.updated_at or now + message.imapuids, # type: ignore[attr-defined] + key=lambda imapuid: imapuid.updated_at or now, ) message.is_read = any(imapuid.is_seen for imapuid in sorted_imapuids) @@ -136,7 +139,7 @@ def update_message_metadata( # created_at value. Taken from # https://docs.sqlalchemy.org/en/13/orm/extensions/ # associationproxy.html#simplifying-association-objects - MessageCategory( + MessageCategory( # type: ignore[call-arg] category=category, message=message, created_at=update_time ) @@ -163,7 +166,7 @@ def update_message_metadata( """ -def update_metadata( +def update_metadata( # type: ignore[no-untyped-def] account_id, folder_id, folder_role, new_flags, session ) -> None: """ @@ -210,7 +213,9 @@ def update_metadata( ) -def remove_deleted_uids(account_id, folder_id, uids) -> None: +def remove_deleted_uids( # type: ignore[no-untyped-def] + account_id, folder_id, uids +) -> None: """ Make sure you're holding a db write lock on the account. (We don't try to grab the lock in here in case the caller needs to put higher-level @@ -278,7 +283,9 @@ def remove_deleted_uids(account_id, folder_id, uids) -> None: log.info("Deleted expunged UIDs", count=deleted_uid_count) -def get_folder_info(account_id, session, folder_name): # noqa: ANN201 +def get_folder_info( # type: ignore[no-untyped-def] # noqa: ANN201 + account_id, session, folder_name +): try: # using .one() here may catch duplication bugs return ( @@ -330,13 +337,13 @@ def create_imap_message( if existing_copy is not None: new_message = existing_copy - imapuid = ImapUid( + imapuid = ImapUid( # type: ignore[call-arg] account=account, folder=folder, msg_uid=raw_message.uid, message=new_message, ) - imapuid.update_flags(raw_message.flags) + imapuid.update_flags(raw_message.flags) # type: ignore[arg-type] if raw_message.g_labels is not None: imapuid.update_labels(raw_message.g_labels) @@ -347,12 +354,18 @@ def create_imap_message( ) update_message_metadata(db_session, account, new_message, is_draft) - update_contacts_from_message(db_session, new_message, account.namespace.id) + update_contacts_from_message( + db_session, + new_message, + account.namespace.id, # type: ignore[attr-defined] + ) return imapuid -def _update_categories(db_session, message, synced_categories) -> None: +def _update_categories( # type: ignore[no-untyped-def] + db_session, message, synced_categories +) -> None: now = datetime.utcnow() # We make the simplifying assumption that only the latest syncback action diff --git a/inbox/mailsync/backends/imap/generic.py b/inbox/mailsync/backends/imap/generic.py index 8881ad872..302d8497a 100644 --- a/inbox/mailsync/backends/imap/generic.py +++ b/inbox/mailsync/backends/imap/generic.py @@ -68,10 +68,10 @@ from datetime import datetime, timedelta from typing import Any, NoReturn -from sqlalchemy import func -from sqlalchemy.exc import IntegrityError -from sqlalchemy.orm import Session -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy import func # type: ignore[import-untyped] +from sqlalchemy.exc import IntegrityError # type: ignore[import-untyped] +from sqlalchemy.orm import Session # type: ignore[import-untyped] +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from inbox import interruptible_threading from inbox.exceptions import ValidationError @@ -159,7 +159,7 @@ class FolderSyncEngine(InterruptibleThread): global_lock = threading.BoundedSemaphore(1) - def __init__( + def __init__( # type: ignore[no-untyped-def] self, account_id, namespace_id, @@ -204,7 +204,7 @@ def __init__( self.state: str | None = None self.provider_name = provider_name self.last_fast_refresh = None - self.flags_fetch_results = {} + self.flags_fetch_results = {} # type: ignore[var-annotated] self.conn_pool = connection_pool(self.account_id) self.polling_logged_at: float = 0 @@ -240,7 +240,7 @@ def setup_heartbeats(self) -> None: self.provider_name, ) - def _run(self): + def _run(self): # type: ignore[no-untyped-def] # Bind thread-local logging context. self.log = log.new( account_id=self.account_id, @@ -250,7 +250,9 @@ def _run(self): # eagerly signal the sync status self.heartbeat_status.publish() - def start_sync(saved_folder_status) -> None: + def start_sync( # type: ignore[no-untyped-def] + saved_folder_status, + ) -> None: # Ensure we don't cause an error if the folder was deleted. sync_end_time = ( saved_folder_status.folder @@ -295,7 +297,7 @@ def start_sync(saved_folder_status) -> None: logger=log, ) - def _run_impl(self): + def _run_impl(self): # type: ignore[no-untyped-def] old_state = self.state assert old_state try: @@ -359,7 +361,7 @@ def _run_impl(self): # killed between the end of the handler and the commit. if self.state != old_state: - def update(status) -> None: + def update(status) -> None: # type: ignore[no-untyped-def] status.state = self.state self.update_folder_sync_status(update) @@ -369,7 +371,9 @@ def update(status) -> None: # error. It's safe to reset the uidvalidity counter. self.uidinvalid_count = 0 - def update_folder_sync_status(self, cb) -> None: + def update_folder_sync_status( # type: ignore[no-untyped-def] + self, cb + ) -> None: # Loads the folder sync status and invokes the provided callback to # modify it. Commits any changes and updates `self.state` to ensure # they are never out of sync. @@ -384,7 +388,7 @@ def update_folder_sync_status(self, cb) -> None: ) except NoResultFound: - saved_folder_status = ImapFolderSyncStatus( + saved_folder_status = ImapFolderSyncStatus( # type: ignore[call-arg] account_id=self.account_id, folder_id=self.folder_id ) db_session.add(saved_folder_status) @@ -394,7 +398,7 @@ def update_folder_sync_status(self, cb) -> None: self.state = saved_folder_status.state - def set_stopped(self, db_session) -> None: + def set_stopped(self, db_session) -> None: # type: ignore[no-untyped-def] self.update_folder_sync_status(lambda s: s.stop_sync()) def _report_initial_sync_start(self) -> None: @@ -426,7 +430,7 @@ def initial_sync(self) -> str: ImapFolderInfo.folder_id == self.folder_id, ).one() except NoResultFound: - imapfolderinfo = ImapFolderInfo( + imapfolderinfo = ImapFolderInfo( # type: ignore[call-arg] account_id=self.account_id, folder_id=self.folder_id, uidvalidity=crispin_client.selected_uidvalidity, @@ -523,7 +527,7 @@ def initial_sync_impl(self, crispin_client: CrispinClient) -> None: # schedule change_poller to die change_poller.kill() - def should_idle(self, crispin_client): # noqa: ANN201 + def should_idle(self, crispin_client): # type: ignore[no-untyped-def] # noqa: ANN201 if not hasattr(self, "_should_idle"): self._should_idle = ( crispin_client.idle_supported() @@ -611,7 +615,7 @@ def create_message( raw_message: RawMessage, ) -> ImapUid | None: assert account is not None - assert account.namespace is not None + assert account.namespace is not None # type: ignore[attr-defined] # Check if we somehow already saved the imapuid (shouldn't happen, but # possible due to race condition). If so, don't commit changes. @@ -684,7 +688,9 @@ def create_message( return new_uid - def _count_thread_messages(self, thread_id, db_session): + def _count_thread_messages( # type: ignore[no-untyped-def] + self, thread_id, db_session + ): (count,) = ( db_session.query(func.count(Message.id)) .filter(Message.thread_id == thread_id) @@ -692,7 +698,7 @@ def _count_thread_messages(self, thread_id, db_session): ) return count - def add_message_to_thread( + def add_message_to_thread( # type: ignore[no-untyped-def] self, db_session, message_obj, raw_message ) -> None: """ @@ -723,7 +729,9 @@ def add_message_to_thread( else: parent_thread.messages.append(message_obj) - def download_and_commit_uids(self, crispin_client, uids): # noqa: ANN201 + def download_and_commit_uids( # type: ignore[no-untyped-def] # noqa: ANN201 + self, crispin_client, uids + ): start = datetime.utcnow() raw_messages = crispin_client.uids(uids) if not raw_messages: @@ -782,7 +790,9 @@ def _report_first_message(self) -> None: for metric in metrics: statsd_client.timing(metric, latency) - def _report_message_velocity(self, timedelta, num_uids) -> None: + def _report_message_velocity( # type: ignore[no-untyped-def] + self, timedelta, num_uids + ) -> None: latency = (timedelta).total_seconds() * 1000 latency_per_uid = float(latency) / num_uids metrics = [ @@ -799,7 +809,9 @@ def _report_message_velocity(self, timedelta, num_uids) -> None: for metric in metrics: statsd_client.timing(metric, latency_per_uid) - def update_uid_counts(self, db_session, **kwargs) -> None: + def update_uid_counts( # type: ignore[no-untyped-def] + self, db_session, **kwargs + ) -> None: saved_status = ( db_session.query(ImapFolderSyncStatus) .join(Folder) @@ -818,7 +830,9 @@ def update_uid_counts(self, db_session, **kwargs) -> None: metrics.update(kwargs) saved_status.update_metrics(metrics) - def get_new_uids(self, crispin_client) -> None: + def get_new_uids( # type: ignore[no-untyped-def] + self, crispin_client + ) -> None: try: remote_uidnext = crispin_client.conn.folder_status( self.folder_name, ["UIDNEXT"] @@ -959,7 +973,9 @@ def condstore_refresh_flags(self, crispin_client: CrispinClient) -> None: ) self.highestmodseq = new_highestmodseq - def generic_refresh_flags(self, crispin_client) -> None: + def generic_refresh_flags( # type: ignore[no-untyped-def] + self, crispin_client + ) -> None: now = datetime.utcnow() slow_refresh_due = ( self.last_slow_refresh is None @@ -967,14 +983,17 @@ def generic_refresh_flags(self, crispin_client) -> None: ) fast_refresh_due = ( self.last_fast_refresh is None - or now > self.last_fast_refresh + FAST_REFRESH_INTERVAL + or now # type: ignore[unreachable] + > self.last_fast_refresh + FAST_REFRESH_INTERVAL ) if slow_refresh_due: self.refresh_flags_impl(crispin_client, SLOW_FLAGS_REFRESH_LIMIT) self.last_slow_refresh = datetime.utcnow() elif fast_refresh_due: self.refresh_flags_impl(crispin_client, FAST_FLAGS_REFRESH_LIMIT) - self.last_fast_refresh = datetime.utcnow() + self.last_fast_refresh = ( + datetime.utcnow() # type: ignore[assignment] + ) def refresh_flags_impl( self, crispin_client: CrispinClient, max_uids: int @@ -1011,7 +1030,7 @@ def refresh_flags_impl( limit=max_uids, ) - flags = crispin_client.flags(local_uids) + flags = crispin_client.flags(local_uids) # type: ignore[arg-type] if max_uids in self.flags_fetch_results and self.flags_fetch_results[ max_uids ] == (local_uids, flags): @@ -1055,29 +1074,29 @@ def check_uid_changes(self, crispin_client: "CrispinClient") -> None: self.generic_refresh_flags(crispin_client) @property - def uidvalidity(self): # noqa: ANN201 + def uidvalidity(self): # type: ignore[no-untyped-def] # noqa: ANN201 if not hasattr(self, "_uidvalidity"): self._uidvalidity = self._load_imap_folder_info().uidvalidity return self._uidvalidity @uidvalidity.setter - def uidvalidity(self, value) -> None: + def uidvalidity(self, value) -> None: # type: ignore[no-untyped-def] self._update_imap_folder_info("uidvalidity", value) self._uidvalidity = value @property - def uidnext(self): # noqa: ANN201 + def uidnext(self): # type: ignore[no-untyped-def] # noqa: ANN201 if not hasattr(self, "_uidnext"): self._uidnext = self._load_imap_folder_info().uidnext return self._uidnext @uidnext.setter - def uidnext(self, value) -> None: + def uidnext(self, value) -> None: # type: ignore[no-untyped-def] self._update_imap_folder_info("uidnext", value) self._uidnext = value @property - def last_slow_refresh(self): # noqa: ANN201 + def last_slow_refresh(self): # type: ignore[no-untyped-def] # noqa: ANN201 # We persist the last_slow_refresh timestamp so that we don't end up # doing a (potentially expensive) full flags refresh for every account # on every process restart. @@ -1088,22 +1107,22 @@ def last_slow_refresh(self): # noqa: ANN201 return self._last_slow_refresh @last_slow_refresh.setter - def last_slow_refresh(self, value) -> None: + def last_slow_refresh(self, value) -> None: # type: ignore[no-untyped-def] self._update_imap_folder_info("last_slow_refresh", value) self._last_slow_refresh = value @property - def highestmodseq(self): # noqa: ANN201 + def highestmodseq(self): # type: ignore[no-untyped-def] # noqa: ANN201 if not hasattr(self, "_highestmodseq"): self._highestmodseq = self._load_imap_folder_info().highestmodseq return self._highestmodseq @highestmodseq.setter - def highestmodseq(self, value) -> None: + def highestmodseq(self, value) -> None: # type: ignore[no-untyped-def] self._highestmodseq = value self._update_imap_folder_info("highestmodseq", value) - def _load_imap_folder_info(self): + def _load_imap_folder_info(self): # type: ignore[no-untyped-def] with session_scope(self.namespace_id) as db_session: imapfolderinfo = ( db_session.query(ImapFolderInfo) @@ -1116,7 +1135,9 @@ def _load_imap_folder_info(self): db_session.expunge(imapfolderinfo) return imapfolderinfo - def _update_imap_folder_info(self, attrname, value) -> None: + def _update_imap_folder_info( # type: ignore[no-untyped-def] + self, attrname, value + ) -> None: with session_scope(self.namespace_id) as db_session: imapfolderinfo = ( db_session.query(ImapFolderInfo) @@ -1129,7 +1150,7 @@ def _update_imap_folder_info(self, attrname, value) -> None: setattr(imapfolderinfo, attrname, value) db_session.commit() - def uidvalidity_cb( # noqa: ANN201 + def uidvalidity_cb( # type: ignore[no-untyped-def] # noqa: ANN201 self, account_id, folder_name, select_info ): assert folder_name == self.folder_name @@ -1164,7 +1185,8 @@ def uidvalidity_cb( account_id: int, folder_name: str, select_info: dict[bytes, Any] ) -> dict[bytes, Any]: assert ( # noqa: PT018 - folder_name is not None and select_info is not None + folder_name is not None # type: ignore[redundant-expr] + and select_info is not None ), "must start IMAP session before verifying UIDVALIDITY" with session_scope(account_id) as db_session: saved_folder_info = common.get_folder_info( diff --git a/inbox/mailsync/backends/imap/monitor.py b/inbox/mailsync/backends/imap/monitor.py index 4f553deef..128305f49 100644 --- a/inbox/mailsync/backends/imap/monitor.py +++ b/inbox/mailsync/backends/imap/monitor.py @@ -32,7 +32,7 @@ class ImapSyncMonitor(BaseMailSyncMonitor): sync_engine_class: ClassVar[type[FolderSyncEngine]] = FolderSyncEngine - def __init__( + def __init__( # type: ignore[no-untyped-def] self, account, heartbeat: int = 1, refresh_frequency: int = 30 ) -> None: self.refresh_frequency = refresh_frequency @@ -45,7 +45,7 @@ def __init__( BaseMailSyncMonitor.__init__(self, account, heartbeat) @retry_crispin - def prepare_sync(self): # noqa: ANN201 + def prepare_sync(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ Gets and save Folder objects for folders on the IMAP backend. Returns a list of folder names for the folders we want to sync (in order). @@ -62,7 +62,9 @@ def prepare_sync(self): # noqa: ANN201 self.saved_remote_folders = remote_folders return sync_folders - def save_folder_names(self, db_session, raw_folders) -> None: + def save_folder_names( # type: ignore[no-untyped-def] + self, db_session, raw_folders + ) -> None: """ Save the folders present on the remote backend for an account. @@ -170,15 +172,15 @@ def start_new_folder_sync_engines(self) -> None: def start_delete_handler(self) -> None: if self.delete_handler is None: - self.delete_handler = DeleteHandler( + self.delete_handler = DeleteHandler( # type: ignore[assignment] account_id=self.account_id, namespace_id=self.namespace_id, provider_name=self.provider_name, uid_accessor=lambda m: m.imapuids, ) - self.delete_handler.start() + self.delete_handler.start() # type: ignore[attr-defined] - def sync(self) -> None: + def sync(self) -> None: # type: ignore[override] try: self.start_delete_handler() self.start_new_folder_sync_engines() @@ -201,7 +203,7 @@ def stop(self) -> None: from inbox.mailsync.backends.gmail import GmailSyncMonitor if self.delete_handler: - self.delete_handler.kill() + self.delete_handler.kill() # type: ignore[unreachable] kill_all(self.folder_monitors, block=False) if isinstance(self, GmailSyncMonitor): kill_all(self.label_rename_handlers.values(), block=False) diff --git a/inbox/mailsync/frontend.py b/inbox/mailsync/frontend.py index 50fcc95d1..d7308e6d4 100644 --- a/inbox/mailsync/frontend.py +++ b/inbox/mailsync/frontend.py @@ -1,7 +1,7 @@ import threading from flask import Flask, jsonify, request -from pympler import muppy, summary +from pympler import muppy, summary # type: ignore[import-untyped] from werkzeug.serving import WSGIRequestHandler, run_simple from inbox.instrumentation import ProfileCollector @@ -15,11 +15,11 @@ class ProfilingHTTPFrontend: syncs. """ # noqa: D404 - def __init__(self, port, profile) -> None: + def __init__(self, port, profile) -> None: # type: ignore[no-untyped-def] self.port = port self.profiler = ProfileCollector() if profile else None - def _create_app(self): + def _create_app(self): # type: ignore[no-untyped-def] app = Flask(__name__) app.config["JSON_SORT_KEYS"] = False self._create_app_impl(app) @@ -30,17 +30,17 @@ def start(self) -> None: self.profiler.start() app = self._create_app() - threading._start_new_thread( + threading._start_new_thread( # type: ignore[attr-defined] run_simple, ("0.0.0.0", self.port, app), {"request_handler": _QuietHandler}, ) - def _create_app_impl(self, app) -> None: + def _create_app_impl(self, app) -> None: # type: ignore[no-untyped-def] @app.route("/profile") - def profile(): + def profile(): # type: ignore[no-untyped-def] if self.profiler is None: - return "Profiling disabled\n", 404 + return ("Profiling disabled\n", 404) resp = self.profiler.stats() if request.args.get("reset ") in (1, "true"): self.profiler.reset() @@ -51,7 +51,7 @@ def load() -> str: return "Load tracing disabled\n" @app.route("/mem") - def mem(): + def mem(): # type: ignore[no-untyped-def] objs = muppy.get_objects() summ = summary.summarize(objs) return "\n".join(summary.format_(summ)) + "\n" @@ -62,16 +62,18 @@ class SyncbackHTTPFrontend(ProfilingHTTPFrontend): class SyncHTTPFrontend(ProfilingHTTPFrontend): - def __init__(self, sync_service, port, profile) -> None: + def __init__( # type: ignore[no-untyped-def] + self, sync_service, port, profile + ) -> None: self.sync_service = sync_service super().__init__(port, profile) - def _create_app_impl(self, app) -> None: + def _create_app_impl(self, app) -> None: # type: ignore[no-untyped-def] super()._create_app_impl(app) @app.route("/unassign", methods=["POST"]) - def unassign_account(): - account_id = request.json["account_id"] + def unassign_account(): # type: ignore[no-untyped-def] + account_id = request.json["account_id"] # type: ignore[index] ret = self.sync_service.stop_sync(account_id) if ret: return "OK" @@ -79,7 +81,7 @@ def unassign_account(): return ("Account not assigned to this process", 409) @app.route("/build-metadata", methods=["GET"]) - def build_metadata(): + def build_metadata(): # type: ignore[no-untyped-def] filename = "/usr/share/python/cloud-core/metadata.txt" with open(filename) as f: # noqa: PTH123 _, build_id = f.readline().rstrip("\n").split() @@ -93,5 +95,7 @@ def build_metadata(): class _QuietHandler(WSGIRequestHandler): - def log_request(self, *args, **kwargs) -> None: + def log_request( # type: ignore[no-untyped-def] + self, *args, **kwargs + ) -> None: """Suppress request logging so as not to pollute application logs.""" diff --git a/inbox/mailsync/gc.py b/inbox/mailsync/gc.py index d31c06bbd..6e1b485e7 100644 --- a/inbox/mailsync/gc.py +++ b/inbox/mailsync/gc.py @@ -1,8 +1,10 @@ import datetime -from sqlalchemy import func -from sqlalchemy.orm import load_only -from sqlalchemy.orm.exc import ObjectDeletedError +from sqlalchemy import func # type: ignore[import-untyped] +from sqlalchemy.orm import load_only # type: ignore[import-untyped] +from sqlalchemy.orm.exc import ( # type: ignore[import-untyped] + ObjectDeletedError, +) from inbox import interruptible_threading from inbox.crispin import connection_pool @@ -54,7 +56,7 @@ class DeleteHandler(InterruptibleThread): """ - def __init__( + def __init__( # type: ignore[no-untyped-def] self, account_id, namespace_id, @@ -92,7 +94,7 @@ def _run_impl(self) -> None: self.gc_deleted_threads(current_time) interruptible_threading.sleep(self.message_ttl.total_seconds()) - def check(self, current_time) -> None: + def check(self, current_time) -> None: # type: ignore[no-untyped-def] dangling_sha256s = set() with session_scope(self.namespace_id) as db_session: @@ -201,7 +203,9 @@ def gc_deleted_categories(self) -> None: db_session.delete(category) db_session.commit() - def gc_deleted_threads(self, current_time) -> None: + def gc_deleted_threads( # type: ignore[no-untyped-def] + self, current_time + ) -> None: with session_scope(self.namespace_id) as db_session: deleted_threads = ( db_session.query(Thread) @@ -236,7 +240,7 @@ class LabelRenameHandler(InterruptibleThread): """ - def __init__( + def __init__( # type: ignore[no-untyped-def] self, account_id, namespace_id, label_name, semaphore ) -> None: bind_context(self, "renamehandler", account_id) @@ -250,7 +254,7 @@ def __init__( self.name = f"{self.__class__.__name__}(account_id={account_id!r}, label_name={label_name!r})" - def _run(self): + def _run(self): # type: ignore[no-untyped-def] interruptible_threading.check_interrupted() return retry_with_logging(self._run_impl, account_id=self.account_id) diff --git a/inbox/mailsync/service.py b/inbox/mailsync/service.py index 6726ceb13..27c5d2070 100644 --- a/inbox/mailsync/service.py +++ b/inbox/mailsync/service.py @@ -4,8 +4,8 @@ from functools import cache from threading import BoundedSemaphore -from sqlalchemy import and_, or_ -from sqlalchemy.exc import OperationalError +from sqlalchemy import and_, or_ # type: ignore[import-untyped] +from sqlalchemy.exc import OperationalError # type: ignore[import-untyped] from inbox.config import config from inbox.contacts.remote_sync import ContactSync @@ -43,10 +43,10 @@ SYNC_EVENT_QUEUE_NAME = "sync:event_queue:{}" SHARED_SYNC_EVENT_QUEUE_NAME = "sync:shared_event_queue:{}" -SHARED_SYNC_EVENT_QUEUE_ZONE_MAP = {} +SHARED_SYNC_EVENT_QUEUE_ZONE_MAP: dict[str, EventQueue] = {} -def shared_sync_event_queue_for_zone(zone): # noqa: ANN201 +def shared_sync_event_queue_for_zone(zone): # type: ignore[no-untyped-def] # noqa: ANN201 queue_name = SHARED_SYNC_EVENT_QUEUE_NAME.format(zone) if queue_name not in SHARED_SYNC_EVENT_QUEUE_ZONE_MAP: SHARED_SYNC_EVENT_QUEUE_ZONE_MAP[queue_name] = EventQueue(queue_name) @@ -86,7 +86,7 @@ class SyncService: """ - def __init__( + def __init__( # type: ignore[no-untyped-def] self, process_identifier, process_number, @@ -104,10 +104,10 @@ def __init__( supported_providers=list(module_registry), ) - self.syncing_accounts = set() - self.email_sync_monitors = {} - self.contact_sync_monitors = {} - self.event_sync_monitors = {} + self.syncing_accounts = set() # type: ignore[var-annotated] + self.email_sync_monitors = {} # type: ignore[var-annotated] + self.contact_sync_monitors = {} # type: ignore[var-annotated] + self.event_sync_monitors = {} # type: ignore[var-annotated] # Randomize the poll_interval so we maintain at least a little fairness # when using a timeout while blocking on the redis queues. min_poll_interval = 5 @@ -189,7 +189,9 @@ def _flush_private_queue(self) -> None: if event is None: break - def handle_shared_queue_event(self, event) -> None: + def handle_shared_queue_event( # type: ignore[no-untyped-def] + self, event + ) -> None: # Conservatively, stop accepting accounts if the process pending averages # is over PENDING_AVGS_THRESHOLD or if the total of accounts being # synced by a single process exceeds the threshold. Excessive @@ -198,7 +200,9 @@ def handle_shared_queue_event(self, event) -> None: # at the same time. pending_avgs_over_threshold = False if self._pending_avgs_provider is not None: - pending_avgs = self._pending_avgs_provider.get_pending_avgs() + pending_avgs = ( # type: ignore[unreachable] + self._pending_avgs_provider.get_pending_avgs() + ) pending_avgs_over_threshold = ( pending_avgs[15] >= PENDING_AVGS_THRESHOLD ) @@ -258,7 +262,7 @@ def poll(self) -> None: ) log_uncaught_errors() - def account_ids_to_sync(self): # noqa: ANN201 + def account_ids_to_sync(self): # type: ignore[no-untyped-def] # noqa: ANN201 with global_session_scope() as db_session: return { r[0] @@ -285,7 +289,7 @@ def account_ids_to_sync(self): # noqa: ANN201 .all() } - def account_ids_owned(self): # noqa: ANN201 + def account_ids_owned(self): # type: ignore[no-untyped-def] # noqa: ANN201 with global_session_scope() as db_session: return { r[0] @@ -294,7 +298,9 @@ def account_ids_owned(self): # noqa: ANN201 .all() } - def register_pending_avgs_provider(self, pending_avgs_provider) -> None: + def register_pending_avgs_provider( # type: ignore[no-untyped-def] + self, pending_avgs_provider + ) -> None: self._pending_avgs_provider = pending_avgs_provider def start_event_sync(self, account: Account) -> None: @@ -313,7 +319,7 @@ def start_event_sync(self, account: Account) -> None: account.email_address, account.verbose_provider, account.id, - account.namespace.id, + account.namespace.id, # type: ignore[attr-defined] provider_class=provider_class, ) self.log.info( @@ -403,7 +409,7 @@ def stop(self) -> None: self.log.info("stopping sync process") self.keep_running = False - def stop_sync(self, account_id) -> bool: + def stop_sync(self, account_id) -> bool: # type: ignore[no-untyped-def] """ Stops the sync for the account with given account_id. If that account doesn't exist, does nothing. diff --git a/inbox/models/account.py b/inbox/models/account.py index fd158696a..896861427 100644 --- a/inbox/models/account.py +++ b/inbox/models/account.py @@ -3,7 +3,7 @@ from datetime import datetime from typing import Literal, Never -from sqlalchemy import ( +from sqlalchemy import ( # type: ignore[import-untyped] BigInteger, Boolean, Column, @@ -16,9 +16,9 @@ event, inspect, ) -from sqlalchemy.orm import relationship -from sqlalchemy.orm.session import Session -from sqlalchemy.sql.expression import false +from sqlalchemy.orm import relationship # type: ignore[import-untyped] +from sqlalchemy.orm.session import Session # type: ignore[import-untyped] +from sqlalchemy.sql.expression import false # type: ignore[import-untyped] from inbox.config import config from inbox.error_handling import log_uncaught_errors @@ -57,7 +57,7 @@ class Account( UpdatedAtMixin, DeletedAtMixin, ): - API_OBJECT_NAME = "account" + API_OBJECT_NAME = "account" # type: ignore[assignment] @property def provider(self) -> Never: @@ -69,7 +69,7 @@ def provider(self) -> Never: raise NotImplementedError @property - def verbose_provider(self): # noqa: ANN201 + def verbose_provider(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ A detailed identifier for the account provider (e.g., 'gmail', 'office365', 'outlook'). @@ -89,17 +89,17 @@ def category_type(self) -> CategoryType: raise NotImplementedError @property - def auth_handler(self): # noqa: ANN201 + def auth_handler(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.auth.base import handler_from_provider return handler_from_provider(self.provider) @property - def provider_info(self): # noqa: ANN201 + def provider_info(self): # type: ignore[no-untyped-def] # noqa: ANN201 return provider_info(self.provider) @property - def thread_cls(self): # noqa: ANN201 + def thread_cls(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.models.thread import Thread return Thread @@ -139,8 +139,8 @@ def thread_cls(self): # noqa: ANN201 def create_emailed_events_calendar(self) -> None: if not self._emailed_events_calendar: calname = "Emailed events" - cal = Calendar( - namespace=self.namespace, + cal = Calendar( # type: ignore[call-arg] + namespace=self.namespace, # type: ignore[attr-defined] description=calname, uid="inbox", name=calname, @@ -149,12 +149,14 @@ def create_emailed_events_calendar(self) -> None: self._emailed_events_calendar = cal @property - def emailed_events_calendar(self): # noqa: ANN201 + def emailed_events_calendar(self): # type: ignore[no-untyped-def] # noqa: ANN201 self.create_emailed_events_calendar() return self._emailed_events_calendar @emailed_events_calendar.setter - def emailed_events_calendar(self, cal) -> None: + def emailed_events_calendar( # type: ignore[no-untyped-def] + self, cal + ) -> None: self._emailed_events_calendar = cal sync_host = Column(String(255), nullable=True) @@ -172,7 +174,7 @@ def emailed_events_calendar(self, cal) -> None: # folders and heartbeats. @property - def sync_enabled(self): # noqa: ANN201 + def sync_enabled(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.sync_should_run sync_state = Column( @@ -185,7 +187,7 @@ def sync_enabled(self): # noqa: ANN201 ) @property - def sync_status(self): # noqa: ANN201 + def sync_status(self): # type: ignore[no-untyped-def] # noqa: ANN201 d = dict( id=self.id, email=self.email_address, @@ -200,32 +202,46 @@ def sync_status(self): # noqa: ANN201 return d @property - def sync_error(self): # noqa: ANN201 + def sync_error(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self._sync_status.get("sync_error") @property - def initial_sync_start(self): # noqa: ANN201 - if len(self.folders) == 0 or any( - [f.initial_sync_start is None for f in self.folders] + def initial_sync_start(self): # type: ignore[no-untyped-def] # noqa: ANN201 + if len(self.folders) == 0 or any( # type: ignore[attr-defined] + [ + f.initial_sync_start is None + for f in self.folders # type: ignore[attr-defined] + ] ): return None - return min(f.initial_sync_start for f in self.folders) + return min( + f.initial_sync_start + for f in self.folders # type: ignore[attr-defined] + ) @property - def initial_sync_end(self): # noqa: ANN201 - if len(self.folders) == 0 or any( - [f.initial_sync_end is None for f in self.folders] + def initial_sync_end(self): # type: ignore[no-untyped-def] # noqa: ANN201 + if len(self.folders) == 0 or any( # type: ignore[attr-defined] + [ + f.initial_sync_end is None + for f in self.folders # type: ignore[attr-defined] + ] ): return None - return max(f.initial_sync_end for f in self.folders) + return max( + f.initial_sync_end + for f in self.folders # type: ignore[attr-defined] + ) @property - def initial_sync_duration(self): # noqa: ANN201 + def initial_sync_duration(self): # type: ignore[no-untyped-def] # noqa: ANN201 if not self.initial_sync_start or not self.initial_sync_end: return None return (self.initial_sync_end - self.initial_sync_end).total_seconds() - def update_sync_error(self, error=None) -> None: + def update_sync_error( # type: ignore[no-untyped-def] + self, error=None + ) -> None: if error is None: self._sync_status["sync_error"] = None else: @@ -264,13 +280,15 @@ def sync_started(self) -> None: self.sync_state = "running" - def enable_sync(self, sync_host=None) -> None: + def enable_sync( # type: ignore[no-untyped-def] + self, sync_host=None + ) -> None: """Tell the monitor that this account should be syncing.""" self.sync_should_run = True if sync_host is not None: self.desired_sync_host = sync_host - def disable_sync(self, reason) -> None: + def disable_sync(self, reason) -> None: # type: ignore[no-untyped-def] """Tell the monitor that this account should stop syncing.""" self.sync_should_run = False self._sync_status["sync_disabled_reason"] = reason @@ -306,7 +324,9 @@ def unmark_for_deletion(self) -> None: self.sync_state = "running" inspect(self).session.commit() - def sync_stopped(self, requesting_host) -> bool: + def sync_stopped( # type: ignore[no-untyped-def] + self, requesting_host + ) -> bool: """ Record transition to stopped state. Should be called after the sync is actually stopped, not when the request to stop it is made. @@ -325,21 +345,21 @@ def sync_stopped(self, requesting_host) -> bool: return False @classmethod - def get(cls, id_, session): # noqa: ANN206 + def get(cls, id_, session): # type: ignore[no-untyped-def] # noqa: ANN206 q = session.query(cls) q = q.filter(cls.id == bindparam("id_")) return q.params(id_=id_).first() @property - def is_killed(self): # noqa: ANN201 + def is_killed(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.sync_state == "killed" @property - def is_running(self): # noqa: ANN201 + def is_running(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.sync_state == "running" @property - def is_marked_for_deletion(self): # noqa: ANN201 + def is_marked_for_deletion(self): # type: ignore[no-untyped-def] # noqa: ANN201 return ( self.sync_state in ("stopped", "killed", "invalid") and self.sync_should_run is False @@ -360,7 +380,9 @@ def should_suppress_transaction_creation(self) -> bool: def server_settings(self) -> None: return None - def get_raw_message_contents(self, message) -> Never: + def get_raw_message_contents( # type: ignore[no-untyped-def] + self, message + ) -> Never: # Get the raw contents of a message. We do this differently # for every backend (Gmail, IMAP, EAS), and the best way # to do this across repos is to make it a method of the @@ -374,7 +396,7 @@ def get_raw_message_contents(self, message) -> Never: } -def should_send_event(obj): # noqa: ANN201 +def should_send_event(obj): # type: ignore[no-untyped-def] # noqa: ANN201 if not isinstance(obj, Account): return False inspected_obj = inspect(obj) @@ -388,11 +410,11 @@ def should_send_event(obj): # noqa: ANN201 return hist.has_changes() -def already_registered_listener(obj) -> bool: +def already_registered_listener(obj) -> bool: # type: ignore[no-untyped-def] return getattr(obj, "_listener_state", None) is not None -def update_listener_state(obj) -> None: +def update_listener_state(obj) -> None: # type: ignore[no-untyped-def] obj._listener_state["sync_should_run"] = obj.sync_should_run obj._listener_state["sync_host"] = obj.sync_host obj._listener_state["desired_sync_host"] = obj.desired_sync_host @@ -400,14 +422,16 @@ def update_listener_state(obj) -> None: @event.listens_for(Session, "after_flush") -def after_flush(session, flush_context) -> None: +def after_flush( # type: ignore[no-untyped-def] + session, flush_context +) -> None: from inbox.mailsync.service import ( SYNC_EVENT_QUEUE_NAME, shared_sync_event_queue_for_zone, ) - def send_migration_events(obj_state): - def f(session) -> None: + def send_migration_events(obj_state): # type: ignore[no-untyped-def] + def f(session) -> None: # type: ignore[no-untyped-def] if obj_state["sent_event"]: return @@ -484,12 +508,16 @@ def f(session) -> None: if already_registered_listener(obj): update_listener_state(obj) else: - obj._listener_state = {"id": obj.id} + obj._listener_state = { # type: ignore[attr-defined] + "id": obj.id + } update_listener_state(obj) event.listen( session, "after_commit", - send_migration_events(obj._listener_state), + send_migration_events( + obj._listener_state # type: ignore[attr-defined] + ), ) for obj in session.dirty: diff --git a/inbox/models/action_log.py b/inbox/models/action_log.py index a46bc93d8..534be671f 100644 --- a/inbox/models/action_log.py +++ b/inbox/models/action_log.py @@ -1,4 +1,4 @@ -from sqlalchemy import ( +from sqlalchemy import ( # type: ignore[import-untyped] BigInteger, Column, Enum, @@ -9,7 +9,7 @@ Text, desc, ) -from sqlalchemy.orm import relationship +from sqlalchemy.orm import relationship # type: ignore[import-untyped] from inbox.logging import get_logger from inbox.models.base import MailSyncBase @@ -20,7 +20,7 @@ log = get_logger() -def schedule_action( +def schedule_action( # type: ignore[no-untyped-def] func_name, record, namespace_id, db_session, **kwargs ) -> None: # Ensure that the record's id is non-null @@ -73,10 +73,10 @@ class ActionLog(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): extra_args = Column(JSON, nullable=True) @classmethod - def create( # noqa: ANN206 + def create( # type: ignore[no-untyped-def] # noqa: ANN206 cls, action, table_name, record_id, namespace_id, extra_args ): - return cls( + return cls( # type: ignore[call-arg] action=action, table_name=table_name, record_id=record_id, diff --git a/inbox/models/backends/calendar_sync_account.py b/inbox/models/backends/calendar_sync_account.py index 818cfd43f..4d1def354 100644 --- a/inbox/models/backends/calendar_sync_account.py +++ b/inbox/models/backends/calendar_sync_account.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta -from sqlalchemy import Column, DateTime +from sqlalchemy import Column, DateTime # type: ignore[import-untyped] class CalendarSyncAccountMixin: diff --git a/inbox/models/backends/generic.py b/inbox/models/backends/generic.py index 119b489ed..9894b9630 100644 --- a/inbox/models/backends/generic.py +++ b/inbox/models/backends/generic.py @@ -1,5 +1,10 @@ -from sqlalchemy import Boolean, Column, ForeignKey, String -from sqlalchemy.orm import relationship +from sqlalchemy import ( # type: ignore[import-untyped] + Boolean, + Column, + ForeignKey, + String, +) +from sqlalchemy.orm import relationship # type: ignore[import-untyped] from inbox.models.account import CategoryType from inbox.models.backends.imap import ImapAccount @@ -61,7 +66,7 @@ class GenericAccount(ImapAccount): __mapper_args__ = {"polymorphic_identity": "genericaccount"} @property - def verbose_provider(self): # noqa: ANN201 + def verbose_provider(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self.provider == "custom": return "imap" return self.provider @@ -87,7 +92,7 @@ def imap_password(self) -> str: @imap_password.setter def imap_password(self, value: str | bytes) -> None: - value: bytes = self.valid_password(value) + value: bytes = self.valid_password(value) # type: ignore[no-redef] if not self.imap_secret: self.imap_secret = Secret() self.imap_secret.secret = value @@ -99,7 +104,7 @@ def smtp_password(self) -> str: @smtp_password.setter def smtp_password(self, value: str | bytes) -> None: - value: bytes = self.valid_password(value) + value: bytes = self.valid_password(value) # type: ignore[no-redef] if not self.smtp_secret: self.smtp_secret = Secret() self.smtp_secret.secret = value @@ -113,22 +118,22 @@ def category_type(self) -> CategoryType: return "folder" @property - def thread_cls(self): # noqa: ANN201 + def thread_cls(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.models.backends.imap import ImapThread return ImapThread @property - def actionlog_cls(self): # noqa: ANN201 + def actionlog_cls(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.models.action_log import ActionLog return ActionLog @property - def server_settings(self): # noqa: ANN201 + def server_settings(self): # type: ignore[no-untyped-def] # noqa: ANN201 settings = {} - settings["imap_host"], settings["imap_port"] = self.imap_endpoint - settings["smtp_host"], settings["smtp_port"] = self.smtp_endpoint + (settings["imap_host"], settings["imap_port"]) = self.imap_endpoint + (settings["smtp_host"], settings["smtp_port"]) = self.smtp_endpoint return settings # Override provider_info and auth_handler to make sure we always get @@ -138,13 +143,13 @@ def server_settings(self): # noqa: ANN201 # provider attribute to "gmail" to use the Gmail sync engine. @property - def provider_info(self): # noqa: ANN201 + def provider_info(self): # type: ignore[no-untyped-def] # noqa: ANN201 provider_info = super().provider_info provider_info["auth"] = "password" return provider_info @property - def auth_handler(self): # noqa: ANN201 + def auth_handler(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.auth.base import handler_from_provider return handler_from_provider("custom") diff --git a/inbox/models/backends/gmail.py b/inbox/models/backends/gmail.py index 687b5d510..da1fd01d8 100644 --- a/inbox/models/backends/gmail.py +++ b/inbox/models/backends/gmail.py @@ -1,4 +1,8 @@ -from sqlalchemy import Column, ForeignKey, String +from sqlalchemy import ( # type: ignore[import-untyped] + Column, + ForeignKey, + String, +) from inbox.config import config from inbox.logging import get_logger @@ -32,19 +36,19 @@ class GmailAccount(CalendarSyncAccountMixin, OAuthAccount, ImapAccount): scope = Column(String(512)) @property - def email_scopes(self): # noqa: ANN201 + def email_scopes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return GOOGLE_EMAIL_SCOPES @property - def contacts_scopes(self): # noqa: ANN201 + def contacts_scopes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return GOOGLE_CONTACTS_SCOPES @property - def calendar_scopes(self): # noqa: ANN201 + def calendar_scopes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return GOOGLE_CALENDAR_SCOPES @property - def scopes(self): # noqa: ANN201 + def scopes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return [ *self.calendar_scopes, *self.contacts_scopes, @@ -52,7 +56,7 @@ def scopes(self): # noqa: ANN201 ] @property - def provider(self): # noqa: ANN201 + def provider(self): # type: ignore[no-untyped-def] # noqa: ANN201 return PROVIDER @property @@ -60,18 +64,20 @@ def category_type(self) -> CategoryType: return "label" @property - def thread_cls(self): # noqa: ANN201 + def thread_cls(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.models.backends.imap import ImapThread return ImapThread @property - def actionlog_cls(self): # noqa: ANN201 + def actionlog_cls(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.models.action_log import ActionLog return ActionLog - def get_raw_message_contents(self, message): # noqa: ANN201 + def get_raw_message_contents( # type: ignore[no-untyped-def] # noqa: ANN201 + self, message + ): from inbox.s3.backends.gmail import get_gmail_raw_contents return get_gmail_raw_contents(message) diff --git a/inbox/models/backends/imap.py b/inbox/models/backends/imap.py index a773ecd77..0de4f2d1c 100644 --- a/inbox/models/backends/imap.py +++ b/inbox/models/backends/imap.py @@ -1,7 +1,7 @@ import json from datetime import datetime -from sqlalchemy import ( +from sqlalchemy import ( # type: ignore[import-untyped] BigInteger, Boolean, Column, @@ -13,10 +13,16 @@ String, desc, ) -from sqlalchemy.ext.associationproxy import association_proxy -from sqlalchemy.orm import backref, object_session, relationship -from sqlalchemy.schema import UniqueConstraint -from sqlalchemy.sql.expression import false +from sqlalchemy.ext.associationproxy import ( # type: ignore[import-untyped] + association_proxy, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + object_session, + relationship, +) +from sqlalchemy.schema import UniqueConstraint # type: ignore[import-untyped] +from sqlalchemy.sql.expression import false # type: ignore[import-untyped] from inbox.logging import get_logger from inbox.models.account import Account @@ -48,7 +54,7 @@ class ImapAccount(Account): _smtp_server_port = Column(Integer, nullable=False, server_default="587") @property - def imap_endpoint(self): # noqa: ANN201 + def imap_endpoint(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self._imap_server_host is not None: # We have to take care to coerce to int here and below, because # mysqlclient returns Integer columns as type long, and @@ -59,25 +65,27 @@ def imap_endpoint(self): # noqa: ANN201 return self.provider_info["imap"] @imap_endpoint.setter - def imap_endpoint(self, endpoint) -> None: + def imap_endpoint(self, endpoint) -> None: # type: ignore[no-untyped-def] host, port = endpoint self._imap_server_host = host self._imap_server_port = int(port) @property - def smtp_endpoint(self): # noqa: ANN201 + def smtp_endpoint(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self._smtp_server_host is not None: return (self._smtp_server_host, int(self._smtp_server_port)) else: return self.provider_info["smtp"] @smtp_endpoint.setter - def smtp_endpoint(self, endpoint) -> None: + def smtp_endpoint(self, endpoint) -> None: # type: ignore[no-untyped-def] host, port = endpoint self._smtp_server_host = host self._smtp_server_port = int(port) - def get_raw_message_contents(self, message): # noqa: ANN201 + def get_raw_message_contents( # type: ignore[no-untyped-def] # noqa: ANN201 + self, message + ): from inbox.s3.backends.imap import get_imap_raw_contents return get_imap_raw_contents(message) @@ -116,7 +124,9 @@ class ImapUid(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): ) labels = association_proxy( - "labelitems", "label", creator=lambda label: LabelItem(label=label) + "labelitems", + "label", + creator=lambda label: LabelItem(label=label), # type: ignore[call-arg] ) # Flags # @@ -144,7 +154,9 @@ def update_flags(self, new_flags: list[bytes]) -> None: """ # noqa: D401 changed = False - new_flags = {flag.decode() for flag in new_flags} + new_flags = { # type: ignore[assignment] + flag.decode() for flag in new_flags + } columns_for_flag = { "\\Draft": "is_draft", "\\Seen": "is_seen", @@ -154,11 +166,13 @@ def update_flags(self, new_flags: list[bytes]) -> None: } for flag, column in columns_for_flag.items(): prior_column_value = getattr(self, column) - new_column_value = flag in new_flags + new_column_value = ( + flag in new_flags # type: ignore[comparison-overlap] + ) if prior_column_value != new_column_value: changed = True setattr(self, column, new_column_value) - new_flags.discard(flag) + new_flags.discard(flag) # type: ignore[attr-defined] extra_flags = sorted(new_flags) @@ -171,7 +185,7 @@ def update_flags(self, new_flags: list[bytes]) -> None: extra_flags.pop() self.extra_flags = extra_flags - return changed + return changed # type: ignore[return-value] def update_labels(self, new_labels: list[str]) -> None: # TODO(emfree): This is all mad complicated. Simplify if possible? @@ -197,7 +211,7 @@ def update_labels(self, new_labels: list[str]) -> None: elif label in category_map: remote_labels.add((category_map[label], category_map[label])) else: - remote_labels.add((label, None)) + remote_labels.add((label, None)) # type: ignore[arg-type] local_labels = { (lbl.name, lbl.canonical_name): lbl for lbl in self.labels @@ -217,8 +231,8 @@ def update_labels(self, new_labels: list[str]) -> None: self.labels.add(label) @property - def namespace(self): # noqa: ANN201 - return self.imapaccount.namespace + def namespace(self): # type: ignore[no-untyped-def] # noqa: ANN201 + return self.imapaccount.namespace # type: ignore[attr-defined] @property def categories(self) -> set[Category]: @@ -288,7 +302,9 @@ class ImapFolderInfo(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): __table_args__ = (UniqueConstraint("account_id", "folder_id"),) -def _choose_existing_thread_for_gmail(message, db_session): +def _choose_existing_thread_for_gmail( # type: ignore[no-untyped-def] + message, db_session +): """ For Gmail, determine if `message` should be added to an existing thread based on the value of `g_thrid`. If so, return the existing ImapThread @@ -339,7 +355,7 @@ class ImapThread(Thread): g_thrid = Column(BigInteger, nullable=True, index=True, unique=False) @classmethod - def from_gmail_message( # noqa: ANN206 + def from_gmail_message( # type: ignore[no-untyped-def] # noqa: ANN206 cls, session, namespace_id, message ): """ @@ -358,7 +374,7 @@ def from_gmail_message( # noqa: ANN206 if message.g_thrid is not None: thread = _choose_existing_thread_for_gmail(message, session) if thread is None: - thread = cls( + thread = cls( # type: ignore[call-arg] subject=message.subject, g_thrid=message.g_thrid, recentdate=message.received_date, @@ -366,16 +382,18 @@ def from_gmail_message( # noqa: ANN206 subjectdate=message.received_date, snippet=message.snippet, ) - return thread + return thread # type: ignore[possibly-undefined] @classmethod - def from_imap_message(cls, session, namespace_id, message): # noqa: ANN206 + def from_imap_message( # type: ignore[no-untyped-def] # noqa: ANN206 + cls, session, namespace_id, message + ): if message.thread is not None: # If this message *already* has a thread associated with it, don't # create a new one. return message.thread clean_subject = cleanup_subject(message.subject) - thread = cls( + thread = cls( # type: ignore[call-arg] subject=clean_subject, recentdate=message.received_date, namespace_id=namespace_id, @@ -427,7 +445,7 @@ class ImapFolderSyncStatus( _metrics = Column(MutableDict.as_mutable(JSON), default={}, nullable=True) @property - def metrics(self): # noqa: ANN201 + def metrics(self): # type: ignore[no-untyped-def] # noqa: ANN201 status = dict(name=self.folder.name, state=self.state) status.update(self._metrics or {}) @@ -443,10 +461,10 @@ def stop_sync(self) -> None: self._metrics["sync_end_time"] = datetime.utcnow() @property - def is_killed(self): # noqa: ANN201 + def is_killed(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self._metrics.get("run_state") == "killed" - def update_metrics(self, metrics) -> None: + def update_metrics(self, metrics) -> None: # type: ignore[no-untyped-def] sync_status_metrics = [ "remote_uid_count", "delete_uid_count", @@ -468,7 +486,7 @@ def update_metrics(self, metrics) -> None: self._metrics = metrics @property - def sync_enabled(self): # noqa: ANN201 + def sync_enabled(self): # type: ignore[no-untyped-def] # noqa: ANN201 # sync is enabled if the folder's run bit is set, and the account's # run bit is set. (this saves us needing to reproduce account-state # transition logic on the folder level, and gives us a comparison bit @@ -500,7 +518,7 @@ class LabelItem(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): ) @property - def namespace(self): # noqa: ANN201 + def namespace(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.label.namespace diff --git a/inbox/models/backends/oauth.py b/inbox/models/backends/oauth.py index 7f0dbc41c..23c735159 100644 --- a/inbox/models/backends/oauth.py +++ b/inbox/models/backends/oauth.py @@ -6,9 +6,11 @@ from datetime import datetime, timedelta from hashlib import sha256 -from sqlalchemy import Column, ForeignKey -from sqlalchemy.ext.declarative import declared_attr -from sqlalchemy.orm import relationship +from sqlalchemy import Column, ForeignKey # type: ignore[import-untyped] +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declared_attr, +) +from sqlalchemy.orm import relationship # type: ignore[import-untyped] from inbox.exceptions import OAuthError from inbox.logging import get_logger @@ -17,14 +19,14 @@ log = get_logger() -def hash_token(token, prefix=None): # noqa: ANN201 +def hash_token(token, prefix=None): # type: ignore[no-untyped-def] # noqa: ANN201 if not token: return None string = f"{prefix}:{token}" if prefix else token return sha256(string.encode()).hexdigest() -def log_token_usage( +def log_token_usage( # type: ignore[no-untyped-def] reason, refresh_token=None, access_token=None, account=None, scopes=None ) -> None: nylas_account_id = ( @@ -35,7 +37,7 @@ def log_token_usage( refresh_hash=hash_token(refresh_token, prefix="refresh_token"), access_hash=hash_token(access_token, prefix="access_token"), nylas_account_id=nylas_account_id, - email=account.email_address if account else None, + email=(account.email_address if account else None), scopes=scopes, ) @@ -85,7 +87,10 @@ def get_token( def get_cache_key( self, account: "OAuthAccount", scopes: list[str] | None ) -> tuple[str, tuple[str, ...] | None]: - return (account.id, tuple(scopes) if scopes else None) + return ( + account.id, # type: ignore[attr-defined] + (tuple(scopes) if scopes else None), + ) def cache_token( self, @@ -97,7 +102,7 @@ def cache_token( expires_in -= 10 expiration = datetime.utcnow() + timedelta(seconds=expires_in) cache_key = self.get_cache_key(account, scopes) - self._token_cache[cache_key] = token, expiration + self._token_cache[cache_key] = (token, expiration) token_manager = TokenManager() @@ -121,11 +126,11 @@ def scopes(self) -> list[str] | None: return None @declared_attr - def refresh_token_id(cls): # noqa: ANN201, N805 + def refresh_token_id(cls): # type: ignore[no-untyped-def] # noqa: ANN201, N805 return Column(ForeignKey(Secret.id), nullable=False) @declared_attr - def secret(cls): # noqa: ANN201, N805 + def secret(cls): # type: ignore[no-untyped-def] # noqa: ANN201, N805 return relationship( "Secret", cascade="all", @@ -173,7 +178,7 @@ def set_secret(self, secret_type: SecretType, secret_value: bytes) -> None: self.secret.type = secret_type.value self.secret.secret = secret_value - def get_client_info(self): # noqa: ANN201 + def get_client_info(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ Obtain the client ID and secret for this OAuth account. @@ -181,8 +186,15 @@ def get_client_info(self): # noqa: ANN201 Tuple with (client_id, client_secret). """ - if not self.client_id or self.client_id == self.OAUTH_CLIENT_ID: - return (self.OAUTH_CLIENT_ID, self.OAUTH_CLIENT_SECRET) + if ( + not self.client_id # type: ignore[attr-defined] + or self.client_id # type: ignore[attr-defined] + == self.OAUTH_CLIENT_ID # type: ignore[attr-defined] + ): + return ( + self.OAUTH_CLIENT_ID, # type: ignore[attr-defined] + self.OAUTH_CLIENT_SECRET, # type: ignore[attr-defined] + ) else: raise OAuthError("No valid tokens.") @@ -204,14 +216,14 @@ def new_token( # noqa: D417 """ # noqa: D401 try: - return self.auth_handler.acquire_access_token( + return self.auth_handler.acquire_access_token( # type: ignore[attr-defined] self, force_refresh=force_refresh, scopes=scopes ) except Exception as e: log.warning( f"Error while getting access token: {e}", force_refresh=force_refresh, - account_id=self.id, + account_id=self.id, # type: ignore[attr-defined] exc_info=True, ) raise diff --git a/inbox/models/backends/outlook.py b/inbox/models/backends/outlook.py index ec9d07956..d566c1f52 100644 --- a/inbox/models/backends/outlook.py +++ b/inbox/models/backends/outlook.py @@ -1,4 +1,8 @@ -from sqlalchemy import Column, ForeignKey, String +from sqlalchemy import ( # type: ignore[import-untyped] + Column, + ForeignKey, + String, +) from inbox.config import config from inbox.models.account import CategoryType @@ -45,7 +49,7 @@ class OutlookAccount(CalendarSyncAccountMixin, ImapAccount, OAuthAccount): locale = Column(String(8)) @property - def email_scopes(self): # noqa: ANN201 + def email_scopes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return MICROSOFT_EMAIL_SCOPES @property @@ -53,15 +57,15 @@ def contacts_scopes(self) -> None: return None @property - def calendar_scopes(self): # noqa: ANN201 + def calendar_scopes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return MICROSOFT_CALENDAR_SCOPES @property - def scopes(self): # noqa: ANN201 + def scopes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.email_scopes @property - def provider(self): # noqa: ANN201 + def provider(self): # type: ignore[no-untyped-def] # noqa: ANN201 return PROVIDER @property @@ -69,13 +73,13 @@ def category_type(self) -> CategoryType: return "folder" @property - def thread_cls(self): # noqa: ANN201 + def thread_cls(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.models.backends.imap import ImapThread return ImapThread @property - def actionlog_cls(self): # noqa: ANN201 + def actionlog_cls(self): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.models.action_log import ActionLog return ActionLog diff --git a/inbox/models/base.py b/inbox/models/base.py index 591d0437d..30f36109a 100644 --- a/inbox/models/base.py +++ b/inbox/models/base.py @@ -1,6 +1,11 @@ -from sqlalchemy import BigInteger, Column -from sqlalchemy.ext.declarative import as_declarative, declared_attr -from sqlalchemy.orm.exc import DetachedInstanceError +from sqlalchemy import BigInteger, Column # type: ignore[import-untyped] +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + as_declarative, + declared_attr, +) +from sqlalchemy.orm.exc import ( # type: ignore[import-untyped] + DetachedInstanceError, +) from inbox.models.mixins import CreatedAtMixin @@ -15,11 +20,11 @@ class MailSyncBase(CreatedAtMixin): id = Column(BigInteger, primary_key=True, autoincrement=True) @declared_attr - def __tablename__(cls): # noqa: ANN204, N805 - return cls.__name__.lower() + def __tablename__(cls): # type: ignore[no-untyped-def] # noqa: ANN204, N805 + return cls.__name__.lower() # type: ignore[attr-defined] @declared_attr - def __table_args__(cls): # noqa: ANN204, N805 + def __table_args__(cls): # type: ignore[no-untyped-def] # noqa: ANN204, N805 return {"extend_existing": True} def __repr__(self) -> str: diff --git a/inbox/models/block.py b/inbox/models/block.py index 923ac39a1..a0ab32b21 100644 --- a/inbox/models/block.py +++ b/inbox/models/block.py @@ -1,7 +1,7 @@ from hashlib import sha256 -from flanker import mime -from sqlalchemy import ( +from flanker import mime # type: ignore[import-untyped] +from sqlalchemy import ( # type: ignore[import-untyped] Boolean, Column, Enum, @@ -10,9 +10,13 @@ String, event, ) -from sqlalchemy.orm import backref, reconstructor, relationship -from sqlalchemy.schema import UniqueConstraint -from sqlalchemy.sql.expression import false +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + reconstructor, + relationship, +) +from sqlalchemy.schema import UniqueConstraint # type: ignore[import-untyped] +from sqlalchemy.sql.expression import false # type: ignore[import-untyped] from inbox.config import config from inbox.logging import get_logger @@ -60,12 +64,15 @@ class Block( ): """Metadata for any file that we store""" - API_OBJECT_NAME = "file" + API_OBJECT_NAME = "file" # type: ignore[assignment] @property def should_suppress_transaction_creation(self) -> bool: # Only version attachments - return not any(part.is_attachment for part in self.parts) + return not any( + part.is_attachment + for part in self.parts # type: ignore[attr-defined] + ) from inbox.models.namespace import Namespace @@ -78,7 +85,9 @@ def should_suppress_transaction_creation(self) -> bool: filename = Column(String(255)) # TODO: create a constructor that allows the 'content_type' keyword - def __init__(self, *args, **kwargs) -> None: + def __init__( # type: ignore[no-untyped-def] + self, *args, **kwargs + ) -> None: self.content_type = None self.size = 0 MailSyncBase.__init__(self, *args, **kwargs) @@ -102,7 +111,7 @@ def init_on_load(self) -> None: self.content_type = self._content_type_other @property - def data(self): # noqa: ANN201 + def data(self): # type: ignore[no-untyped-def] # noqa: ANN201 value: bytes | None if self.size == 0: log.warning("Block size is 0") @@ -120,12 +129,17 @@ def data(self): # noqa: ANN201 from inbox.models.block import Block - if isinstance(self, Block) and self.parts: + if ( + isinstance(self, Block) # type: ignore[redundant-expr] + and self.parts # type: ignore[attr-defined] + ): # This block is an attachment of a message that was # deleted. We will attempt to fetch the raw # message and parse out the needed attachment. - message = self.parts[0].message # only grab one + message = self.parts[ # type: ignore[attr-defined] + 0 + ].message # only grab one account = message.namespace.account statsd_string = ( @@ -210,7 +224,7 @@ def data(self): # noqa: ANN201 return value @data.setter - def data(self, value) -> None: + def data(self, value) -> None: # type: ignore[no-untyped-def] assert value is not None assert isinstance(value, bytes) @@ -230,7 +244,9 @@ def data(self, value) -> None: @event.listens_for(Block, "before_insert", propagate=True) -def serialize_before_insert(mapper, connection, target) -> None: +def serialize_before_insert( # type: ignore[no-untyped-def] + mapper, connection, target +) -> None: if target.content_type in COMMON_CONTENT_TYPES: target._content_type_common = target.content_type target._content_type_other = None @@ -273,7 +289,7 @@ class Part(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): __table_args__ = (UniqueConstraint("message_id", "walk_index"),) @property - def thread_id(self): # noqa: ANN201 + def thread_id(self): # type: ignore[no-untyped-def] # noqa: ANN201 if not self.message: return None return self.message.thread_id @@ -283,7 +299,7 @@ def is_attachment(self) -> bool: return self.content_disposition is not None @property - def is_embedded(self): # noqa: ANN201 + def is_embedded(self): # type: ignore[no-untyped-def] # noqa: ANN201 return ( self.content_disposition is not None and self.content_disposition.lower() == "inline" diff --git a/inbox/models/calendar.py b/inbox/models/calendar.py index c62ba7ecb..331a6dfb8 100644 --- a/inbox/models/calendar.py +++ b/inbox/models/calendar.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta -from sqlalchemy import ( +from sqlalchemy import ( # type: ignore[import-untyped] Boolean, Column, DateTime, @@ -10,7 +10,11 @@ UniqueConstraint, inspect, ) -from sqlalchemy.orm import backref, object_session, relationship +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + object_session, + relationship, +) from inbox.models.base import MailSyncBase from inbox.models.constants import MAX_INDEXABLE_LENGTH @@ -26,7 +30,7 @@ class Calendar( MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, DeletedAtMixin ): - API_OBJECT_NAME = "calendar" + API_OBJECT_NAME = "calendar" # type: ignore[assignment] namespace_id = Column( ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False ) diff --git a/inbox/models/category.py b/inbox/models/category.py index 22e3d15da..27cc652f3 100644 --- a/inbox/models/category.py +++ b/inbox/models/category.py @@ -1,10 +1,23 @@ from datetime import datetime -from sqlalchemy import Column, DateTime, Enum, ForeignKey, String -from sqlalchemy.ext.hybrid import hybrid_property -from sqlalchemy.orm import relationship, validates -from sqlalchemy.orm.exc import MultipleResultsFound -from sqlalchemy.schema import UniqueConstraint +from sqlalchemy import ( # type: ignore[import-untyped] + Column, + DateTime, + Enum, + ForeignKey, + String, +) +from sqlalchemy.ext.hybrid import ( # type: ignore[import-untyped] + hybrid_property, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + relationship, + validates, +) +from sqlalchemy.orm.exc import ( # type: ignore[import-untyped] + MultipleResultsFound, +) +from sqlalchemy.schema import UniqueConstraint # type: ignore[import-untyped] from inbox.logging import get_logger from inbox.models.base import MailSyncBase @@ -25,7 +38,7 @@ EPOCH = datetime.utcfromtimestamp(0) -def sanitize_name(name): # noqa: ANN201 +def sanitize_name(name): # type: ignore[no-untyped-def] # noqa: ANN201 return unicode_safe_truncate(name, MAX_INDEXABLE_LENGTH) @@ -42,7 +55,9 @@ class CategoryNameString(StringWithTransform): type match the values that we are actually storing in the database. """ - def __init__(self, *args, **kwargs) -> None: + def __init__( # type: ignore[no-untyped-def] + self, *args, **kwargs + ) -> None: super().__init__( sanitize_name, MAX_INDEXABLE_LENGTH, collation="utf8mb4_bin" ) @@ -52,7 +67,7 @@ class Category( MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, DeletedAtMixin ): @property - def API_OBJECT_NAME(self): # noqa: ANN201, N802 + def API_OBJECT_NAME(self): # type: ignore[no-untyped-def] # noqa: ANN201, N802 return self.type_ # Override the default `deleted_at` column with one that is NOT NULL -- @@ -81,7 +96,9 @@ def API_OBJECT_NAME(self): # noqa: ANN201, N802 type_ = Column(Enum("folder", "label"), nullable=False, default="folder") @validates("display_name") - def validate_display_name(self, key, display_name): # noqa: ANN201 + def validate_display_name( # type: ignore[no-untyped-def] # noqa: ANN201 + self, key, display_name + ): sanitized_name = sanitize_name(display_name) if sanitized_name != display_name: log.warning( @@ -92,7 +109,7 @@ def validate_display_name(self, key, display_name): # noqa: ANN201 return sanitized_name @classmethod - def find_or_create( # noqa: ANN206 + def find_or_create( # type: ignore[no-untyped-def] # noqa: ANN206 cls, session, namespace_id, name, display_name, type_ ): name = name or "" @@ -107,7 +124,7 @@ def find_or_create( # noqa: ANN206 ) if not objects: - obj = cls( + obj = cls( # type: ignore[call-arg] namespace_id=namespace_id, name=name, display_name=display_name, @@ -138,11 +155,11 @@ def find_or_create( # noqa: ANN206 return obj @classmethod - def create( # noqa: ANN206 + def create( # type: ignore[no-untyped-def] # noqa: ANN206 cls, session, namespace_id, name, display_name, type_ ): name = name or "" - obj = cls( + obj = cls( # type: ignore[call-arg] namespace_id=namespace_id, name=name, display_name=display_name, @@ -153,23 +170,23 @@ def create( # noqa: ANN206 return obj @property - def account(self): # noqa: ANN201 + def account(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.namespace.account @property - def type(self): # noqa: ANN201 + def type(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.account.category_type @hybrid_property def lowercase_name(self): # noqa: ANN201 return self.display_name.lower() - @lowercase_name.comparator + @lowercase_name.comparator # type: ignore[no-redef] def lowercase_name(cls): # noqa: ANN201, N805 return CaseInsensitiveComparator(cls.display_name) @property - def api_display_name(self): # noqa: ANN201 + def api_display_name(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self.namespace.account.provider == "gmail": if self.display_name.startswith("[Gmail]/"): return self.display_name[8:] @@ -186,7 +203,7 @@ def api_display_name(self): # noqa: ANN201 return self.display_name @property - def is_deleted(self): # noqa: ANN201 + def is_deleted(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.deleted_at > EPOCH __table_args__ = ( diff --git a/inbox/models/contact.py b/inbox/models/contact.py index 07d27a24d..35e1a2699 100644 --- a/inbox/models/contact.py +++ b/inbox/models/contact.py @@ -1,4 +1,4 @@ -from sqlalchemy import ( +from sqlalchemy import ( # type: ignore[import-untyped] BigInteger, Column, Enum, @@ -8,8 +8,12 @@ String, Text, ) -from sqlalchemy.orm import backref, relationship, validates -from sqlalchemy.schema import UniqueConstraint +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + relationship, + validates, +) +from sqlalchemy.schema import UniqueConstraint # type: ignore[import-untyped] from inbox.models.base import MailSyncBase from inbox.models.event import Event @@ -36,7 +40,7 @@ class Contact( ): """Data for a user's contact.""" - API_OBJECT_NAME = "contact" + API_OBJECT_NAME = "contact" # type: ignore[assignment] namespace_id = Column(BigInteger, nullable=False, index=True) namespace = relationship( @@ -77,16 +81,18 @@ class Contact( ) @validates("raw_data") - def validate_text_column_length(self, key, value): # noqa: ANN201 + def validate_text_column_length( # type: ignore[no-untyped-def] # noqa: ANN201 + self, key, value + ): if value is None: return None return unicode_safe_truncate(value, MAX_TEXT_CHARS) @property - def versioned_relationships(self): # noqa: ANN201 + def versioned_relationships(self): # type: ignore[no-untyped-def] # noqa: ANN201 return ["phone_numbers"] - def merge_from(self, new_contact) -> None: + def merge_from(self, new_contact) -> None: # type: ignore[no-untyped-def] # This must be updated when new fields are added to the class. merge_attrs = ["name", "email_address", "raw_data"] for attr in merge_attrs: diff --git a/inbox/models/data_processing.py b/inbox/models/data_processing.py index e3b4b2359..7ba9182c6 100644 --- a/inbox/models/data_processing.py +++ b/inbox/models/data_processing.py @@ -2,9 +2,15 @@ import json import zlib -from sqlalchemy import Column, DateTime, ForeignKey -from sqlalchemy.dialects.mysql import MEDIUMBLOB -from sqlalchemy.schema import UniqueConstraint +from sqlalchemy import ( # type: ignore[import-untyped] + Column, + DateTime, + ForeignKey, +) +from sqlalchemy.dialects.mysql import ( # type: ignore[import-untyped] + MEDIUMBLOB, +) +from sqlalchemy.schema import UniqueConstraint # type: ignore[import-untyped] from inbox.models.base import MailSyncBase from inbox.models.mixins import DeletedAtMixin, UpdatedAtMixin @@ -23,28 +29,28 @@ class DataProcessingCache(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): contact_groups_last_updated = Column(DateTime) @property - def contact_rankings(self): # noqa: ANN201 + def contact_rankings(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self._contact_rankings is None: return None else: return json.loads(zlib.decompress(self._contact_rankings)) @contact_rankings.setter - def contact_rankings(self, value) -> None: + def contact_rankings(self, value) -> None: # type: ignore[no-untyped-def] self._contact_rankings = zlib.compress( json.dumps(value).encode("utf-8") ) self.contact_rankings_last_updated = datetime.datetime.now() @property - def contact_groups(self): # noqa: ANN201 + def contact_groups(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self._contact_groups is None: return None else: return json.loads(zlib.decompress(self._contact_groups)) @contact_groups.setter - def contact_groups(self, value) -> None: + def contact_groups(self, value) -> None: # type: ignore[no-untyped-def] self._contact_groups = zlib.compress(json.dumps(value).encode("utf-8")) self.contact_groups_last_updated = datetime.datetime.now() diff --git a/inbox/models/event.py b/inbox/models/event.py index 52e4fe2bc..d50d37f6b 100644 --- a/inbox/models/event.py +++ b/inbox/models/event.py @@ -5,9 +5,9 @@ from email.utils import parseaddr from typing import Never -import arrow +import arrow # type: ignore[import-untyped] from dateutil.parser import parse as date_parse -from sqlalchemy import ( +from sqlalchemy import ( # type: ignore[import-untyped] Boolean, Column, DateTime, @@ -19,9 +19,14 @@ Text, event, ) -from sqlalchemy.dialects.mysql import LONGTEXT -from sqlalchemy.orm import backref, reconstructor, relationship, validates -from sqlalchemy.types import TypeDecorator +from sqlalchemy.dialects.mysql import LONGTEXT # type: ignore[import-untyped] +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + reconstructor, + relationship, + validates, +) +from sqlalchemy.types import TypeDecorator # type: ignore[import-untyped] from inbox.logging import get_logger from inbox.models.base import MailSyncBase @@ -83,20 +88,24 @@ class FlexibleDateTime(TypeDecorator): impl = DateTime - def process_bind_param(self, value, dialect): # noqa: ANN201 + def process_bind_param( # type: ignore[no-untyped-def] # noqa: ANN201 + self, value, dialect + ): if isinstance(value, arrow.arrow.Arrow): value = value.to("utc").naive if isinstance(value, datetime): value = arrow.get(value).to("utc").naive return value - def process_result_value(self, value, dialect): # noqa: ANN201 + def process_result_value( # type: ignore[no-untyped-def] # noqa: ANN201 + self, value, dialect + ): if value is None: return value else: return arrow.get(value).to("utc") - def compare_values(self, x, y): # noqa: ANN201 + def compare_values(self, x, y): # type: ignore[no-untyped-def] # noqa: ANN201 if isinstance(x, datetime | int): x = arrow.get(x) if isinstance(y, datetime) or isinstance(x, int): @@ -110,7 +119,7 @@ class Event( ): """Data for events.""" - API_OBJECT_NAME = "event" + API_OBJECT_NAME = "event" # type: ignore[assignment] API_MODIFIABLE_FIELDS = [ "title", "description", @@ -216,13 +225,13 @@ class Event( "uid", "raw_data", ) - def validate_length(self, key, value): # noqa: ANN201 + def validate_length(self, key, value): # type: ignore[no-untyped-def] # noqa: ANN201 if value is None: return None return unicode_safe_truncate(value, MAX_LENS[key]) @property - def when(self): # noqa: ANN201 + def when(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self.all_day: # Dates are stored as DateTimes so transform to dates here. start = arrow.get(self.start).to("utc").date() @@ -234,7 +243,7 @@ def when(self): # noqa: ANN201 return Time(start) if start == end else TimeSpan(start, end) @when.setter - def when(self, when) -> None: + def when(self, when) -> None: # type: ignore[no-untyped-def] if "time" in when: self.start = self.end = time_parse(when["time"]) self.all_day = False @@ -250,7 +259,9 @@ def when(self, when) -> None: self.end = date_parse(when["end_date"]) self.all_day = True - def _merge_participant_attributes(self, left, right): + def _merge_participant_attributes( # type: ignore[no-untyped-def] + self, left, right + ): """Merge right into left. Right takes precedence unless it's null.""" for attribute in right.keys(): # Special cases: @@ -265,7 +276,9 @@ def _merge_participant_attributes(self, left, right): return left - def _partial_participants_merge(self, event): + def _partial_participants_merge( # type: ignore[no-untyped-def] + self, event + ): """ Merge the participants from event into self.participants. event always takes precedence over self, except if @@ -355,7 +368,7 @@ def update(self, event: "Event") -> None: self.sequence_number = event.sequence_number @property - def recurring(self): # noqa: ANN201 + def recurring(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self.recurrence and self.recurrence != "": try: r = ast.literal_eval(self.recurrence) @@ -372,13 +385,13 @@ def recurring(self): # noqa: ANN201 return [] @property - def organizer_email(self): # noqa: ANN201 + def organizer_email(self): # type: ignore[no-untyped-def] # noqa: ANN201 # For historical reasons, the event organizer field is stored as # "Owner Name ". parsed_owner = parseaddr(self.owner) if len(parsed_owner) == 0: - return None + return None # type: ignore[unreachable] if parsed_owner[1] == "": return None @@ -386,11 +399,11 @@ def organizer_email(self): # noqa: ANN201 return parsed_owner[1] @property - def organizer_name(self): # noqa: ANN201 + def organizer_name(self): # type: ignore[no-untyped-def] # noqa: ANN201 parsed_owner = parseaddr(self.owner) if len(parsed_owner) == 0: - return None + return None # type: ignore[unreachable] if parsed_owner[0] == "": return None @@ -402,43 +415,43 @@ def is_recurring(self) -> bool: return self.recurrence is not None @property - def length(self): # noqa: ANN201 + def length(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.when.delta @property - def cancelled(self): # noqa: ANN201 + def cancelled(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.status == "cancelled" @cancelled.setter - def cancelled(self, is_cancelled) -> None: + def cancelled(self, is_cancelled) -> None: # type: ignore[no-untyped-def] if is_cancelled: self.status = "cancelled" else: self.status = "confirmed" @property - def calendar_event_link(self): # noqa: ANN201 + def calendar_event_link(self): # type: ignore[no-untyped-def] # noqa: ANN201 try: return json.loads(self.raw_data)["htmlLink"] except (ValueError, KeyError): return None @property - def emails_from_description(self): # noqa: ANN201 + def emails_from_description(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self.description: return extract_emails_from_text(self.description) else: return [] @property - def emails_from_title(self): # noqa: ANN201 + def emails_from_title(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self.title: return extract_emails_from_text(self.title) else: return [] @classmethod - def create(cls, **kwargs): # noqa: ANN206 + def create(cls, **kwargs): # type: ignore[no-untyped-def] # noqa: ANN206 # Decide whether or not to instantiate a RecurringEvent/Override # based on the kwargs we get. cls_ = cls @@ -453,7 +466,7 @@ def create(cls, **kwargs): # noqa: ANN206 cls_ = RecurringEventOverride return cls_(**kwargs) - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs) -> None: # type: ignore[no-untyped-def] if ( kwargs.pop("__event_created_sanely", None) is not _EVENT_CREATED_SANELY_SENTINEL @@ -482,7 +495,7 @@ class RecurringEvent(Event): """ __mapper_args__ = {"polymorphic_identity": "recurringevent"} - __table_args__ = None + __table_args__ = None # type: ignore[assignment] id = Column(ForeignKey("event.id", ondelete="CASCADE"), primary_key=True) rrule = Column(String(RECURRENCE_MAX_LEN)) @@ -490,7 +503,7 @@ class RecurringEvent(Event): until = Column(FlexibleDateTime, nullable=True) start_timezone = Column(String(35)) - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs) -> None: # type: ignore[no-untyped-def] self.start_timezone = kwargs.pop("original_start_tz", None) kwargs["recurrence"] = repr(kwargs["recurrence"]) super().__init__(**kwargs) @@ -517,7 +530,7 @@ def reconstruct(self) -> None: exc_info=True, ) - def inflate(self, start=None, end=None): # noqa: ANN201 + def inflate(self, start=None, end=None): # type: ignore[no-untyped-def] # noqa: ANN201 # Convert a RecurringEvent into a series of InflatedEvents # by expanding its RRULE into a series of start times. from inbox.events.recurring import get_start_times @@ -539,10 +552,10 @@ def unwrap_rrule(self) -> None: elif item.startswith("EXDATE"): self.exdate = item - def all_events(self, start=None, end=None): # noqa: ANN201 + def all_events(self, start=None, end=None): # type: ignore[no-untyped-def] # noqa: ANN201 # Returns all inflated events along with overrides that match the # provided time range. - overrides = self.overrides + overrides = self.overrides # type: ignore[attr-defined] if start: overrides = overrides.filter(RecurringEventOverride.start > start) if end: @@ -570,7 +583,7 @@ def all_events(self, start=None, end=None): # noqa: ANN201 events.append(e) return sorted(events, key=lambda e: e.start) - def update(self, event) -> None: + def update(self, event) -> None: # type: ignore[no-untyped-def] super().update(event) if isinstance(event, type(self)): self.rrule = event.rrule @@ -590,7 +603,7 @@ class RecurringEventOverride(Event): "polymorphic_identity": "recurringeventoverride", "inherit_condition": (id == Event.id), } - __table_args__ = None + __table_args__ = None # type: ignore[assignment] master_event_id = Column(ForeignKey("event.id", ondelete="CASCADE")) master_event_uid = Column( @@ -606,12 +619,14 @@ class RecurringEventOverride(Event): ) @validates("master_event_uid") - def validate_master_event_uid_length(self, key, value): # noqa: ANN201 + def validate_master_event_uid_length( # type: ignore[no-untyped-def] # noqa: ANN201 + self, key, value + ): if value is None: return None return unicode_safe_truncate(value, MAX_LENS[key]) - def update(self, event) -> None: + def update(self, event) -> None: # type: ignore[no-untyped-def] super().update(event) if isinstance(event, type(self)): self.master_event_uid = event.master_event_uid @@ -629,9 +644,11 @@ class InflatedEvent(Event): __mapper_args__ = {"polymorphic_identity": "inflatedevent"} __tablename__ = "event" - __table_args__ = {"extend_existing": True} + __table_args__ = {"extend_existing": True} # type: ignore[assignment] - def __init__(self, event, instance_start) -> None: + def __init__( # type: ignore[no-untyped-def] + self, event, instance_start + ) -> None: self.master = event self.update(self.master) self.read_only = True # Until we support modifying inflated events @@ -642,13 +659,13 @@ def __init__(self, event, instance_start) -> None: self.public_id = f"{self.master.public_id}_{ts_id}" self.set_start_end(instance_start) - def set_start_end(self, start) -> None: + def set_start_end(self, start) -> None: # type: ignore[no-untyped-def] # get the length from the master event length = self.master.length self.start = start.to("utc") self.end = self.start + length - def update(self, master) -> None: + def update(self, master) -> None: # type: ignore[no-untyped-def] super().update(master) self.namespace_id = master.namespace_id self.calendar_id = master.calendar_id @@ -665,7 +682,9 @@ def update(self, master) -> None: self.message = None -def insert_warning(mapper, connection, target) -> Never: +def insert_warning( # type: ignore[no-untyped-def] + mapper, connection, target +) -> Never: log.warning(f"InflatedEvent {target} shouldn't be committed") raise Exception("InflatedEvent should not be committed") diff --git a/inbox/models/folder.py b/inbox/models/folder.py index 9963f55d7..7e4dde2fd 100644 --- a/inbox/models/folder.py +++ b/inbox/models/folder.py @@ -1,7 +1,21 @@ -from sqlalchemy import Column, DateTime, ForeignKey, String, bindparam -from sqlalchemy.orm import backref, relationship, synonym, validates -from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound -from sqlalchemy.schema import UniqueConstraint +from sqlalchemy import ( # type: ignore[import-untyped] + Column, + DateTime, + ForeignKey, + String, + bindparam, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + relationship, + synonym, + validates, +) +from sqlalchemy.orm.exc import ( # type: ignore[import-untyped] + MultipleResultsFound, + NoResultFound, +) +from sqlalchemy.schema import UniqueConstraint # type: ignore[import-untyped] from inbox.logging import get_logger from inbox.models.base import MailSyncBase @@ -51,11 +65,11 @@ class Folder(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): ) @property - def canonical_name(self): # noqa: ANN201 + def canonical_name(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self._canonical_name @canonical_name.setter - def canonical_name(self, value) -> None: + def canonical_name(self, value) -> None: # type: ignore[no-untyped-def] value = value or "" self._canonical_name = value if self.category: @@ -72,7 +86,7 @@ def canonical_name(self, value) -> None: initial_sync_end = Column(DateTime, nullable=True) @validates("name") - def validate_name(self, key, name): # noqa: ANN201 + def validate_name(self, key, name): # type: ignore[no-untyped-def] # noqa: ANN201 sanitized_name = sanitize_name(name) if sanitized_name != name: log.warning( @@ -83,7 +97,9 @@ def validate_name(self, key, name): # noqa: ANN201 return sanitized_name @classmethod - def find_or_create(cls, session, account, name, role=None): # noqa: ANN206 + def find_or_create( # type: ignore[no-untyped-def] # noqa: ANN206 + cls, session, account, name, role=None + ): q = ( session.query(cls) .filter(cls.account_id == account.id) @@ -94,7 +110,9 @@ def find_or_create(cls, session, account, name, role=None): # noqa: ANN206 try: obj = q.one() except NoResultFound: - obj = cls(account=account, name=name, canonical_name=role) + obj = cls( # type: ignore[call-arg] + account=account, name=name, canonical_name=role + ) obj.category = Category.find_or_create( session, namespace_id=account.namespace.id, @@ -112,7 +130,7 @@ def find_or_create(cls, session, account, name, role=None): # noqa: ANN206 return obj @classmethod - def get(cls, id_, session): # noqa: ANN206 + def get(cls, id_, session): # type: ignore[no-untyped-def] # noqa: ANN206 q = session.query(cls) q = q.filter(cls.id == bindparam("id_")) return q.params(id_=id_).first() diff --git a/inbox/models/label.py b/inbox/models/label.py index a68b46e02..ca4274da2 100644 --- a/inbox/models/label.py +++ b/inbox/models/label.py @@ -1,6 +1,14 @@ -from sqlalchemy import Column, ForeignKey, String -from sqlalchemy.orm import backref, relationship, validates -from sqlalchemy.schema import UniqueConstraint +from sqlalchemy import ( # type: ignore[import-untyped] + Column, + ForeignKey, + String, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + relationship, + validates, +) +from sqlalchemy.schema import UniqueConstraint # type: ignore[import-untyped] from inbox.logging import get_logger from inbox.models.base import MailSyncBase @@ -45,7 +53,7 @@ class Label(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): ) @validates("name") - def validate_name(self, key, name): # noqa: ANN201 + def validate_name(self, key, name): # type: ignore[no-untyped-def] # noqa: ANN201 sanitized_name = sanitize_name(name) if sanitized_name != name: log.warning( @@ -56,7 +64,9 @@ def validate_name(self, key, name): # noqa: ANN201 return sanitized_name @classmethod - def find_or_create(cls, session, account, name, role=None): # noqa: ANN206 + def find_or_create( # type: ignore[no-untyped-def] # noqa: ANN206 + cls, session, account, name, role=None + ): q = session.query(cls).filter(cls.account_id == account.id) role = role or "" @@ -67,7 +77,9 @@ def find_or_create(cls, session, account, name, role=None): # noqa: ANN206 obj = q.first() if obj is None: - obj = cls(account=account, name=name, canonical_name=role) + obj = cls( # type: ignore[call-arg] + account=account, name=name, canonical_name=role + ) obj.category = Category.find_or_create( session, namespace_id=account.namespace.id, diff --git a/inbox/models/message.py b/inbox/models/message.py index 4d8cad578..ef8dfaf89 100644 --- a/inbox/models/message.py +++ b/inbox/models/message.py @@ -7,9 +7,9 @@ from hashlib import sha256 from typing import Any -from flanker import mime -from flanker.mime.message.part import MimePart -from sqlalchemy import ( +from flanker import mime # type: ignore[import-untyped] +from flanker.mime.message.part import MimePart # type: ignore[import-untyped] +from sqlalchemy import ( # type: ignore[import-untyped] BigInteger, Boolean, Column, @@ -20,9 +20,14 @@ String, bindparam, ) -from sqlalchemy.dialects.mysql import LONGBLOB, VARCHAR -from sqlalchemy.ext.associationproxy import association_proxy -from sqlalchemy.orm import ( +from sqlalchemy.dialects.mysql import ( # type: ignore[import-untyped] + LONGBLOB, + VARCHAR, +) +from sqlalchemy.ext.associationproxy import ( # type: ignore[import-untyped] + association_proxy, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] backref, joinedload, load_only, @@ -32,7 +37,7 @@ validates, with_polymorphic, ) -from sqlalchemy.sql.expression import false +from sqlalchemy.sql.expression import false # type: ignore[import-untyped] from inbox.config import config from inbox.constants import MAX_MESSAGE_BODY_LENGTH @@ -103,7 +108,7 @@ def normalize_data(data: str) -> str: class MessageTooBigException(Exception): - def __init__(self, body_length) -> None: + def __init__(self, body_length) -> None: # type: ignore[no-untyped-def] super().__init__( f"message length ({body_length}) is over the parsing limit" ) @@ -113,7 +118,7 @@ class Message( MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, DeletedAtMixin ): @property - def API_OBJECT_NAME(self) -> str: # noqa: N802 + def API_OBJECT_NAME(self) -> str: # type: ignore[override] # noqa: N802 return "message" if not self.is_draft else "draft" namespace_id = Column(BigInteger, index=True, nullable=False) @@ -136,11 +141,11 @@ def API_OBJECT_NAME(self) -> str: # noqa: N802 ) @property - def thread(self): # noqa: ANN201 + def thread(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self._thread @thread.setter - def thread(self, value) -> None: + def thread(self, value) -> None: # type: ignore[no-untyped-def] if value is not None and self._thread is not None: self._thread.deleted_at = None self._thread = value @@ -185,7 +190,7 @@ def thread(self, value) -> None: ) @property - def is_sending(self): # noqa: ANN201 + def is_sending(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.version == MAX_MYSQL_INTEGER and not self.is_draft def mark_as_sending(self) -> None: @@ -196,11 +201,13 @@ def mark_as_sending(self) -> None: self.regenerate_nylas_uid() @property - def categories_changes(self): # noqa: ANN201 + def categories_changes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.state == "actions_pending" @categories_changes.setter - def categories_changes(self, has_changes) -> None: + def categories_changes( # type: ignore[no-untyped-def] + self, has_changes + ) -> None: if has_changes is True: self.state = "actions_pending" else: @@ -252,7 +259,9 @@ def regenerate_nylas_uid(self) -> None: categories = association_proxy( "messagecategories", "category", - creator=lambda category: MessageCategory(category=category), + creator=lambda category: MessageCategory( # type: ignore[call-arg] + category=category + ), ) # FOR INBOX-CREATED MESSAGES: @@ -318,7 +327,7 @@ def create_from_synced( # noqa: D417 """ # noqa: D401 # stop trickle-down bugs - assert account.namespace is not None + assert account.namespace is not None # type: ignore[attr-defined] assert isinstance(body, bytes) message = Message() @@ -329,7 +338,9 @@ def create_from_synced( # noqa: D417 save_raw_mime(message.data_sha256, body) # Persist the processed message to the database - message.namespace_id = account.namespace.id + message.namespace_id = ( + account.namespace.id # type: ignore[attr-defined] + ) with email_parsing_lock: try: @@ -382,7 +393,7 @@ def create_from_synced( # noqa: D417 message._parse_mimepart( imap_uid, mimepart, - account.namespace.id, + account.namespace.id, # type: ignore[attr-defined] html_parts, plain_parts, ) @@ -465,7 +476,7 @@ def _parse_metadata( mime_version=mime_version, ) - self.subject: str | None = parsed.subject + self.subject: str | None = parsed.subject # type: ignore[no-redef] self.from_addr = parse_mimepart_address_header(parsed, "From") self.sender_addr = parse_mimepart_address_header(parsed, "Sender") self.reply_to = parse_mimepart_address_header(parsed, "Reply-To") @@ -473,11 +484,15 @@ def _parse_metadata( self.cc_addr = parse_mimepart_address_header(parsed, "Cc") self.bcc_addr = parse_mimepart_address_header(parsed, "Bcc") - self.in_reply_to: str | None = parsed.headers.get("In-Reply-To") + self.in_reply_to: str | None = ( # type: ignore[no-redef] + parsed.headers.get("In-Reply-To") + ) # The RFC mandates that the Message-Id header must be at most 998 # characters. Sadly, not everybody follows specs. - self.message_id_header: str | None = parsed.headers.get("Message-Id") + self.message_id_header: str | None = ( # type: ignore[no-redef] + parsed.headers.get("Message-Id") + ) if self.message_id_header and len(self.message_id_header) > 998: self.message_id_header = self.message_id_header[:998] log.warning( # noqa: PLE1205 @@ -503,7 +518,9 @@ def _parse_metadata( self.received_date = self.received_date.replace(microsecond=0) # Custom Nylas header - self.nylas_uid: str | None = parsed.headers.get("X-INBOX-ID") + self.nylas_uid: str | None = ( # type: ignore[no-redef] + parsed.headers.get("X-INBOX-ID") + ) # In accordance with JWZ (http://www.jwz.org/doc/threading.html) self.references = parse_references( @@ -626,15 +643,15 @@ def _save_attachment( block = Block() block.namespace_id = namespace_id block.filename = _trim_filename(filename, namespace_id=namespace_id) - block.content_type = content_type - part = Part(block=block, message=self) + block.content_type = content_type # type: ignore[assignment] + part = Part(block=block, message=self) # type: ignore[call-arg] if content_id: content_id = content_id[:255] part.content_id = content_id part.content_disposition = content_disposition data = data or "" - if not isinstance(data, bytes): - data = data.encode("utf-8", "strict") + if not isinstance(data, bytes): # type: ignore[unreachable] + data = data.encode("utf-8", "strict") # type: ignore[assignment] block.data = data def _mark_error(self) -> None: @@ -751,12 +768,16 @@ def participants(self) -> list[tuple[str, str]]: @property def attachments(self) -> list["Part"]: - return [part for part in self.parts if part.is_attachment] + return [ + part + for part in self.parts # type: ignore[attr-defined] + if part.is_attachment + ] @property def api_attachment_metadata(self) -> list[dict[str, Any]]: resp = [] - for part in self.parts: + for part in self.parts: # type: ignore[attr-defined] if not part.is_attachment: continue k = { @@ -774,22 +795,25 @@ def api_attachment_metadata(self) -> list[dict[str, Any]]: return resp @property - def versioned_relationships(self): # noqa: ANN201 + def versioned_relationships(self): # type: ignore[no-untyped-def] # noqa: ANN201 return ["parts", "messagecategories"] @property - def propagated_attributes(self): # noqa: ANN201 + def propagated_attributes(self): # type: ignore[no-untyped-def] # noqa: ANN201 return ["is_read", "is_starred", "messagecategories"] @property def has_attached_events(self) -> bool: - return "text/calendar" in [p.block.content_type for p in self.parts] + return "text/calendar" in [ + p.block.content_type + for p in self.parts # type: ignore[attr-defined] + ] @property def attached_event_files(self) -> list["Part"]: return [ part - for part in self.parts + for part in self.parts # type: ignore[attr-defined] if part.block.content_type == "text/calendar" ] @@ -808,16 +832,20 @@ def from_public_id( ) q = q.options( joinedload(Message.thread).load_only("discriminator", "public_id"), - joinedload(Message.messagecategories).joinedload( - MessageCategory.category + joinedload( + Message.messagecategories # type: ignore[attr-defined] + ).joinedload(MessageCategory.category), + joinedload(Message.parts).joinedload( # type: ignore[attr-defined] + "block" ), - joinedload(Message.parts).joinedload("block"), - joinedload(Message.events), + joinedload(Message.events), # type: ignore[attr-defined] ) return q.params(public_id=public_id, namespace_id=namespace_id).one() @classmethod - def api_loading_options(cls, expand: bool = False): # noqa: ANN206 + def api_loading_options( # type: ignore[no-untyped-def] # noqa: ANN206 + cls, expand: bool = False + ): columns = [ "public_id", "is_draft", @@ -858,7 +886,11 @@ def api_loading_options(cls, expand: bool = False): # noqa: ANN206 load_only(*columns), subqueryload("parts").joinedload("block"), subqueryload("thread").load_only("public_id", "discriminator"), - subqueryload(Message.events.of_type(all_event_subclasses)), + subqueryload( + Message.events.of_type( # type: ignore[attr-defined] + all_event_subclasses + ) + ), subqueryload("messagecategories").joinedload("category"), ) @@ -930,7 +962,7 @@ class MessageCategory(MailSyncBase): ) @property - def namespace(self): # noqa: ANN201 + def namespace(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.message.namespace diff --git a/inbox/models/metadata.py b/inbox/models/metadata.py index 7c5e50987..d2e33ec25 100644 --- a/inbox/models/metadata.py +++ b/inbox/models/metadata.py @@ -1,5 +1,12 @@ -from sqlalchemy import BigInteger, Column, ForeignKey, Index, Integer, String -from sqlalchemy.orm import relationship +from sqlalchemy import ( # type: ignore[import-untyped] + BigInteger, + Column, + ForeignKey, + Index, + Integer, + String, +) +from sqlalchemy.orm import relationship # type: ignore[import-untyped] from inbox.models.base import MailSyncBase from inbox.models.mixins import ( @@ -29,7 +36,7 @@ class Metadata( value should be set to null. """ - API_OBJECT_NAME = "metadata" + API_OBJECT_NAME = "metadata" # type: ignore[assignment] # Application data fields # - app_id: The referenced app's primary key diff --git a/inbox/models/mixins.py b/inbox/models/mixins.py index 4ad8b02c4..ef336549f 100644 --- a/inbox/models/mixins.py +++ b/inbox/models/mixins.py @@ -1,8 +1,19 @@ import abc from datetime import datetime -from sqlalchemy import Boolean, Column, DateTime, String, func, inspect, sql -from sqlalchemy.ext.hybrid import Comparator, hybrid_property +from sqlalchemy import ( # type: ignore[import-untyped] + Boolean, + Column, + DateTime, + String, + func, + inspect, + sql, +) +from sqlalchemy.ext.hybrid import ( # type: ignore[import-untyped] + Comparator, + hybrid_property, +) from inbox.models.constants import MAX_INDEXABLE_LENGTH from inbox.sqlalchemy_ext.util import ABCMixin, Base36UID, generate_public_id @@ -14,7 +25,7 @@ class HasRevisions(ABCMixin): """Mixin for tables that should be versioned in the transaction log.""" @property - def versioned_relationships(self): # noqa: ANN201 + def versioned_relationships(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ May be overriden by subclasses. This should be the list of relationship attribute names that should trigger an update revision @@ -25,7 +36,7 @@ def versioned_relationships(self): # noqa: ANN201 return [] @property - def propagated_attributes(self): # noqa: ANN201 + def propagated_attributes(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ May be overridden by subclasses. This is the list of attribute names that should trigger an update revision for a /related/ object - @@ -84,20 +95,20 @@ class HasPublicID: class AddressComparator(Comparator): - def __eq__(self, other): # noqa: ANN204 + def __eq__(self, other): # type: ignore[no-untyped-def] # noqa: ANN204 return self.__clause_element__() == canonicalize_address(other) - def like(self, term, escape=None): # noqa: ANN201 + def like(self, term, escape=None): # type: ignore[no-untyped-def] # noqa: ANN201 return self.__clause_element__().like(term, escape=escape) - def in_(self, addresses): # noqa: ANN201 + def in_(self, addresses): # type: ignore[no-untyped-def] # noqa: ANN201 return self.__clause_element__().in_( [canonicalize_address(address) for address in addresses] ) class CaseInsensitiveComparator(Comparator): - def __eq__(self, other): # noqa: ANN204 + def __eq__(self, other): # type: ignore[no-untyped-def] # noqa: ANN204 return func.lower(self.__clause_element__()) == func.lower(other) @@ -126,11 +137,11 @@ class HasEmailAddress: def email_address(self): # noqa: ANN201 return self._raw_address - @email_address.comparator + @email_address.comparator # type: ignore[no-redef] def email_address(cls): # noqa: ANN201, N805 return AddressComparator(cls._canonicalized_address) - @email_address.setter + @email_address.setter # type: ignore[no-redef] def email_address(self, value) -> None: # Silently truncate if necessary. In practice, this may be too # long if somebody put a super-long email into their contacts by diff --git a/inbox/models/namespace.py b/inbox/models/namespace.py index 14db63da5..eca9fe792 100644 --- a/inbox/models/namespace.py +++ b/inbox/models/namespace.py @@ -1,5 +1,13 @@ -from sqlalchemy import BigInteger, Column, ForeignKey, bindparam -from sqlalchemy.orm import backref, relationship +from sqlalchemy import ( # type: ignore[import-untyped] + BigInteger, + Column, + ForeignKey, + bindparam, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + relationship, +) from inbox.models.base import MailSyncBase from inbox.models.mixins import DeletedAtMixin, HasPublicID, UpdatedAtMixin @@ -30,19 +38,21 @@ def __str__(self) -> str: ) @property - def email_address(self): # noqa: ANN201 + def email_address(self): # type: ignore[no-untyped-def] # noqa: ANN201 if self.account is not None: return self.account.email_address return None @classmethod - def get(cls, id_, session): # noqa: ANN206 + def get(cls, id_, session): # type: ignore[no-untyped-def] # noqa: ANN206 q = session.query(cls) q = q.filter(cls.id == bindparam("id_")) return q.params(id_=id_).first() @classmethod - def from_public_id(cls, public_id, db_session): # noqa: ANN206 + def from_public_id( # type: ignore[no-untyped-def] # noqa: ANN206 + cls, public_id, db_session + ): q = db_session.query(Namespace) q = q.filter(Namespace.public_id == bindparam("public_id")) return q.params(public_id=public_id).one() diff --git a/inbox/models/search.py b/inbox/models/search.py index 208931ed6..267f7cba9 100644 --- a/inbox/models/search.py +++ b/inbox/models/search.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, ForeignKey +from sqlalchemy import Column, ForeignKey # type: ignore[import-untyped] from inbox.models.base import MailSyncBase from inbox.models.mixins import DeletedAtMixin, UpdatedAtMixin diff --git a/inbox/models/secret.py b/inbox/models/secret.py index 7a5459b0e..b82dacc17 100644 --- a/inbox/models/secret.py +++ b/inbox/models/secret.py @@ -1,8 +1,8 @@ import enum -from sqlalchemy import Column, Enum, Integer -from sqlalchemy.orm import validates -from sqlalchemy.types import BLOB +from sqlalchemy import Column, Enum, Integer # type: ignore[import-untyped] +from sqlalchemy.orm import validates # type: ignore[import-untyped] +from sqlalchemy.types import BLOB # type: ignore[import-untyped] from inbox.models.base import MailSyncBase from inbox.models.mixins import DeletedAtMixin, UpdatedAtMixin @@ -43,10 +43,12 @@ def secret(self, plaintext: str | bytes) -> None: raise TypeError("Invalid secret") with get_encryption_oracle("SECRET_ENCRYPTION_KEY") as e_oracle: - self._secret, self.encryption_scheme = e_oracle.encrypt(plaintext) + (self._secret, self.encryption_scheme) = e_oracle.encrypt( + plaintext + ) @validates("type") - def validate_type(self, k, type): # noqa: ANN201 + def validate_type(self, k, type): # type: ignore[no-untyped-def] # noqa: ANN201 if type not in [x.value for x in SecretType]: raise TypeError("Invalid secret type.") diff --git a/inbox/models/session.py b/inbox/models/session.py index 0181ee275..39145a05f 100644 --- a/inbox/models/session.py +++ b/inbox/models/session.py @@ -2,10 +2,12 @@ import time from contextlib import contextmanager -from sqlalchemy import event -from sqlalchemy.exc import OperationalError -from sqlalchemy.ext.horizontal_shard import ShardedSession -from sqlalchemy.orm.session import Session +from sqlalchemy import event # type: ignore[import-untyped] +from sqlalchemy.exc import OperationalError # type: ignore[import-untyped] +from sqlalchemy.ext.horizontal_shard import ( # type: ignore[import-untyped] + ShardedSession, +) +from sqlalchemy.orm.session import Session # type: ignore[import-untyped] from inbox.config import config from inbox.ignition import engine_manager @@ -18,7 +20,9 @@ MAX_SANE_TRX_TIME_MS = 30000 -def new_session(engine, versioned: bool = True): # noqa: ANN201 +def new_session( # type: ignore[no-untyped-def] # noqa: ANN201 + engine, versioned: bool = True +): """Returns a session bound to the given engine.""" # noqa: D401 session = Session(bind=engine, autoflush=True, autocommit=False) @@ -40,7 +44,9 @@ def new_session(engine, versioned: bool = True): # noqa: ANN201 metric_name = f"db.{engine.url.database}.{modname}.{funcname}" @event.listens_for(session, "after_begin") - def after_begin(session, transaction, connection) -> None: + def after_begin( # type: ignore[no-untyped-def] + session, transaction, connection + ) -> None: # It's okay to key on the session object here, because each session # binds to only one engine/connection. If this changes in the # future such that a session may encompass multiple engines, then @@ -49,7 +55,7 @@ def after_begin(session, transaction, connection) -> None: @event.listens_for(session, "after_commit") @event.listens_for(session, "after_rollback") - def end(session) -> None: + def end(session) -> None: # type: ignore[no-untyped-def] start_time = transaction_start_map.get(session) if not start_time: return @@ -72,7 +78,7 @@ def end(session) -> None: return session -def configure_versioning(session): # noqa: ANN201 +def configure_versioning(session): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.models.transaction import ( bump_redis_txn_id, create_revisions, @@ -81,12 +87,16 @@ def configure_versioning(session): # noqa: ANN201 ) @event.listens_for(session, "before_flush") - def before_flush(session, flush_context, instances) -> None: + def before_flush( # type: ignore[no-untyped-def] + session, flush_context, instances + ) -> None: propagate_changes(session) increment_versions(session) @event.listens_for(session, "after_flush") - def after_flush(session, flush_context) -> None: + def after_flush( # type: ignore[no-untyped-def] + session, flush_context + ) -> None: """ Hook to log revision snapshots. Must be post-flush in order to grab object IDs on new objects. @@ -107,7 +117,7 @@ def after_flush(session, flush_context) -> None: @contextmanager -def session_scope(id_, versioned: bool = True): # noqa: ANN201 +def session_scope(id_, versioned: bool = True): # type: ignore[no-untyped-def] # noqa: ANN201 """ Provide a transactional scope around a series of operations. @@ -140,7 +150,7 @@ def session_scope(id_, versioned: bool = True): # noqa: ANN201 if config.get("LOG_DB_SESSIONS"): start_time = time.time() frame = sys._getframe() - assert frame + assert frame # type: ignore[truthy-bool] assert frame.f_back assert frame.f_back.f_back calling_frame = frame.f_back.f_back @@ -167,8 +177,10 @@ def session_scope(id_, versioned: bool = True): # noqa: ANN201 raise exc # noqa: B904 finally: if config.get("LOG_DB_SESSIONS"): - lifetime = time.time() - start_time - logger.info( + lifetime = ( + time.time() - start_time # type: ignore[possibly-undefined] + ) + logger.info( # type: ignore[possibly-undefined] "closing db_session", lifetime=lifetime, sessions_used=engine.pool.checkedout(), @@ -177,7 +189,7 @@ def session_scope(id_, versioned: bool = True): # noqa: ANN201 @contextmanager -def session_scope_by_shard_id( # noqa: ANN201 +def session_scope_by_shard_id( # type: ignore[no-untyped-def] # noqa: ANN201 shard_id, versioned: bool = True ): key = shard_id << 48 @@ -189,11 +201,13 @@ def session_scope_by_shard_id( # noqa: ANN201 # GLOBAL (cross-shard) queries. USE WITH CAUTION. -def shard_chooser(mapper, instance, clause=None): # noqa: ANN201 +def shard_chooser( # type: ignore[no-untyped-def] # noqa: ANN201 + mapper, instance, clause=None +): return str(engine_manager.shard_key_for_id(instance.id)) -def id_chooser(query, ident): # noqa: ANN201 +def id_chooser(query, ident): # type: ignore[no-untyped-def] # noqa: ANN201 # STOPSHIP(emfree): is ident a tuple here??? # TODO[k]: What if len(list) > 1? if isinstance(ident, list) and len(ident) == 1: @@ -201,12 +215,12 @@ def id_chooser(query, ident): # noqa: ANN201 return [str(engine_manager.shard_key_for_id(ident))] -def query_chooser(query): # noqa: ANN201 +def query_chooser(query): # type: ignore[no-untyped-def] # noqa: ANN201 return [str(k) for k in engine_manager.engines] @contextmanager -def global_session_scope(): # noqa: ANN201 +def global_session_scope(): # type: ignore[no-untyped-def] # noqa: ANN201 shards = {str(k): v for k, v in engine_manager.engines.items()} session = ShardedSession( shard_chooser=shard_chooser, diff --git a/inbox/models/thread.py b/inbox/models/thread.py index 47dce6b10..f263800dc 100644 --- a/inbox/models/thread.py +++ b/inbox/models/thread.py @@ -2,8 +2,15 @@ import itertools from collections import defaultdict -from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, String -from sqlalchemy.orm import ( +from sqlalchemy import ( # type: ignore[import-untyped] + Column, + DateTime, + ForeignKey, + Index, + Integer, + String, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] backref, object_session, relationship, @@ -40,7 +47,7 @@ class Thread( """ - API_OBJECT_NAME = "thread" + API_OBJECT_NAME = "thread" # type: ignore[assignment] namespace_id = Column( ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False @@ -61,12 +68,14 @@ class Thread( version = Column(Integer, nullable=True, server_default="0") @validates("subject") - def compute_cleaned_up_subject(self, key, value): # noqa: ANN201 + def compute_cleaned_up_subject( # type: ignore[no-untyped-def] # noqa: ANN201 + self, key, value + ): self._cleaned_subject = cleanup_subject(value) return value @validates("messages") - def update_from_message(self, k, message): # noqa: ANN201 + def update_from_message(self, k, message): # type: ignore[no-untyped-def] # noqa: ANN201 with object_session(self).no_autoflush: if message.is_draft: # Don't change subjectdate, recentdate, or unread/unseen based @@ -84,9 +93,9 @@ def update_from_message(self, k, message): # noqa: ANN201 return message @property - def most_recent_received_date(self): # noqa: ANN201 + def most_recent_received_date(self): # type: ignore[no-untyped-def] # noqa: ANN201 received_recent_date: datetime.datetime | None = None - for m in self.messages: + for m in self.messages: # type: ignore[attr-defined] if ( all( category.name != "sent" @@ -104,7 +113,8 @@ def most_recent_received_date(self): # noqa: ANN201 if not received_recent_date: sorted_messages = sorted( - self.messages, key=lambda m: m.received_date + self.messages, # type: ignore[attr-defined] + key=lambda m: m.received_date, ) if not sorted_messages: log.warning( @@ -117,7 +127,7 @@ def most_recent_received_date(self): # noqa: ANN201 return received_recent_date @property - def most_recent_sent_date(self): # noqa: ANN201 + def most_recent_sent_date(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ This is the timestamp of the most recently *sent* message on this thread, as decided by whether the message is in the sent folder or @@ -125,7 +135,9 @@ def most_recent_sent_date(self): # noqa: ANN201 """ # noqa: D404 sent_recent_date = None sorted_messages = sorted( - self.messages, key=lambda m: m.received_date, reverse=True + self.messages, # type: ignore[attr-defined] + key=lambda m: m.received_date, + reverse=True, ) for message in sorted_messages: if "sent" in [ @@ -137,22 +149,34 @@ def most_recent_sent_date(self): # noqa: ANN201 @property def unread(self) -> bool: - return not all(m.is_read for m in self.messages if not m.is_draft) + return not all( + m.is_read + for m in self.messages # type: ignore[attr-defined] + if not m.is_draft + ) @property - def starred(self): # noqa: ANN201 - return any(m.is_starred for m in self.messages if not m.is_draft) + def starred(self): # type: ignore[no-untyped-def] # noqa: ANN201 + return any( + m.is_starred + for m in self.messages # type: ignore[attr-defined] + if not m.is_draft + ) @property - def has_attachments(self): # noqa: ANN201 - return any(m.attachments for m in self.messages if not m.is_draft) + def has_attachments(self): # type: ignore[no-untyped-def] # noqa: ANN201 + return any( + m.attachments + for m in self.messages # type: ignore[attr-defined] + if not m.is_draft + ) @property - def versioned_relationships(self): # noqa: ANN201 + def versioned_relationships(self): # type: ignore[no-untyped-def] # noqa: ANN201 return ["messages"] @property - def participants(self): # noqa: ANN201 + def participants(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ Different messages in the thread may reference the same email address with different phrases. We partially deduplicate: if the same @@ -161,7 +185,7 @@ def participants(self): # noqa: ANN201 """ deduped_participants = defaultdict(set) - for m in self.messages: + for m in self.messages: # type: ignore[attr-defined] if m.is_draft: # Don't use drafts to compute participants. continue @@ -177,30 +201,38 @@ def participants(self): # noqa: ANN201 return p @property - def drafts(self): # noqa: ANN201 + def drafts(self): # type: ignore[no-untyped-def] # noqa: ANN201 """ Return all drafts on this thread that don't have later revisions. """ - return [m for m in self.messages if m.is_draft] + return [ + m + for m in self.messages # type: ignore[attr-defined] + if m.is_draft + ] @property - def attachments(self): # noqa: ANN201 - return any(m.attachments for m in self.messages) + def attachments(self): # type: ignore[no-untyped-def] # noqa: ANN201 + return any( + m.attachments for m in self.messages # type: ignore[attr-defined] + ) @property - def account(self): # noqa: ANN201 + def account(self): # type: ignore[no-untyped-def] # noqa: ANN201 return self.namespace.account @property - def categories(self): # noqa: ANN201 + def categories(self): # type: ignore[no-untyped-def] # noqa: ANN201 categories = set() - for m in self.messages: + for m in self.messages: # type: ignore[attr-defined] categories.update(m.categories) return categories @classmethod - def api_loading_options(cls, expand: bool = False): # noqa: ANN206 + def api_loading_options( # type: ignore[no-untyped-def] # noqa: ANN206 + cls, expand: bool = False + ): message_columns = [ "public_id", "is_draft", @@ -225,11 +257,11 @@ def api_loading_options(cls, expand: bool = False): # noqa: ANN206 "reply_to", ] return ( - subqueryload(Thread.messages) + subqueryload(Thread.messages) # type: ignore[attr-defined] .load_only(*message_columns) .joinedload("messagecategories") .joinedload("category"), - subqueryload(Thread.messages) + subqueryload(Thread.messages) # type: ignore[attr-defined] .joinedload("parts") .joinedload("block"), ) diff --git a/inbox/models/transaction.py b/inbox/models/transaction.py index 2b23539e7..7204edd06 100644 --- a/inbox/models/transaction.py +++ b/inbox/models/transaction.py @@ -1,5 +1,13 @@ -from sqlalchemy import BigInteger, Column, Enum, Index, String, func, inspect -from sqlalchemy.orm import relationship +from sqlalchemy import ( # type: ignore[import-untyped] + BigInteger, + Column, + Enum, + Index, + String, + func, + inspect, +) +from sqlalchemy.orm import relationship # type: ignore[import-untyped] from inbox.ignition import redis_txn from inbox.models.base import MailSyncBase @@ -65,7 +73,7 @@ class AccountTransaction(MailSyncBase, HasPublicID): ) -def is_dirty(session, obj) -> bool: +def is_dirty(session, obj) -> bool: # type: ignore[no-untyped-def] if obj in session.dirty and obj.has_versioned_changes(): return True if hasattr(obj, "dirty") and obj.dirty: @@ -73,7 +81,7 @@ def is_dirty(session, obj) -> bool: return False -def create_revisions(session) -> None: +def create_revisions(session) -> None: # type: ignore[no-untyped-def] for obj in session: if ( not isinstance(obj, HasRevisions) @@ -88,13 +96,15 @@ def create_revisions(session) -> None: # occurs). This emulates what happens to objects in session.dirty, # in that they are no longer present in the set during the next # invocation of the pre-flush hook. - obj.dirty = False + obj.dirty = False # type: ignore[attr-defined] create_revision(obj, session, "update") elif obj in session.deleted: create_revision(obj, session, "delete") -def create_revision(obj, session, revision_type) -> None: +def create_revision( # type: ignore[no-untyped-def] + obj, session, revision_type +) -> None: assert revision_type in ("insert", "update", "delete") # If available use object dates for the transaction timestamp @@ -116,7 +126,7 @@ def create_revision(obj, session, revision_type) -> None: # Always create a Transaction record -- this maintains a total ordering over # all events for an account. - revision = Transaction( + revision = Transaction( # type: ignore[call-arg] command=revision_type, record_id=obj.id, object_type=obj.API_OBJECT_NAME, @@ -130,7 +140,7 @@ def create_revision(obj, session, revision_type) -> None: # this is an optimization needed so these sparse events can be still be # retrieved efficiently for webhooks etc. if obj.API_OBJECT_NAME == "account": - revision = AccountTransaction( + revision = AccountTransaction( # type: ignore[assignment, call-arg] command=revision_type, record_id=obj.id, object_type=obj.API_OBJECT_NAME, @@ -140,7 +150,7 @@ def create_revision(obj, session, revision_type) -> None: session.add(revision) -def propagate_changes(session) -> None: +def propagate_changes(session) -> None: # type: ignore[no-untyped-def] """ Mark an object's related object as dirty when certain attributes of the object (its `propagated_attributes`) change. @@ -161,7 +171,7 @@ def propagate_changes(session) -> None: obj.thread.dirty = True -def increment_versions(session) -> None: +def increment_versions(session) -> None: # type: ignore[no-untyped-def] from inbox.models.metadata import Metadata from inbox.models.thread import Thread @@ -174,12 +184,12 @@ def increment_versions(session) -> None: obj.version = Metadata.version + 1 # TODO what's going on here? -def bump_redis_txn_id(session) -> None: +def bump_redis_txn_id(session) -> None: # type: ignore[no-untyped-def] """ Called from post-flush hook to bump the latest id stored in redis """ # noqa: D401 - def get_namespace_public_id(namespace_id): + def get_namespace_public_id(namespace_id): # type: ignore[no-untyped-def] # the namespace was just used to create the transaction, so it should # still be in the session. If not, a sql statement will be emitted. namespace = session.query(Namespace).get(namespace_id) diff --git a/inbox/models/util.py b/inbox/models/util.py index c9822246d..eb9b9f08d 100644 --- a/inbox/models/util.py +++ b/inbox/models/util.py @@ -3,10 +3,10 @@ from collections import OrderedDict from collections.abc import Iterable -import limitlion -from sqlalchemy import desc, func -from sqlalchemy.orm import Session -from sqlalchemy.orm.exc import NoResultFound +import limitlion # type: ignore[import-untyped] +from sqlalchemy import desc, func # type: ignore[import-untyped] +from sqlalchemy.orm import Session # type: ignore[import-untyped] +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from inbox.error_handling import log_uncaught_errors from inbox.heartbeat.status import clear_heartbeat_status @@ -88,7 +88,7 @@ def reconcile_message( return existing_message -def transaction_objects(): # noqa: ANN201 +def transaction_objects(): # type: ignore[no-untyped-def] # noqa: ANN201 """ Return the mapping from API object name - which becomes the Transaction.object_type - for models that generate Transactions (i.e. @@ -121,7 +121,7 @@ def transaction_objects(): # noqa: ANN201 } -def get_accounts_to_delete(shard_id): # noqa: ANN201 +def get_accounts_to_delete(shard_id): # type: ignore[no-untyped-def] # noqa: ANN201 ids_to_delete = [] with session_scope_by_shard_id(shard_id) as db_session: ids_to_delete = [ @@ -136,7 +136,7 @@ class AccountDeletionErrror(Exception): pass -def batch_delete_namespaces( +def batch_delete_namespaces( # type: ignore[no-untyped-def] ids_to_delete, throttle: bool = False, dry_run: bool = False ) -> None: start = time.time() @@ -160,7 +160,7 @@ def batch_delete_namespaces( ) -def delete_namespace( +def delete_namespace( # type: ignore[no-untyped-def] namespace_id, throttle: bool = False, dry_run: bool = False ) -> None: """ @@ -288,7 +288,7 @@ def delete_namespace( ) -def _batch_delete( +def _batch_delete( # type: ignore[no-untyped-def] engine, table, column_id_filters, @@ -403,7 +403,7 @@ def check_throttle() -> bool: return True -def purge_transactions( +def purge_transactions( # type: ignore[no-untyped-def] shard_id, days_ago: int = 60, limit: int = 1000, @@ -440,7 +440,7 @@ def purge_transactions( ) as db_session: if dry_run: rowcount = db_session.execute( - f"{query} OFFSET {offset}" + f"{query} OFFSET {offset}" # type: ignore[possibly-undefined] ).rowcount offset += rowcount else: @@ -497,7 +497,7 @@ def delete_message_hashes( dry_run: If True, don't actually delete the data. """ - if not message_hashes: + if not message_hashes: # type: ignore[truthy-iterable] return # First check if the messagea still exists in another namespace diff --git a/inbox/models/when.py b/inbox/models/when.py index ebb1b34da..323ed1bd5 100644 --- a/inbox/models/when.py +++ b/inbox/models/when.py @@ -2,7 +2,7 @@ import datetime from typing import Any, Union -import arrow +import arrow # type: ignore[import-untyped] def parse_as_when( @@ -48,7 +48,7 @@ class When: spanning = False @classmethod - def parse(cls, raw: dict[str, Any]): # noqa: ANN206 + def parse(cls, raw: dict[str, Any]): # type: ignore[no-untyped-def] # noqa: ANN206 parsed_times = cls.parse_keys(raw) return cls(*parsed_times) @@ -94,7 +94,7 @@ class SpanningWhen(When): singular_cls: type @classmethod - def parse(cls, raw: dict[str, Any]): # noqa: ANN206 + def parse(cls, raw: dict[str, Any]): # type: ignore[no-untyped-def] # noqa: ANN206 # If initializing a span, we sanity check the timestamps and initialize # the singular form if they are equal. start, end = cls.parse_keys(raw) diff --git a/inbox/providers.py b/inbox/providers.py index 9c872f465..9891046bb 100644 --- a/inbox/providers.py +++ b/inbox/providers.py @@ -5,7 +5,7 @@ __all__ = ["provider_info", "providers"] -def provider_info(provider_name): # noqa: ANN201 +def provider_info(provider_name): # type: ignore[no-untyped-def] # noqa: ANN201 """ Like providers[provider_name] except raises inbox.basicauth.NotSupportedError instead of KeyError when the provider is diff --git a/inbox/s3/backends/gmail.py b/inbox/s3/backends/gmail.py index 0a28d751d..cdb559516 100644 --- a/inbox/s3/backends/gmail.py +++ b/inbox/s3/backends/gmail.py @@ -12,7 +12,7 @@ # We use the Google API so we don't have to worry about # the Gmail max IMAP connection limit. -def get_gmail_raw_contents(message): # noqa: ANN201 +def get_gmail_raw_contents(message): # type: ignore[no-untyped-def] # noqa: ANN201 account = message.namespace.account auth_token = token_manager.get_token(account) diff --git a/inbox/s3/backends/imap.py b/inbox/s3/backends/imap.py index aa0fe95b7..4b3188c0f 100644 --- a/inbox/s3/backends/imap.py +++ b/inbox/s3/backends/imap.py @@ -1,4 +1,4 @@ -import imapclient +import imapclient # type: ignore[import-untyped] from inbox.crispin import connection_pool from inbox.logging import get_logger @@ -8,7 +8,7 @@ log = get_logger() -def get_imap_raw_contents(message): # noqa: ANN201 +def get_imap_raw_contents(message): # type: ignore[no-untyped-def] # noqa: ANN201 account = message.namespace.account if len(message.imapuids) == 0: diff --git a/inbox/s3/base.py b/inbox/s3/base.py index d027c01bd..4e23d5d25 100644 --- a/inbox/s3/base.py +++ b/inbox/s3/base.py @@ -1,4 +1,4 @@ -def get_raw_from_provider(message): # noqa: ANN201 +def get_raw_from_provider(message): # type: ignore[no-untyped-def] # noqa: ANN201 """Get the raw contents of a message from the provider.""" account = message.account return account.get_raw_message_contents(message) diff --git a/inbox/scheduling/event_queue.py b/inbox/scheduling/event_queue.py index 1ec910641..fa8056ee0 100644 --- a/inbox/scheduling/event_queue.py +++ b/inbox/scheduling/event_queue.py @@ -12,7 +12,9 @@ SOCKET_TIMEOUT = 30 -def _get_redis_client(host=None, port: int = 6379, db: int = 1): +def _get_redis_client( # type: ignore[no-untyped-def] + host=None, port: int = 6379, db: int = 1 +): return StrictRedis( host=host, port=port, @@ -29,7 +31,9 @@ class EventQueue: """ def __init__( - self, queue_name: str, redis: StrictRedis | None = None + self, + queue_name: str, + redis: StrictRedis | None = None, # type: ignore[type-arg] ) -> None: self.redis = redis if self.redis is None: @@ -67,7 +71,7 @@ def receive_event(self, timeout: int | None = 0) -> dict[str, Any] | None: if blpop_result is None: return None - blpop_queue_name, event_data = blpop_result + (blpop_queue_name, event_data) = blpop_result queue_name = blpop_queue_name.decode("utf-8") try: diff --git a/inbox/search/backends/gmail.py b/inbox/search/backends/gmail.py index e5ed138f3..438274df4 100644 --- a/inbox/search/backends/gmail.py +++ b/inbox/search/backends/gmail.py @@ -1,5 +1,5 @@ import requests -from sqlalchemy import desc +from sqlalchemy import desc # type: ignore[import-untyped] from inbox.api.kellogs import APIEncoder from inbox.auth.oauth import OAuthRequestsWrapper @@ -17,7 +17,7 @@ class GmailSearchClient: - def __init__(self, account) -> None: + def __init__(self, account) -> None: # type: ignore[no-untyped-def] self.account_id = int(account.id) try: with session_scope(self.account_id) as db_session: @@ -32,7 +32,7 @@ def __init__(self, account) -> None: 403, ) - def search_messages( # noqa: ANN201 + def search_messages( # type: ignore[no-untyped-def] # noqa: ANN201 self, db_session, search_query, offset: int = 0, limit: int = 40 ): # We need to get the next limit + offset terms if we want to @@ -59,8 +59,8 @@ def search_messages( # noqa: ANN201 # We're only issuing a single request to the Gmail API so there's # no need to stream it. - def stream_messages(self, search_query): # noqa: ANN201 - def g(): + def stream_messages(self, search_query): # type: ignore[no-untyped-def] # noqa: ANN201 + def g(): # type: ignore[no-untyped-def] encoder = APIEncoder() with session_scope(self.account_id) as db_session: @@ -70,7 +70,7 @@ def g(): return g - def search_threads( # noqa: ANN201 + def search_threads( # type: ignore[no-untyped-def] # noqa: ANN201 self, db_session, search_query, offset: int = 0, limit: int = 40 ): # We need to get the next limit + offset terms if we want to @@ -98,8 +98,8 @@ def search_threads( # noqa: ANN201 return query.all() - def stream_threads(self, search_query): # noqa: ANN201 - def g(): + def stream_threads(self, search_query): # type: ignore[no-untyped-def] # noqa: ANN201 + def g(): # type: ignore[no-untyped-def] encoder = APIEncoder() with session_scope(self.account_id) as db_session: @@ -109,7 +109,7 @@ def g(): return g - def _search(self, search_query, limit): + def _search(self, search_query, limit): # type: ignore[no-untyped-def] results: list[int] = [] params = dict(q=search_query, maxResults=limit) diff --git a/inbox/search/backends/imap.py b/inbox/search/backends/imap.py index 310d53e2e..49ded25e5 100644 --- a/inbox/search/backends/imap.py +++ b/inbox/search/backends/imap.py @@ -1,7 +1,7 @@ from imaplib import IMAP4 -from imapclient import IMAPClient -from sqlalchemy import desc +from imapclient import IMAPClient # type: ignore[import-untyped] +from sqlalchemy import desc # type: ignore[import-untyped] from inbox.api.kellogs import APIEncoder from inbox.crispin import CrispinClient, FolderMissingError @@ -18,12 +18,14 @@ class IMAPSearchClient: - def __init__(self, account) -> None: + def __init__(self, account) -> None: # type: ignore[no-untyped-def] self.account = account self.account_id = account.id self.log = get_logger().new(account_id=account.id, component="search") - def _open_crispin_connection(self, db_session): + def _open_crispin_connection( # type: ignore[no-untyped-def] + self, db_session + ): account = db_session.query(Account).get(self.account_id) try: conn = account.auth_handler.get_authenticated_imap_connection( @@ -68,7 +70,7 @@ def _open_crispin_connection(self, db_session): def _close_crispin_connection(self) -> None: self.crispin_client.logout() - def search_messages( # noqa: ANN201 + def search_messages( # type: ignore[no-untyped-def] # noqa: ANN201 self, db_session, search_query, offset: int = 0, limit: int = 40 ): imap_uids = [] @@ -92,8 +94,8 @@ def search_messages( # noqa: ANN201 return query.all() - def stream_messages(self, search_query): # noqa: ANN201 - def g(): + def stream_messages(self, search_query): # type: ignore[no-untyped-def] # noqa: ANN201 + def g(): # type: ignore[no-untyped-def] encoder = APIEncoder() with session_scope(self.account_id) as db_session: @@ -114,7 +116,7 @@ def g(): return g - def search_threads( # noqa: ANN201 + def search_threads( # type: ignore[no-untyped-def] # noqa: ANN201 self, db_session, search_query, offset: int = 0, limit: int = 40 ): imap_uids = [] @@ -141,8 +143,8 @@ def search_threads( # noqa: ANN201 query = query.limit(limit) return query.all() - def stream_threads(self, search_query): # noqa: ANN201 - def g(): + def stream_threads(self, search_query): # type: ignore[no-untyped-def] # noqa: ANN201 + def g(): # type: ignore[no-untyped-def] encoder = APIEncoder() with session_scope(self.account_id) as db_session: @@ -163,7 +165,9 @@ def g(): return g - def _search(self, db_session, search_query): + def _search( # type: ignore[no-untyped-def] + self, db_session, search_query + ): self._open_crispin_connection(db_session) try: @@ -186,7 +190,8 @@ def _search(self, db_session, search_query): db_session.query(Folder) .filter( Folder.account_id == self.account_id, - Folder.canonical_name == cname, + Folder.canonical_name # type: ignore[comparison-overlap] + == cname, ) .one_or_none() ) @@ -206,7 +211,9 @@ def _search(self, db_session, search_query): self._close_crispin_connection() - def _search_folder(self, folder, criteria, charset): + def _search_folder( # type: ignore[no-untyped-def] + self, folder, criteria, charset + ): try: self.crispin_client.select_folder(folder.name, uidvalidity_cb) except FolderMissingError: diff --git a/inbox/search/base.py b/inbox/search/base.py index f144302f3..91a0956ac 100644 --- a/inbox/search/base.py +++ b/inbox/search/base.py @@ -1,4 +1,4 @@ -def get_search_client(account): # noqa: ANN201 +def get_search_client(account): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.search.backends import module_registry search_mod = module_registry.get(account.provider) @@ -13,7 +13,9 @@ class SearchBackendException(Exception): provider. """ - def __init__(self, message, http_code, server_error=None) -> None: + def __init__( # type: ignore[no-untyped-def] + self, message, http_code, server_error=None + ) -> None: self.message = message self.http_code = http_code self.server_error = server_error @@ -30,6 +32,6 @@ class SearchStoreException(Exception): an error """ - def __init__(self, err_code) -> None: + def __init__(self, err_code) -> None: # type: ignore[no-untyped-def] self.err_code = err_code super().__init__(err_code) diff --git a/inbox/security/oracles.py b/inbox/security/oracles.py index 921d3d043..e1aa92ccc 100644 --- a/inbox/security/oracles.py +++ b/inbox/security/oracles.py @@ -15,7 +15,7 @@ class EncryptionScheme(enum.Enum): SECRETBOX_WITH_STATIC_KEY = 1 -def get_encryption_oracle(secret_name): # noqa: ANN201 +def get_encryption_oracle(secret_name): # type: ignore[no-untyped-def] # noqa: ANN201 """ Return an encryption oracle for the given secret. """ @@ -41,7 +41,7 @@ class _EncryptionOracle: module. """ # noqa: D404 - def __init__(self, secret_name) -> None: + def __init__(self, secret_name) -> None: # type: ignore[no-untyped-def] self._closed = False if not config.get_required("ENCRYPT_SECRETS"): @@ -55,7 +55,7 @@ def __init__(self, secret_name) -> None: encoder=nacl.encoding.HexEncoder, ) - def __enter__(self): # noqa: ANN204 + def __enter__(self): # type: ignore[no-untyped-def] # noqa: ANN204 return self def __exit__( @@ -80,7 +80,9 @@ def close(self) -> None: del self._secret_box self._closed = True - def encrypt(self, plaintext, encryption_scheme=None): + def encrypt( # type: ignore[no-untyped-def] + self, plaintext, encryption_scheme=None + ): """ Encrypt the specified secret. If no encryption_scheme is specified (recommended), a reasonable default will be used. @@ -134,7 +136,7 @@ class _DecryptionOracle(_EncryptionOracle): module. """ # noqa: D404 - def reencrypt( + def reencrypt( # type: ignore[no-untyped-def] self, ciphertext, encryption_scheme, new_encryption_scheme=None ): """ @@ -156,7 +158,9 @@ def reencrypt( encryption_scheme=new_encryption_scheme, ) - def decrypt(self, ciphertext, encryption_scheme): + def decrypt( # type: ignore[no-untyped-def] + self, ciphertext, encryption_scheme + ): # type (bytes, int) -> bytes """ Decrypt the specified secret. diff --git a/inbox/sendmail/base.py b/inbox/sendmail/base.py index b37cac838..b9efaff40 100644 --- a/inbox/sendmail/base.py +++ b/inbox/sendmail/base.py @@ -35,7 +35,7 @@ class SendMailException(Exception): """ - def __init__( + def __init__( # type: ignore[no-untyped-def] self, message, http_code, server_error=None, failures=None ) -> None: self.message = message @@ -45,7 +45,7 @@ def __init__( super().__init__(message, http_code, server_error, failures) -def get_sendmail_client(account): # noqa: ANN201 +def get_sendmail_client(account): # type: ignore[no-untyped-def] # noqa: ANN201 from inbox.sendmail import module_registry sendmail_mod = module_registry.get(account.provider) @@ -67,7 +67,11 @@ def create_draft_from_mime( with db_session.no_autoflush: msg = Message.create_from_synced( - account, "", "", datetime.utcnow(), new_body + account, + "", # type: ignore[arg-type] + "", + datetime.utcnow(), + new_body, ) if msg.from_addr and len(msg.from_addr) > 1: @@ -88,7 +92,7 @@ def create_draft_from_mime( msg.thread = thread_cls( subject=msg.subject, recentdate=msg.received_date, - namespace=account.namespace, + namespace=account.namespace, # type: ignore[attr-defined] subjectdate=msg.received_date, ) @@ -101,22 +105,22 @@ def create_draft_from_mime( return msg -def block_to_part(block, message, namespace): # noqa: ANN201 +def block_to_part(block, message, namespace): # type: ignore[no-untyped-def] # noqa: ANN201 inline_image_uri = rf"cid:{block.public_id}" is_inline = re.search(inline_image_uri, message.body) is not None # Create a new Part object to associate to the message object. # (You can't just set block.message, because if block is an # attachment on an existing message, that would dissociate it from # the existing message.) - part = Part(block=block) + part = Part(block=block) # type: ignore[call-arg] part.content_id = block.public_id if is_inline else None - part.namespace_id = namespace.id + part.namespace_id = namespace.id # type: ignore[attr-defined] part.content_disposition = "inline" if is_inline else "attachment" part.is_inboxapp_attachment = True return part -def create_message_from_json( # noqa: ANN201 +def create_message_from_json( # type: ignore[no-untyped-def] # noqa: ANN201 data, namespace, db_session, is_draft ): """ @@ -211,7 +215,9 @@ def create_message_from_json( # noqa: ANN201 # Associate attachments to the draft message for block in blocks: - message.parts.append(block_to_part(block, message, namespace)) + message.parts.append( # type: ignore[attr-defined] + block_to_part(block, message, namespace) + ) update_contacts_from_message(db_session, message, namespace.id) @@ -259,7 +265,7 @@ def create_message_from_json( # noqa: ANN201 return message -def update_draft( # noqa: ANN201 +def update_draft( # type: ignore[no-untyped-def] # noqa: ANN201 db_session, account, draft, @@ -276,7 +282,7 @@ def update_draft( # noqa: ANN201 Update draft with new attributes. """ - def update(attr, value=None) -> None: + def update(attr, value=None) -> None: # type: ignore[no-untyped-def] if value is not None: setattr(draft, attr, value) @@ -346,7 +352,9 @@ def update(attr, value=None) -> None: return draft -def delete_draft(db_session, account, draft) -> None: +def delete_draft( # type: ignore[no-untyped-def] + db_session, account, draft +) -> None: """Delete the given draft.""" thread = draft.thread assert draft.is_draft @@ -370,7 +378,7 @@ def delete_draft(db_session, account, draft) -> None: db_session.commit() -def generate_attachments(message, blocks): # noqa: ANN201 +def generate_attachments(message, blocks): # type: ignore[no-untyped-def] # noqa: ANN201 attachment_dicts = [] for block in blocks: content_disposition = "attachment" @@ -394,7 +402,9 @@ def generate_attachments(message, blocks): # noqa: ANN201 return attachment_dicts -def _set_reply_headers(new_message, previous_message) -> None: +def _set_reply_headers( # type: ignore[no-untyped-def] + new_message, previous_message +) -> None: """ When creating a draft in reply to a thread, set the In-Reply-To and References headers appropriately, if possible. diff --git a/inbox/sendmail/message.py b/inbox/sendmail/message.py index e272514ac..a3c42e829 100644 --- a/inbox/sendmail/message.py +++ b/inbox/sendmail/message.py @@ -17,11 +17,17 @@ from datetime import datetime from email.header import Header -from flanker import mime -from flanker.addresslib import address -from flanker.addresslib.address import MAX_ADDRESS_LENGTH -from flanker.addresslib.quote import smart_quote -from flanker.mime.message.headers import WithParams +from flanker import mime # type: ignore[import-untyped] +from flanker.addresslib import address # type: ignore[import-untyped] +from flanker.addresslib.address import ( # type: ignore[import-untyped] + MAX_ADDRESS_LENGTH, +) +from flanker.addresslib.quote import ( # type: ignore[import-untyped] + smart_quote, +) +from flanker.mime.message.headers import ( # type: ignore[import-untyped] + WithParams, +) from html2text import html2text from inbox import VERSION @@ -34,7 +40,9 @@ # and garble the encoded messages when sending, unless you break the lines with # '=\r\n'. Their expectation seems to be technically correct, per RFC1521 # section 5.1. However, we opt to simply avoid this mess entirely. -def fallback_to_base64(charset, preferred_encoding, body): # noqa: ANN201 +def fallback_to_base64( # type: ignore[no-untyped-def] # noqa: ANN201 + charset, preferred_encoding, body +): if charset in ("ascii", "iso8859=1", "us-ascii"): if mime.message.part.has_long_lines(body): # In the original implementation, this was @@ -53,7 +61,7 @@ def fallback_to_base64(charset, preferred_encoding, body): # noqa: ANN201 mime.message.part.choose_text_encoding = fallback_to_base64 -def create_email( # noqa: ANN201, D417 +def create_email( # type: ignore[no-untyped-def] # noqa: ANN201, D417 from_name, from_email, reply_to, @@ -184,7 +192,7 @@ def create_email( # noqa: ANN201, D417 return rfcmsg -def encode_string(value, maxlinelen): # noqa: ANN201 +def encode_string(value, maxlinelen): # type: ignore[no-untyped-def] # noqa: ANN201 try: header = Header(value.encode("ascii"), "ascii", maxlinelen) except UnicodeEncodeError: @@ -193,7 +201,9 @@ def encode_string(value, maxlinelen): # noqa: ANN201 return header.encode(splitchars=" ;,") -def _get_full_spec_without_validation(name, email): +def _get_full_spec_without_validation( # type: ignore[no-untyped-def] + name, email +): """ This function is the same as calling full_spec() on a Flanker address.EmailAddress object. This function exists @@ -208,7 +218,7 @@ def _get_full_spec_without_validation(name, email): return str(email) -def add_nylas_headers(msg, nylas_uid) -> None: +def add_nylas_headers(msg, nylas_uid) -> None: # type: ignore[no-untyped-def] """ Set a custom `X-INBOX-ID` header so as to identify messages generated by Nylas. @@ -230,11 +240,11 @@ def add_nylas_headers(msg, nylas_uid) -> None: msg.headers["User-Agent"] = f"NylasMailer/{VERSION}" -def generate_message_id_header(uid) -> str: +def generate_message_id_header(uid) -> str: # type: ignore[no-untyped-def] return f"<{uid}@mailer.nylas.com>" -def _rfc_transform(msg): +def _rfc_transform(msg): # type: ignore[no-untyped-def] """ Create an RFC-2821 compliant SMTP message. (Specifically, this means splitting the References header to conform to diff --git a/inbox/sendmail/smtp/postel.py b/inbox/sendmail/smtp/postel.py index 929c17903..02cc0a48d 100644 --- a/inbox/sendmail/smtp/postel.py +++ b/inbox/sendmail/smtp/postel.py @@ -43,7 +43,7 @@ class SMTP_SSL(smtplib.SMTP_SSL): # noqa: N801 Derived class which correctly surfaces SMTP errors. """ - def rset(self) -> None: + def rset(self) -> None: # type: ignore[override] """ Wrap rset() in order to correctly surface SMTP exceptions. SMTP.sendmail() does e.g.: @@ -70,7 +70,7 @@ class SMTP(smtplib.SMTP): Derived class which correctly surfaces SMTP errors. """ - def rset(self) -> None: + def rset(self) -> None: # type: ignore[override] """ Wrap rset() in order to correctly surface SMTP exceptions. SMTP.sendmail() does e.g.: @@ -92,7 +92,7 @@ def rset(self) -> None: log.warning("Server disconnect during SMTP rset", exc_info=True) -def _transform_ssl_error(strerror): +def _transform_ssl_error(strerror): # type: ignore[no-untyped-def] """ Clean up errors like: _ssl.c:510: error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed @@ -117,7 +117,7 @@ def _substitute_bcc(raw_message: bytes) -> bytes: class SMTPConnection: - def __init__( + def __init__( # type: ignore[no-untyped-def] self, account_id, email_address, @@ -141,7 +141,7 @@ def __init__( } self.setup() - def __enter__(self): # noqa: ANN204 + def __enter__(self): # type: ignore[no-untyped-def] # noqa: ANN204 return self def __exit__( @@ -155,7 +155,7 @@ def __exit__( except smtplib.SMTPServerDisconnected: return - def _connect(self, host, port): + def _connect(self, host, port): # type: ignore[no-untyped-def] """Connect, with error-handling""" try: self.connection.connect(host, port) @@ -176,7 +176,7 @@ def setup(self) -> None: else: self.connection = SMTP(timeout=SMTP_TIMEOUT) self._connect(host, port) - self.connection._host = host # type: ignore + self.connection._host = host # type: ignore[attr-defined] self._upgrade_connection() # Auth the connection @@ -184,7 +184,7 @@ def setup(self) -> None: auth_handler = self.auth_handlers[self.auth_type] auth_handler() - def _upgrade_connection(self): + def _upgrade_connection(self): # type: ignore[no-untyped-def] """ Upgrade the connection if STARTTLS is supported. If it's not/ it fails and SSL is not required, do nothing. Otherwise, @@ -213,7 +213,7 @@ def _smtp_oauth2_try_refresh(self) -> None: account, force_refresh=True, scopes=account.email_scopes ) - def _try_xoauth2(self): + def _try_xoauth2(self): # type: ignore[no-untyped-def] auth_string = f"user={self.email_address}\1auth=Bearer {self.auth_token}\1\1".encode() code, resp = self.connection.docmd( "AUTH", f"XOAUTH2 {base64.b64encode(auth_string).decode()}" @@ -275,7 +275,7 @@ def smtp_password(self) -> None: self.log.info("SMTP Auth(Password) success") - def sendmail(self, recipients, msg): # noqa: ANN201 + def sendmail(self, recipients, msg): # type: ignore[no-untyped-def] # noqa: ANN201 try: return self.connection.sendmail( self.email_address, recipients, msg @@ -295,7 +295,7 @@ def sendmail(self, recipients, msg): # noqa: ANN201 class SMTPClient: """SMTPClient for Gmail and other IMAP providers.""" - def __init__(self, account) -> None: + def __init__(self, account) -> None: # type: ignore[no-untyped-def] self.account_id = account.id self.log = get_logger() self.log.bind(account_id=account.id) @@ -327,7 +327,7 @@ def __init__(self, account) -> None: # non-generic accounts have no smtp password self.auth_token = account.password - def _send(self, recipients, msg): + def _send(self, recipients, msg): # type: ignore[no-untyped-def] """ Send the email message. Retries up to SMTP_MAX_RETRIES times if the message couldn't be submitted to any recipient. @@ -374,7 +374,7 @@ def _send(self, recipients, msg): ) self._handle_sending_exception(last_error) - def _handle_sending_exception(self, err): + def _handle_sending_exception(self, err): # type: ignore[no-untyped-def] if isinstance(err, smtplib.SMTPServerDisconnected): raise SendMailException( "The server unexpectedly closed the connection", 503 @@ -416,12 +416,16 @@ def _handle_sending_exception(self, err): "Sending failed", http_code=503, server_error=str(err) ) - def send_generated_email(self, recipients, raw_message): # noqa: ANN201 + def send_generated_email( # type: ignore[no-untyped-def] # noqa: ANN201 + self, recipients, raw_message + ): # A tiny wrapper over _send because the API differs # between SMTP and EAS. return self._send(recipients, raw_message) - def send_custom(self, draft, body, recipients) -> None: # noqa: D417 + def send_custom( # type: ignore[no-untyped-def] # noqa: D417 + self, draft, body, recipients + ) -> None: """ Turn a draft object into a MIME message, replacing the body with the provided body, and send it only to the provided recipients. @@ -461,7 +465,7 @@ def send_custom(self, draft, body, recipients) -> None: # noqa: D417 # Sent successfully self.log.info("Sending successful", draft_id=draft.id) - def send(self, draft) -> None: + def send(self, draft) -> None: # type: ignore[no-untyped-def] """ Turn a draft object into a MIME message and send it. @@ -518,7 +522,7 @@ def send(self, draft) -> None: # Sent to all successfully self.log.info("Sending successful", draft_id=draft.id) - def send_raw(self, msg) -> None: + def send_raw(self, msg) -> None: # type: ignore[no-untyped-def] recipient_emails = [ email for name, email in itertools.chain( @@ -539,7 +543,7 @@ def send_raw(self, msg) -> None: recipients=recipient_emails, ) - def _get_connection(self): + def _get_connection(self): # type: ignore[no-untyped-def] smtp_connection = SMTPConnection( account_id=self.account_id, email_address=self.email_address, diff --git a/inbox/sqlalchemy_ext/json_util.py b/inbox/sqlalchemy_ext/json_util.py index b9d4866b4..57826a434 100644 --- a/inbox/sqlalchemy_ext/json_util.py +++ b/inbox/sqlalchemy_ext/json_util.py @@ -33,7 +33,7 @@ EPOCH_NAIVE = datetime.datetime.utcfromtimestamp(0) -def dumps(obj, *args, **kwargs): # noqa: ANN201 +def dumps(obj, *args, **kwargs): # type: ignore[no-untyped-def] # noqa: ANN201 """ Helper function that wraps :class:`json.dumps`. @@ -42,13 +42,13 @@ def dumps(obj, *args, **kwargs): # noqa: ANN201 return json.dumps(_json_convert(obj), *args, **kwargs) -def loads(s, *args, **kwargs): # noqa: ANN201 +def loads(s, *args, **kwargs): # type: ignore[no-untyped-def] # noqa: ANN201 """Helper function that wraps :class:`json.loads`.""" # noqa: D401 kwargs["object_hook"] = lambda dct: object_hook(dct) return json.loads(s, *args, **kwargs) -def _json_convert(obj): +def _json_convert(obj): # type: ignore[no-untyped-def] """ Recursive helper method that converts datetime.datetime type so it can be converted into json. @@ -63,7 +63,7 @@ def _json_convert(obj): return obj -def object_hook(dct): # noqa: ANN201 +def object_hook(dct): # type: ignore[no-untyped-def] # noqa: ANN201 if "$date" in dct: dtm = dct["$date"] secs = float(dtm) / 1000.0 @@ -71,10 +71,10 @@ def object_hook(dct): # noqa: ANN201 return dct -def default(obj): # noqa: ANN201 +def default(obj): # type: ignore[no-untyped-def] # noqa: ANN201 if isinstance(obj, datetime.datetime): if obj.utcoffset() is not None: - obj = obj - obj.utcoffset() + obj = obj - obj.utcoffset() # type: ignore[operator] millis = int( calendar.timegm(obj.timetuple()) * 1000 + obj.microsecond / 1000 ) diff --git a/inbox/sqlalchemy_ext/util.py b/inbox/sqlalchemy_ext/util.py index 16fbefcb5..57ee3ca8c 100644 --- a/inbox/sqlalchemy_ext/util.py +++ b/inbox/sqlalchemy_ext/util.py @@ -7,12 +7,15 @@ from collections.abc import MutableMapping from typing import Any -from sqlalchemy import String, Text, event -from sqlalchemy.engine import Engine -from sqlalchemy.ext.mutable import Mutable -from sqlalchemy.pool import QueuePool -from sqlalchemy.sql import operators -from sqlalchemy.types import BINARY, TypeDecorator +from sqlalchemy import String, Text, event # type: ignore[import-untyped] +from sqlalchemy.engine import Engine # type: ignore[import-untyped] +from sqlalchemy.ext.mutable import Mutable # type: ignore[import-untyped] +from sqlalchemy.pool import QueuePool # type: ignore[import-untyped] +from sqlalchemy.sql import operators # type: ignore[import-untyped] +from sqlalchemy.types import ( # type: ignore[import-untyped] + BINARY, + TypeDecorator, +) from inbox.logging import get_logger from inbox.sqlalchemy_ext import json_util @@ -37,7 +40,7 @@ # that. Don't use this to silence any warnings in application code because # these warnings are an indicator of excessive lazy loading from the DB. @contextlib.contextmanager -def disabled_dubiously_many_queries_warning(): # noqa: ANN201 +def disabled_dubiously_many_queries_warning(): # type: ignore[no-untyped-def] # noqa: ANN201 global should_log_dubiously_many_queries should_log_dubiously_many_queries = False yield @@ -45,7 +48,7 @@ def disabled_dubiously_many_queries_warning(): # noqa: ANN201 @event.listens_for(Engine, "before_cursor_execute") -def before_cursor_execute( +def before_cursor_execute( # type: ignore[no-untyped-def] conn, cursor, statement, parameters, context, executemany ) -> None: if conn not in query_counts: @@ -55,7 +58,7 @@ def before_cursor_execute( @event.listens_for(Engine, "commit") -def before_commit(conn) -> None: +def before_commit(conn) -> None: # type: ignore[no-untyped-def] if not should_log_dubiously_many_queries: return if query_counts.get(conn, 0) > MAX_SANE_QUERIES_PER_SESSION: @@ -96,7 +99,9 @@ class StringWithTransform(TypeDecorator): impl = String - def __init__(self, string_transform, *args, **kwargs) -> None: + def __init__( # type: ignore[no-untyped-def] + self, string_transform, *args, **kwargs + ) -> None: super().__init__(*args, **kwargs) if string_transform is None: raise ValueError("Must provide a string_transform") @@ -104,11 +109,13 @@ def __init__(self, string_transform, *args, **kwargs) -> None: raise TypeError("`string_transform` must be callable") self._string_transform = string_transform - def process_bind_param(self, value, dialect): # noqa: ANN201 + def process_bind_param( # type: ignore[no-untyped-def] # noqa: ANN201 + self, value, dialect + ): return self._string_transform(value) class comparator_factory(String.Comparator): # noqa: N801 - def __eq__(self, other): # noqa: ANN204 + def __eq__(self, other): # type: ignore[no-untyped-def] # noqa: ANN204 other = self.type._string_transform(other) return self.operate(operators.eq, other) @@ -119,13 +126,17 @@ class JSON(TypeDecorator): impl = Text - def process_bind_param(self, value, dialect): # noqa: ANN201 + def process_bind_param( # type: ignore[no-untyped-def] # noqa: ANN201 + self, value, dialect + ): if value is None: return None return json_util.dumps(value) - def process_result_value(self, value, dialect): # noqa: ANN201 + def process_result_value( # type: ignore[no-untyped-def] # noqa: ANN201 + self, value, dialect + ): if not value: return None @@ -139,7 +150,7 @@ def process_result_value(self, value, dialect): # noqa: ANN201 log.error("ValueError on decoding JSON", value=value) -def json_field_too_long(value): # noqa: ANN201 +def json_field_too_long(value): # type: ignore[no-untyped-def] # noqa: ANN201 return len(json_util.dumps(value)) > MAX_TEXT_CHARS @@ -174,9 +185,9 @@ def process_result_value( # Can simply use this as is because though we use inbox.sqlalchemy_ext.json_util, # loads() dumps() return standard Python dicts like the json.* equivalents # (because these are simply called under the hood) -class MutableDict(Mutable, dict): +class MutableDict(Mutable, dict): # type: ignore[type-arg] @classmethod - def coerce(cls, key, value): # noqa: ANN206 + def coerce(cls, key, value): # type: ignore[no-untyped-def] # noqa: ANN206 """Convert plain dictionaries to MutableDict.""" if not isinstance(value, MutableDict): if isinstance(value, dict): @@ -187,31 +198,31 @@ def coerce(cls, key, value): # noqa: ANN206 else: return value - def __setitem__(self, key, value) -> None: + def __setitem__(self, key, value) -> None: # type: ignore[no-untyped-def] """Detect dictionary set events and emit change events.""" dict.__setitem__(self, key, value) self.changed() - def __delitem__(self, key) -> None: + def __delitem__(self, key) -> None: # type: ignore[no-untyped-def] """Detect dictionary del events and emit change events.""" dict.__delitem__(self, key) self.changed() - def update(self, *args, **kwargs) -> None: + def update(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] for k, v in dict(*args, **kwargs).items(): self[k] = v # To support pickling: - def __getstate__(self): # noqa: ANN204 + def __getstate__(self): # type: ignore[no-untyped-def] # noqa: ANN204 return dict(self) - def __setstate__(self, state) -> None: + def __setstate__(self, state) -> None: # type: ignore[no-untyped-def] self.update(state) -class MutableList(Mutable, list): +class MutableList(Mutable, list): # type: ignore[type-arg] @classmethod - def coerce(cls, key, value): # noqa: ANN206 + def coerce(cls, key, value): # type: ignore[no-untyped-def] # noqa: ANN206 """Convert plain list to MutableList""" if not isinstance(value, MutableList): if isinstance(value, list): @@ -222,32 +233,32 @@ def coerce(cls, key, value): # noqa: ANN206 else: return value - def __setitem__(self, idx, value) -> None: + def __setitem__(self, idx, value) -> None: # type: ignore[no-untyped-def] list.__setitem__(self, idx, value) self.changed() - def __delitem__(self, idx) -> None: + def __delitem__(self, idx) -> None: # type: ignore[no-untyped-def] list.__delitem__(self, idx) self.changed() - def append(self, value) -> None: + def append(self, value) -> None: # type: ignore[no-untyped-def] list.append(self, value) self.changed() - def insert(self, idx, value) -> None: + def insert(self, idx, value) -> None: # type: ignore[no-untyped-def] list.insert(self, idx, value) self.changed() - def extend(self, values) -> None: + def extend(self, values) -> None: # type: ignore[no-untyped-def] list.extend(self, values) self.changed() - def pop(self, *args, **kw): # noqa: ANN201 + def pop(self, *args, **kw): # type: ignore[no-untyped-def] # noqa: ANN201 value = list.pop(self, *args, **kw) self.changed() return value - def remove(self, value) -> None: + def remove(self, value) -> None: # type: ignore[no-untyped-def] list.remove(self, value) self.changed() @@ -318,7 +329,9 @@ def utf8_surrogate_fix_decode( def utf8_surrogate_fix_search_function(encoding_name: str) -> codecs.CodecInfo: return codecs.CodecInfo( - utf8_encode, utf8_surrogate_fix_decode, name="utf8-surrogate-fix" # type: ignore + utf8_encode, + utf8_surrogate_fix_decode, # type: ignore[arg-type] + name="utf8-surrogate-fix", ) @@ -337,7 +350,9 @@ class ForceStrictModePool(QueuePool): # # Without this, MySQL will silently insert invalid values in the database if @event.listens_for(ForceStrictModePool, "connect") -def receive_connect(dbapi_connection, connection_record) -> None: +def receive_connect( # type: ignore[no-untyped-def] + dbapi_connection, connection_record +) -> None: cur = dbapi_connection.cursor() cur.execute( "SET SESSION sql_mode='STRICT_TRANS_TABLES,STRICT_ALL_TABLES," @@ -350,7 +365,9 @@ def receive_connect(dbapi_connection, connection_record) -> None: dbapi_connection.encoding = "utf8-surrogate-fix" -def get_db_api_cursor_with_query(session, query): # noqa: ANN201 +def get_db_api_cursor_with_query( # type: ignore[no-untyped-def] # noqa: ANN201 + session, query +): """ Return a DB-API cursor with the given SQLAlchemy query executed. diff --git a/inbox/sync/base_sync.py b/inbox/sync/base_sync.py index 220fa51e9..cad3dc539 100644 --- a/inbox/sync/base_sync.py +++ b/inbox/sync/base_sync.py @@ -27,7 +27,7 @@ class BaseSyncMonitor(InterruptibleThread): """ - def __init__( + def __init__( # type: ignore[no-untyped-def] self, account_id, namespace_id, @@ -86,7 +86,9 @@ def _run(self) -> None: def _run_impl(self) -> None: try: self.sync() - self.heartbeat_status.publish(state="poll") + self.heartbeat_status.publish( # type: ignore[unreachable] + state="poll" + ) # If we get a connection or API permissions error, then sleep # 2x poll frequency. diff --git a/inbox/transactions/actions.py b/inbox/transactions/actions.py index 5062cd55e..318f3e964 100644 --- a/inbox/transactions/actions.py +++ b/inbox/transactions/actions.py @@ -15,7 +15,7 @@ from collections import defaultdict from datetime import datetime, timedelta -from sqlalchemy import desc +from sqlalchemy import desc # type: ignore[import-untyped] from inbox import interruptible_threading from inbox.actions.base import ( @@ -76,11 +76,11 @@ } -def action_uses_crispin_client(action) -> bool: +def action_uses_crispin_client(action) -> bool: # type: ignore[no-untyped-def] return action in MAIL_ACTION_FUNCTION_MAP -def function_for_action(action): # noqa: ANN201 +def function_for_action(action): # type: ignore[no-untyped-def] # noqa: ANN201 if action in MAIL_ACTION_FUNCTION_MAP: return MAIL_ACTION_FUNCTION_MAP[action] return EVENT_ACTION_FUNCTION_MAP[action] @@ -98,7 +98,7 @@ def function_for_action(action): # noqa: ANN201 class SyncbackService(InterruptibleThread): """Asynchronously consumes the action log and executes syncback actions.""" - def __init__( + def __init__( # type: ignore[no-untyped-def] self, syncback_id, process_number, @@ -122,7 +122,7 @@ def __init__( self.batch_size = batch_size self.keep_running = True - self.workers = [] + self.workers = [] # type: ignore[var-annotated] # Dictionary account_id -> semaphore to serialize action syncback for # any particular account. # TODO(emfree): We really only need to serialize actions that operate @@ -159,11 +159,13 @@ def __init__( self.num_idle_workers = 0 self.worker_did_finish = threading.Event() self.worker_did_finish.clear() - self.task_queue = queue.Queue() - self.running_action_ids = set() + self.task_queue = queue.Queue() # type: ignore[var-annotated] + self.running_action_ids = set() # type: ignore[var-annotated] super().__init__() - def _has_recent_move_action(self, db_session, log_entries) -> bool: + def _has_recent_move_action( # type: ignore[no-untyped-def] + self, db_session, log_entries + ) -> bool: """ Determines if we recently completed a move action. Since Nylas doesn't update local UID state after completing an action, we space @@ -206,7 +208,9 @@ def _has_recent_move_action(self, db_session, log_entries) -> bool: else: return False - def _tasks_for_log_entries(self, db_session, log_entries, has_more): + def _tasks_for_log_entries( # type: ignore[no-untyped-def] + self, db_session, log_entries, has_more + ): """ Return SyncbackTask for similar actions (same action & record). """ @@ -305,7 +309,9 @@ def _tasks_for_log_entries(self, db_session, log_entries, has_more): ) return [task] - def _get_batch_task(self, db_session, log_entries, has_more): + def _get_batch_task( # type: ignore[no-untyped-def] + self, db_session, log_entries, has_more + ): """ Helper for _batch_log_entries that returns the batch task for the given valid log entries. @@ -344,7 +350,9 @@ def _get_batch_task(self, db_session, log_entries, has_more): ) return None - def _batch_log_entries(self, db_session, log_entries): + def _batch_log_entries( # type: ignore[no-untyped-def] + self, db_session, log_entries + ): """ Batch action log entries together and return a batch task after verifying we can process them. All actions must belong to the same @@ -505,7 +513,7 @@ def _run_impl(self) -> None: # whichever happens first. timeout = self.poll_interval if self.num_idle_workers == 0: - timeout = None + timeout = None # type: ignore[assignment] self.worker_did_finish.clear() self.worker_did_finish.wait(timeout=timeout) @@ -527,7 +535,9 @@ def _run(self) -> None: def notify_worker_active(self) -> None: self.num_idle_workers -= 1 - def notify_worker_finished(self, action_ids) -> None: + def notify_worker_finished( # type: ignore[no-untyped-def] + self, action_ids + ) -> None: self.num_idle_workers += 1 self.worker_did_finish.set() for action_id in action_ids: @@ -539,12 +549,14 @@ def __del__(self) -> None: class SyncbackBatchTask: - def __init__(self, semaphore, tasks, account_id) -> None: + def __init__( # type: ignore[no-untyped-def] + self, semaphore, tasks, account_id + ) -> None: self.semaphore = semaphore self.tasks = tasks self.account_id = account_id - def _crispin_client_or_none(self): + def _crispin_client_or_none(self): # type: ignore[no-untyped-def] if self.uses_crispin_client(): return writable_connection_pool(self.account_id).get() else: @@ -570,14 +582,14 @@ def execute(self) -> None: # failed. break - def uses_crispin_client(self): # noqa: ANN201 + def uses_crispin_client(self): # type: ignore[no-untyped-def] # noqa: ANN201 return any([task.uses_crispin_client() for task in self.tasks]) - def timeout(self, per_task_timeout): # noqa: ANN201 + def timeout(self, per_task_timeout): # type: ignore[no-untyped-def] # noqa: ANN201 return len(self.tasks) * per_task_timeout @property - def action_log_ids(self): # noqa: ANN201 + def action_log_ids(self): # type: ignore[no-untyped-def] # noqa: ANN201 return [entry for task in self.tasks for entry in task.action_log_ids] @@ -597,7 +609,7 @@ class SyncbackTask: """ - def __init__( + def __init__( # type: ignore[no-untyped-def] self, action_name, semaphore, @@ -621,7 +633,7 @@ def __init__( self.retry_interval = retry_interval self.crispin_client = None - def try_merge_with(self, other): # noqa: ANN201 + def try_merge_with(self, other): # type: ignore[no-untyped-def] # noqa: ANN201 if self.func != other.func: return None @@ -658,7 +670,9 @@ def try_merge_with(self, other): # noqa: ANN201 ) return None - def _log_to_statsd(self, action_log_status, latency=None) -> None: + def _log_to_statsd( # type: ignore[no-untyped-def] + self, action_log_status, latency=None + ) -> None: metric_names = [ f"syncback.overall.{action_log_status}", f"syncback.providers.{self.provider}.{action_log_status}", @@ -707,7 +721,7 @@ def execute_with_lock(self) -> bool | None: max_latency = max_func_latency = 0 for action_log_entry in action_log_entries: - latency, func_latency = self._mark_action_as_successful( + (latency, func_latency) = self._mark_action_as_successful( action_log_entry, before, after, db_session ) max_latency = max(latency, max_latency) @@ -750,7 +764,9 @@ def execute_with_lock(self) -> bool | None: return False - def _get_records_and_actions_to_process(self): + def _get_records_and_actions_to_process( # type: ignore[no-untyped-def] + self, + ): records_to_process = [] action_ids_to_process = [] action_log_record_map = dict(zip(self.action_log_ids, self.record_ids)) @@ -768,9 +784,11 @@ def _get_records_and_actions_to_process(self): records_to_process.append( action_log_record_map[action_log_entry.id] ) - return records_to_process, action_ids_to_process + return (records_to_process, action_ids_to_process) - def _execute_timed_action(self, records_to_process): + def _execute_timed_action( # type: ignore[no-untyped-def] + self, records_to_process + ): before_func = datetime.utcnow() func_args = [self.account_id] if can_handle_multiple_records(self.action_name): @@ -783,12 +801,14 @@ def _execute_timed_action(self, records_to_process): func_args.append(self.extra_args) if self.uses_crispin_client(): assert self.crispin_client is not None - func_args.insert(0, self.crispin_client) + func_args.insert( # type: ignore[unreachable] + 0, self.crispin_client + ) self.func(*func_args) after_func = datetime.utcnow() return before_func, after_func - def _mark_action_as_successful( + def _mark_action_as_successful( # type: ignore[no-untyped-def] self, action_log_entry, before, after, db_session ): action_log_entry.status = "successful" @@ -801,7 +821,9 @@ def _mark_action_as_successful( self._log_to_statsd(action_log_entry.status, latency) return (latency, func_latency) - def _mark_action_as_failed(self, action_log_entry, db_session) -> None: + def _mark_action_as_failed( # type: ignore[no-untyped-def] + self, action_log_entry, db_session + ) -> None: self.log.critical("Max retries reached, giving up.", exc_info=True) action_log_entry.status = "failed" self._log_to_statsd(action_log_entry.status) @@ -828,10 +850,10 @@ def _mark_action_as_failed(self, action_log_entry, db_session) -> None: event.deleted_at = datetime.now() db_session.commit() - def uses_crispin_client(self): # noqa: ANN201 + def uses_crispin_client(self): # type: ignore[no-untyped-def] # noqa: ANN201 return action_uses_crispin_client(self.action_name) - def timeout(self, per_task_timeout): # noqa: ANN201 + def timeout(self, per_task_timeout): # type: ignore[no-untyped-def] # noqa: ANN201 return per_task_timeout def execute(self) -> None: @@ -840,20 +862,22 @@ def execute(self) -> None: class SyncbackWorker(InterruptibleThread): - def __init__(self, parent_service, task_timeout: int = 60) -> None: + def __init__( # type: ignore[no-untyped-def] + self, parent_service, task_timeout: int = 60 + ) -> None: self.parent_service = weakref.ref(parent_service) self.task_timeout = task_timeout self.log = logger.new(component="syncback-worker") super().__init__() def _run(self) -> None: - while self.parent_service().keep_running: + while self.parent_service().keep_running: # type: ignore[union-attr] task = interruptible_threading.queue_get( - self.parent_service().task_queue + self.parent_service().task_queue # type: ignore[union-attr] ) try: - self.parent_service().notify_worker_active() + self.parent_service().notify_worker_active() # type: ignore[union-attr] with interruptible_threading.timeout( task.timeout(self.task_timeout) ): @@ -865,6 +889,6 @@ def _run(self) -> None: account_id=task.account_id, ) finally: - self.parent_service().notify_worker_finished( + self.parent_service().notify_worker_finished( # type: ignore[union-attr] task.action_log_ids ) diff --git a/inbox/transactions/delta_sync.py b/inbox/transactions/delta_sync.py index d913e1158..aed639216 100644 --- a/inbox/transactions/delta_sync.py +++ b/inbox/transactions/delta_sync.py @@ -2,8 +2,8 @@ import time from datetime import datetime -from sqlalchemy import asc, bindparam, desc -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy import asc, bindparam, desc # type: ignore[import-untyped] +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from inbox.api.kellogs import APIEncoder, encode from inbox.models import Account, Message, Namespace, Thread, Transaction @@ -17,7 +17,7 @@ } -def get_transaction_cursor_near_timestamp( # noqa: ANN201 +def get_transaction_cursor_near_timestamp( # type: ignore[no-untyped-def] # noqa: ANN201 namespace_id, timestamp, db_session ): """ @@ -85,7 +85,9 @@ def get_transaction_cursor_near_timestamp( # noqa: ANN201 return latest_transaction.public_id -def _get_last_trx_id_for_namespace(namespace_id, db_session): +def _get_last_trx_id_for_namespace( # type: ignore[no-untyped-def] + namespace_id, db_session +): q = db_session.query(Transaction.id) q = q.filter(Transaction.namespace_id == bindparam("namespace_id")) q = ( @@ -96,7 +98,7 @@ def _get_last_trx_id_for_namespace(namespace_id, db_session): return q.params(namespace_id=namespace_id).one()[0] -def format_transactions_after_pointer( # noqa: ANN201, D417 +def format_transactions_after_pointer( # type: ignore[no-untyped-def] # noqa: ANN201, D417 namespace, pointer, db_session, @@ -285,7 +287,7 @@ def format_transactions_after_pointer( # noqa: ANN201, D417 pointer = transactions[-1].id -def streaming_change_generator( # noqa: ANN201, D417 +def streaming_change_generator( # type: ignore[no-untyped-def] # noqa: ANN201, D417 namespace, poll_interval, timeout, diff --git a/inbox/util/addr.py b/inbox/util/addr.py index f402f7db0..6ad551257 100644 --- a/inbox/util/addr.py +++ b/inbox/util/addr.py @@ -1,16 +1,20 @@ import email.utils import re -from flanker.addresslib import address -from flanker.mime.message.headers.encodedword import decode -from flanker.mime.message.headers.parsing import normalize -from flanker.mime.message.part import MimePart +from flanker.addresslib import address # type: ignore[import-untyped] +from flanker.mime.message.headers.encodedword import ( # type: ignore[import-untyped] + decode, +) +from flanker.mime.message.headers.parsing import ( # type: ignore[import-untyped] + normalize, +) +from flanker.mime.message.part import MimePart # type: ignore[import-untyped] from inbox.sqlalchemy_ext.util import MAX_TEXT_BYTES class HeaderTooBigException(Exception): - def __init__(self, header) -> None: + def __init__(self, header) -> None: # type: ignore[no-untyped-def] super().__init__(f"header {header!r} length is over the parsing limit") @@ -22,7 +26,7 @@ def __init__(self, header) -> None: ) -def valid_email(email_address) -> bool: +def valid_email(email_address) -> bool: # type: ignore[no-untyped-def] parsed = address.parse(email_address, addr_spec_only=True) if isinstance(parsed, address.EmailAddress): return True @@ -74,6 +78,6 @@ def parse_mimepart_address_header( return sorted(list(elem) for elem in addresses) -def extract_emails_from_text(text): # noqa: ANN201 +def extract_emails_from_text(text): # type: ignore[no-untyped-def] # noqa: ANN201 emails = EMAIL_FIND_RE.findall(text) return [email for email in emails if valid_email(email)] diff --git a/inbox/util/blockstore.py b/inbox/util/blockstore.py index a5990282b..f9e6bd009 100644 --- a/inbox/util/blockstore.py +++ b/inbox/util/blockstore.py @@ -16,8 +16,8 @@ # TODO: store AWS credentials in a better way. STORE_MSG_ON_S3 = config.get("STORE_MESSAGES_ON_S3", None) -import boto3 # noqa: E402 -import botocore.exceptions # noqa: E402 +import boto3 # type: ignore[import-untyped] # noqa: E402 +import botocore.exceptions # type: ignore[import-untyped] # noqa: E402 # https://github.com/facebook/zstd/blob/dev/doc/zstd_compression_format.md#zstandard-frames # > This value was selected to be less probable to find at the beginning of some random file. @@ -27,7 +27,7 @@ ZSTD_MAGIC_NUMBER_PREFIX = 0xFD2FB528.to_bytes(4, "little") -def _data_file_directory(h): +def _data_file_directory(h): # type: ignore[no-untyped-def] return os.path.join( # noqa: PTH118 config.get_required("MSG_PARTS_DIRECTORY"), h[0], @@ -39,7 +39,7 @@ def _data_file_directory(h): ) -def _data_file_path(h): +def _data_file_path(h): # type: ignore[no-untyped-def] return os.path.join(_data_file_directory(h), h) # noqa: PTH118 @@ -162,7 +162,7 @@ def _save_to_s3( ) -def get_s3_bucket(bucket_name): # noqa: ANN201 +def get_s3_bucket(bucket_name): # type: ignore[no-untyped-def] # noqa: ANN201 resource = boto3.resource( "s3", aws_access_key_id=config.get("AWS_ACCESS_KEY_ID"), @@ -173,7 +173,7 @@ def get_s3_bucket(bucket_name): # noqa: ANN201 return resource.Bucket(bucket_name) -def _s3_key_exists(bucket, key) -> bool: +def _s3_key_exists(bucket, key) -> bool: # type: ignore[no-untyped-def] """ Check if a key exists in an S3 bucket by doing a HEAD request. """ @@ -210,7 +210,7 @@ def _save_to_s3_bucket( statsd_client.timing("s3_blockstore.save_latency", latency_millis) -def get_from_blockstore( +def get_from_blockstore( # type: ignore[no-untyped-def] data_sha256, *, check_sha: bool = True ) -> bytes | None: if STORE_MSG_ON_S3: @@ -278,7 +278,7 @@ def get_raw_mime(data_sha256: str) -> "bytes | None": return decompressed_raw_mime -def _get_from_s3(data_sha256): +def _get_from_s3(data_sha256): # type: ignore[no-untyped-def] assert "AWS_ACCESS_KEY_ID" in config, "Need AWS key!" assert "AWS_SECRET_ACCESS_KEY" in config, "Need AWS secret!" @@ -289,7 +289,8 @@ def _get_from_s3(data_sha256): # Try getting data from our temporary blockstore before # trying getting it from the provider. data = _get_from_s3_bucket( - data_sha256, config.get("TEMP_MESSAGE_STORE_BUCKET_NAME") + data_sha256, + config.get("TEMP_MESSAGE_STORE_BUCKET_NAME"), # type: ignore[arg-type] ) if data is not None: @@ -328,7 +329,7 @@ def _get_from_s3_bucket(data_sha256: str, bucket_name: str) -> "bytes | None": return file_object.getvalue() -def _get_from_disk(data_sha256): +def _get_from_disk(data_sha256): # type: ignore[no-untyped-def] if not data_sha256: return None @@ -369,7 +370,7 @@ def _delete_from_s3_bucket( statsd_client.timing("s3_blockstore.delete_latency", latency_millis) -def _delete_from_disk(data_sha256) -> None: +def _delete_from_disk(data_sha256) -> None: # type: ignore[no-untyped-def] if not data_sha256: return @@ -379,12 +380,17 @@ def _delete_from_disk(data_sha256) -> None: log.warning(f"No file with name: {data_sha256}!") -def delete_from_blockstore(*data_sha256_hashes) -> None: +def delete_from_blockstore( # type: ignore[no-untyped-def] + *data_sha256_hashes, +) -> None: log.info("deleting from blockstore", sha256=data_sha256_hashes) if STORE_MSG_ON_S3: _delete_from_s3_bucket( - data_sha256_hashes, config.get("TEMP_MESSAGE_STORE_BUCKET_NAME") + data_sha256_hashes, + config.get( # type: ignore[arg-type] + "TEMP_MESSAGE_STORE_BUCKET_NAME" + ), ) else: for data_sha256 in data_sha256_hashes: diff --git a/inbox/util/concurrency.py b/inbox/util/concurrency.py index df85eb963..461eee914 100644 --- a/inbox/util/concurrency.py +++ b/inbox/util/concurrency.py @@ -9,9 +9,11 @@ from collections.abc import Callable, Iterable from typing import Any, TypeVar -from MySQLdb import _exceptions as _mysql_exceptions +from MySQLdb import ( # type: ignore[import-untyped] + _exceptions as _mysql_exceptions, +) from redis import TimeoutError -from sqlalchemy.exc import StatementError +from sqlalchemy.exc import StatementError # type: ignore[import-untyped] from inbox import interruptible_threading from inbox.error_handling import log_uncaught_errors @@ -40,7 +42,7 @@ ) -def retry( # noqa: ANN201, D417 +def retry( # type: ignore[no-untyped-def] # noqa: ANN201, D417 func, retry_classes=None, fail_classes=None, @@ -74,7 +76,7 @@ class filters. "Can't include exception classes in both fail_on and retry_on" ) - def should_retry_on(exc) -> bool: + def should_retry_on(exc) -> bool: # type: ignore[no-untyped-def] if fail_classes and isinstance(exc, tuple(fail_classes)): return False if retry_classes and not isinstance(exc, tuple(retry_classes)): @@ -82,7 +84,7 @@ def should_retry_on(exc) -> bool: return True @functools.wraps(func) - def wrapped(*args, **kwargs): + def wrapped(*args, **kwargs): # type: ignore[no-untyped-def] while True: try: return func(*args, **kwargs) @@ -105,7 +107,7 @@ def wrapped(*args, **kwargs): return wrapped -def retry_with_logging( # noqa: ANN201 +def retry_with_logging( # type: ignore[no-untyped-def] # noqa: ANN201 func, logger=None, retry_classes=None, @@ -119,7 +121,7 @@ def retry_with_logging( # noqa: ANN201 # http://stackoverflow.com/questions/7935966/python-overwriting-variables-in-nested-functions occurrences = [0] - def callback(e) -> None: + def callback(e) -> None: # type: ignore[no-untyped-def] is_transient = isinstance(e, TRANSIENT_NETWORK_ERRS) mysql_error = None @@ -211,7 +213,7 @@ def kill_all( *, block: bool = True, ) -> None: - if not interruptible_threads: + if not interruptible_threads: # type: ignore[truthy-iterable] return for thread in interruptible_threads: diff --git a/inbox/util/db.py b/inbox/util/db.py index ab18851d5..b6152dc6d 100644 --- a/inbox/util/db.py +++ b/inbox/util/db.py @@ -1,5 +1,5 @@ -from sqlalchemy.engine import reflection -from sqlalchemy.schema import ( +from sqlalchemy.engine import reflection # type: ignore[import-untyped] +from sqlalchemy.schema import ( # type: ignore[import-untyped] DropConstraint, DropTable, ForeignKeyConstraint, @@ -9,7 +9,9 @@ # http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything -def drop_everything(engine, keep_tables=None, reset_columns=None) -> None: +def drop_everything( # type: ignore[no-untyped-def] + engine, keep_tables=None, reset_columns=None +) -> None: """ Drops all tables in the db unless their name is in `keep_tables`. `reset_columns` is used to specify the columns that should be reset to diff --git a/inbox/util/debug.py b/inbox/util/debug.py index 3adc7f33b..c9575d073 100644 --- a/inbox/util/debug.py +++ b/inbox/util/debug.py @@ -1,7 +1,9 @@ """Utilities for debugging failures in development/staging.""" -def bind_context(thread, role, account_id, *args) -> None: +def bind_context( # type: ignore[no-untyped-def] + thread, role, account_id, *args +) -> None: """ Bind a human-interpretable "context" to the thread `gr`, for execution-tracing purposes. The context consists of the thread's role diff --git a/inbox/util/html.py b/inbox/util/html.py index 96a1cc77f..394f0b494 100644 --- a/inbox/util/html.py +++ b/inbox/util/html.py @@ -14,12 +14,14 @@ class HTMLTagStripper(HTMLParser): def __init__(self) -> None: self.reset() - self.fed = [] + self.fed = [] # type: ignore[var-annotated] self.strip_tag_contents_mode = False HTMLParser.__init__(self) - def handle_starttag(self, tag, attrs) -> None: + def handle_starttag( # type: ignore[no-untyped-def] + self, tag, attrs + ) -> None: # Replace
,
tags by spaces if tag.lower() in ("br", "div"): self.fed.append(" ") @@ -30,14 +32,14 @@ def handle_starttag(self, tag, attrs) -> None: if tag.lower() in HTMLTagStripper.strippedTags: self.strip_tag_contents_mode = True - def handle_endtag(self, tag) -> None: + def handle_endtag(self, tag) -> None: # type: ignore[no-untyped-def] self.strip_tag_contents_mode = False - def handle_data(self, d) -> None: + def handle_data(self, d) -> None: # type: ignore[no-untyped-def] if not self.strip_tag_contents_mode: self.fed.append(d) - def handle_entityref(self, d) -> None: + def handle_entityref(self, d) -> None: # type: ignore[no-untyped-def] try: val = chr(name2codepoint[d]) except KeyError: @@ -84,7 +86,7 @@ def strip_tags(html: str) -> str: def plaintext2html(text: str, tabstop: int = 4) -> str: assert "\r" not in text, "newlines not normalized" - def do_sub(m): + def do_sub(m): # type: ignore[no-untyped-def] c = m.groupdict() if c["htmlchars"]: return html_escape(c["htmlchars"], quote=False) diff --git a/inbox/util/itert.py b/inbox/util/itert.py index 09514d6fa..b4d683289 100644 --- a/inbox/util/itert.py +++ b/inbox/util/itert.py @@ -1,7 +1,7 @@ import itertools -def chunk(iterable, size): # noqa: ANN201 +def chunk(iterable, size): # type: ignore[no-untyped-def] # noqa: ANN201 """ Yield chunks of an iterable. diff --git a/inbox/util/misc.py b/inbox/util/misc.py index d0220150b..d0a519105 100644 --- a/inbox/util/misc.py +++ b/inbox/util/misc.py @@ -10,10 +10,10 @@ class DummyContextManager: - def __enter__(self): # noqa: ANN204 + def __enter__(self): # type: ignore[no-untyped-def] # noqa: ANN204 return None - def __exit__( + def __exit__( # type: ignore[exit-return] self, exc_type: type[BaseException] | None, exc_value: BaseException | None, @@ -22,7 +22,7 @@ def __exit__( return False -def or_none(value, selector): # noqa: ANN201 +def or_none(value, selector): # type: ignore[no-untyped-def] # noqa: ANN201 if value is None: return None else: @@ -63,7 +63,7 @@ def parse_references(references: str, in_reply_to: str) -> list[str]: return reference_list -def dt_to_timestamp(dt): # noqa: ANN201 +def dt_to_timestamp(dt): # type: ignore[no-untyped-def] # noqa: ANN201 return int((dt - datetime(1970, 1, 1)).total_seconds()) @@ -83,7 +83,7 @@ def get_internaldate(date: str | None, received: str | None) -> datetime: # Based on: http://stackoverflow.com/a/8556471 -def load_modules(base_name, base_path): # noqa: ANN201 +def load_modules(base_name, base_path): # type: ignore[no-untyped-def] # noqa: ANN201 """ Imports all modules underneath `base_module` in the module tree. @@ -106,7 +106,7 @@ def load_modules(base_name, base_path): # noqa: ANN201 return modules -def register_backends(base_name, base_path): # noqa: ANN201 +def register_backends(base_name, base_path): # type: ignore[no-untyped-def] # noqa: ANN201 """ Dynamically loads all packages contained within thread backends module, including those by other module install paths @@ -129,7 +129,7 @@ def register_backends(base_name, base_path): # noqa: ANN201 return mod_for -def cleanup_subject(subject_str): # noqa: ANN201 +def cleanup_subject(subject_str): # type: ignore[no-untyped-def] # noqa: ANN201 """ Clean-up a message subject-line, including whitespace. For instance, 'Re: Re: Re: Birthday party' becomes 'Birthday party' @@ -148,7 +148,7 @@ def cleanup_subject(subject_str): # noqa: ANN201 # IMAP doesn't support nested folders and instead encodes paths inside folder # names. # imap_folder_path converts a "/" delimited path to an IMAP compatible path. -def imap_folder_path( # noqa: ANN201 +def imap_folder_path( # type: ignore[no-untyped-def] # noqa: ANN201 path, separator: str = ".", prefix: str = "" ): folders = [folder for folder in path.split("/") if folder != ""] @@ -169,7 +169,7 @@ def imap_folder_path( # noqa: ANN201 return res -def strip_prefix(path, prefix): # noqa: ANN201 +def strip_prefix(path, prefix): # type: ignore[no-untyped-def] # noqa: ANN201 if path.startswith(prefix): return path[len(prefix) :] @@ -177,7 +177,7 @@ def strip_prefix(path, prefix): # noqa: ANN201 # fs_folder_path converts an IMAP compatible path to a "/" delimited path. -def fs_folder_path( # noqa: ANN201 +def fs_folder_path( # type: ignore[no-untyped-def] # noqa: ANN201 path, separator: str = ".", prefix: str = "" ): if prefix: diff --git a/inbox/util/rdb.py b/inbox/util/rdb.py index eaeefff16..d489a8374 100644 --- a/inbox/util/rdb.py +++ b/inbox/util/rdb.py @@ -15,16 +15,18 @@ class RemoteConsole(InteractiveConsole): - def __init__(self, socket, locals=None) -> None: + def __init__( # type: ignore[no-untyped-def] + self, socket, locals=None + ) -> None: self.socket = socket self.handle = socket.makefile("rw") InteractiveConsole.__init__(self, locals=locals) self.handle.write(doc) - def write(self, data) -> None: + def write(self, data) -> None: # type: ignore[no-untyped-def] self.handle.write(data) - def runcode(self, code) -> None: + def runcode(self, code) -> None: # type: ignore[no-untyped-def] # preserve stdout/stderr oldstdout = sys.stdout oldstderr = sys.stderr @@ -36,7 +38,9 @@ def runcode(self, code) -> None: sys.stdout = oldstdout sys.stderr = oldstderr - def interact(self, banner=None) -> None: + def interact( # type: ignore[no-untyped-def, override] + self, banner=None + ) -> None: """ Closely emulate the interactive Python console. @@ -73,7 +77,7 @@ def interact(self, banner=None) -> None: else: prompt = sys.ps1 try: - line = self.raw_input(prompt) + line = self.raw_input(prompt) # type: ignore[arg-type] self.handle.flush() # Can be None if sys.stdin was redefined encoding = getattr(sys.stdin, "encoding", None) @@ -99,13 +103,15 @@ def terminate(self) -> None: except OSError: return - def raw_input(self, prompt: str = ""): # noqa: ANN201 + def raw_input(self, prompt: str = ""): # type: ignore[no-untyped-def] # noqa: ANN201 self.handle.write(prompt) self.handle.flush() return self.handle.readline() -def break_to_interpreter(host: str = "localhost", port=None) -> None: +def break_to_interpreter( # type: ignore[no-untyped-def] + host: str = "localhost", port=None +) -> None: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) diff --git a/inbox/util/sharding.py b/inbox/util/sharding.py index 1b6045f73..dae6e2d47 100644 --- a/inbox/util/sharding.py +++ b/inbox/util/sharding.py @@ -1,7 +1,7 @@ from inbox.config import config -def get_shard_schemas(): # noqa: ANN201 +def get_shard_schemas(): # type: ignore[no-untyped-def] # noqa: ANN201 # Can't use engine_manager.engines here because it does not track # shard schemas. shard_schemas = {} diff --git a/inbox/util/startup.py b/inbox/util/startup.py index 50b86a733..3e32f7d16 100644 --- a/inbox/util/startup.py +++ b/inbox/util/startup.py @@ -47,7 +47,9 @@ def check_tz() -> None: sys.exit(_TZ_ERROR_TEXT) -def load_overrides(file_path, loaded_config=config) -> None: +def load_overrides( # type: ignore[no-untyped-def] + file_path, loaded_config=config +) -> None: """ Convenience function for overriding default configuration. diff --git a/inbox/util/stats.py b/inbox/util/stats.py index 02a221515..9cd4572b5 100644 --- a/inbox/util/stats.py +++ b/inbox/util/stats.py @@ -1,9 +1,9 @@ -import statsd +import statsd # type: ignore[import-untyped] from inbox.config import config -def get_statsd_client(): # noqa: ANN201 +def get_statsd_client(): # type: ignore[no-untyped-def] # noqa: ANN201 return statsd.StatsClient( str(config.get("STATSD_HOST", "localhost")), config.get("STATSD_PORT", 8125), diff --git a/inbox/util/testutils.py b/inbox/util/testutils.py index ef9d9f472..47de44335 100644 --- a/inbox/util/testutils.py +++ b/inbox/util/testutils.py @@ -7,7 +7,7 @@ import attr import dns -import pytest +import pytest # type: ignore[import-not-found] from inbox.exceptions import ValidationError from inbox.util.file import get_data @@ -74,7 +74,7 @@ def setup_test_db() -> None: @attr.s class MockAnswer: - exchange = attr.ib() + exchange = attr.ib() # type: ignore[var-annotated] class MockDNSResolver: @@ -83,10 +83,10 @@ def __init__(self) -> None: Literal["mx", "ns"], dict[str, dict[str, str] | list[str]] ] = {"mx": {}, "ns": {}} - def _load_records(self, filename) -> None: + def _load_records(self, filename) -> None: # type: ignore[no-untyped-def] self._registry = json.loads(get_data(filename)) - def query(self, domain, record_type): # noqa: ANN201 + def query(self, domain, record_type): # type: ignore[no-untyped-def] # noqa: ANN201 record_type = record_type.lower() entry = self._registry[record_type][domain] if isinstance(entry, dict): @@ -100,7 +100,7 @@ def query(self, domain, record_type): # noqa: ANN201 @pytest.fixture -def mock_dns_resolver(monkeypatch): # noqa: ANN201 +def mock_dns_resolver(monkeypatch): # type: ignore[no-untyped-def] # noqa: ANN201 dns_resolver = MockDNSResolver() monkeypatch.setattr("inbox.util.url.dns_resolver", dns_resolver) yield dns_resolver @@ -114,46 +114,54 @@ class MockIMAPClient: """ def __init__(self) -> None: - self._data = {} + self._data = {} # type: ignore[var-annotated] self.selected_folder = None self.uidvalidity = 1 - self.logins = {} + self.logins = {} # type: ignore[var-annotated] self.error_message = "" - def _add_login(self, email, password) -> None: + def _add_login( # type: ignore[no-untyped-def] + self, email, password + ) -> None: self.logins[email] = password - def _set_error_message(self, message) -> None: + def _set_error_message( # type: ignore[no-untyped-def] + self, message + ) -> None: self.error_message = message - def login(self, email, password) -> None: + def login(self, email, password) -> None: # type: ignore[no-untyped-def] if email not in self.logins or self.logins[email] != password: raise ValidationError(self.error_message) def logout(self) -> None: pass - def list_folders( # noqa: ANN201 + def list_folders( # type: ignore[no-untyped-def] # noqa: ANN201 self, directory: str = "", pattern: str = "*" ): return [(b"\\All", b"/", "[Gmail]/All Mail")] - def has_capability(self, capability) -> bool: + def has_capability( # type: ignore[no-untyped-def] + self, capability + ) -> bool: return False - def idle_check(self, timeout=None): # noqa: ANN201 + def idle_check(self, timeout=None): # type: ignore[no-untyped-def] # noqa: ANN201 return [] - def idle_done(self): # noqa: ANN201 + def idle_done(self): # type: ignore[no-untyped-def] # noqa: ANN201 return ("Idle terminated", []) - def add_folder_data(self, folder_name, uids) -> None: + def add_folder_data( # type: ignore[no-untyped-def] + self, folder_name, uids + ) -> None: """Adds fake UID data for the given folder.""" # noqa: D401 self._data[folder_name] = uids - def search(self, criteria): # noqa: ANN201 + def search(self, criteria): # type: ignore[no-untyped-def] # noqa: ANN201 assert self.selected_folder is not None - assert isinstance(criteria, list) + assert isinstance(criteria, list) # type: ignore[unreachable] uid_dict = self._data[self.selected_folder] if criteria == ["ALL"]: return list(uid_dict) @@ -179,15 +187,19 @@ def search(self, criteria): # noqa: ANN201 return [u for u, v in uid_dict.items() if v[criteria[0]] == thrid] raise ValueError(f"unsupported test criteria: {criteria!r}") - def select_folder( # noqa: ANN201 + def select_folder( # type: ignore[no-untyped-def] # noqa: ANN201 self, folder_name, readonly: bool = False ): self.selected_folder = folder_name return self.folder_status(folder_name) - def fetch(self, items, data, modifiers=None): # noqa: ANN201 + def fetch( # type: ignore[no-untyped-def] # noqa: ANN201 + self, items, data, modifiers=None + ): assert self.selected_folder is not None - uid_dict = self._data[self.selected_folder] + uid_dict = self._data[ # type: ignore[unreachable] + self.selected_folder + ] resp = {} if "BODY.PEEK[]" in data: data.remove("BODY.PEEK[]") @@ -214,7 +226,7 @@ def fetch(self, items, data, modifiers=None): # noqa: ANN201 } return resp - def append( + def append( # type: ignore[no-untyped-def] self, folder_name, mimemsg, @@ -235,7 +247,9 @@ def append( b"X-GM-THRID": x_gm_thrid, } - def copy(self, matching_uids, folder_name) -> None: + def copy( # type: ignore[no-untyped-def] + self, matching_uids, folder_name + ) -> None: """ Note: _moves_ one or more messages from the currently selected folder to folder_name @@ -244,10 +258,12 @@ def copy(self, matching_uids, folder_name) -> None: self._data[folder_name][u] = self._data[self.selected_folder][u] self.delete_messages(matching_uids) - def capabilities(self): # noqa: ANN201 + def capabilities(self): # type: ignore[no-untyped-def] # noqa: ANN201 return [] - def folder_status(self, folder_name, data=None): # noqa: ANN201 + def folder_status( # type: ignore[no-untyped-def] # noqa: ANN201 + self, folder_name, data=None + ): folder_data = self._data[folder_name] lastuid = max(folder_data) if folder_data else 0 resp = {b"UIDNEXT": lastuid + 1, b"UIDVALIDITY": self.uidvalidity} @@ -257,25 +273,33 @@ def folder_status(self, folder_name, data=None): # noqa: ANN201 ) return resp - def delete_messages(self, uids, silent: bool = False) -> None: + def delete_messages( # type: ignore[no-untyped-def] + self, uids, silent: bool = False + ) -> None: for u in uids: del self._data[self.selected_folder][u] - def remove_flags(self, uids, flags) -> None: + def remove_flags( # type: ignore[no-untyped-def] + self, uids, flags + ) -> None: pass - def remove_gmail_labels(self, uids, labels) -> None: + def remove_gmail_labels( # type: ignore[no-untyped-def] + self, uids, labels + ) -> None: pass def expunge(self) -> None: pass - def oauth2_login(self, email, token) -> None: + def oauth2_login( # type: ignore[no-untyped-def] + self, email, token + ) -> None: pass @pytest.fixture -def mock_imapclient(monkeypatch): # noqa: ANN201 +def mock_imapclient(monkeypatch): # type: ignore[no-untyped-def] # noqa: ANN201 conn = MockIMAPClient() monkeypatch.setattr( "inbox.crispin.CrispinConnectionPool._new_raw_connection", @@ -293,11 +317,11 @@ class MockSMTPClient: @pytest.fixture -def mock_smtp_get_connection(monkeypatch): # noqa: ANN201 +def mock_smtp_get_connection(monkeypatch): # type: ignore[no-untyped-def] # noqa: ANN201 client = MockSMTPClient() @contextlib.contextmanager - def get_connection(account): + def get_connection(account): # type: ignore[no-untyped-def] yield client monkeypatch.setattr( @@ -308,7 +332,7 @@ def get_connection(account): @pytest.fixture -def files(db): # noqa: ANN201 +def files(db): # type: ignore[no-untyped-def] # noqa: ANN201 filenames = FILENAMES data = [] for filename in filenames: @@ -325,7 +349,7 @@ def files(db): # noqa: ANN201 @pytest.fixture -def uploaded_file_ids(api_client, files): # noqa: ANN201 +def uploaded_file_ids(api_client, files): # type: ignore[no-untyped-def] # noqa: ANN201 file_ids = [] upload_path = "/files" for filename, path in files: diff --git a/inbox/util/threading.py b/inbox/util/threading.py index e97821927..9b3bc0719 100644 --- a/inbox/util/threading.py +++ b/inbox/util/threading.py @@ -1,7 +1,10 @@ from operator import attrgetter -from sqlalchemy import desc -from sqlalchemy.orm import contains_eager, load_only +from sqlalchemy import desc # type: ignore[import-untyped] +from sqlalchemy.orm import ( # type: ignore[import-untyped] + contains_eager, + load_only, +) from inbox.models.message import Message from inbox.models.thread import Thread @@ -11,7 +14,7 @@ MAX_MESSAGES_SCANNED = 20000 -def fetch_corresponding_thread( # noqa: ANN201 +def fetch_corresponding_thread( # type: ignore[no-untyped-def] # noqa: ANN201 db_session, namespace_id, message ): """ @@ -41,11 +44,13 @@ def fetch_corresponding_thread( # noqa: ANN201 Thread.namespace_id == namespace_id, Thread._cleaned_subject == clean_subject, ) - .outerjoin(Message, Thread.messages) + .outerjoin(Message, Thread.messages) # type: ignore[attr-defined] .order_by(desc(Thread.id)) .options( load_only("id", "discriminator"), - contains_eager(Thread.messages).load_only( + contains_eager( + Thread.messages # type: ignore[attr-defined] + ).load_only( "from_addr", "to_addr", "bcc_addr", "cc_addr", "received_date" ), ) diff --git a/inbox/util/url.py b/inbox/util/url.py index a71cff78b..9dfceec13 100644 --- a/inbox/util/url.py +++ b/inbox/util/url.py @@ -4,7 +4,7 @@ import dns from dns.resolver import NXDOMAIN, NoAnswer, NoNameservers, Resolver, Timeout -from tldextract import extract as tld_extract +from tldextract import extract as tld_extract # type: ignore[import-untyped] from inbox.logging import get_logger @@ -27,11 +27,11 @@ class InvalidEmailAddressError(Exception): pass -def _dns_resolver(): +def _dns_resolver(): # type: ignore[no-untyped-def] return dns_resolver -def _fallback_get_mx_domains(domain): +def _fallback_get_mx_domains(domain): # type: ignore[no-untyped-def] """ Sometimes dns.resolver.Resolver fails to return what we want. See http://stackoverflow.com/questions/18898847. In such cases, try using @@ -46,7 +46,9 @@ def _fallback_get_mx_domains(domain): return [] -def get_mx_domains(domain, dns_resolver=_dns_resolver): # noqa: ANN201 +def get_mx_domains( # type: ignore[no-untyped-def] # noqa: ANN201 + domain, dns_resolver=_dns_resolver +): """Retrieve and return the MX records for a domain.""" mx_records = [] try: @@ -65,7 +67,9 @@ def get_mx_domains(domain, dns_resolver=_dns_resolver): # noqa: ANN201 return [str(rdata.exchange).lower() for rdata in mx_records] -def mx_match(mx_domains, match_domains) -> bool: +def mx_match( # type: ignore[no-untyped-def] + mx_domains, match_domains +) -> bool: """ Return True if any of the `mx_domains` matches an mx_domain in `match_domains`. @@ -91,7 +95,7 @@ def mx_match(mx_domains, match_domains) -> bool: return False -def provider_from_address( # noqa: ANN201 +def provider_from_address( # type: ignore[no-untyped-def] # noqa: ANN201 email_address, dns_resolver=_dns_resolver ): if not EMAIL_REGEX.match(email_address): @@ -141,7 +145,7 @@ def provider_from_address( # noqa: ANN201 # From tornado.httputil -def url_concat(url, args, fragments=None): # noqa: ANN201 +def url_concat(url, args, fragments=None): # type: ignore[no-untyped-def] # noqa: ANN201 """ Concatenate url and argument dictionary regardless of whether url has existing query parameters. @@ -170,18 +174,18 @@ def url_concat(url, args, fragments=None): # noqa: ANN201 return url + args_tail + fragment_tail -def resolve_hostname(addr): # noqa: ANN201 +def resolve_hostname(addr): # type: ignore[no-untyped-def] # noqa: ANN201 try: return socket.gethostbyname(addr) except OSError: return None -def parent_domain(domain): # noqa: ANN201 +def parent_domain(domain): # type: ignore[no-untyped-def] # noqa: ANN201 return tld_extract(domain).registered_domain -def naked_domain(url): # noqa: ANN201 +def naked_domain(url): # type: ignore[no-untyped-def] # noqa: ANN201 # This function extracts the domain name part of an URL. # It works indiscriminately on URLs or plain domains. res = tld_extract(url) @@ -192,7 +196,9 @@ def naked_domain(url): # noqa: ANN201 return ".".join([res.subdomain, res.registered_domain]) -def matching_subdomains(new_value, old_value) -> bool: +def matching_subdomains( # type: ignore[no-untyped-def] + new_value, old_value +) -> bool: """ We allow our customers to update their server addresses, provided that the new server has: diff --git a/inbox/webhooks/google_notifications.py b/inbox/webhooks/google_notifications.py index 1cf0ea4e2..860d27ef4 100644 --- a/inbox/webhooks/google_notifications.py +++ b/inbox/webhooks/google_notifications.py @@ -1,12 +1,12 @@ from flask import Blueprint, g, jsonify, make_response, request -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from inbox.api.err import APIException, InputError, NotFoundError from inbox.api.validation import valid_public_id from inbox.logging import get_logger log = get_logger() -import limitlion # noqa: E402 +import limitlion # type: ignore[import-untyped] # noqa: E402 from inbox.models import Calendar # noqa: E402 from inbox.models.backends.gmail import GmailAccount # noqa: E402 @@ -19,19 +19,19 @@ GOOGLE_RESOURCE_ID_STRING = "X-Goog-Resource-ID" -def resp(http_code, message=None, **kwargs): # noqa: ANN201 +def resp(http_code, message=None, **kwargs): # type: ignore[no-untyped-def] # noqa: ANN201 resp = kwargs if message: resp["message"] = message if http_code == 204: body = "" else: - body = jsonify(resp) + body = jsonify(resp) # type: ignore[assignment] return make_response(body, http_code) @app.before_request -def start(): # noqa: ANN201 +def start(): # type: ignore[no-untyped-def] # noqa: ANN201 try: watch_state = request.headers[GOOGLE_RESOURCE_STATE_STRING] g.watch_channel_id = request.headers[GOOGLE_CHANNEL_ID_STRING] @@ -53,14 +53,14 @@ def start(): # noqa: ANN201 @app.errorhandler(APIException) -def handle_input_error(error): # noqa: ANN201 +def handle_input_error(error): # type: ignore[no-untyped-def] # noqa: ANN201 response = jsonify(message=error.message, type="invalid_request_error") response.status_code = error.status_code return response @app.route("/calendar_list_update/", methods=["POST"]) -def calendar_update(account_public_id): # noqa: ANN201 +def calendar_update(account_public_id): # type: ignore[no-untyped-def] # noqa: ANN201 request.environ["log_context"]["account_public_id"] = account_public_id try: valid_public_id(account_public_id) @@ -86,7 +86,7 @@ def calendar_update(account_public_id): # noqa: ANN201 @app.route("/calendar_update/", methods=["POST"]) -def event_update(calendar_public_id): # noqa: ANN201 +def event_update(calendar_public_id): # type: ignore[no-untyped-def] # noqa: ANN201 request.environ["log_context"]["calendar_public_id"] = calendar_public_id try: valid_public_id(calendar_public_id) diff --git a/inbox/webhooks/microsoft_notifications.py b/inbox/webhooks/microsoft_notifications.py index 196e3bbff..50a42056f 100644 --- a/inbox/webhooks/microsoft_notifications.py +++ b/inbox/webhooks/microsoft_notifications.py @@ -2,7 +2,7 @@ from typing import cast from flask import Blueprint, make_response, request -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] from werkzeug.exceptions import UnsupportedMediaType from inbox.config import config @@ -21,9 +21,13 @@ ) -def handle_initial_validation_response(view_function): # noqa: ANN201 +def handle_initial_validation_response( # type: ignore[no-untyped-def] # noqa: ANN201 + view_function, +): @wraps(view_function) - def _handle_initial_validation_response(*args, **kwargs): + def _handle_initial_validation_response( # type: ignore[no-untyped-def] + *args, **kwargs + ): """ Handle initial validation of webhook endpoint. @@ -48,10 +52,16 @@ def _handle_initial_validation_response(*args, **kwargs): return _handle_initial_validation_response -def validate_webhook_payload_factory(type: MsGraphType): # noqa: ANN201 - def validate_webhook_payload(view_function): +def validate_webhook_payload_factory( # type: ignore[no-untyped-def] # noqa: ANN201 + type: MsGraphType, +): + def validate_webhook_payload( # type: ignore[no-untyped-def] + view_function, + ): @wraps(view_function) - def _validate_webhook_payload(*args, **kwargs): + def _validate_webhook_payload( # type: ignore[no-untyped-def] + *args, **kwargs + ): """ Validate webhook payload. @@ -96,7 +106,7 @@ def _validate_webhook_payload(*args, **kwargs): @app.route("/calendar_list_update/", methods=["POST"]) @handle_initial_validation_response @validate_webhook_payload_factory("#Microsoft.Graph.Calendar") -def calendar_update(account_public_id): # noqa: ANN201 +def calendar_update(account_public_id): # type: ignore[no-untyped-def] # noqa: ANN201 """Handle calendar list update for given account.""" with global_session_scope() as db_session: try: @@ -117,7 +127,7 @@ def calendar_update(account_public_id): # noqa: ANN201 @app.route("/calendar_update/", methods=["POST"]) @handle_initial_validation_response @validate_webhook_payload_factory("#Microsoft.Graph.Event") -def event_update(calendar_public_id): # noqa: ANN201 +def event_update(calendar_public_id): # type: ignore[no-untyped-def] # noqa: ANN201 """Handle events update for given calendar.""" with global_session_scope() as db_session: try: @@ -168,7 +178,9 @@ def handle_event_deletions( for deleted_event in deleted_events: deleted_event.status = "cancelled" if isinstance(deleted_event, RecurringEvent): - for override in deleted_event.overrides: + for ( + override + ) in deleted_event.overrides: # type: ignore[attr-defined] override.status = "cancelled" db_session.commit() diff --git a/migrations/env.py b/migrations/env.py index b87e8bfdb..e072d31bb 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -4,14 +4,14 @@ # Interpret the config file for Python logging. # This line sets up loggers basically. -fileConfig(context.config.config_file_name) +fileConfig(context.config.config_file_name) # type: ignore[arg-type] # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel from inbox.models.base import MailSyncBase -target_metadata = MailSyncBase.metadata +target_metadata = MailSyncBase.metadata # type: ignore[attr-defined] from inbox.config import config from inbox.ignition import EngineManager diff --git a/migrations/versions/000_g_msgid_g_thrid_as_integers.py b/migrations/versions/000_g_msgid_g_thrid_as_integers.py index 6be7ecbfc..184efc81c 100644 --- a/migrations/versions/000_g_msgid_g_thrid_as_integers.py +++ b/migrations/versions/000_g_msgid_g_thrid_as_integers.py @@ -9,10 +9,10 @@ # revision identifiers, used by Alembic. revision = "2605b23e1fe6" -down_revision = None +down_revision: str | None = None from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py b/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py index 5d153db4e..1e5bf28d4 100644 --- a/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py +++ b/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py @@ -11,9 +11,9 @@ revision = "297aa1e1acc7" down_revision = "217431caacc7" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/003_expand_littlejson.py b/migrations/versions/003_expand_littlejson.py index a1081435c..a10bbb53e 100644 --- a/migrations/versions/003_expand_littlejson.py +++ b/migrations/versions/003_expand_littlejson.py @@ -11,7 +11,7 @@ revision = "269247bc37d3" down_revision = "297aa1e1acc7" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/004_drafts_as_required_folder.py b/migrations/versions/004_drafts_as_required_folder.py index 91847d331..5c29a0e72 100644 --- a/migrations/versions/004_drafts_as_required_folder.py +++ b/migrations/versions/004_drafts_as_required_folder.py @@ -11,7 +11,7 @@ revision = "41a7e825d108" down_revision = "269247bc37d3" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/005_import_old_accounts.py b/migrations/versions/005_import_old_accounts.py index db399845d..73ba8c05c 100644 --- a/migrations/versions/005_import_old_accounts.py +++ b/migrations/versions/005_import_old_accounts.py @@ -14,17 +14,19 @@ import os.path from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) SQL_DUMP_FILENAME = "alphasync_rds_inbox_imapaccount.sql" def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - from inbox.auth import gmail + from inbox.auth import gmail # type: ignore[attr-defined] from inbox.models.backends.imap import ImapAccount # Assert we have the dump file @@ -45,10 +47,10 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapAccount_Old(Base): # noqa: N801 + class ImapAccount_Old(Base): # type: ignore[misc, valid-type] # noqa: N801 __table__ = Base.metadata.tables["imapaccount_old"] - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] migrated_accounts = [] for acct in db_session.query(ImapAccount_Old): diff --git a/migrations/versions/006_add_search_tokens.py b/migrations/versions/006_add_search_tokens.py index 58e06d67d..e0485d872 100644 --- a/migrations/versions/006_add_search_tokens.py +++ b/migrations/versions/006_add_search_tokens.py @@ -11,7 +11,7 @@ revision = "482338e7a7d6" down_revision = "adc646e1f11" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/007_per_provider_table_split.py b/migrations/versions/007_per_provider_table_split.py index 77c9f54cd..68818cb49 100644 --- a/migrations/versions/007_per_provider_table_split.py +++ b/migrations/versions/007_per_provider_table_split.py @@ -11,10 +11,12 @@ revision = "1c3f1812f2d9" down_revision = "482338e7a7d6" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.sql import column, table +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.sql import column, table # type: ignore[import-untyped] def upgrade() -> None: @@ -31,18 +33,18 @@ def downgrade() -> None: # Upgrade funtions: def genericize_imapaccount() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapAccount_(Base): # noqa: N801 + class ImapAccount_(Base): # type: ignore[misc, valid-type] # noqa: N801 __table__ = Base.metadata.tables["imapaccount"] # Get data from columns-to-be-dropped - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] results = db_session.query( ImapAccount_.id, ImapAccount_.imap_host ).all() @@ -77,18 +79,18 @@ class ImapAccount_(Base): # noqa: N801 def genericize_thread() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Thread_(Base): # noqa: N801 + class Thread_(Base): # type: ignore[misc, valid-type] # noqa: N801 __table__ = Base.metadata.tables["thread"] # Get data from columns-to-be-dropped - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] results = db_session.query(Thread_.id, Thread_.g_thrid).all() to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results] @@ -179,18 +181,18 @@ def genericize_namespace_contact_foldersync() -> None: # Downgrade functions: def downgrade_imapaccount() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapAccount_(Base): # noqa: N801 + class ImapAccount_(Base): # type: ignore[misc, valid-type] # noqa: N801 __table__ = Base.metadata.tables["imapaccount"] # Get data from table-to-be-dropped - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] results = db_session.query( ImapAccount_.id, ImapAccount_.imap_host ).all() @@ -248,18 +250,18 @@ class ImapAccount_(Base): # noqa: N801 def downgrade_imapthread() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapThread_(Base): # noqa: N801 + class ImapThread_(Base): # type: ignore[misc, valid-type] # noqa: N801 __table__ = Base.metadata.tables["imapthread"] # Get data from table-to-be-dropped - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] results = db_session.query(ImapThread_.id, ImapThread_.g_thrid).all() to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results] diff --git a/migrations/versions/008_store_userinfo_from_oauth.py b/migrations/versions/008_store_userinfo_from_oauth.py index ff28b2861..716c6308b 100644 --- a/migrations/versions/008_store_userinfo_from_oauth.py +++ b/migrations/versions/008_store_userinfo_from_oauth.py @@ -11,7 +11,7 @@ revision = "3c11391b5eb0" down_revision = "1c3f1812f2d9" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/009_multiple_contact_providers.py b/migrations/versions/009_multiple_contact_providers.py index f661b3d87..626dbdb03 100644 --- a/migrations/versions/009_multiple_contact_providers.py +++ b/migrations/versions/009_multiple_contact_providers.py @@ -11,7 +11,7 @@ revision = "169cac0cd87e" down_revision = "3c11391b5eb0" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/010_store_raw_contact_data.py b/migrations/versions/010_store_raw_contact_data.py index aa0d5c7dd..e3a27eb1a 100644 --- a/migrations/versions/010_store_raw_contact_data.py +++ b/migrations/versions/010_store_raw_contact_data.py @@ -11,7 +11,7 @@ revision = "3b511977a01f" down_revision = "169cac0cd87e" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/011_use_server_default.py b/migrations/versions/011_use_server_default.py index 800d51050..24de475f0 100644 --- a/migrations/versions/011_use_server_default.py +++ b/migrations/versions/011_use_server_default.py @@ -11,7 +11,7 @@ revision = "3237b6b1ee03" down_revision = "3b511977a01f" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/012_move_google_userinfo_fields_to_.py b/migrations/versions/012_move_google_userinfo_fields_to_.py index 153a32b88..e10cf151f 100644 --- a/migrations/versions/012_move_google_userinfo_fields_to_.py +++ b/migrations/versions/012_move_google_userinfo_fields_to_.py @@ -11,14 +11,16 @@ revision = "193802835c33" down_revision = "3237b6b1ee03" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.sql import column, table +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.sql import column, table # type: ignore[import-untyped] def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -55,10 +57,10 @@ def upgrade() -> None: ) # MOVE: - class Account_(Base): # noqa: N801 + class Account_(Base): # type: ignore[misc, valid-type] # noqa: N801 __table__ = Base.metadata.tables["account"] - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] results = db_session.query( Account_.id, Account_.family_name, @@ -110,7 +112,7 @@ class Account_(Base): # noqa: N801 def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -144,10 +146,10 @@ def downgrade() -> None: ) # MOVE: - class ImapAccount_(Base): # noqa: N801 + class ImapAccount_(Base): # type: ignore[misc, valid-type] # noqa: N801 __table__ = Base.metadata.tables["imapaccount"] - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] results = db_session.query( ImapAccount_.id, ImapAccount_.family_name, diff --git a/migrations/versions/013_add_spool_msg.py b/migrations/versions/013_add_spool_msg.py index d46dd2e95..09a5060c8 100644 --- a/migrations/versions/013_add_spool_msg.py +++ b/migrations/versions/013_add_spool_msg.py @@ -11,7 +11,7 @@ revision = "f7dbd9bf4a6" down_revision = "193802835c33" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/014_contact_ranking_signals.py b/migrations/versions/014_contact_ranking_signals.py index b1dfb94f8..e5a4aa2ee 100644 --- a/migrations/versions/014_contact_ranking_signals.py +++ b/migrations/versions/014_contact_ranking_signals.py @@ -11,7 +11,7 @@ revision = "563d405d1f99" down_revision = "f7dbd9bf4a6" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/015_generalize_from_sender_header_field.py b/migrations/versions/015_generalize_from_sender_header_field.py index d16a4f628..18c3bd3cd 100644 --- a/migrations/versions/015_generalize_from_sender_header_field.py +++ b/migrations/versions/015_generalize_from_sender_header_field.py @@ -16,7 +16,7 @@ def upgrade() -> None: from inbox.models import Message from inbox.models.session import session_scope - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] results = db_session.query(Message).all() for message in results: message.from_addr = [message.from_addr] @@ -28,7 +28,7 @@ def downgrade() -> None: from inbox.models import Message from inbox.models.session import session_scope - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] results = db_session.query(Message).all() for message in results: if message.from_addr: diff --git a/migrations/versions/016_extra_transaction_data.py b/migrations/versions/016_extra_transaction_data.py index 65a6c5308..1c2201bc8 100644 --- a/migrations/versions/016_extra_transaction_data.py +++ b/migrations/versions/016_extra_transaction_data.py @@ -11,7 +11,7 @@ revision = "5093433b073" down_revision = "3fee2f161614" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/017_haspublicid.py b/migrations/versions/017_haspublicid.py index 5c3d1d203..0fb5f1b81 100644 --- a/migrations/versions/017_haspublicid.py +++ b/migrations/versions/017_haspublicid.py @@ -14,16 +14,16 @@ import sys from gc import collect as garbage_collect -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] chunk_size = 500 def upgrade() -> None: # These all inherit HasPublicID - from inbox.models import ( + from inbox.models import ( # type: ignore[attr-defined] Account, Block, Contact, @@ -72,7 +72,7 @@ def upgrade() -> None: print("Finished adding columns. \nNow generating public_ids") - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] count = 0 for c in classes: garbage_collect() @@ -99,7 +99,7 @@ def upgrade() -> None: def downgrade() -> None: # These all inherit HasPublicID - from inbox.models import ( + from inbox.models import ( # type: ignore[attr-defined] Account, Block, Contact, diff --git a/migrations/versions/018_message_contact_association.py b/migrations/versions/018_message_contact_association.py index 2ef45dc0e..a54614098 100644 --- a/migrations/versions/018_message_contact_association.py +++ b/migrations/versions/018_message_contact_association.py @@ -12,7 +12,7 @@ down_revision = "2c9f3a06de09" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op @@ -38,7 +38,9 @@ def upgrade() -> None: import sys sys.path.append("./tools") - from rerank_contacts import rerank_contacts + from rerank_contacts import ( # type: ignore[import-not-found] + rerank_contacts, + ) rerank_contacts() diff --git a/migrations/versions/019_blocks_to_parts.py b/migrations/versions/019_blocks_to_parts.py index f43c9f8ae..271600328 100644 --- a/migrations/versions/019_blocks_to_parts.py +++ b/migrations/versions/019_blocks_to_parts.py @@ -14,15 +14,17 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) chunk_size = 250 def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import Session, session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -62,7 +64,7 @@ def upgrade() -> None: Base.metadata.reflect(engine) class Block_( # noqa: N801 - Base + Base # type: ignore[misc, valid-type] ): # old schema, reflected from database table __table__ = Base.metadata.tables["block"] @@ -73,17 +75,17 @@ class Block_( # noqa: N801 print("Migrating from blocks to parts") new_parts = [] - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] for block in db_session.query(Block_).yield_per(chunk_size): # Move relevant fields p = Part() - p.size = block.size - p.data_sha256 = block.data_sha256 + p.size = block.size # type: ignore[attr-defined] + p.data_sha256 = block.data_sha256 # type: ignore[attr-defined] p.message_id = block.message_id p.walk_index = block.walk_index p.content_disposition = block.content_disposition p.content_id = block.content_id - p.misc_keyval = block.misc_keyval + p.misc_keyval = block.misc_keyval # type: ignore[attr-defined] p.is_inboxapp_attachment # noqa: B018 old_namespace = ( @@ -92,7 +94,7 @@ class Block_( # noqa: N801 .filter(Message.id == block.message_id) .one() ) - p.namespace_id = old_namespace.id + p.namespace_id = old_namespace.id # type: ignore[attr-defined] # Commit after column modifications new_parts.append(p) diff --git a/migrations/versions/020_store_webhook_parameters.py b/migrations/versions/020_store_webhook_parameters.py index c71c42b95..9d45bb7d5 100644 --- a/migrations/versions/020_store_webhook_parameters.py +++ b/migrations/versions/020_store_webhook_parameters.py @@ -11,9 +11,9 @@ revision = "10ef1d46f016" down_revision = "5a787816e2bc" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/021_add_references_column_to_message_table.py b/migrations/versions/021_add_references_column_to_message_table.py index 2c33e9ccb..a53050680 100644 --- a/migrations/versions/021_add_references_column_to_message_table.py +++ b/migrations/versions/021_add_references_column_to_message_table.py @@ -11,7 +11,7 @@ revision = "4fd291c6940c" down_revision = "10ef1d46f016" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/022_store_imapuid_msg_uid_as_biginteger_.py b/migrations/versions/022_store_imapuid_msg_uid_as_biginteger_.py index 586b5f535..fe09bdfc2 100644 --- a/migrations/versions/022_store_imapuid_msg_uid_as_biginteger_.py +++ b/migrations/versions/022_store_imapuid_msg_uid_as_biginteger_.py @@ -11,9 +11,9 @@ revision = "519e462df171" down_revision = "4fd291c6940c" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/022_webhooks_and_filters.py b/migrations/versions/022_webhooks_and_filters.py index 808b59744..f90548a06 100644 --- a/migrations/versions/022_webhooks_and_filters.py +++ b/migrations/versions/022_webhooks_and_filters.py @@ -16,7 +16,7 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/023_tighten_nullable_constraints_on_.py b/migrations/versions/023_tighten_nullable_constraints_on_.py index d3bc55e58..136e0958e 100644 --- a/migrations/versions/023_tighten_nullable_constraints_on_.py +++ b/migrations/versions/023_tighten_nullable_constraints_on_.py @@ -14,13 +14,15 @@ revision = "4e04f752b7ad" down_revision = "2c313b6ddd9b" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -28,12 +30,12 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapUid(Base): + class ImapUid(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapuid"] print("Deleting imapuid objects with NULL message_id...") - with session_scope(versioned=False) as session: + with session_scope(versioned=False) as session: # type: ignore[call-arg] session.query(ImapUid).filter_by(message_id=None).delete() session.commit() diff --git a/migrations/versions/024_remote_folders_and_inbox_tags_split.py b/migrations/versions/024_remote_folders_and_inbox_tags_split.py index f3e7da760..73a2d8eff 100644 --- a/migrations/versions/024_remote_folders_and_inbox_tags_split.py +++ b/migrations/versions/024_remote_folders_and_inbox_tags_split.py @@ -13,11 +13,16 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import backref, relationship +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + relationship, +) CHUNK_SIZE = 250 @@ -137,7 +142,7 @@ def upgrade() -> None: "imapuid_ibfk_3", "imapuid", "folder", ["folder_id"], ["id"] ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -177,17 +182,17 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Folder(Base): + class Folder(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["folder"] account = relationship( "Account", foreign_keys="Folder.account_id", backref="folders" ) - class FolderItem(Base): + class FolderItem(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["folderitem"] folder = relationship("Folder", backref="threads", lazy="joined") - class Thread(Base): + class Thread(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["thread"] folderitems = relationship( "FolderItem", @@ -197,13 +202,13 @@ class Thread(Base): ) namespace = relationship("Namespace", backref="threads") - class Namespace(Base): + class Namespace(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["namespace"] account = relationship( "Account", backref=backref("namespace", uselist=False) ) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] inbox_folder = relationship( "Folder", foreign_keys="Account.inbox_folder_id" @@ -230,13 +235,13 @@ class Account(Base): "Folder", foreign_keys="Account.all_folder_id" ) - class ImapUid(Base): + class ImapUid(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapuid"] folder = relationship("Folder", backref="imapuids", lazy="joined") if easupdate: - class EASUid(Base): + class EASUid(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easuid"] folder = relationship( "Folder", @@ -247,7 +252,9 @@ class EASUid(Base): print("Creating Folder rows and migrating FolderItems...") # not many folders per account, so shouldn't grow that big - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: folders = dict( [ ((i.account_id, i.name), i) @@ -272,7 +279,10 @@ class EASUid(Base): elif folderitem.thread.namespace.account.provider == "eas": new_folder_name = folderitem.folder_name.title() - if (account_id, new_folder_name) in folders: + if ( + account_id, + new_folder_name, # type: ignore[possibly-undefined] + ) in folders: f = folders[(account_id, new_folder_name)] else: f = Folder(account_id=account_id, name=new_folder_name) @@ -306,7 +316,9 @@ class EASUid(Base): if easupdate: print("Migrating EASUids to reference Folder rows...") - for easuid in db_session.query(EASUid).yield_per(CHUNK_SIZE): + for easuid in db_session.query( + EASUid # type: ignore[possibly-undefined] + ).yield_per(CHUNK_SIZE): account_id = easuid.easaccount_id new_folder_name = easuid.folder_name diff --git a/migrations/versions/026_add_audit_timestamps_to_all_objects.py b/migrations/versions/026_add_audit_timestamps_to_all_objects.py index 469345491..18c0a1c55 100644 --- a/migrations/versions/026_add_audit_timestamps_to_all_objects.py +++ b/migrations/versions/026_add_audit_timestamps_to_all_objects.py @@ -13,10 +13,12 @@ from datetime import datetime -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.sql import column, table +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.sql import column, table # type: ignore[import-untyped] table_names = { "account", @@ -41,7 +43,7 @@ def add_eas_tables() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/027_imapuid_soft_deletes.py b/migrations/versions/027_imapuid_soft_deletes.py index 4d6877249..8af5a386c 100644 --- a/migrations/versions/027_imapuid_soft_deletes.py +++ b/migrations/versions/027_imapuid_soft_deletes.py @@ -14,9 +14,9 @@ revision = "924ffd092832" down_revision = "146b1817e4a8" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import column, table +from sqlalchemy.sql import column, table # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/028_tag_api_migration.py b/migrations/versions/028_tag_api_migration.py index 09b11e0c3..07af51fc9 100644 --- a/migrations/versions/028_tag_api_migration.py +++ b/migrations/versions/028_tag_api_migration.py @@ -15,22 +15,27 @@ from datetime import datetime from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import relationship, sessionmaker -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + relationship, + sessionmaker, +) +from sqlalchemy.orm.exc import NoResultFound # type: ignore[import-untyped] def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Session = sessionmaker(bind=engine) # noqa: N806 @contextmanager - def basic_session(): + def basic_session(): # type: ignore[no-untyped-def] # Using the new_session is kind of a pain in this migration, so let's # just roll with a normal sqlalchemy session. session = Session(autoflush=True, autocommit=False) @@ -79,17 +84,17 @@ def basic_session(): Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Folder(Base): + class Folder(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["folder"] account = relationship( "Account", foreign_keys="Folder.account_id", backref="folders" ) - class FolderItem(Base): + class FolderItem(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["folderitem"] folder = relationship("Folder", backref="threads", lazy="joined") - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] print("setting provider_prefix for current accounts") diff --git a/migrations/versions/029_set_inbox_folder_exposed_name.py b/migrations/versions/029_set_inbox_folder_exposed_name.py index bba8014d8..ac543aa41 100644 --- a/migrations/versions/029_set_inbox_folder_exposed_name.py +++ b/migrations/versions/029_set_inbox_folder_exposed_name.py @@ -11,21 +11,25 @@ revision = "52a9a976a2e0" down_revision = "40629415951c" -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Folder(Base): + class Folder(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["folder"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for folder in db_session.query(Folder).filter(Folder.name == "Inbox"): folder.public_id = "inbox" folder.exposed_name = "inbox" diff --git a/migrations/versions/030_add_is_read_attribute_to_messages.py b/migrations/versions/030_add_is_read_attribute_to_messages.py index d5f40b640..352111119 100644 --- a/migrations/versions/030_add_is_read_attribute_to_messages.py +++ b/migrations/versions/030_add_is_read_attribute_to_messages.py @@ -11,11 +11,16 @@ revision = "1b6ceae51b43" down_revision = "52a9a976a2e0" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import backref, relationship +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.orm import ( # type: ignore[import-untyped] + backref, + relationship, +) def upgrade() -> None: @@ -42,17 +47,17 @@ def upgrade() -> None: nullable=False, ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Message(Base): + class Message(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["message"] - class ImapUid(Base): + class ImapUid(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapuid"] message = relationship( "Message", @@ -67,7 +72,9 @@ class ImapUid(Base): "Message.deleted_at == None)", ) - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for uid in db_session.query(ImapUid).yield_per(500): if uid.is_seen: uid.message.is_read = True diff --git a/migrations/versions/031_add_indexes_to_timestamps.py b/migrations/versions/031_add_indexes_to_timestamps.py index 9ba04ee1f..18a39c433 100644 --- a/migrations/versions/031_add_indexes_to_timestamps.py +++ b/migrations/versions/031_add_indexes_to_timestamps.py @@ -12,11 +12,13 @@ down_revision = "1b6ceae51b43" from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 @@ -352,7 +354,7 @@ def downgrade() -> None: op.drop_index("ix_folder_deleted_at", table_name="folder") op.drop_index("ix_folder_created_at", table_name="folder") - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/032_tighten_easuid.py b/migrations/versions/032_tighten_easuid.py index 4c3a38a48..7f7375940 100644 --- a/migrations/versions/032_tighten_easuid.py +++ b/migrations/versions/032_tighten_easuid.py @@ -11,13 +11,15 @@ revision = "3f96e92953e1" down_revision = "55f0ff54c776" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 @@ -51,7 +53,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/033_add_more_indexes.py b/migrations/versions/033_add_more_indexes.py index 57e1b45a4..bf68fe55c 100644 --- a/migrations/versions/033_add_more_indexes.py +++ b/migrations/versions/033_add_more_indexes.py @@ -12,11 +12,13 @@ down_revision = "3f96e92953e1" from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 @@ -37,7 +39,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/035_add_columns_for_drafts_support_to_.py b/migrations/versions/035_add_columns_for_drafts_support_to_.py index e9571a745..e47ea9039 100644 --- a/migrations/versions/035_add_columns_for_drafts_support_to_.py +++ b/migrations/versions/035_add_columns_for_drafts_support_to_.py @@ -11,9 +11,9 @@ revision = "24e085e152c0" down_revision = "350a08df27ee" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/036_replace_usertag_by_generic_tag.py b/migrations/versions/036_replace_usertag_by_generic_tag.py index ac7c6a76a..e2ccfa391 100644 --- a/migrations/versions/036_replace_usertag_by_generic_tag.py +++ b/migrations/versions/036_replace_usertag_by_generic_tag.py @@ -13,7 +13,7 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op @@ -87,10 +87,16 @@ def upgrade() -> None: # of this commit. However, the alternative is to have a crazy long, # involved and error-prone recreation of the models and their behavior # here. (I tried it, and decided this way was better.) - from inbox.models import FolderItem, Namespace, Tag + from inbox.models import ( # type: ignore[attr-defined] + FolderItem, + Namespace, + Tag, + ) from inbox.models.session import session_scope - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: # create canonical tags that don't already exist. CANONICAL_TAG_NAMES = [ # noqa: N806 "inbox", diff --git a/migrations/versions/037_shorten_addresses.py b/migrations/versions/037_shorten_addresses.py index 98854faa2..734106c37 100644 --- a/migrations/versions/037_shorten_addresses.py +++ b/migrations/versions/037_shorten_addresses.py @@ -12,7 +12,7 @@ down_revision = "21878b1b3d4b" from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/038_add_public_ids_to_transactions.py b/migrations/versions/038_add_public_ids_to_transactions.py index cc19a11f7..5bf7e3798 100644 --- a/migrations/versions/038_add_public_ids_to_transactions.py +++ b/migrations/versions/038_add_public_ids_to_transactions.py @@ -14,10 +14,12 @@ import sys from gc import collect as garbage_collect -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: @@ -33,7 +35,7 @@ def upgrade() -> None: ) # TODO(emfree) reflect - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope from inbox.sqlalchemy_ext.util import b36_to_bin, generate_public_id @@ -41,10 +43,12 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Transaction(Base): + class Transaction(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["transaction"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: count = 0 (num_transactions,) = db_session.query( sa.func.max(Transaction.id) diff --git a/migrations/versions/039_change_easfoldersync_unique_constraint.py b/migrations/versions/039_change_easfoldersync_unique_constraint.py index d405d8594..ba824b502 100644 --- a/migrations/versions/039_change_easfoldersync_unique_constraint.py +++ b/migrations/versions/039_change_easfoldersync_unique_constraint.py @@ -12,11 +12,13 @@ down_revision = "1edbd63582c2" from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 @@ -32,7 +34,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/040_gmailaccount.py b/migrations/versions/040_gmailaccount.py index 26ea023f7..0fe72938a 100644 --- a/migrations/versions/040_gmailaccount.py +++ b/migrations/versions/040_gmailaccount.py @@ -13,7 +13,7 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op @@ -46,9 +46,11 @@ def upgrade() -> None: sa.PrimaryKeyConstraint("id"), ) - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope @@ -56,16 +58,18 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] - class ImapAccount(Base): + class ImapAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapaccount"] - class GmailAccount(Base): + class GmailAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["gmailaccount"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acct in db_session.query(Account): if acct.provider == "Gmail": imap_acct = ( diff --git a/migrations/versions/041_add_sync_status_columns_to_foldersync.py b/migrations/versions/041_add_sync_status_columns_to_foldersync.py index 3995f848a..8570559f9 100644 --- a/migrations/versions/041_add_sync_status_columns_to_foldersync.py +++ b/migrations/versions/041_add_sync_status_columns_to_foldersync.py @@ -11,13 +11,15 @@ revision = "159609404baf" down_revision = "4085dd542739" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) from inbox.sqlalchemy_ext.util import JSON, MutableDict @@ -42,7 +44,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/042_simplify_tags_schema.py b/migrations/versions/042_simplify_tags_schema.py index f52f1bae1..b0d01bb7b 100644 --- a/migrations/versions/042_simplify_tags_schema.py +++ b/migrations/versions/042_simplify_tags_schema.py @@ -11,9 +11,9 @@ revision = "459dbc29648" down_revision = "159609404baf" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/043_columns_for_sync_running_stopped_killed.py b/migrations/versions/043_columns_for_sync_running_stopped_killed.py index f93364564..c0aff25da 100644 --- a/migrations/versions/043_columns_for_sync_running_stopped_killed.py +++ b/migrations/versions/043_columns_for_sync_running_stopped_killed.py @@ -11,7 +11,7 @@ revision = "5a136610b50b" down_revision = "459dbc29648" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/044_update_drafts_schema.py b/migrations/versions/044_update_drafts_schema.py index ffb728b92..26ae4f629 100644 --- a/migrations/versions/044_update_drafts_schema.py +++ b/migrations/versions/044_update_drafts_schema.py @@ -11,9 +11,9 @@ revision = "247cd689758c" down_revision = "5a136610b50b" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/045_new_password_storage.py b/migrations/versions/045_new_password_storage.py index 79004fa34..29c50e762 100644 --- a/migrations/versions/045_new_password_storage.py +++ b/migrations/versions/045_new_password_storage.py @@ -14,7 +14,7 @@ import os from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op # We're deleting this value from the config, so need to explicitly give it for @@ -26,15 +26,15 @@ # Copied from deprecated inbox.util.cryptography module. # Needed to port passwords to new storage method. -def decrypt_aes(ciphertext, key): # noqa: ANN201 +def decrypt_aes(ciphertext, key): # type: ignore[no-untyped-def] # noqa: ANN201 """ Decrypts a ciphertext that was AES-encrypted with the given key. The function expects the ciphertext as a byte string and it returns the decrypted message as a byte string. """ - from Crypto.Cipher import AES + from Crypto.Cipher import AES # type: ignore[import-not-found] - def unpad(s): + def unpad(s): # type: ignore[no-untyped-def] return s[: -ord(s[-1])] iv = ciphertext[: AES.block_size] @@ -44,13 +44,13 @@ def unpad(s): def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) from hashlib import sha256 - from inbox.util.file import mkdirp + from inbox.util.file import mkdirp # type: ignore[attr-defined] OriginalBase = sa.ext.declarative.declarative_base() # noqa: N806 OriginalBase.metadata.reflect(engine) @@ -62,14 +62,16 @@ def upgrade() -> None: Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] class EASAccount(Account): __table__ = Base.metadata.tables["easaccount"] @property - def _keyfile(self, create_dir: bool = True): # noqa: PLR0206 + def _keyfile( # type: ignore[no-untyped-def] # noqa: PLR0206 + self, create_dir: bool = True + ): assert self.key assert KEY_DIR @@ -78,7 +80,7 @@ def _keyfile(self, create_dir: bool = True): # noqa: PLR0206 key_filename = f"{sha256(self.key).hexdigest()}" return os.path.join(KEY_DIR, key_filename) # noqa: PTH118 - def get_old_password(self): + def get_old_password(self): # type: ignore[no-untyped-def] if self.password_aes is not None: with open(self._keyfile) as f: # noqa: PTH123 key = f.read() @@ -87,7 +89,7 @@ def get_old_password(self): return decrypt_aes(self.password_aes, key) return None - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] for account in db_session.query(EASAccount): account.password = account.get_old_password() db_session.add(account) diff --git a/migrations/versions/046_yahoo.py b/migrations/versions/046_yahoo.py index 60b42a334..ba8026d36 100644 --- a/migrations/versions/046_yahoo.py +++ b/migrations/versions/046_yahoo.py @@ -11,7 +11,7 @@ revision = "38d78543f8be" down_revision = "7a117720554" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/047_store_more_on_threads.py b/migrations/versions/047_store_more_on_threads.py index e27cd1a13..ebd05b71f 100644 --- a/migrations/versions/047_store_more_on_threads.py +++ b/migrations/versions/047_store_more_on_threads.py @@ -13,7 +13,7 @@ import itertools -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op @@ -31,7 +31,9 @@ def upgrade() -> None: from inbox.models import Thread from inbox.models.session import session_scope - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: (num_threads,) = db_session.query(sa.func.max(Thread.id)).one() if num_threads is None: # There aren't actually any threads to update. diff --git a/migrations/versions/048_remove_storage_of_access_token.py b/migrations/versions/048_remove_storage_of_access_token.py index c40aff849..3ea2e6fdc 100644 --- a/migrations/versions/048_remove_storage_of_access_token.py +++ b/migrations/versions/048_remove_storage_of_access_token.py @@ -11,7 +11,7 @@ revision = "4e44216e9830" down_revision = "161b88c17615" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/050_imap_table_cleanups.py b/migrations/versions/050_imap_table_cleanups.py index 86b1ed688..ed072558c 100644 --- a/migrations/versions/050_imap_table_cleanups.py +++ b/migrations/versions/050_imap_table_cleanups.py @@ -13,12 +13,12 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.folder import Folder from inbox.models.session import session_scope from inbox.sqlalchemy_ext.util import JSON @@ -112,16 +112,18 @@ def upgrade() -> None: ) Base.metadata.reflect(engine) - class EASFolderSyncStatus(Base): + class EASFolderSyncStatus(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easfoldersyncstatus"] - class ImapFolderSyncStatus(Base): + class ImapFolderSyncStatus(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapfoldersyncstatus"] - class ImapFolderInfo(Base): + class ImapFolderInfo(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapfolderinfo"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: folder_id_for = dict( [ ((account_id, name.lower()), id_) @@ -137,7 +139,9 @@ class ImapFolderInfo(Base): ] db_session.commit() if "easfoldersyncstatus" in Base.metadata.tables: - for status in db_session.query(EASFolderSyncStatus): + for status in db_session.query( + EASFolderSyncStatus # type: ignore[possibly-undefined] + ): print("migrating", status.folder_name) folder_id = folder_id_for.get( (status.account_id, status.folder_name.lower()) @@ -146,7 +150,7 @@ class ImapFolderInfo(Base): status.folder_id = folder_id else: # EAS folder rows *may* not exist if have no messages - folder = Folder( + folder = Folder( # type: ignore[call-arg] account_id=status.account_id, name=status.folder_name ) db_session.add(folder) @@ -206,7 +210,9 @@ class ImapFolderInfo(Base): "account_id", "imapfoldersyncstatus", ["account_id", "folder_id"] ) - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for info in db_session.query(ImapFolderInfo): print("migrating", info.folder_name) info.folder_id = folder_id_for[ diff --git a/migrations/versions/051_store_secrets_in_local_vault.py b/migrations/versions/051_store_secrets_in_local_vault.py index 3d362b18f..17c4bea77 100644 --- a/migrations/versions/051_store_secrets_in_local_vault.py +++ b/migrations/versions/051_store_secrets_in_local_vault.py @@ -13,14 +13,16 @@ from datetime import datetime -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -43,19 +45,21 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] - class ImapAccount(Base): + class ImapAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapaccount"] - class GmailAccount(Base): + class GmailAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["gmailaccount"] - class Secret(Base): + class Secret(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["secret"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acct in db_session.query(GmailAccount): secret = Secret( acl_id=0, @@ -92,23 +96,23 @@ class Secret(Base): def downgrade() -> None: from sqlalchemy.ext.declarative import declarative_base - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] - class ImapAccount(Base): + class ImapAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapaccount"] - class GmailAccount(Base): + class GmailAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["gmailaccount"] - class Secret(Base): + class Secret(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["secret"] op.add_column( @@ -116,7 +120,9 @@ class Secret(Base): sa.Column("refresh_token", sa.String(length=512), nullable=True), ) - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acct in db_session.query(GmailAccount): secret = ( db_session.query(Secret) diff --git a/migrations/versions/052_store_google_client_id_and_secret_on_.py b/migrations/versions/052_store_google_client_id_and_secret_on_.py index 394e66a19..de8c3b40b 100644 --- a/migrations/versions/052_store_google_client_id_and_secret_on_.py +++ b/migrations/versions/052_store_google_client_id_and_secret_on_.py @@ -11,7 +11,7 @@ revision = "358d0320397f" down_revision = "1925c535a52d" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/053_canonicalize_addresses.py b/migrations/versions/053_canonicalize_addresses.py index f6c02a00c..251650f5a 100644 --- a/migrations/versions/053_canonicalize_addresses.py +++ b/migrations/versions/053_canonicalize_addresses.py @@ -11,9 +11,9 @@ revision = "3795b2a97af1" down_revision = "358d0320397f" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: @@ -57,19 +57,21 @@ def upgrade() -> None: "ix_contact__raw_address", "contact", ["_raw_address"], unique=False ) - from flanker.addresslib import address + from flanker.addresslib import address # type: ignore[import-untyped] - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) from inbox.models.session import session_scope Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - def canonicalize_address(addr): + def canonicalize_address(addr): # type: ignore[no-untyped-def] """Gmail addresses with and without periods are the same.""" parsed_address = address.parse(addr, addr_spec_only=True) if not isinstance(parsed_address, address.EmailAddress): @@ -79,13 +81,15 @@ def canonicalize_address(addr): local_part = local_part.replace(".", "") return "@".join((local_part, parsed_address.hostname)) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] - class Contact(Base): + class Contact(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["contact"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acct in db_session.query(Account): acct._raw_address = acct.email_address acct._canonicalized_address = canonicalize_address( @@ -122,7 +126,7 @@ def downgrade() -> None: op.create_index( "ix_contact_email_address", "contact", ["email_address"], unique=False ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) from sqlalchemy.ext.declarative import declarative_base @@ -132,13 +136,15 @@ def downgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] - class Contact(Base): + class Contact(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["contact"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acct in db_session.query(Account): acct.email_address = acct._raw_address db_session.commit() diff --git a/migrations/versions/054_dont_specially_store_mailing_list_.py b/migrations/versions/054_dont_specially_store_mailing_list_.py index 503ba424c..93c4c843f 100644 --- a/migrations/versions/054_dont_specially_store_mailing_list_.py +++ b/migrations/versions/054_dont_specially_store_mailing_list_.py @@ -11,9 +11,9 @@ revision = "5143154fb1a2" down_revision = "3795b2a97af1" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/055_add_account_liveness.py b/migrations/versions/055_add_account_liveness.py index fdf111fc7..da93dbc7d 100644 --- a/migrations/versions/055_add_account_liveness.py +++ b/migrations/versions/055_add_account_liveness.py @@ -11,7 +11,7 @@ revision = "4b4674f1a726" down_revision = "5143154fb1a2" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/056_message_unique_constraint.py b/migrations/versions/056_message_unique_constraint.py index f44d261c3..ec2b8a14f 100644 --- a/migrations/versions/056_message_unique_constraint.py +++ b/migrations/versions/056_message_unique_constraint.py @@ -12,7 +12,7 @@ down_revision = "4b4674f1a726" from alembic import op -from sqlalchemy import func +from sqlalchemy import func # type: ignore[import-untyped] def upgrade() -> None: @@ -54,7 +54,9 @@ def upgrade() -> None: from inbox.models import Message from inbox.models.session import session_scope - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: groups = ( db_session.query(Message.id, Message.thread_id, Message.g_msgid) .filter(~Message.g_msgid.is_(None)) diff --git a/migrations/versions/057_consolidate_account_sync_status_columns.py b/migrations/versions/057_consolidate_account_sync_status_columns.py index 5170c682a..026658add 100644 --- a/migrations/versions/057_consolidate_account_sync_status_columns.py +++ b/migrations/versions/057_consolidate_account_sync_status_columns.py @@ -13,18 +13,20 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op from inbox.sqlalchemy_ext import json_util def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.sqlalchemy_ext.util import JSON, MutableDict engine = main_engine(pool_size=1, max_overflow=0) - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) from inbox.models.session import session_scope @@ -41,10 +43,12 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acct in db_session.query(Account): d = dict( sync_start_time=str(acct.sync_start_time), diff --git a/migrations/versions/058_enforce_length_limit_of_255_on_message_.py b/migrations/versions/058_enforce_length_limit_of_255_on_message_.py index e906f3959..ae4f83abd 100644 --- a/migrations/versions/058_enforce_length_limit_of_255_on_message_.py +++ b/migrations/versions/058_enforce_length_limit_of_255_on_message_.py @@ -13,11 +13,11 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -def truncate_subject(obj) -> None: +def truncate_subject(obj) -> None: # type: ignore[no-untyped-def] if obj.subject is None: return if len(obj.subject) > 255: @@ -26,23 +26,27 @@ def truncate_subject(obj) -> None: def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Message(Base): + class Message(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["message"] - class Thread(Base): + class Thread(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["thread"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: count = 0 for msg in ( db_session.query(Message) diff --git a/migrations/versions/059_add_action_log.py b/migrations/versions/059_add_action_log.py index d126eba37..513f11646 100644 --- a/migrations/versions/059_add_action_log.py +++ b/migrations/versions/059_add_action_log.py @@ -11,7 +11,7 @@ revision = "15dfc756a1b0" down_revision = "4af5952e8a5b" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/060_cascade_folder_deletes_to_easuid.py b/migrations/versions/060_cascade_folder_deletes_to_easuid.py index 36f98804b..54f6fe1d5 100644 --- a/migrations/versions/060_cascade_folder_deletes_to_easuid.py +++ b/migrations/versions/060_cascade_folder_deletes_to_easuid.py @@ -15,10 +15,12 @@ def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) diff --git a/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py b/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py index 628677bb8..968119815 100644 --- a/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py +++ b/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py @@ -15,22 +15,35 @@ def upgrade() -> None: - if "easfoldersyncstatus" in Base.metadata.tables: # noqa: F821 - from inbox.ignition import main_engine + if ( + "easfoldersyncstatus" + in Base.metadata.tables # type: ignore[has-type, used-before-def] # noqa: F821 + ): + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) - from sqlalchemy.ext.declarative import declarative_base - from sqlalchemy.orm.exc import NoResultFound + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) + from sqlalchemy.orm.exc import ( # type: ignore[import-untyped] + NoResultFound, + ) from inbox.models.session import session_scope Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) from inbox.models import Folder - from inbox.models.backends.eas import EASFolderSyncStatus - from inbox.util.eas.constants import SKIP_FOLDERS + from inbox.models.backends.eas import ( # type: ignore[import-not-found] + EASFolderSyncStatus, + ) + from inbox.util.eas.constants import ( # type: ignore[import-not-found] + SKIP_FOLDERS, + ) - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: statuses = ( db_session.query(EASFolderSyncStatus) .filter(EASFolderSyncStatus.eas_folder_type.in_(SKIP_FOLDERS)) diff --git a/migrations/versions/062_up_max_length_of_message_message_id_header.py b/migrations/versions/062_up_max_length_of_message_message_id_header.py index da9c98aee..efcc31c49 100644 --- a/migrations/versions/062_up_max_length_of_message_message_id_header.py +++ b/migrations/versions/062_up_max_length_of_message_message_id_header.py @@ -15,7 +15,7 @@ revision = "4c03aaa1fa47" down_revision = "bb4f204f192" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/064_make_address_fields_non_null.py b/migrations/versions/064_make_address_fields_non_null.py index 5f8633c3e..128531642 100644 --- a/migrations/versions/064_make_address_fields_non_null.py +++ b/migrations/versions/064_make_address_fields_non_null.py @@ -12,25 +12,29 @@ down_revision = "4fd3fcd46a3b" from alembic import op -from sqlalchemy import func, or_ -from sqlalchemy.dialects import mysql +from sqlalchemy import func, or_ # type: ignore[import-untyped] +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) from inbox.models.session import session_scope Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Message(Base): + class Message(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["message"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: null_field_count = ( db_session.query(func.count(Message.id)) .filter( diff --git a/migrations/versions/066_kill_spoolmessage.py b/migrations/versions/066_kill_spoolmessage.py index 94c342786..d18e23229 100644 --- a/migrations/versions/066_kill_spoolmessage.py +++ b/migrations/versions/066_kill_spoolmessage.py @@ -13,14 +13,16 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -67,13 +69,15 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Message(Base): + class Message(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["message"] - class SpoolMessage(Base): + class SpoolMessage(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["spoolmessage"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for sm in db_session.query(SpoolMessage).yield_per(250): m = db_session.query(Message).get(sm.id) diff --git a/migrations/versions/067_add_executed_status_to_action_log.py b/migrations/versions/067_add_executed_status_to_action_log.py index 9b68fa787..740db5348 100644 --- a/migrations/versions/067_add_executed_status_to_action_log.py +++ b/migrations/versions/067_add_executed_status_to_action_log.py @@ -11,7 +11,7 @@ revision = "322c2800c401" down_revision = "4f3a1f6eaee3" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/068_outlook.py b/migrations/versions/068_outlook.py index 80b23ae07..ba50d1a1e 100644 --- a/migrations/versions/068_outlook.py +++ b/migrations/versions/068_outlook.py @@ -11,7 +11,7 @@ revision = "1ceff61ec112" down_revision = "322c2800c401" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/069_aol.py b/migrations/versions/069_aol.py index dd7fee88c..55e80d4e7 100644 --- a/migrations/versions/069_aol.py +++ b/migrations/versions/069_aol.py @@ -11,7 +11,7 @@ revision = "479b3b84a73e" down_revision = "1ceff61ec112" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py b/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py index 1cdf795da..800501398 100644 --- a/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py +++ b/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py @@ -13,12 +13,12 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) diff --git a/migrations/versions/071_more_sync_states.py b/migrations/versions/071_more_sync_states.py index bc2ceee4a..fc841383e 100644 --- a/migrations/versions/071_more_sync_states.py +++ b/migrations/versions/071_more_sync_states.py @@ -11,7 +11,7 @@ revision = "3bb5d61c895c" down_revision = "2525c5245cc2" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/072_recompute_snippets.py b/migrations/versions/072_recompute_snippets.py index f07beabad..4b3203560 100644 --- a/migrations/versions/072_recompute_snippets.py +++ b/migrations/versions/072_recompute_snippets.py @@ -11,11 +11,13 @@ revision = "4e93522b5b62" down_revision = "3bb5d61c895c" -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) # solution from http://stackoverflow.com/a/1217947 -def page_query(q): # noqa: ANN201 +def page_query(q): # type: ignore[no-untyped-def] # noqa: ANN201 CHUNK_SIZE = 1000 # noqa: N806 offset = 0 while True: @@ -29,7 +31,7 @@ def page_query(q): # noqa: ANN201 def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope from inbox.util.html import strip_tags @@ -39,10 +41,12 @@ def upgrade() -> None: SNIPPET_LENGTH = 191 # noqa: N806 - class Message(Base): + class Message(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["message"] - def calculate_html_snippet(msg, text) -> None: + def calculate_html_snippet( # type: ignore[no-untyped-def] + msg, text + ) -> None: text = ( text.replace("
", " ") .replace("
", " ") @@ -51,10 +55,14 @@ def calculate_html_snippet(msg, text) -> None: text = strip_tags(text) calculate_plaintext_snippet(msg, text) - def calculate_plaintext_snippet(msg, text) -> None: + def calculate_plaintext_snippet( # type: ignore[no-untyped-def] + msg, text + ) -> None: msg.snippet = " ".join(text.split())[:SNIPPET_LENGTH] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for message in page_query(db_session.query(Message)): if not message.decode_error: calculate_html_snippet(message, message.sanitized_body) diff --git a/migrations/versions/073_generic_providers.py b/migrations/versions/073_generic_providers.py index d8f040324..92f60a456 100644 --- a/migrations/versions/073_generic_providers.py +++ b/migrations/versions/073_generic_providers.py @@ -15,14 +15,16 @@ from datetime import datetime -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -40,25 +42,27 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] - class ImapAccount(Base): + class ImapAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapaccount"] - class YahooAccount(Base): + class YahooAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["yahooaccount"] - class AOLAccount(Base): + class AOLAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["aolaccount"] - class GenericAccount(Base): + class GenericAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["genericaccount"] - class Secret(Base): + class Secret(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["secret"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acct in db_session.query(YahooAccount): secret = Secret( acl_id=0, @@ -102,7 +106,7 @@ class Secret(Base): def downgrade() -> None: from sqlalchemy.ext.declarative import declarative_base - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -129,25 +133,27 @@ def downgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Account(Base): + class Account(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["account"] - class ImapAccount(Base): + class ImapAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["imapaccount"] - class YahooAccount(Base): + class YahooAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["yahooaccount"] - class AOLAccount(Base): + class AOLAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["aolaccount"] - class GenericAccount(Base): + class GenericAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["genericaccount"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acct in db_session.query(GenericAccount): secret = ( - db_session.query(Secret) # noqa: F821 + db_session.query(Secret) # type: ignore[name-defined] # noqa: F821 .filter_by(id=acct.password_id) .one() ) diff --git a/migrations/versions/074_add_eas_thrid_index.py b/migrations/versions/074_add_eas_thrid_index.py index 5d4a570b4..caa98cb4c 100644 --- a/migrations/versions/074_add_eas_thrid_index.py +++ b/migrations/versions/074_add_eas_thrid_index.py @@ -11,12 +11,12 @@ revision = "3c02d8204335" down_revision = "43cd2de5ad85" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() Base = sa.ext.declarative.declarative_base() # noqa: N806 @@ -33,7 +33,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() Base = sa.ext.declarative.declarative_base() # noqa: N806 diff --git a/migrations/versions/076_add_thread_order_column.py b/migrations/versions/076_add_thread_order_column.py index 9f0a59ed6..35daa0cdf 100644 --- a/migrations/versions/076_add_thread_order_column.py +++ b/migrations/versions/076_add_thread_order_column.py @@ -11,7 +11,7 @@ revision = "3de3979f94bd" down_revision = "1763103db266" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/077_add_supports_condstore_column_to_.py b/migrations/versions/077_add_supports_condstore_column_to_.py index eae14efd3..1a88a6f23 100644 --- a/migrations/versions/077_add_supports_condstore_column_to_.py +++ b/migrations/versions/077_add_supports_condstore_column_to_.py @@ -11,7 +11,7 @@ revision = "3c74cbe7882e" down_revision = "3de3979f94bd" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/078_events.py b/migrations/versions/078_events.py index 8de835c92..b9253ea91 100644 --- a/migrations/versions/078_events.py +++ b/migrations/versions/078_events.py @@ -11,7 +11,7 @@ revision = "1c2253a0e997" down_revision = "3c74cbe7882e" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/079_events_longer_uids.py b/migrations/versions/079_events_longer_uids.py index 70f50f1de..ce74c2bee 100644 --- a/migrations/versions/079_events_longer_uids.py +++ b/migrations/versions/079_events_longer_uids.py @@ -11,13 +11,15 @@ revision = "5901bf556d83" down_revision = "1c2253a0e997" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/080_longer_event_summaries.py b/migrations/versions/080_longer_event_summaries.py index 4608f0832..1d6c0c77c 100644 --- a/migrations/versions/080_longer_event_summaries.py +++ b/migrations/versions/080_longer_event_summaries.py @@ -11,7 +11,7 @@ revision = "4e3e8abea884" down_revision = "5901bf556d83" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/081_move_imapfolder_highestmodseq_to_bigint.py b/migrations/versions/081_move_imapfolder_highestmodseq_to_bigint.py index 9a3fe623d..555405b0d 100644 --- a/migrations/versions/081_move_imapfolder_highestmodseq_to_bigint.py +++ b/migrations/versions/081_move_imapfolder_highestmodseq_to_bigint.py @@ -11,7 +11,7 @@ revision = "1bc2536b8bc6" down_revision = "4e3e8abea884" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/082_event_participants.py b/migrations/versions/082_event_participants.py index fdf5fbf05..88bbc22f9 100644 --- a/migrations/versions/082_event_participants.py +++ b/migrations/versions/082_event_participants.py @@ -11,7 +11,7 @@ revision = "1322d3787305" down_revision = "1bc2536b8bc6" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/083_calendars_event_owners.py b/migrations/versions/083_calendars_event_owners.py index 144247225..1ef24e9ef 100644 --- a/migrations/versions/083_calendars_event_owners.py +++ b/migrations/versions/083_calendars_event_owners.py @@ -11,9 +11,9 @@ revision = "10a1129fe685" down_revision = "1322d3787305" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import table +from sqlalchemy.sql import table # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/084_mutable_drafts.py b/migrations/versions/084_mutable_drafts.py index 431877363..25b50a0b7 100644 --- a/migrations/versions/084_mutable_drafts.py +++ b/migrations/versions/084_mutable_drafts.py @@ -13,9 +13,9 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/085_add_attachment_tag.py b/migrations/versions/085_add_attachment_tag.py index e749431a2..0381f6d11 100644 --- a/migrations/versions/085_add_attachment_tag.py +++ b/migrations/versions/085_add_attachment_tag.py @@ -12,14 +12,14 @@ down_revision = "10db12da2005" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: from inbox.models import Namespace from inbox.models.session import session_scope - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] # Create the attachment tag print("creating canonical tags...") for ns in db_session.query(Namespace): diff --git a/migrations/versions/086_event_date_times.py b/migrations/versions/086_event_date_times.py index 5025081b9..da4fc2bf6 100644 --- a/migrations/versions/086_event_date_times.py +++ b/migrations/versions/086_event_date_times.py @@ -11,13 +11,15 @@ revision = "1ac03cab7a24" down_revision = "294200d809c8" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/087_fix_account_foreign_keys.py b/migrations/versions/087_fix_account_foreign_keys.py index 6e17649fb..b2d45f524 100644 --- a/migrations/versions/087_fix_account_foreign_keys.py +++ b/migrations/versions/087_fix_account_foreign_keys.py @@ -13,12 +13,12 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) inspector = sa.inspect(engine) diff --git a/migrations/versions/088_calendar_descriptions.py b/migrations/versions/088_calendar_descriptions.py index f4bf4370d..f7dd0337c 100644 --- a/migrations/versions/088_calendar_descriptions.py +++ b/migrations/versions/088_calendar_descriptions.py @@ -11,14 +11,16 @@ revision = "24e9afe91349" down_revision = "565c7325c51d" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -63,13 +65,15 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Calendar(Base): + class Calendar(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["calendar"] - class Event(Base): + class Event(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["event"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for calendar in db_session.query(Calendar): if calendar.name and "-" in calendar.name: provider_name, name = calendar.name.split("-") diff --git a/migrations/versions/089_revert_encryption.py b/migrations/versions/089_revert_encryption.py index 30663a982..3630cd587 100644 --- a/migrations/versions/089_revert_encryption.py +++ b/migrations/versions/089_revert_encryption.py @@ -12,7 +12,7 @@ down_revision = "24e9afe91349" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op @@ -39,7 +39,7 @@ def upgrade() -> None: import nacl.utils from inbox.config import config - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -48,10 +48,12 @@ def upgrade() -> None: key = config.get_required("SECRET_ENCRYPTION_KEY") - class Secret(Base): + class Secret(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["secret"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: secrets = ( db_session.query(Secret) .filter(Secret.encryption_scheme == 1, Secret._secret.isnot(None)) diff --git a/migrations/versions/090_parts_block_ids.py b/migrations/versions/090_parts_block_ids.py index 17376bf01..f12f1fab5 100644 --- a/migrations/versions/090_parts_block_ids.py +++ b/migrations/versions/090_parts_block_ids.py @@ -13,9 +13,9 @@ from datetime import datetime -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/091_remove_webhooks.py b/migrations/versions/091_remove_webhooks.py index b4d86698d..589b9a569 100644 --- a/migrations/versions/091_remove_webhooks.py +++ b/migrations/versions/091_remove_webhooks.py @@ -11,9 +11,9 @@ revision = "4b07b67498e1" down_revision = "2b89164aa9cd" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/092_fix_outlookaccount_typo.py b/migrations/versions/092_fix_outlookaccount_typo.py index 5ce2869da..51f1cda00 100644 --- a/migrations/versions/092_fix_outlookaccount_typo.py +++ b/migrations/versions/092_fix_outlookaccount_typo.py @@ -12,7 +12,7 @@ down_revision = "4b07b67498e1" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/093_add_folder_identifier.py b/migrations/versions/093_add_folder_identifier.py index c51f3eb63..902517372 100644 --- a/migrations/versions/093_add_folder_identifier.py +++ b/migrations/versions/093_add_folder_identifier.py @@ -14,7 +14,7 @@ from typing import Never from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/094_eas_passwords.py b/migrations/versions/094_eas_passwords.py index 0153aab3c..3b6d1e064 100644 --- a/migrations/versions/094_eas_passwords.py +++ b/migrations/versions/094_eas_passwords.py @@ -13,12 +13,12 @@ from datetime import datetime -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) # Do nothing if the affected table isn't present. @@ -35,16 +35,18 @@ def upgrade() -> None: Base.metadata.reflect(engine) from inbox.models.session import session_scope - class EASAccount(Base): + class EASAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easaccount"] secret = sa.orm.relationship( "Secret", primaryjoin="EASAccount.password_id == Secret.id" ) - class Secret(Base): + class Secret(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["secret"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: accounts = db_session.query(EASAccount).all() print("# EAS accounts: ", len(accounts)) @@ -67,7 +69,7 @@ class Secret(Base): def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easaccount"): diff --git a/migrations/versions/095_secret_storage.py b/migrations/versions/095_secret_storage.py index 20b5c4062..3808beee9 100644 --- a/migrations/versions/095_secret_storage.py +++ b/migrations/versions/095_secret_storage.py @@ -13,7 +13,7 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/096_migrate_secret_data.py b/migrations/versions/096_migrate_secret_data.py index 50905f255..90f1e4083 100644 --- a/migrations/versions/096_migrate_secret_data.py +++ b/migrations/versions/096_migrate_secret_data.py @@ -11,7 +11,7 @@ revision = "38c29430efeb" down_revision = "1683790906cf" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] def upgrade() -> None: @@ -19,20 +19,22 @@ def upgrade() -> None: import nacl.utils from inbox.config import config - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Secret(Base): + class Secret(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["secret"] - class GenericAccount(Base): + class GenericAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["genericaccount"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: secrets = ( db_session.query(Secret).filter(Secret.secret.isnot(None)).all() ) @@ -47,7 +49,7 @@ class GenericAccount(Base): password_secrets = [id_ for id_, in generic_query] if engine.has_table("easaccount"): - class EASAccount(Base): + class EASAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easaccount"] eas_query = ( @@ -60,7 +62,7 @@ class EASAccount(Base): for s in secrets: plain = ( s.secret.encode("utf-8") - if isinstance(s.secret, unicode) # noqa: F821 + if isinstance(s.secret, unicode) # type: ignore[name-defined] # noqa: F821 else s.secret ) if config.get_required("ENCRYPT_SECRETS"): diff --git a/migrations/versions/097_secrets_endgame.py b/migrations/versions/097_secrets_endgame.py index 749a2d799..5f628a9ec 100644 --- a/migrations/versions/097_secrets_endgame.py +++ b/migrations/versions/097_secrets_endgame.py @@ -17,7 +17,7 @@ def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() if engine.has_table("easaccount"): diff --git a/migrations/versions/098_add_throttling_support.py b/migrations/versions/098_add_throttling_support.py index ad19eb76c..49f788523 100644 --- a/migrations/versions/098_add_throttling_support.py +++ b/migrations/versions/098_add_throttling_support.py @@ -11,7 +11,7 @@ revision = "40b533a6f3e1" down_revision = "248ec24a39f" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/099_add_namespace_id_to_message.py b/migrations/versions/099_add_namespace_id_to_message.py index 60e35860b..f114abc31 100644 --- a/migrations/versions/099_add_namespace_id_to_message.py +++ b/migrations/versions/099_add_namespace_id_to_message.py @@ -12,7 +12,7 @@ down_revision = "40b533a6f3e1" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/100_make_message_namespace_id_nonnull.py b/migrations/versions/100_make_message_namespace_id_nonnull.py index ef8a29237..40cab257d 100644 --- a/migrations/versions/100_make_message_namespace_id_nonnull.py +++ b/migrations/versions/100_make_message_namespace_id_nonnull.py @@ -11,7 +11,7 @@ revision = "5a68ac0e3e9" down_revision = "e27104acb25" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/101_add_namespace_to_contacts.py b/migrations/versions/101_add_namespace_to_contacts.py index 28c95c1f1..b741d387a 100644 --- a/migrations/versions/101_add_namespace_to_contacts.py +++ b/migrations/versions/101_add_namespace_to_contacts.py @@ -14,7 +14,7 @@ from typing import Never from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/102_add_namespace_to_events.py b/migrations/versions/102_add_namespace_to_events.py index 7d274cd48..0b0486b5c 100644 --- a/migrations/versions/102_add_namespace_to_events.py +++ b/migrations/versions/102_add_namespace_to_events.py @@ -14,7 +14,7 @@ from typing import Never from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/103_add_namespace_to_calendars.py b/migrations/versions/103_add_namespace_to_calendars.py index a21412edf..30b79d6e6 100644 --- a/migrations/versions/103_add_namespace_to_calendars.py +++ b/migrations/versions/103_add_namespace_to_calendars.py @@ -13,9 +13,9 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/107_drop_eas_state.py b/migrations/versions/107_drop_eas_state.py index c74f59c6a..9a4531d80 100644 --- a/migrations/versions/107_drop_eas_state.py +++ b/migrations/versions/107_drop_eas_state.py @@ -13,12 +13,12 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() Base = sa.ext.declarative.declarative_base() # noqa: N806 diff --git a/migrations/versions/108_easaccount_username.py b/migrations/versions/108_easaccount_username.py index 8b6b43c6a..7105a1b80 100644 --- a/migrations/versions/108_easaccount_username.py +++ b/migrations/versions/108_easaccount_username.py @@ -11,12 +11,12 @@ revision = "2f97277cd86d" down_revision = "3cea90bfcdea" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() @@ -40,10 +40,12 @@ def upgrade() -> None: Base.metadata.reflect(engine) from inbox.models.session import session_scope - class EASAccount(Base): + class EASAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easaccount"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: accts = db_session.query(EASAccount).all() for a in accts: @@ -61,7 +63,7 @@ class EASAccount(Base): def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() diff --git a/migrations/versions/109_add_retries_column_to_the_actionlog.py b/migrations/versions/109_add_retries_column_to_the_actionlog.py index 6b8071d4f..63ca0bb3d 100644 --- a/migrations/versions/109_add_retries_column_to_the_actionlog.py +++ b/migrations/versions/109_add_retries_column_to_the_actionlog.py @@ -13,9 +13,9 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/111_add_account_name_column.py b/migrations/versions/111_add_account_name_column.py index f426f882c..de6798b86 100644 --- a/migrations/versions/111_add_account_name_column.py +++ b/migrations/versions/111_add_account_name_column.py @@ -13,7 +13,7 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/113_add_custom_imap_overrides.py b/migrations/versions/113_add_custom_imap_overrides.py index 4c159ec6c..ec8ecc62f 100644 --- a/migrations/versions/113_add_custom_imap_overrides.py +++ b/migrations/versions/113_add_custom_imap_overrides.py @@ -11,7 +11,7 @@ revision = "26bfb2e45c47" down_revision = "26911668870a" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/114_eas_twodevices_pledge.py b/migrations/versions/114_eas_twodevices_pledge.py index f2728af74..81161ae6e 100644 --- a/migrations/versions/114_eas_twodevices_pledge.py +++ b/migrations/versions/114_eas_twodevices_pledge.py @@ -13,12 +13,12 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() diff --git a/migrations/versions/115_eas_twodevices_turn.py b/migrations/versions/115_eas_twodevices_turn.py index 9d6571ef7..c1f710511 100644 --- a/migrations/versions/115_eas_twodevices_turn.py +++ b/migrations/versions/115_eas_twodevices_turn.py @@ -14,13 +14,13 @@ from datetime import datetime from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() @@ -32,7 +32,7 @@ def upgrade() -> None: Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class EASAccount(Base): + class EASAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easaccount"] primary_device = sa.orm.relationship( "EASDevice", @@ -47,10 +47,12 @@ class EASAccount(Base): uselist=False, ) - class EASDevice(Base): + class EASDevice(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easdevice"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: accts = db_session.query(EASAccount).all() for a in accts: diff --git a/migrations/versions/116_eas_twodevices_prestige.py b/migrations/versions/116_eas_twodevices_prestige.py index b834263f7..e16a08954 100644 --- a/migrations/versions/116_eas_twodevices_prestige.py +++ b/migrations/versions/116_eas_twodevices_prestige.py @@ -13,7 +13,7 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/118_store_label_information_per_uid.py b/migrations/versions/118_store_label_information_per_uid.py index 022fe7793..827eaf643 100644 --- a/migrations/versions/118_store_label_information_per_uid.py +++ b/migrations/versions/118_store_label_information_per_uid.py @@ -11,9 +11,9 @@ revision = "4634999269" down_revision = "420bf3422c4f" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] from inbox.sqlalchemy_ext.util import JSON diff --git a/migrations/versions/119_store_full_message_body.py b/migrations/versions/119_store_full_message_body.py index b08809e35..99097b3d1 100644 --- a/migrations/versions/119_store_full_message_body.py +++ b/migrations/versions/119_store_full_message_body.py @@ -11,7 +11,7 @@ revision = "58732bb5d14b" down_revision = "4634999269" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/120_simplify_transaction_log.py b/migrations/versions/120_simplify_transaction_log.py index cadb4416b..cac6d6d09 100644 --- a/migrations/versions/120_simplify_transaction_log.py +++ b/migrations/versions/120_simplify_transaction_log.py @@ -14,7 +14,7 @@ from typing import Never from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/121_add_searchindexcursor.py b/migrations/versions/121_add_searchindexcursor.py index 68b77c8c8..7366b62a2 100644 --- a/migrations/versions/121_add_searchindexcursor.py +++ b/migrations/versions/121_add_searchindexcursor.py @@ -11,7 +11,7 @@ revision = "526eefc1d600" down_revision = "8c2406df6f8" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/122_add_easeventuid.py b/migrations/versions/122_add_easeventuid.py index f68733f6c..d7af0daca 100644 --- a/migrations/versions/122_add_easeventuid.py +++ b/migrations/versions/122_add_easeventuid.py @@ -11,12 +11,12 @@ revision = "476c5185121b" down_revision = "526eefc1d600" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() diff --git a/migrations/versions/123_remove_gmail_inbox_syncs.py b/migrations/versions/123_remove_gmail_inbox_syncs.py index 964b6df30..93bb797d9 100644 --- a/migrations/versions/123_remove_gmail_inbox_syncs.py +++ b/migrations/versions/123_remove_gmail_inbox_syncs.py @@ -16,13 +16,17 @@ def upgrade() -> None: # Remove UIDs and sync status for inbox IMAP syncs -- otherwise # archives/deletes may not be synced correctly. from inbox.heartbeat.config import STATUS_DATABASE, get_redis_client - from inbox.heartbeat.status import HeartbeatStatusKey + from inbox.heartbeat.status import ( # type: ignore[attr-defined] + HeartbeatStatusKey, + ) from inbox.models.backends.gmail import GmailAccount from inbox.models.backends.imap import ImapFolderSyncStatus, ImapUid from inbox.models.session import session_scope redis_client = get_redis_client(STATUS_DATABASE) - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for account in db_session.query(GmailAccount): if account.inbox_folder is None: # May be the case for accounts that we can't sync, e.g. due to diff --git a/migrations/versions/125_refactor_participants_table.py b/migrations/versions/125_refactor_participants_table.py index a7c7d79e5..10cb21cf3 100644 --- a/migrations/versions/125_refactor_participants_table.py +++ b/migrations/versions/125_refactor_participants_table.py @@ -13,9 +13,9 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/126_add_account_sync_contacts_events.py b/migrations/versions/126_add_account_sync_contacts_events.py index d9410a069..9b05d7b7c 100644 --- a/migrations/versions/126_add_account_sync_contacts_events.py +++ b/migrations/versions/126_add_account_sync_contacts_events.py @@ -12,7 +12,7 @@ down_revision = "955792afd00" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/127_remove_easeventuid.py b/migrations/versions/127_remove_easeventuid.py index b3a45242f..bc80b4fdd 100644 --- a/migrations/versions/127_remove_easeventuid.py +++ b/migrations/versions/127_remove_easeventuid.py @@ -17,7 +17,7 @@ def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine() diff --git a/migrations/versions/137_add_versions.py b/migrations/versions/137_add_versions.py index 4440e3ed7..9ec2931e2 100644 --- a/migrations/versions/137_add_versions.py +++ b/migrations/versions/137_add_versions.py @@ -11,7 +11,7 @@ revision = "1f746c93e8fd" down_revision = "39fa82d3168e" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/138_add_participants_column.py b/migrations/versions/138_add_participants_column.py index 49b302dff..5011cd745 100644 --- a/migrations/versions/138_add_participants_column.py +++ b/migrations/versions/138_add_participants_column.py @@ -8,7 +8,7 @@ """ from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] # revision identifiers, used by Alembic. revision = "5305d4ae30b4" diff --git a/migrations/versions/140_relax_participants_by_email_constraint.py b/migrations/versions/140_relax_participants_by_email_constraint.py index 6004d4806..e03600cdb 100644 --- a/migrations/versions/140_relax_participants_by_email_constraint.py +++ b/migrations/versions/140_relax_participants_by_email_constraint.py @@ -12,7 +12,7 @@ down_revision = "1fd7b3e0b662" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/141_remote_remote_contacts.py b/migrations/versions/141_remote_remote_contacts.py index 05cb23b21..a1d2d4267 100644 --- a/migrations/versions/141_remote_remote_contacts.py +++ b/migrations/versions/141_remote_remote_contacts.py @@ -14,23 +14,25 @@ from typing import Never from alembic import op -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Contact_Old(Base): # noqa: N801 + class Contact_Old(Base): # type: ignore[misc, valid-type] # noqa: N801 __table__ = Base.metadata.tables["contact"] # Delete the "remote" contacts. This is just a server cache for comparing # any changes, now handled by the previous "local" contacts - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] db_session.query(Contact_Old).filter_by(source="remote").delete() op.drop_column("contact", "source") diff --git a/migrations/versions/142_add_sync_run_bit.py b/migrations/versions/142_add_sync_run_bit.py index 4b3562576..3ed147b62 100644 --- a/migrations/versions/142_add_sync_run_bit.py +++ b/migrations/versions/142_add_sync_run_bit.py @@ -11,7 +11,7 @@ revision = "2d8a350b4885" down_revision = "3ab34bc85c8d" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/147_add_cleaned_subject.py b/migrations/versions/147_add_cleaned_subject.py index 0d3420127..dd9137a63 100644 --- a/migrations/versions/147_add_cleaned_subject.py +++ b/migrations/versions/147_add_cleaned_subject.py @@ -11,9 +11,9 @@ revision = "486c7fa5b533" down_revision = "c77a90d524" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/148_add_last_modified_column_for_events.py b/migrations/versions/148_add_last_modified_column_for_events.py index 3f7e6914b..f740157c3 100644 --- a/migrations/versions/148_add_last_modified_column_for_events.py +++ b/migrations/versions/148_add_last_modified_column_for_events.py @@ -12,7 +12,7 @@ down_revision = "486c7fa5b533" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/149_add_emailed_events_calendar.py b/migrations/versions/149_add_emailed_events_calendar.py index fd0a8ca27..ccbe13353 100644 --- a/migrations/versions/149_add_emailed_events_calendar.py +++ b/migrations/versions/149_add_emailed_events_calendar.py @@ -12,7 +12,7 @@ down_revision = "54dcea22a268" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/150_add_polymorphic_events.py b/migrations/versions/150_add_polymorphic_events.py index 34025846a..f1c82ef3b 100644 --- a/migrations/versions/150_add_polymorphic_events.py +++ b/migrations/versions/150_add_polymorphic_events.py @@ -15,7 +15,7 @@ import json import sys -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op @@ -75,7 +75,7 @@ def populate() -> None: ) from inbox.models.session import session_scope - with session_scope() as db: + with session_scope() as db: # type: ignore[call-arg] # Redo recurrence rule population, since we extended the column length print("Repopulating max-length recurrences...", end=" ") for e in db.query(Event).filter( @@ -113,29 +113,35 @@ def populate() -> None: c = 0 print("Expanding Overrides .", end=" ") query = db.query(RecurringEventOverride) - for e in query: + for e in query: # type: ignore[misc] try: # Some raw data is str(dict), other is json.dumps - raw_data = json.loads(e.raw_data) + raw_data = json.loads(e.raw_data) # type: ignore[misc] except: # noqa: E722 try: - raw_data = ast.literal_eval(e.raw_data) + raw_data = ast.literal_eval( + e.raw_data # type: ignore[misc] + ) except: # noqa: E722 - print(f"Could not load raw data for event {e.id}") + print( + f"Could not load raw data for event {e.id}" # type: ignore[misc] + ) continue rec_uid = raw_data.get("recurringEventId") if rec_uid: - e.master_event_uid = rec_uid + e.master_event_uid = rec_uid # type: ignore[misc] ost = raw_data.get("originalStartTime") if ost: # this is a dictionary with one value start_time = ost.values().pop() - e.original_start_time = parse_datetime(start_time) + e.original_start_time = parse_datetime( # type: ignore[misc] + start_time + ) # attempt to get the ID for the event, if we can, and # set the relationship appropriately if raw_data.get("status") == "cancelled": - e.cancelled = True - link_events(db, e) + e.cancelled = True # type: ignore[misc] + link_events(db, e) # type: ignore[misc] c += 1 if c % 100 == 0: print(".", end=" ") @@ -156,7 +162,9 @@ def populate() -> None: try: db.execute(create) except Exception as e: - print(f"Couldn't insert RecurringEvents: {e}") + print( + f"Couldn't insert RecurringEvents: {e}" # type: ignore[misc] + ) sys.exit(2) print("done.") diff --git a/migrations/versions/151_remove_message_thread_order.py b/migrations/versions/151_remove_message_thread_order.py index c571b4e4c..2fc0e9dd3 100644 --- a/migrations/versions/151_remove_message_thread_order.py +++ b/migrations/versions/151_remove_message_thread_order.py @@ -11,9 +11,9 @@ revision = "2f3c8fa3fc3a" down_revision = "1de526a15c5d" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/152_add_message_id_to_event.py b/migrations/versions/152_add_message_id_to_event.py index f6089aefe..83d20165f 100644 --- a/migrations/versions/152_add_message_id_to_event.py +++ b/migrations/versions/152_add_message_id_to_event.py @@ -12,7 +12,7 @@ down_revision = "2f3c8fa3fc3a" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/155_add_status_column.py b/migrations/versions/155_add_status_column.py index ad7b0e51d..ff8802bca 100644 --- a/migrations/versions/155_add_status_column.py +++ b/migrations/versions/155_add_status_column.py @@ -12,7 +12,7 @@ down_revision = "1f06c15ae796" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/156_drop_cancelled_column.py b/migrations/versions/156_drop_cancelled_column.py index 1e0a3c76d..2ddd773e9 100644 --- a/migrations/versions/156_drop_cancelled_column.py +++ b/migrations/versions/156_drop_cancelled_column.py @@ -12,7 +12,7 @@ down_revision = "7de8a6ce8cd" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/157_update_eas_schema.py b/migrations/versions/157_update_eas_schema.py index 9c82cadcd..0926733af 100644 --- a/migrations/versions/157_update_eas_schema.py +++ b/migrations/versions/157_update_eas_schema.py @@ -17,7 +17,7 @@ def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) # Do nothing if the affected table isn't present. diff --git a/migrations/versions/158_update_eas_schema_part_2.py b/migrations/versions/158_update_eas_schema_part_2.py index 590d13994..27ae9328a 100644 --- a/migrations/versions/158_update_eas_schema_part_2.py +++ b/migrations/versions/158_update_eas_schema_part_2.py @@ -17,7 +17,7 @@ def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) # Do nothing if the affected table isn't present. diff --git a/migrations/versions/159_update_eas_schema_part_3.py b/migrations/versions/159_update_eas_schema_part_3.py index 31cc9003b..2bb6f591b 100644 --- a/migrations/versions/159_update_eas_schema_part_3.py +++ b/migrations/versions/159_update_eas_schema_part_3.py @@ -17,7 +17,7 @@ def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) # Do nothing if the affected table isn't present. diff --git a/migrations/versions/160_split_actionlog.py b/migrations/versions/160_split_actionlog.py index 242cce90d..91864837e 100644 --- a/migrations/versions/160_split_actionlog.py +++ b/migrations/versions/160_split_actionlog.py @@ -11,13 +11,13 @@ revision = "182f2b40fa36" down_revision = "4e6eedda36af" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.orm import contains_eager +from sqlalchemy.orm import contains_eager # type: ignore[import-untyped] def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] op.add_column("actionlog", sa.Column("type", sa.String(16))) @@ -25,7 +25,7 @@ def upgrade() -> None: from inbox.models import Account, ActionLog, Namespace from inbox.models.session import session_scope - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] q = ( db_session.query(ActionLog) .join(Namespace) @@ -68,7 +68,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] op.drop_column("actionlog", "type") diff --git a/migrations/versions/161_update_eas_schema_part_3_for_prod.py b/migrations/versions/161_update_eas_schema_part_3_for_prod.py index 7b76f09fb..59f731f1d 100644 --- a/migrations/versions/161_update_eas_schema_part_3_for_prod.py +++ b/migrations/versions/161_update_eas_schema_part_3_for_prod.py @@ -12,11 +12,11 @@ down_revision = "182f2b40fa36" from alembic import op -from sqlalchemy.schema import MetaData +from sqlalchemy.schema import MetaData # type: ignore[import-untyped] def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) diff --git a/migrations/versions/165_add_compacted_body.py b/migrations/versions/165_add_compacted_body.py index 866258c3a..dd35f4d78 100644 --- a/migrations/versions/165_add_compacted_body.py +++ b/migrations/versions/165_add_compacted_body.py @@ -11,9 +11,9 @@ revision = "29698176aa8d" down_revision = "17dcbd7754e0" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/166_migrate_body_format.py b/migrations/versions/166_migrate_body_format.py index 2e19d050b..a368d5ee5 100644 --- a/migrations/versions/166_migrate_body_format.py +++ b/migrations/versions/166_migrate_body_format.py @@ -11,15 +11,17 @@ revision = "3d4f5741e1d7" down_revision = "29698176aa8d" -import sqlalchemy as sa -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import load_only +import sqlalchemy as sa # type: ignore[import-untyped] +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.orm import load_only # type: ignore[import-untyped] CHUNK_SIZE = 1000 def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope from inbox.security.blobstorage import encode_blob @@ -27,10 +29,12 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Message(Base): + class Message(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["message"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: (max_id,) = db_session.query(sa.func.max(Message.id)).one() if max_id is None: max_id = 0 diff --git a/migrations/versions/169_update_easuid_schema.py b/migrations/versions/169_update_easuid_schema.py index 7d9a50de9..6f0cd4da9 100644 --- a/migrations/versions/169_update_easuid_schema.py +++ b/migrations/versions/169_update_easuid_schema.py @@ -15,7 +15,7 @@ def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easuid"): diff --git a/migrations/versions/170_update_easuid_schema_2.py b/migrations/versions/170_update_easuid_schema_2.py index 53f3589fa..f14b17ec4 100644 --- a/migrations/versions/170_update_easuid_schema_2.py +++ b/migrations/versions/170_update_easuid_schema_2.py @@ -11,13 +11,15 @@ revision = "3ee78a8b1ac6" down_revision = "281b07fa75bb" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] def upgrade() -> None: - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -26,13 +28,15 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class EASUid(Base): + class EASUid(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easuid"] - class EASFolderSyncStatus(Base): + class EASFolderSyncStatus(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easfoldersyncstatus"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: max_easuid = db_session.query(sa.func.max(EASUid.id)).scalar() if max_easuid is None: return diff --git a/migrations/versions/171_update_easuid_schema_3.py b/migrations/versions/171_update_easuid_schema_3.py index abbbd0b23..43aadd4f4 100644 --- a/migrations/versions/171_update_easuid_schema_3.py +++ b/migrations/versions/171_update_easuid_schema_3.py @@ -11,13 +11,15 @@ revision = "584356bf23a3" down_revision = "3ee78a8b1ac6" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] def upgrade() -> None: - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) @@ -27,10 +29,12 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class EASUid(Base): + class EASUid(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["easuid"] - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: # STOPSHIP(emfree): determine if we need to batch this update on large # databases. db_session.query(EASUid).update( diff --git a/migrations/versions/172_update_easuid_schema_4.py b/migrations/versions/172_update_easuid_schema_4.py index d2e2e5550..061f67407 100644 --- a/migrations/versions/172_update_easuid_schema_4.py +++ b/migrations/versions/172_update_easuid_schema_4.py @@ -17,7 +17,7 @@ def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easuid"): diff --git a/migrations/versions/173_add_owner2.py b/migrations/versions/173_add_owner2.py index fc95d712d..5ed1a784e 100644 --- a/migrations/versions/173_add_owner2.py +++ b/migrations/versions/173_add_owner2.py @@ -13,7 +13,7 @@ down_revision = "d0427f9f3d1" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/174_backfill_owner2.py b/migrations/versions/174_backfill_owner2.py index a12c0ed3d..a2787434b 100644 --- a/migrations/versions/174_backfill_owner2.py +++ b/migrations/versions/174_backfill_owner2.py @@ -14,7 +14,7 @@ from typing import Never from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/175_fix_recurring_override_cascade.py b/migrations/versions/175_fix_recurring_override_cascade.py index cc5fbadf7..b8f3852d0 100644 --- a/migrations/versions/175_fix_recurring_override_cascade.py +++ b/migrations/versions/175_fix_recurring_override_cascade.py @@ -12,7 +12,7 @@ down_revision = "4ef055945390" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/176_add_run_state_folderstatus.py b/migrations/versions/176_add_run_state_folderstatus.py index baaf05b9f..d5bc6f3a8 100644 --- a/migrations/versions/176_add_run_state_folderstatus.py +++ b/migrations/versions/176_add_run_state_folderstatus.py @@ -11,7 +11,7 @@ revision = "48a1991e5dbd" down_revision = "6e5b154d917" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/177_add_run_state_eas_folderstatus.py b/migrations/versions/177_add_run_state_eas_folderstatus.py index 5e324b84c..75b93feaf 100644 --- a/migrations/versions/177_add_run_state_eas_folderstatus.py +++ b/migrations/versions/177_add_run_state_eas_folderstatus.py @@ -11,12 +11,12 @@ revision = "2b9dd6f7593a" down_revision = "48a1991e5dbd" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easfoldersyncstatus"): @@ -33,7 +33,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easfoldersyncstatus"): diff --git a/migrations/versions/178_add_reply_to_messagecontactassociation.py b/migrations/versions/178_add_reply_to_messagecontactassociation.py index be3599e87..8ac0836fa 100644 --- a/migrations/versions/178_add_reply_to_messagecontactassociation.py +++ b/migrations/versions/178_add_reply_to_messagecontactassociation.py @@ -11,7 +11,7 @@ revision = "41f957b595fc" down_revision = "2b9dd6f7593a" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/179_longer_event_descriptions.py b/migrations/versions/179_longer_event_descriptions.py index 542d414af..6f355a3a0 100644 --- a/migrations/versions/179_longer_event_descriptions.py +++ b/migrations/versions/179_longer_event_descriptions.py @@ -11,9 +11,9 @@ revision = "56500282e024" down_revision = "41f957b595fc" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/182_add_data_processing_cache_table.py b/migrations/versions/182_add_data_processing_cache_table.py index e8050bd42..4014e7f39 100644 --- a/migrations/versions/182_add_data_processing_cache_table.py +++ b/migrations/versions/182_add_data_processing_cache_table.py @@ -11,9 +11,9 @@ revision = "3857f395fb1d" down_revision = "10da2e0bc3bb" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/183_change_event_sync_timestamp.py b/migrations/versions/183_change_event_sync_timestamp.py index 0d8a33c7c..4a1467e31 100644 --- a/migrations/versions/183_change_event_sync_timestamp.py +++ b/migrations/versions/183_change_event_sync_timestamp.py @@ -11,7 +11,7 @@ revision = "3a58d466f61d" down_revision = "3857f395fb1d" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/184_create_gmail_auth_credentials_table.py b/migrations/versions/184_create_gmail_auth_credentials_table.py index 51e40d1e7..bed25ef02 100644 --- a/migrations/versions/184_create_gmail_auth_credentials_table.py +++ b/migrations/versions/184_create_gmail_auth_credentials_table.py @@ -11,9 +11,9 @@ revision = "2ac4e3c4e049" down_revision = "3a58d466f61d" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/185_backfill_gmail_auth_credentials_table.py b/migrations/versions/185_backfill_gmail_auth_credentials_table.py index 61d195545..97b6a1dba 100644 --- a/migrations/versions/185_backfill_gmail_auth_credentials_table.py +++ b/migrations/versions/185_backfill_gmail_auth_credentials_table.py @@ -15,11 +15,13 @@ def upgrade() -> None: import datetime - from sqlalchemy.ext.declarative import declarative_base - from sqlalchemy.orm import relationship + from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, + ) + from sqlalchemy.orm import relationship # type: ignore[import-untyped] from inbox.config import config - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models.session import session_scope engine = main_engine() @@ -28,17 +30,19 @@ def upgrade() -> None: Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class GmailAccount(Base): + class GmailAccount(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["gmailaccount"] - class Secret(Base): + class Secret(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["secret"] - class GmailAuthCredentials(Base): + class GmailAuthCredentials(Base): # type: ignore[misc, valid-type] __table__ = Base.metadata.tables["gmailauthcredentials"] secret = relationship(Secret) - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for acc, sec in ( db_session.query(GmailAccount, Secret) .filter( diff --git a/migrations/versions/186_new_tables_for_folders_overhaul.py b/migrations/versions/186_new_tables_for_folders_overhaul.py index ca5fc49d9..09ab015c1 100644 --- a/migrations/versions/186_new_tables_for_folders_overhaul.py +++ b/migrations/versions/186_new_tables_for_folders_overhaul.py @@ -13,7 +13,7 @@ from typing import Never -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op @@ -166,7 +166,7 @@ def upgrade() -> None: "folder_ibfk_2", "folder", "category", ["category_id"], ["id"] ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if engine.has_table("easfoldersyncstatus"): diff --git a/migrations/versions/187_migrate_data_for_folders_overhaul.py b/migrations/versions/187_migrate_data_for_folders_overhaul.py index 3bd33c586..60285a8eb 100644 --- a/migrations/versions/187_migrate_data_for_folders_overhaul.py +++ b/migrations/versions/187_migrate_data_for_folders_overhaul.py @@ -11,8 +11,12 @@ revision = "334b33f18b4f" down_revision = "23e204cd1d91" -from sqlalchemy import asc -from sqlalchemy.orm import joinedload, load_only, subqueryload +from sqlalchemy import asc # type: ignore[import-untyped] +from sqlalchemy.orm import ( # type: ignore[import-untyped] + joinedload, + load_only, + subqueryload, +) from inbox.config import config from inbox.logging import configure_logging, get_logger @@ -21,7 +25,9 @@ log = get_logger() -def populate_labels(uid, account, db_session) -> None: +def populate_labels( # type: ignore[no-untyped-def] + uid, account, db_session +) -> None: from inbox.models import Label existing_labels = {(l.name, l.canonical_name): l for l in account.labels} @@ -43,7 +49,7 @@ def populate_labels(uid, account, db_session) -> None: (category_map[label_string], category_map[label_string]) ) else: - remote_labels.add((label_string, None)) + remote_labels.add((label_string, None)) # type: ignore[arg-type] for key in remote_labels: if key not in existing_labels: @@ -54,20 +60,28 @@ def populate_labels(uid, account, db_session) -> None: uid.labels.add(existing_labels[key]) -def set_labels_for_imapuids(account, db_session) -> None: +def set_labels_for_imapuids( # type: ignore[no-untyped-def] + account, db_session +) -> None: from inbox.models.backends.imap import ImapUid uids = ( db_session.query(ImapUid) .filter(ImapUid.account_id == account.id) - .options(subqueryload(ImapUid.labelitems).joinedload("label")) + .options( + subqueryload( + ImapUid.labelitems # type: ignore[attr-defined] + ).joinedload("label") + ) ) for uid in uids: populate_labels(uid, account, db_session) log.info("Updated UID labels", account_id=account.id, uid=uid.id) -def create_categories_for_folders(account, db_session) -> None: +def create_categories_for_folders( # type: ignore[no-untyped-def] + account, db_session +) -> None: from inbox.models import Category, Folder for folder in db_session.query(Folder).filter( @@ -84,19 +98,27 @@ def create_categories_for_folders(account, db_session) -> None: db_session.commit() -def create_categories_for_easfoldersyncstatuses(account, db_session) -> None: - from inbox.mailsync.backends.eas.base.foldersync import save_categories +def create_categories_for_easfoldersyncstatuses( # type: ignore[no-untyped-def] + account, db_session +) -> None: + from inbox.mailsync.backends.eas.base.foldersync import ( # type: ignore[import-not-found] + save_categories, + ) save_categories(db_session, account, account.primary_device_id) db_session.commit() save_categories(db_session, account, account.secondary_device_id) -def migrate_account_metadata(account_id) -> None: +def migrate_account_metadata( # type: ignore[no-untyped-def] + account_id, +) -> None: from inbox.models import Account from inbox.models.session import session_scope - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: account = db_session.query(Account).get(account_id) if account.discriminator == "easaccount": create_categories_for_easfoldersyncstatuses(account, db_session) @@ -107,21 +129,25 @@ def migrate_account_metadata(account_id) -> None: db_session.commit() -def migrate_messages(account_id) -> None: - from inbox.ignition import main_engine +def migrate_messages(account_id) -> None: # type: ignore[no-untyped-def] + from inbox.ignition import main_engine # type: ignore[attr-defined] from inbox.models import Message, Namespace from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: namespace = ( db_session.query(Namespace).filter_by(account_id=account_id).one() ) offset = 0 while True: if engine.has_table("easuid"): - additional_options = [subqueryload(Message.easuids)] + additional_options = [ + subqueryload(Message.easuids) # type: ignore[attr-defined] + ] else: additional_options = [] @@ -136,8 +162,12 @@ def migrate_messages(account_id) -> None: Message.is_draft, ), joinedload(Message.namespace).load_only("id"), - subqueryload(Message.imapuids), - subqueryload(Message.messagecategories), + subqueryload( + Message.imapuids # type: ignore[attr-defined] + ), + subqueryload( + Message.messagecategories # type: ignore[attr-defined] + ), *additional_options, ) .with_hint(Message, "USE INDEX (ix_message_namespace_id)") @@ -163,7 +193,7 @@ def migrate_messages(account_id) -> None: offset += 1000 -def migrate_account(account_id) -> None: +def migrate_account(account_id) -> None: # type: ignore[no-untyped-def] migrate_account_metadata(account_id) migrate_messages(account_id) @@ -172,7 +202,7 @@ def upgrade() -> None: from inbox.models import Account from inbox.models.session import session_scope - with session_scope() as db_session: + with session_scope() as db_session: # type: ignore[call-arg] account_ids = [id_ for id_, in db_session.query(Account.id)] for id_ in account_ids: diff --git a/migrations/versions/188_create_sequence_number_column.py b/migrations/versions/188_create_sequence_number_column.py index 9d58a4347..2e38fdff4 100644 --- a/migrations/versions/188_create_sequence_number_column.py +++ b/migrations/versions/188_create_sequence_number_column.py @@ -12,7 +12,7 @@ down_revision = "334b33f18b4f" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/189_add_initial_sync_start_end_column.py b/migrations/versions/189_add_initial_sync_start_end_column.py index cf6f387d2..d52e90c7c 100644 --- a/migrations/versions/189_add_initial_sync_start_end_column.py +++ b/migrations/versions/189_add_initial_sync_start_end_column.py @@ -11,7 +11,7 @@ revision = "3b093f2d7419" down_revision = "606447e78e7" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/190_eas_add_device_retirement.py b/migrations/versions/190_eas_add_device_retirement.py index a65a5c2de..03fe52ac6 100644 --- a/migrations/versions/190_eas_add_device_retirement.py +++ b/migrations/versions/190_eas_add_device_retirement.py @@ -11,12 +11,12 @@ revision = "246a6bf050bc" down_revision = "3b093f2d7419" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easdevice"): @@ -33,7 +33,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easdevice"): diff --git a/migrations/versions/191_add_new_events_and_calendars_flags.py b/migrations/versions/191_add_new_events_and_calendars_flags.py index 6b07b3d9b..3c021ddec 100644 --- a/migrations/versions/191_add_new_events_and_calendars_flags.py +++ b/migrations/versions/191_add_new_events_and_calendars_flags.py @@ -11,7 +11,7 @@ revision = "47aec237051e" down_revision = "246a6bf050bc" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/192_add_receivedrecentdate_column_to_threads.py b/migrations/versions/192_add_receivedrecentdate_column_to_threads.py index 4eec70f41..281b89a58 100644 --- a/migrations/versions/192_add_receivedrecentdate_column_to_threads.py +++ b/migrations/versions/192_add_receivedrecentdate_column_to_threads.py @@ -11,12 +11,12 @@ revision = "2758cefad87d" down_revision = "47aec237051e" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("thread"): @@ -39,7 +39,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("thread"): diff --git a/migrations/versions/193_calculate_receivedrecentdate_for_threads.py b/migrations/versions/193_calculate_receivedrecentdate_for_threads.py index 140ff98c9..f86eac5da 100644 --- a/migrations/versions/193_calculate_receivedrecentdate_for_threads.py +++ b/migrations/versions/193_calculate_receivedrecentdate_for_threads.py @@ -13,7 +13,7 @@ # solution from http://stackoverflow.com/a/1217947 -def page_query(q): # noqa: ANN201 +def page_query(q): # type: ignore[no-untyped-def] # noqa: ANN201 CHUNK_SIZE = 500 # noqa: N806 offset = 0 while True: @@ -27,13 +27,15 @@ def page_query(q): # noqa: ANN201 def upgrade() -> None: - from sqlalchemy import desc - from sqlalchemy.sql import not_ + from sqlalchemy import desc # type: ignore[import-untyped] + from sqlalchemy.sql import not_ # type: ignore[import-untyped] from inbox.models import Message, Thread from inbox.models.session import session_scope - with session_scope(versioned=False) as db_session: + with session_scope( # type: ignore[call-arg] + versioned=False + ) as db_session: for thread in page_query(db_session.query(Thread)): last_message = ( db_session.query(Message) diff --git a/migrations/versions/194_extend_eas_folder_id.py b/migrations/versions/194_extend_eas_folder_id.py index 0d3ec72df..9845e9b5d 100644 --- a/migrations/versions/194_extend_eas_folder_id.py +++ b/migrations/versions/194_extend_eas_folder_id.py @@ -12,15 +12,17 @@ down_revision = "691fa97024d" from alembic import op -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.sql import text +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 @@ -39,7 +41,7 @@ def downgrade() -> None: conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/195_remove_receivedrecentdate_column.py b/migrations/versions/195_remove_receivedrecentdate_column.py index 3940d2ea1..9e43ce6c0 100644 --- a/migrations/versions/195_remove_receivedrecentdate_column.py +++ b/migrations/versions/195_remove_receivedrecentdate_column.py @@ -11,7 +11,7 @@ revision = "51ad0922ad8e" down_revision = "69e93aef3e9" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/196_create_outlook_account_column.py b/migrations/versions/196_create_outlook_account_column.py index 4f73a0493..ded263aa4 100644 --- a/migrations/versions/196_create_outlook_account_column.py +++ b/migrations/versions/196_create_outlook_account_column.py @@ -12,15 +12,17 @@ down_revision = "51ad0922ad8e" from alembic import op -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.sql import text +from sqlalchemy.ext.declarative import ( # type: ignore[import-untyped] + declarative_base, +) +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 @@ -36,7 +38,7 @@ def downgrade() -> None: conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() # noqa: N806 diff --git a/migrations/versions/197_add_message_categories_change_counter.py b/migrations/versions/197_add_message_categories_change_counter.py index 053d2f33e..02037ee06 100644 --- a/migrations/versions/197_add_message_categories_change_counter.py +++ b/migrations/versions/197_add_message_categories_change_counter.py @@ -11,7 +11,7 @@ revision = "3cf51fb0e76a" down_revision = "4fa0540482f8" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op @@ -30,7 +30,7 @@ def upgrade() -> None: existing_type=sa.Enum("draft", "sending", "sending failed", "sent"), ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easdevice"): diff --git a/migrations/versions/198_eas_foldersyncstatus_startstop_columns.py b/migrations/versions/198_eas_foldersyncstatus_startstop_columns.py index 507d1b443..9a438670f 100644 --- a/migrations/versions/198_eas_foldersyncstatus_startstop_columns.py +++ b/migrations/versions/198_eas_foldersyncstatus_startstop_columns.py @@ -11,12 +11,12 @@ revision = "301d22aa96b8" down_revision = "3cf51fb0e76a" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op def upgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easfoldersyncstatus"): @@ -32,7 +32,7 @@ def upgrade() -> None: def downgrade() -> None: - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easfoldersyncstatus"): diff --git a/migrations/versions/199_save_imap_uidnext.py b/migrations/versions/199_save_imap_uidnext.py index d689b4cc8..3320e3b2e 100644 --- a/migrations/versions/199_save_imap_uidnext.py +++ b/migrations/versions/199_save_imap_uidnext.py @@ -11,7 +11,7 @@ revision = "3583211a4838" down_revision = "301d22aa96b8" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/200_update_imapfolderinfo.py b/migrations/versions/200_update_imapfolderinfo.py index 4acac9901..1610fa616 100644 --- a/migrations/versions/200_update_imapfolderinfo.py +++ b/migrations/versions/200_update_imapfolderinfo.py @@ -11,7 +11,7 @@ revision = "dbf45fac873" down_revision = "3583211a4838" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/201_add_sync_email_bit_to_account.py b/migrations/versions/201_add_sync_email_bit_to_account.py index 653429d33..960fba7be 100644 --- a/migrations/versions/201_add_sync_email_bit_to_account.py +++ b/migrations/versions/201_add_sync_email_bit_to_account.py @@ -11,7 +11,7 @@ revision = "527bbdc2b0fa" down_revision = "dbf45fac873" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/202_drop_sync_raw_data_column.py b/migrations/versions/202_drop_sync_raw_data_column.py index 403a0b30a..2e9638fad 100644 --- a/migrations/versions/202_drop_sync_raw_data_column.py +++ b/migrations/versions/202_drop_sync_raw_data_column.py @@ -11,9 +11,9 @@ revision = "2e515548043b" down_revision = "527bbdc2b0fa" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/203_deleted_at_constraint.py b/migrations/versions/203_deleted_at_constraint.py index d94077ad4..a50cdb410 100644 --- a/migrations/versions/203_deleted_at_constraint.py +++ b/migrations/versions/203_deleted_at_constraint.py @@ -12,7 +12,7 @@ down_revision = "2e515548043b" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: @@ -25,7 +25,7 @@ def upgrade() -> None: ["namespace_id", "name", "display_name", "deleted_at"], ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easfoldersyncstatus"): @@ -47,7 +47,7 @@ def downgrade() -> None: "namespace_id", "category", ["namespace_id", "name", "display_name"] ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easfoldersyncstatus"): diff --git a/migrations/versions/204_remove_deleted_at_constraint.py b/migrations/versions/204_remove_deleted_at_constraint.py index 71f68e3f0..07d357865 100644 --- a/migrations/versions/204_remove_deleted_at_constraint.py +++ b/migrations/versions/204_remove_deleted_at_constraint.py @@ -12,7 +12,7 @@ down_revision = "420ccbea2c5e" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: @@ -23,7 +23,7 @@ def upgrade() -> None: "namespace_id", "category", ["namespace_id", "name", "display_name"] ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easfoldersyncstatus"): @@ -47,7 +47,7 @@ def downgrade() -> None: ["namespace_id", "name", "display_name", "deleted_at"], ) - from inbox.ignition import main_engine + from inbox.ignition import main_engine # type: ignore[attr-defined] engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easfoldersyncstatus"): diff --git a/migrations/versions/205_fix_categories_cascade.py b/migrations/versions/205_fix_categories_cascade.py index 51b58ef3a..c34fe9b77 100644 --- a/migrations/versions/205_fix_categories_cascade.py +++ b/migrations/versions/205_fix_categories_cascade.py @@ -12,7 +12,7 @@ down_revision = "583e083d4512" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/206_add_phone_numbers_table.py b/migrations/versions/206_add_phone_numbers_table.py index e23b0f542..1d792728c 100644 --- a/migrations/versions/206_add_phone_numbers_table.py +++ b/migrations/versions/206_add_phone_numbers_table.py @@ -11,9 +11,9 @@ revision = "gu8eqpm6f2x1n0fg" down_revision = "302d9f6b22f3" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/207_add_contact_search_index_service_cursor_.py b/migrations/versions/207_add_contact_search_index_service_cursor_.py index 8cc410fc6..6238b8b3c 100644 --- a/migrations/versions/207_add_contact_search_index_service_cursor_.py +++ b/migrations/versions/207_add_contact_search_index_service_cursor_.py @@ -11,9 +11,9 @@ revision = "4b225df49747" down_revision = "gu8eqpm6f2x1n0fg" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/208_drop_easuid_uniqueconstraint.py b/migrations/versions/208_drop_easuid_uniqueconstraint.py index cef632d7a..69793cde8 100644 --- a/migrations/versions/208_drop_easuid_uniqueconstraint.py +++ b/migrations/versions/208_drop_easuid_uniqueconstraint.py @@ -12,7 +12,7 @@ down_revision = "4b225df49747" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/209_recreate_easuid_index.py b/migrations/versions/209_recreate_easuid_index.py index 8a4d93695..62be32bfe 100644 --- a/migrations/versions/209_recreate_easuid_index.py +++ b/migrations/versions/209_recreate_easuid_index.py @@ -12,7 +12,7 @@ down_revision = "1962d17d1c0a" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/212_add_columns_for_smtp_imap_specific_auth.py b/migrations/versions/212_add_columns_for_smtp_imap_specific_auth.py index 4a03c8487..e0e3cbb8a 100644 --- a/migrations/versions/212_add_columns_for_smtp_imap_specific_auth.py +++ b/migrations/versions/212_add_columns_for_smtp_imap_specific_auth.py @@ -12,7 +12,7 @@ down_revision = "31aae1ecb374" from alembic import context, op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/213_add_metadata_table.py b/migrations/versions/213_add_metadata_table.py index aef6d6c18..e4da48ca2 100644 --- a/migrations/versions/213_add_metadata_table.py +++ b/migrations/versions/213_add_metadata_table.py @@ -11,7 +11,7 @@ revision = "bc1119471fe" down_revision = "501f6b2fef28" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import context, op diff --git a/migrations/versions/214_introduce_accounttransaction.py b/migrations/versions/214_introduce_accounttransaction.py index 372c928bd..8150a88e4 100644 --- a/migrations/versions/214_introduce_accounttransaction.py +++ b/migrations/versions/214_introduce_accounttransaction.py @@ -11,12 +11,14 @@ revision = "4b83e064dd49" down_revision = "bc1119471fe" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import context, op def upgrade() -> None: - shard_id = int(context.config.get_main_option("shard_id")) + shard_id = int( + context.config.get_main_option("shard_id") # type: ignore[arg-type] + ) namespace_id_type = sa.Integer() if shard_id == 0 else sa.BigInteger() op.create_table( diff --git a/migrations/versions/216_add_folder_separator_column_for_generic_.py b/migrations/versions/216_add_folder_separator_column_for_generic_.py index 19f7a186b..d886f507d 100644 --- a/migrations/versions/216_add_folder_separator_column_for_generic_.py +++ b/migrations/versions/216_add_folder_separator_column_for_generic_.py @@ -12,7 +12,7 @@ down_revision = "4bfecbcc7dbd" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/217_add_genericaccount_ssl_required.py b/migrations/versions/217_add_genericaccount_ssl_required.py index 7599cc414..f83ce2c2c 100644 --- a/migrations/versions/217_add_genericaccount_ssl_required.py +++ b/migrations/versions/217_add_genericaccount_ssl_required.py @@ -12,7 +12,7 @@ down_revision = "4f8e995d1dba" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/219_accounttransaction_namespace_id_cascade.py b/migrations/versions/219_accounttransaction_namespace_id_cascade.py index 43a870903..1f26abf6f 100644 --- a/migrations/versions/219_accounttransaction_namespace_id_cascade.py +++ b/migrations/versions/219_accounttransaction_namespace_id_cascade.py @@ -12,7 +12,7 @@ down_revision = "3b1cc8580fc2" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/220_folder_separators_again.py b/migrations/versions/220_folder_separators_again.py index fc95b6436..b6c0611cb 100644 --- a/migrations/versions/220_folder_separators_again.py +++ b/migrations/versions/220_folder_separators_again.py @@ -12,7 +12,7 @@ down_revision = "2b2205db4964" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/221_fix_category_column_defaults.py b/migrations/versions/221_fix_category_column_defaults.py index 4cd3b6eb8..0e657f8dc 100644 --- a/migrations/versions/221_fix_category_column_defaults.py +++ b/migrations/versions/221_fix_category_column_defaults.py @@ -12,7 +12,7 @@ down_revision = "59e1cc690da9" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/223_time_mixins_fix.py b/migrations/versions/223_time_mixins_fix.py index 3852ee12d..9d7614ee8 100644 --- a/migrations/versions/223_time_mixins_fix.py +++ b/migrations/versions/223_time_mixins_fix.py @@ -11,10 +11,10 @@ revision = "539ce0291298" down_revision = "361972a1de3e" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql -from sqlalchemy.sql import text +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/225_drop_messagecategory_foreign_keys.py b/migrations/versions/225_drop_messagecategory_foreign_keys.py index 88069266f..6dbd0cf5a 100644 --- a/migrations/versions/225_drop_messagecategory_foreign_keys.py +++ b/migrations/versions/225_drop_messagecategory_foreign_keys.py @@ -12,7 +12,7 @@ down_revision = "29a1f2ef5653" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/226_add_queryable_value_column_to_metadata.py b/migrations/versions/226_add_queryable_value_column_to_metadata.py index fcfeaab0b..4baf73f5e 100644 --- a/migrations/versions/226_add_queryable_value_column_to_metadata.py +++ b/migrations/versions/226_add_queryable_value_column_to_metadata.py @@ -11,7 +11,7 @@ revision = "2dbf6da0775b" down_revision = "25129e0316d4" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/227_remove_message_foreignkeys.py b/migrations/versions/227_remove_message_foreignkeys.py index c73e25c9b..6f39bfa56 100644 --- a/migrations/versions/227_remove_message_foreignkeys.py +++ b/migrations/versions/227_remove_message_foreignkeys.py @@ -12,7 +12,7 @@ down_revision = "2dbf6da0775b" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/228_increase_gmailaccount_token_length.py b/migrations/versions/228_increase_gmailaccount_token_length.py index f2a5607a4..32eb9c911 100644 --- a/migrations/versions/228_increase_gmailaccount_token_length.py +++ b/migrations/versions/228_increase_gmailaccount_token_length.py @@ -11,7 +11,7 @@ revision = "3df39f4fbdec" down_revision = "17b147c1d53c" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/229_drop_transaction_foreign_keys.py b/migrations/versions/229_drop_transaction_foreign_keys.py index c9a3bcfc4..38140e905 100644 --- a/migrations/versions/229_drop_transaction_foreign_keys.py +++ b/migrations/versions/229_drop_transaction_foreign_keys.py @@ -12,7 +12,7 @@ down_revision = "3df39f4fbdec" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/230_drop_block_foreign_keys.py b/migrations/versions/230_drop_block_foreign_keys.py index e79c2b350..81148055d 100644 --- a/migrations/versions/230_drop_block_foreign_keys.py +++ b/migrations/versions/230_drop_block_foreign_keys.py @@ -12,7 +12,7 @@ down_revision = "23ff7f0b506d" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/231_drop_contact_foreign_keys.py b/migrations/versions/231_drop_contact_foreign_keys.py index 411e7a525..9ee85594f 100644 --- a/migrations/versions/231_drop_contact_foreign_keys.py +++ b/migrations/versions/231_drop_contact_foreign_keys.py @@ -12,7 +12,7 @@ down_revision = "4265dc58eec6" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/232_add_thread_deleted_at.py b/migrations/versions/232_add_thread_deleted_at.py index d56e52e84..89507e9b7 100644 --- a/migrations/versions/232_add_thread_deleted_at.py +++ b/migrations/versions/232_add_thread_deleted_at.py @@ -11,7 +11,7 @@ revision = "4a44b06cd53b" down_revision = "c48fc8dea1b" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/233_revert_drop_block_foreign_keys.py b/migrations/versions/233_revert_drop_block_foreign_keys.py index 0b29e6479..009b7a36a 100644 --- a/migrations/versions/233_revert_drop_block_foreign_keys.py +++ b/migrations/versions/233_revert_drop_block_foreign_keys.py @@ -12,7 +12,7 @@ down_revision = "4a44b06cd53b" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/234_change_contact_uid_collation.py b/migrations/versions/234_change_contact_uid_collation.py index 9f5e6e32c..45af0f79a 100644 --- a/migrations/versions/234_change_contact_uid_collation.py +++ b/migrations/versions/234_change_contact_uid_collation.py @@ -12,7 +12,7 @@ down_revision = "569ebe8e383d" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/235_change_imapfolderinfo_column.py b/migrations/versions/235_change_imapfolderinfo_column.py index 9fb2d2965..14176b5a4 100644 --- a/migrations/versions/235_change_imapfolderinfo_column.py +++ b/migrations/versions/235_change_imapfolderinfo_column.py @@ -11,7 +11,7 @@ revision = "34815f9e639c" down_revision = "53e6a7446c45" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/236_add_desired_sync_host.py b/migrations/versions/236_add_desired_sync_host.py index 71a3ba508..15e2f464a 100644 --- a/migrations/versions/236_add_desired_sync_host.py +++ b/migrations/versions/236_add_desired_sync_host.py @@ -12,7 +12,7 @@ down_revision = "34815f9e639c" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/237_add_new_contacts_index.py b/migrations/versions/237_add_new_contacts_index.py index 6a90e49ef..b4cf553b9 100644 --- a/migrations/versions/237_add_new_contacts_index.py +++ b/migrations/versions/237_add_new_contacts_index.py @@ -12,7 +12,7 @@ down_revision = "3eb4f30c8ed3" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/239_server_default_created_at.py b/migrations/versions/239_server_default_created_at.py index 319bc2529..8ebb953a7 100644 --- a/migrations/versions/239_server_default_created_at.py +++ b/migrations/versions/239_server_default_created_at.py @@ -12,7 +12,7 @@ down_revision = "1b0b4e6fdf96" from alembic import op -from sqlalchemy.sql import text +from sqlalchemy.sql import text # type: ignore[import-untyped] # SELECT table_name FROM information_schema.columns WHERE table_schema='inbox' AND column_name='created_at' TABLES = [ diff --git a/migrations/versions/247_add_event_visibility.py b/migrations/versions/247_add_event_visibility.py index 3c2cd0bcc..43710ae8d 100644 --- a/migrations/versions/247_add_event_visibility.py +++ b/migrations/versions/247_add_event_visibility.py @@ -11,7 +11,7 @@ revision = "53b532fda984" down_revision = "69c4b13c806" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/248_event_contact_association.py b/migrations/versions/248_event_contact_association.py index 6ecaae5f8..3f764b04d 100644 --- a/migrations/versions/248_event_contact_association.py +++ b/migrations/versions/248_event_contact_association.py @@ -11,7 +11,7 @@ revision = "203ae9bf0ddd" down_revision = "53b532fda984" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/250_add_secret_type.py b/migrations/versions/250_add_secret_type.py index ab61b6e76..4d45c9e31 100644 --- a/migrations/versions/250_add_secret_type.py +++ b/migrations/versions/250_add_secret_type.py @@ -11,7 +11,7 @@ revision = "1d84676d7731" down_revision = "36ce9c8635ef" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/251_drop_gmailauthcredentials.py b/migrations/versions/251_drop_gmailauthcredentials.py index 428466c6e..a1bb916b9 100644 --- a/migrations/versions/251_drop_gmailauthcredentials.py +++ b/migrations/versions/251_drop_gmailauthcredentials.py @@ -11,9 +11,9 @@ revision = "50407c7fe030" down_revision = "1d84676d7731" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/252_drop_gmailaccount_fields.py b/migrations/versions/252_drop_gmailaccount_fields.py index cc9ed85ff..9b7bd73f6 100644 --- a/migrations/versions/252_drop_gmailaccount_fields.py +++ b/migrations/versions/252_drop_gmailaccount_fields.py @@ -11,9 +11,9 @@ revision = "32df3d8ff73e" down_revision = "50407c7fe030" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/253_drop_require_ssl.py b/migrations/versions/253_drop_require_ssl.py index 8cf0df802..0caacc3c2 100644 --- a/migrations/versions/253_drop_require_ssl.py +++ b/migrations/versions/253_drop_require_ssl.py @@ -11,9 +11,9 @@ revision = "52783469ee6c" down_revision = "32df3d8ff73e" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/254_add_calendar_default.py b/migrations/versions/254_add_calendar_default.py index 3632f9ba3..a321e8376 100644 --- a/migrations/versions/254_add_calendar_default.py +++ b/migrations/versions/254_add_calendar_default.py @@ -11,9 +11,9 @@ revision = "7bb5c0ca93de" down_revision = "52783469ee6c" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/255_add_calendar_webhook_columns.py b/migrations/versions/255_add_calendar_webhook_columns.py index b69d25457..9393e5df4 100644 --- a/migrations/versions/255_add_calendar_webhook_columns.py +++ b/migrations/versions/255_add_calendar_webhook_columns.py @@ -11,7 +11,7 @@ revision = "9ea81ca0f64b" down_revision = "7bb5c0ca93de" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/256_add_account_webhook_columns.py b/migrations/versions/256_add_account_webhook_columns.py index ec81e7fe2..435fcd490 100644 --- a/migrations/versions/256_add_account_webhook_columns.py +++ b/migrations/versions/256_add_account_webhook_columns.py @@ -11,7 +11,7 @@ revision = "93cc6f4ce113" down_revision = "9ea81ca0f64b" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/257_remove_calendar_old_webhook_columns.py b/migrations/versions/257_remove_calendar_old_webhook_columns.py index cf9d8b190..dbf68d689 100644 --- a/migrations/versions/257_remove_calendar_old_webhook_columns.py +++ b/migrations/versions/257_remove_calendar_old_webhook_columns.py @@ -11,9 +11,9 @@ revision = "4af0d2f17967" down_revision = "93cc6f4ce113" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/258_remove_account_old_webhook_columns.py b/migrations/versions/258_remove_account_old_webhook_columns.py index 4d8919a91..d867fdafa 100644 --- a/migrations/versions/258_remove_account_old_webhook_columns.py +++ b/migrations/versions/258_remove_account_old_webhook_columns.py @@ -11,9 +11,9 @@ revision = "e9e932c6c55e" down_revision = "4af0d2f17967" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/259_add_calendar_sync_account_mixin.py b/migrations/versions/259_add_calendar_sync_account_mixin.py index 61ea4fb7d..10c3e5247 100644 --- a/migrations/versions/259_add_calendar_sync_account_mixin.py +++ b/migrations/versions/259_add_calendar_sync_account_mixin.py @@ -11,9 +11,9 @@ revision = "f9dab5e44c0f" down_revision = "e9e932c6c55e" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op -from sqlalchemy.dialects import mysql +from sqlalchemy.dialects import mysql # type: ignore[import-untyped] def upgrade() -> None: diff --git a/migrations/versions/260_add_event_conference_data.py b/migrations/versions/260_add_event_conference_data.py index 021ef5300..e27d89603 100644 --- a/migrations/versions/260_add_event_conference_data.py +++ b/migrations/versions/260_add_event_conference_data.py @@ -11,7 +11,7 @@ revision = "fe0488decbd1" down_revision = "f9dab5e44c0f" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/migrations/versions/261_add_ix_imapuid_account_id_folder_id_msg_uid_desc.py b/migrations/versions/261_add_ix_imapuid_account_id_folder_id_msg_uid_desc.py index 4d981a8f3..ac1b599d6 100644 --- a/migrations/versions/261_add_ix_imapuid_account_id_folder_id_msg_uid_desc.py +++ b/migrations/versions/261_add_ix_imapuid_account_id_folder_id_msg_uid_desc.py @@ -11,7 +11,7 @@ revision = "e3cf974d07a5" down_revision = "fe0488decbd1" -import sqlalchemy as sa +import sqlalchemy as sa # type: ignore[import-untyped] from alembic import op diff --git a/pyproject.toml b/pyproject.toml index c9bc46484..5a865eae4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -153,3 +153,31 @@ unfixable = [ "bin/**.py" = ["T201", "E402", "N999"] "migrations/**.py" = ["T201", "E402"] "tests/**.py" = ["ANN"] + +[tool.mypy] +python_version = "3.12" +strict_equality = true +warn_unreachable = true +warn_unused_ignores = true +warn_redundant_casts = true +show_error_context = true +enable_error_code = [ + "unused-awaitable", + "redundant-self", + "redundant-expr", + "possibly-undefined", + "ignore-without-code", + "truthy-iterable", + "truthy-bool", +] +pretty = true +files = [ + "bin", + "inbox", + "migrations", +] +namespace_packages = true +check_untyped_defs = true +disallow_any_generics = true +disallow_untyped_defs = true +disallow_incomplete_defs = true diff --git a/requirements/requirements-lint.in b/requirements/requirements-lint.in index 951c3ba69..8875600e1 100644 --- a/requirements/requirements-lint.in +++ b/requirements/requirements-lint.in @@ -2,14 +2,15 @@ # This file should only contain dependencies we use directly. # Indirect dependencies should be pinned in the autogenerated files. -black==24.8.0 -mypy==1.13.0 -ruff==0.8.2 -typing_extensions==4.12.2 -mypy_extensions==1.0.0 -types-boto==2.49.18.20241019 -types-python-dateutil==2.9.0.20241003 -types-pytz==2024.2.0.20241003 -types-PyYAML==6.0.12.20240917 -types-redis==4.6.0.20241004 -types-requests==2.32.0.20241016 +-c requirements-prod.txt + +black +mypy +ruff +typing_extensions +types-boto +types-python-dateutil +types-pytz +types-PyYAML +types-redis +types-requests diff --git a/requirements/requirements-lint.txt b/requirements/requirements-lint.txt index bad023e25..5ebfc4e67 100644 --- a/requirements/requirements-lint.txt +++ b/requirements/requirements-lint.txt @@ -96,42 +96,45 @@ cffi==1.17.1 \ --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b - # via cryptography + # via + # -c requirements-prod.txt + # cryptography click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de - # via black -cryptography==44.0.0 \ - --hash=sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7 \ - --hash=sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731 \ - --hash=sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b \ - --hash=sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc \ - --hash=sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543 \ - --hash=sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385 \ - --hash=sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c \ - --hash=sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591 \ - --hash=sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede \ - --hash=sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb \ - --hash=sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f \ - --hash=sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123 \ - --hash=sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c \ - --hash=sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba \ - --hash=sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c \ - --hash=sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285 \ - --hash=sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd \ - --hash=sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092 \ - --hash=sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa \ - --hash=sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289 \ - --hash=sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02 \ - --hash=sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64 \ - --hash=sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053 \ - --hash=sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417 \ - --hash=sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e \ - --hash=sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e \ - --hash=sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7 \ - --hash=sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756 \ - --hash=sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4 # via + # -c requirements-prod.txt + # black +cryptography==43.0.1 \ + --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ + --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ + --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ + --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ + --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ + --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ + --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ + --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ + --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ + --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ + --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ + --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ + --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ + --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ + --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ + --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ + --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ + --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ + --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ + --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ + --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ + --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ + --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ + --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ + --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ + --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ + --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 + # via + # -c requirements-prod.txt # types-pyopenssl # types-redis mypy==1.13.0 \ @@ -172,13 +175,14 @@ mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via - # -r requirements-lint.in # black # mypy packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f - # via black + # via + # -c requirements-prod.txt + # black pathspec==0.12.1 \ --hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \ --hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712 @@ -187,10 +191,12 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via black -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via + # -c requirements-prod.txt + # cffi ruff==0.8.2 \ --hash=sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f \ --hash=sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea \ @@ -239,21 +245,22 @@ types-redis==4.6.0.20241004 \ --hash=sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e \ --hash=sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed # via -r requirements-lint.in -types-requests==2.32.0.20241016 \ - --hash=sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95 \ - --hash=sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747 +types-requests==2.31.0.6 \ + --hash=sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9 \ + --hash=sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0 # via -r requirements-lint.in types-setuptools==75.6.0.20241126 \ --hash=sha256:7bf25ad4be39740e469f9268b6beddda6e088891fa5a27e985c6ce68bf62ace0 \ --hash=sha256:aaae310a0e27033c1da8457d4d26ac673b0c8a0de7272d6d4708e263f2ea3b9b # via types-cffi +types-urllib3==1.26.25.14 \ + --hash=sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f \ + --hash=sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e + # via types-requests typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via + # -c requirements-prod.txt # -r requirements-lint.in # mypy -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 - # via types-requests diff --git a/requirements/requirements-prod.in b/requirements/requirements-prod.in index 5a8e1d47f..201c11482 100644 --- a/requirements/requirements-prod.in +++ b/requirements/requirements-prod.in @@ -82,7 +82,7 @@ stack-data==0.6.2 tldextract==3.1.2 structlog==21.4.0 traitlets==5.9.0 -typing_extensions==4.0.1 +typing_extensions urllib3==1.26.20 vobject==0.9.6.1 wcwidth==0.2.6 diff --git a/requirements/requirements-prod.txt b/requirements/requirements-prod.txt index 7a12fb87d..3b6a03ff1 100644 --- a/requirements/requirements-prod.txt +++ b/requirements/requirements-prod.txt @@ -1109,9 +1109,9 @@ traitlets==5.9.0 \ # -r requirements-prod.in # ipython # matplotlib-inline -typing-extensions==4.0.1 \ - --hash=sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e \ - --hash=sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements-prod.in urllib3==1.26.20 \ --hash=sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e \