From cbbe6eb4f22f0bc781aab5d6ba3bd62d730da888 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrzej=20Bartosi=C5=84ski?= Date: Wed, 11 Dec 2024 16:37:39 +0100 Subject: [PATCH] Autoadd `# noqa` with `ruff check . --add-noqa` --- bin/__init__.py | 1 + bin/backfix-duplicate-categories.py | 2 +- bin/backfix-generic-imap-separators.py | 2 +- bin/check-attachments.py | 4 +- bin/clear-all-heartbeats.py | 2 +- bin/clear-db.py | 2 +- bin/clear-heartbeat-status.py | 2 +- bin/clear-kv.py | 2 +- bin/correct-autoincrements.py | 2 +- bin/create-db.py | 4 +- bin/create-encryption-keys.py | 6 +- bin/create-event-contact-associations.py | 2 +- bin/delete-account-data.py | 4 +- bin/delete-marked-accounts.py | 2 +- bin/detect-missing-sync-host.py | 4 +- bin/get-accounts-for-host.py | 2 +- bin/get-id.py | 4 +- bin/get-object.py | 4 +- bin/inbox-api.py | 4 +- bin/inbox-auth.py | 2 +- bin/inbox-console.py | 2 +- bin/inbox-start.py | 8 +- bin/migrate-db.py | 4 +- bin/mysql-prompt.py | 4 +- bin/purge-transaction-log.py | 2 +- bin/remove-message-attachments.py | 2 +- bin/restart-forgotten-accounts.py | 2 +- bin/set-desired-host.py | 2 +- bin/set-throttled.py | 6 +- bin/stamp-db.py | 4 +- bin/sync-single-account.py | 2 +- bin/syncback-service.py | 8 +- bin/syncback-stats.py | 2 +- bin/unschedule-account-syncs.py | 6 +- bin/update-categories.py | 2 +- bin/verify-db.py | 2 +- inbox/actions/base.py | 2 +- inbox/api/err.py | 10 +- inbox/api/filtering.py | 20 +- inbox/api/kellogs.py | 26 +-- inbox/api/metrics_api.py | 8 +- inbox/api/ns_api.py | 171 ++++++++++-------- inbox/api/sending.py | 8 +- inbox/api/srv.py | 26 +-- inbox/api/update.py | 14 +- inbox/api/validation.py | 90 +++++---- inbox/api/wsgi.py | 4 +- inbox/auth/base.py | 18 +- inbox/auth/generic.py | 10 +- inbox/auth/google.py | 12 +- inbox/auth/microsoft.py | 12 +- inbox/auth/oauth.py | 16 +- inbox/auth/utils.py | 12 +- inbox/config.py | 8 +- inbox/console.py | 12 +- inbox/contacts/algorithms.py | 20 +- inbox/contacts/carddav.py | 14 +- inbox/contacts/crud.py | 4 +- inbox/contacts/google.py | 18 +- inbox/contacts/icloud.py | 6 +- inbox/contacts/processing.py | 2 +- inbox/contacts/vcard.py | 42 ++--- inbox/crispin.py | 90 ++++----- inbox/error_handling.py | 8 +- inbox/events/google.py | 24 +-- inbox/events/ical.py | 22 ++- inbox/events/microsoft/graph_client.py | 6 +- inbox/events/microsoft/parse.py | 8 +- inbox/events/recurring.py | 16 +- inbox/events/remote_sync.py | 6 +- inbox/events/util.py | 20 +- inbox/exceptions.py | 2 +- inbox/folder_edge_cases.py | 2 +- inbox/heartbeat/config.py | 6 +- inbox/heartbeat/status.py | 8 +- inbox/heartbeat/store.py | 34 ++-- inbox/ignition.py | 16 +- inbox/instrumentation.py | 6 +- inbox/interruptible_threading.py | 2 +- inbox/logging.py | 14 +- inbox/mailsync/backends/gmail.py | 16 +- inbox/mailsync/backends/imap/common.py | 4 +- inbox/mailsync/backends/imap/generic.py | 50 ++--- inbox/mailsync/backends/imap/monitor.py | 6 +- inbox/mailsync/frontend.py | 4 +- inbox/mailsync/service.py | 18 +- inbox/models/account.py | 36 ++-- inbox/models/action_log.py | 4 +- inbox/models/backends/generic.py | 14 +- inbox/models/backends/gmail.py | 16 +- inbox/models/backends/imap.py | 24 +-- inbox/models/backends/oauth.py | 14 +- inbox/models/backends/outlook.py | 12 +- inbox/models/base.py | 4 +- inbox/models/block.py | 8 +- inbox/models/category.py | 26 +-- inbox/models/contact.py | 4 +- inbox/models/data_processing.py | 4 +- inbox/models/event.py | 42 ++--- inbox/models/folder.py | 8 +- inbox/models/label.py | 4 +- inbox/models/message.py | 30 +-- inbox/models/mixins.py | 16 +- inbox/models/namespace.py | 6 +- inbox/models/secret.py | 2 +- inbox/models/session.py | 26 +-- inbox/models/thread.py | 28 +-- inbox/models/transaction.py | 2 +- inbox/models/util.py | 20 +- inbox/models/when.py | 8 +- inbox/providers.py | 2 +- inbox/s3/backends/gmail.py | 4 +- inbox/s3/backends/imap.py | 6 +- inbox/s3/base.py | 2 +- inbox/search/backends/gmail.py | 14 +- inbox/search/backends/imap.py | 20 +- inbox/search/base.py | 2 +- inbox/security/blobstorage.py | 2 +- inbox/security/oracles.py | 10 +- inbox/sendmail/base.py | 12 +- inbox/sendmail/message.py | 10 +- inbox/sendmail/smtp/postel.py | 59 +++--- inbox/sqlalchemy_ext/json_util.py | 12 +- inbox/sqlalchemy_ext/util.py | 32 ++-- inbox/sync/base_sync.py | 6 +- inbox/transactions/actions.py | 24 +-- inbox/transactions/delta_sync.py | 8 +- inbox/util/addr.py | 2 +- inbox/util/blockstore.py | 18 +- inbox/util/concurrency.py | 6 +- inbox/util/db.py | 4 +- inbox/util/encoding.py | 2 +- inbox/util/file.py | 14 +- inbox/util/html.py | 2 +- inbox/util/itert.py | 2 +- inbox/util/misc.py | 24 +-- inbox/util/rdb.py | 8 +- inbox/util/sharding.py | 8 +- inbox/util/startup.py | 9 +- inbox/util/stats.py | 2 +- inbox/util/testutils.py | 44 ++--- inbox/util/threading.py | 4 +- inbox/util/url.py | 16 +- inbox/webhooks/google_notifications.py | 36 ++-- inbox/webhooks/microsoft_notifications.py | 10 +- .../versions/005_import_old_accounts.py | 8 +- .../versions/007_per_provider_table_split.py | 16 +- .../012_move_google_userinfo_fields_to_.py | 8 +- migrations/versions/019_blocks_to_parts.py | 8 +- .../023_tighten_nullable_constraints_on_.py | 2 +- ...024_remote_folders_and_inbox_tags_split.py | 4 +- ...026_add_audit_timestamps_to_all_objects.py | 2 +- migrations/versions/028_tag_api_migration.py | 4 +- .../029_set_inbox_folder_exposed_name.py | 2 +- .../030_add_is_read_attribute_to_messages.py | 2 +- .../versions/031_add_indexes_to_timestamps.py | 4 +- migrations/versions/032_tighten_easuid.py | 4 +- migrations/versions/033_add_more_indexes.py | 4 +- .../036_replace_usertag_by_generic_tag.py | 4 +- .../038_add_public_ids_to_transactions.py | 2 +- ..._change_easfoldersync_unique_constraint.py | 4 +- migrations/versions/040_gmailaccount.py | 2 +- ...1_add_sync_status_columns_to_foldersync.py | 4 +- .../versions/045_new_password_storage.py | 12 +- .../versions/050_imap_table_cleanups.py | 2 +- .../051_store_secrets_in_local_vault.py | 4 +- .../versions/053_canonicalize_addresses.py | 4 +- ...consolidate_account_sync_status_columns.py | 2 +- ...enforce_length_limit_of_255_on_message_.py | 2 +- .../060_cascade_folder_deletes_to_easuid.py | 2 +- ...remove_easfoldersyncstatus_folder_rows_.py | 4 +- .../064_make_address_fields_non_null.py | 2 +- migrations/versions/066_kill_spoolmessage.py | 2 +- ..._easfoldersyncstatus_unique_constraints.py | 2 +- migrations/versions/072_recompute_snippets.py | 8 +- migrations/versions/073_generic_providers.py | 8 +- .../versions/074_add_eas_thrid_index.py | 4 +- migrations/versions/079_events_longer_uids.py | 2 +- migrations/versions/086_event_date_times.py | 2 +- .../versions/088_calendar_descriptions.py | 2 +- migrations/versions/089_revert_encryption.py | 2 +- migrations/versions/094_eas_passwords.py | 2 +- .../versions/096_migrate_secret_data.py | 4 +- migrations/versions/107_drop_eas_state.py | 2 +- .../versions/108_easaccount_username.py | 2 +- .../versions/115_eas_twodevices_turn.py | 2 +- .../versions/141_remote_remote_contacts.py | 4 +- .../versions/150_add_polymorphic_events.py | 12 +- .../versions/166_migrate_body_format.py | 2 +- .../versions/170_update_easuid_schema_2.py | 2 +- .../versions/171_update_easuid_schema_3.py | 2 +- ...5_backfill_gmail_auth_credentials_table.py | 2 +- .../187_migrate_data_for_folders_overhaul.py | 2 +- ...alculate_receivedrecentdate_for_threads.py | 4 +- .../versions/194_extend_eas_folder_id.py | 4 +- .../196_create_outlook_account_column.py | 4 +- tests/api/conftest.py | 2 +- tests/api/test_drafts.py | 9 +- tests/api/test_event_when.py | 2 +- tests/api/test_events_recurring.py | 2 +- tests/api/test_files.py | 12 +- tests/api/test_searching.py | 2 +- tests/api/test_streaming.py | 2 +- tests/auth/providers/mock_gmail.py | 4 +- tests/auth/test_generic_auth.py | 2 +- tests/conftest.py | 2 +- tests/events/microsoft/conftest.py | 2 +- .../events/microsoft/test_events_provider.py | 2 +- tests/events/microsoft/test_graph_client.py | 2 +- tests/events/microsoft/test_parse.py | 2 +- tests/events/test_google_events.py | 6 +- tests/events/test_ics_parsing.py | 120 ++++++++---- tests/events/test_recurrence.py | 8 +- tests/general/test_message_parsing.py | 2 +- tests/general/test_mutable_json_type.py | 2 +- tests/general/test_namespace.py | 6 +- tests/general/test_thread_creation.py | 2 +- tests/heartbeat/test_heartbeat.py | 2 +- tests/imap/data.py | 2 +- tests/imap/network/test_actions_syncback.py | 2 +- tests/imap/network/test_drafts_syncback.py | 4 +- tests/imap/test_actions.py | 2 +- tests/imap/test_crispin_client.py | 2 +- tests/imap/test_delete_handling.py | 24 +-- tests/imap/test_folder_sync.py | 2 +- tests/imap/test_full_imap_enabled.py | 2 +- tests/imap/test_labels.py | 6 +- tests/imap/test_update_metadata.py | 2 +- tests/security/test_secret.py | 2 +- .../transactions/test_transaction_deletion.py | 24 +-- tests/util/base.py | 14 +- 231 files changed, 1285 insertions(+), 1080 deletions(-) diff --git a/bin/__init__.py b/bin/__init__.py index e69de29bb..951e9a492 100755 --- a/bin/__init__.py +++ b/bin/__init__.py @@ -0,0 +1 @@ +# noqa: EXE002 diff --git a/bin/backfix-duplicate-categories.py b/bin/backfix-duplicate-categories.py index 5057b4ed8..3a85ff73e 100755 --- a/bin/backfix-duplicate-categories.py +++ b/bin/backfix-duplicate-categories.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 # Duplicate categories were created because of an inadequate unique constraint # in MySQL. This script deletes duplicate categories with no messages # associated. If two or more duplicate categories exist with associated diff --git a/bin/backfix-generic-imap-separators.py b/bin/backfix-generic-imap-separators.py index fa92c8e9f..104225334 100755 --- a/bin/backfix-generic-imap-separators.py +++ b/bin/backfix-generic-imap-separators.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 # We previously didn't store IMAP path separators for generic imap accounts. # This script backfixes the accounts. diff --git a/bin/check-attachments.py b/bin/check-attachments.py index fc7d2cba6..414b3ba13 100755 --- a/bin/check-attachments.py +++ b/bin/check-attachments.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 # Check that we can fetch attachments for 99.9% of our syncing accounts. import concurrent.futures import datetime @@ -20,7 +20,7 @@ NUM_MESSAGES = 10 -def process_account(account_id): +def process_account(account_id): # noqa: ANN201 ret = defaultdict(int) try: diff --git a/bin/clear-all-heartbeats.py b/bin/clear-all-heartbeats.py index 56aea03df..724a98449 100755 --- a/bin/clear-all-heartbeats.py +++ b/bin/clear-all-heartbeats.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 from sys import exit diff --git a/bin/clear-db.py b/bin/clear-db.py index 8c2c69e77..77f123f27 100755 --- a/bin/clear-db.py +++ b/bin/clear-db.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import argparse import sys diff --git a/bin/clear-heartbeat-status.py b/bin/clear-heartbeat-status.py index 516a62f29..4070d7525 100755 --- a/bin/clear-heartbeat-status.py +++ b/bin/clear-heartbeat-status.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 from sys import exit diff --git a/bin/clear-kv.py b/bin/clear-kv.py index c0fa49362..bfa9e8335 100755 --- a/bin/clear-kv.py +++ b/bin/clear-kv.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 from sys import exit diff --git a/bin/correct-autoincrements.py b/bin/correct-autoincrements.py index 4d4258ddd..f41f0e80a 100755 --- a/bin/correct-autoincrements.py +++ b/bin/correct-autoincrements.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import click diff --git a/bin/create-db.py b/bin/create-db.py index 1c50926f2..536f1f449 100755 --- a/bin/create-db.py +++ b/bin/create-db.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import os @@ -81,7 +81,7 @@ def main(target_hostname, host_ip) -> None: alembic_ini_filename = os.environ.get( "ALEMBIC_INI_PATH", "alembic.ini" ) - assert os.path.isfile( + assert os.path.isfile( # noqa: PTH113 alembic_ini_filename ), f"Must have alembic.ini file at {alembic_ini_filename}" alembic_cfg = alembic.config.Config(alembic_ini_filename) diff --git a/bin/create-encryption-keys.py b/bin/create-encryption-keys.py index 804810124..7dc1a1340 100755 --- a/bin/create-encryption-keys.py +++ b/bin/create-encryption-keys.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import binascii @@ -36,11 +36,11 @@ def main() -> None: # so it better exist. # Update it try: - with open(secrets_path, "a") as f: + with open(secrets_path, "a") as f: # noqa: PTH123 print(f"Writing keys to secrets config file {secrets_path}") yaml.dump(data, f, default_flow_style=False) except OSError: - raise Exception( + raise Exception( # noqa: B904 f"Check file write permissions on config file {secrets_path}" ) diff --git a/bin/create-event-contact-associations.py b/bin/create-event-contact-associations.py index 0b32b5681..eddc45dc2 100755 --- a/bin/create-event-contact-associations.py +++ b/bin/create-event-contact-associations.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 """ Create event contact associations for events that don't have any. """ diff --git a/bin/delete-account-data.py b/bin/delete-account-data.py index 4f8c98466..ea4086e90 100755 --- a/bin/delete-account-data.py +++ b/bin/delete-account-data.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 """ Deletes an account's data permanently. @@ -63,7 +63,7 @@ def delete_account_data(account_id, dry_run, yes, throttle) -> int | None: ) ) - answer = raw_input(question).strip().lower() + answer = raw_input(question).strip().lower() # noqa: F821 if answer != "yes": print("Will NOT delete, goodbye.") diff --git a/bin/delete-marked-accounts.py b/bin/delete-marked-accounts.py index 85a7f0417..cb7530b7b 100755 --- a/bin/delete-marked-accounts.py +++ b/bin/delete-marked-accounts.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 """ Searches for accounts that are marked for deletion and deletes all of their data diff --git a/bin/detect-missing-sync-host.py b/bin/detect-missing-sync-host.py index 9e6f303b9..1ddd71b9c 100755 --- a/bin/detect-missing-sync-host.py +++ b/bin/detect-missing-sync-host.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import click @@ -17,7 +17,7 @@ def main() -> None: was _started_ on a new host without being first stopped on its previous host.) - """ + """ # noqa: D401 maybe_enable_rollbar() with global_session_scope() as db_session: diff --git a/bin/get-accounts-for-host.py b/bin/get-accounts-for-host.py index 8ee51c887..12bfe57db 100644 --- a/bin/get-accounts-for-host.py +++ b/bin/get-accounts-for-host.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: EXE001, N999 import click diff --git a/bin/get-id.py b/bin/get-id.py index 696bfb024..8396a3e8d 100755 --- a/bin/get-id.py +++ b/bin/get-id.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 # Query the id corresponding to a public id and vice-versa. @@ -40,7 +40,7 @@ def main(type, id, public_id) -> None: maybe_enable_rollbar() - type = type.lower() + type = type.lower() # noqa: A001 if type not in cls_for_type: print(f"Error: unknown type '{type}'") diff --git a/bin/get-object.py b/bin/get-object.py index 5e36bf96f..2cd22c5f6 100755 --- a/bin/get-object.py +++ b/bin/get-object.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 # Query the id corresponding to a public id and vice-versa. @@ -51,7 +51,7 @@ def main(type, id, public_id, account_id, namespace_id, readwrite) -> None: maybe_enable_rollbar() - type = type.lower() + type = type.lower() # noqa: A001 if type not in cls_for_type: print(f"Error: unknown type '{type}'") diff --git a/bin/inbox-api.py b/bin/inbox-api.py index a5af24abd..5af5277be 100755 --- a/bin/inbox-api.py +++ b/bin/inbox-api.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import os import sys @@ -49,7 +49,7 @@ def main(prod, config, port) -> None: maybe_enable_rollbar() if config is not None: - config_path = os.path.abspath(config) + config_path = os.path.abspath(config) # noqa: PTH100 load_overrides(config_path) start(port=int(port), use_reloader=not prod) diff --git a/bin/inbox-auth.py b/bin/inbox-auth.py index e5301d8b8..5bcc8956a 100755 --- a/bin/inbox-auth.py +++ b/bin/inbox-auth.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import sys diff --git a/bin/inbox-console.py b/bin/inbox-console.py index 77d0577ee..093055450 100755 --- a/bin/inbox-console.py +++ b/bin/inbox-console.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 from setproctitle import setproctitle diff --git a/bin/inbox-start.py b/bin/inbox-start.py index 0de89db24..060201961 100755 --- a/bin/inbox-start.py +++ b/bin/inbox-start.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import os @@ -83,13 +83,15 @@ def main(prod, enable_profiler, config, process_num) -> None: if config is not None: from inbox.util.startup import load_overrides - config_path = os.path.abspath(config) + config_path = os.path.abspath(config) # noqa: PTH100 load_overrides(config_path) if not prod: preflight() - total_processes = int(os.environ.get("MAILSYNC_PROCESSES", 1)) + total_processes = int( + os.environ.get("MAILSYNC_PROCESSES", 1) # noqa: PLW1508 + ) setproctitle.setproctitle(f"sync-engine-{process_num}") diff --git a/bin/migrate-db.py b/bin/migrate-db.py index 87a45f955..a8d2ad0d3 100755 --- a/bin/migrate-db.py +++ b/bin/migrate-db.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import os @@ -15,7 +15,7 @@ def main() -> None: maybe_enable_rollbar() alembic_ini_filename = os.environ.get("ALEMBIC_INI_PATH", "alembic.ini") - assert os.path.isfile( + assert os.path.isfile( # noqa: PTH113 alembic_ini_filename ), f"Missing alembic.ini file at {alembic_ini_filename}" diff --git a/bin/mysql-prompt.py b/bin/mysql-prompt.py index 64da47c61..9bf74e950 100755 --- a/bin/mysql-prompt.py +++ b/bin/mysql-prompt.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import subprocess @@ -51,7 +51,7 @@ def main(shard_num: int | None, execute: str | None, batch: bool) -> None: creds["db_name"] = shard["SCHEMA_NAME"] break - for key in creds.keys(): + for key in creds.keys(): # noqa: PLC0206 if creds[key] is None: print(f"Error: {key} is None") sys.exit(-1) diff --git a/bin/purge-transaction-log.py b/bin/purge-transaction-log.py index da9af4aaf..55ed23f77 100755 --- a/bin/purge-transaction-log.py +++ b/bin/purge-transaction-log.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 """ Deletes entries in the transaction older than `days_ago` days( as measured by the created_at column) diff --git a/bin/remove-message-attachments.py b/bin/remove-message-attachments.py index 5188ad5e9..3a0549def 100755 --- a/bin/remove-message-attachments.py +++ b/bin/remove-message-attachments.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import datetime import enum import logging diff --git a/bin/restart-forgotten-accounts.py b/bin/restart-forgotten-accounts.py index 7a4a41210..3a4754e01 100644 --- a/bin/restart-forgotten-accounts.py +++ b/bin/restart-forgotten-accounts.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: EXE001, N999 import time diff --git a/bin/set-desired-host.py b/bin/set-desired-host.py index bdac3770f..98a8c11ca 100644 --- a/bin/set-desired-host.py +++ b/bin/set-desired-host.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: EXE001, N999 import click diff --git a/bin/set-throttled.py b/bin/set-throttled.py index 9125fea4d..5b4862099 100644 --- a/bin/set-throttled.py +++ b/bin/set-throttled.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: EXE001, N999 # throttle or unthrottle an account @@ -49,7 +49,7 @@ def main() -> None: parser.add_option("--unthrottled", action="store_true", default=False) parser.add_option("--id", action="store", dest="account_id", default=None) parser.add_option("--stdin", action="store_true", default=False) - options, remainder = parser.parse_args(sys.argv[1:]) + options, remainder = parser.parse_args(sys.argv[1:]) # noqa: F841 if all(opt is False for opt in [options.throttled, options.unthrottled]): print_usage() sys.exit(-1) @@ -67,7 +67,7 @@ def main() -> None: if len(splat) < 2: continue - email, id = splat[:2] + email, id = splat[:2] # noqa: A001, F841 options.account_id = id throttle(options) diff --git a/bin/stamp-db.py b/bin/stamp-db.py index 763dd582a..e9bed0ec7 100755 --- a/bin/stamp-db.py +++ b/bin/stamp-db.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import os @@ -16,7 +16,7 @@ def main(revision_id) -> None: maybe_enable_rollbar() alembic_ini_filename = os.environ.get("ALEMBIC_INI_PATH", "alembic.ini") - assert os.path.isfile( + assert os.path.isfile( # noqa: PTH113 alembic_ini_filename ), f"Missing alembic.ini file at {alembic_ini_filename}" diff --git a/bin/sync-single-account.py b/bin/sync-single-account.py index 4423e8a7a..a1b3828d5 100755 --- a/bin/sync-single-account.py +++ b/bin/sync-single-account.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import logging from threading import BoundedSemaphore diff --git a/bin/syncback-service.py b/bin/syncback-service.py index 97968843c..7382138d6 100755 --- a/bin/syncback-service.py +++ b/bin/syncback-service.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 """ Run the syncback service separately. You should run this if you run the API under something like gunicorn. (For convenience, the bin/inbox-api script @@ -58,13 +58,15 @@ def main(prod, config, process_num, syncback_id, enable_profiler) -> None: print("Python", sys.version, file=sys.stderr) if config is not None: - config_path = os.path.abspath(config) + config_path = os.path.abspath(config) # noqa: PTH100 load_overrides(config_path) level = os.environ.get("LOGLEVEL", inbox_config.get("LOGLEVEL")) configure_logging(log_level=level) reconfigure_logging() - total_processes = int(os.environ.get("SYNCBACK_PROCESSES", 1)) + total_processes = int( + os.environ.get("SYNCBACK_PROCESSES", 1) # noqa: PLW1508 + ) def start(): # Start the syncback service, and just hang out forever diff --git a/bin/syncback-stats.py b/bin/syncback-stats.py index 403274099..05bf06afd 100755 --- a/bin/syncback-stats.py +++ b/bin/syncback-stats.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import click from sqlalchemy import func diff --git a/bin/unschedule-account-syncs.py b/bin/unschedule-account-syncs.py index 327731faa..14fea671c 100755 --- a/bin/unschedule-account-syncs.py +++ b/bin/unschedule-account-syncs.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import click @@ -27,7 +27,7 @@ def main(dry_run, number, hostname, process) -> None: "You have not provided a --number option. This will " "unschedule ALL syncs on the host. Proceed? [Y/n] " ) - if raw_input(message).strip().lower() == "n": + if raw_input(message).strip().lower() == "n": # noqa: F821 print("Will not proceed") return @@ -38,7 +38,7 @@ def main(dry_run, number, hostname, process) -> None: hostname ) ) - if raw_input(message).strip().lower() == "n": + if raw_input(message).strip().lower() == "n": # noqa: F821 print("Bailing out") return diff --git a/bin/update-categories.py b/bin/update-categories.py index 7349edcca..b4287437b 100755 --- a/bin/update-categories.py +++ b/bin/update-categories.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 import click diff --git a/bin/verify-db.py b/bin/verify-db.py index 01448eb3d..195550663 100755 --- a/bin/verify-db.py +++ b/bin/verify-db.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # noqa: N999 from inbox.config import config diff --git a/inbox/actions/base.py b/inbox/actions/base.py index fd134cb36..6d55162c1 100644 --- a/inbox/actions/base.py +++ b/inbox/actions/base.py @@ -51,7 +51,7 @@ log = get_logger() -def can_handle_multiple_records(action_name): +def can_handle_multiple_records(action_name): # noqa: ANN201 return action_name == "change_labels" diff --git a/inbox/api/err.py b/inbox/api/err.py index 3464f8f80..11f29a039 100644 --- a/inbox/api/err.py +++ b/inbox/api/err.py @@ -8,10 +8,10 @@ log = get_logger() -from inbox.config import is_live_env +from inbox.config import is_live_env # noqa: E402 -def get_request_uid(headers): +def get_request_uid(headers): # noqa: ANN201 return headers.get("X-Unique-ID") @@ -26,9 +26,9 @@ def log_exception(exc_info, **kwargs) -> None: rollbar.report_exc_info(exc_info) if not is_live_env(): - print() + print() # noqa: T201 traceback.print_exc() - print() + print() # noqa: T201 new_log_context = create_error_log_context(exc_info) new_log_context.update(kwargs) @@ -110,7 +110,7 @@ class AccountDoesNotExistError(APIException): message = "The account does not exist." -def err(http_code, message, **kwargs): +def err(http_code, message, **kwargs): # noqa: ANN201 """Handle unexpected errors, including sending the traceback to Rollbar.""" log_exception(sys.exc_info(), user_error_message=message, **kwargs) resp = {"type": "api_error", "message": message} diff --git a/inbox/api/filtering.py b/inbox/api/filtering.py index 32e6fed21..4b3821fd6 100644 --- a/inbox/api/filtering.py +++ b/inbox/api/filtering.py @@ -19,7 +19,9 @@ from inbox.models.event import RecurringEvent -def contact_subquery(db_session, namespace_id, email_address, field): +def contact_subquery( # noqa: ANN201 + db_session, namespace_id, email_address, field +): return ( db_session.query(Message.thread_id) .join(MessageContactAssociation) @@ -33,7 +35,7 @@ def contact_subquery(db_session, namespace_id, email_address, field): ) -def threads( +def threads( # noqa: ANN201 namespace_id, subject, from_addr, @@ -203,7 +205,7 @@ def threads( return query.all() -def messages_or_drafts( +def messages_or_drafts( # noqa: ANN201 namespace_id, drafts, subject, @@ -486,7 +488,7 @@ def messages_or_drafts( return prepared.all() -def files( +def files( # noqa: ANN201 namespace_id, message_public_id, filename, @@ -543,7 +545,7 @@ def files( return query.all() -def filter_event_query( +def filter_event_query( # noqa: ANN201 query, event_cls, namespace_id, @@ -584,7 +586,7 @@ def filter_event_query( return query -def recurring_events( +def recurring_events( # noqa: ANN201 filters, starts_before, starts_after, @@ -642,7 +644,7 @@ def recurring_events( return recur_instances -def events( +def events( # noqa: ANN201 namespace_id, event_public_id, calendar_public_id, @@ -822,7 +824,9 @@ def events( return all_events -def messages_for_contact_scores(db_session, namespace_id, starts_after=None): +def messages_for_contact_scores( # noqa: ANN201 + db_session, namespace_id, starts_after=None +): query = ( db_session.query( Message.to_addr, diff --git a/inbox/api/kellogs.py b/inbox/api/kellogs.py index 956b1a36f..a1ae75e5d 100644 --- a/inbox/api/kellogs.py +++ b/inbox/api/kellogs.py @@ -30,13 +30,13 @@ log = get_logger() -def format_address_list(addresses): +def format_address_list(addresses): # noqa: ANN201 if addresses is None: return [] return [{"name": name, "email": email} for name, email in addresses] -def format_categories(categories): +def format_categories(categories): # noqa: ANN201 if categories is None: return [] return [ @@ -50,7 +50,7 @@ def format_categories(categories): ] -def format_messagecategories(messagecategories): +def format_messagecategories(messagecategories): # noqa: ANN201 if messagecategories is None: return [] return [ @@ -65,7 +65,7 @@ def format_messagecategories(messagecategories): ] -def format_phone_numbers(phone_numbers): +def format_phone_numbers(phone_numbers): # noqa: ANN201 formatted_phone_numbers = [] for number in phone_numbers: formatted_phone_numbers.append( @@ -74,7 +74,9 @@ def format_phone_numbers(phone_numbers): return formatted_phone_numbers -def encode(obj, namespace_public_id=None, expand=False, is_n1=False): +def encode( # noqa: ANN201 + obj, namespace_public_id=None, expand=False, is_n1=False +): try: return _encode(obj, namespace_public_id, expand, is_n1=is_n1) except Exception as e: @@ -100,7 +102,9 @@ def _convert_timezone_to_iana_tz(original_tz): return original_tz -def _encode(obj, namespace_public_id=None, expand=False, is_n1=False): +def _encode( # noqa: D417 + obj, namespace_public_id=None, expand=False, is_n1=False +): """ Returns a dictionary representation of a Nylas model object obj, or None if there is no such representation defined. If the optional @@ -118,7 +122,7 @@ def _encode(obj, namespace_public_id=None, expand=False, is_n1=False): ------- dictionary or None - """ + """ # noqa: D401 def _get_namespace_public_id(obj): return namespace_public_id or obj.namespace.public_id @@ -468,7 +472,7 @@ def default(self, obj): return InternalEncoder - def cereal(self, obj, pretty=False): + def cereal(self, obj, pretty=False): # noqa: ANN201, D417 """ Returns the JSON string representation of obj. @@ -483,7 +487,7 @@ def cereal(self, obj, pretty=False): TypeError If obj is not serializable. - """ + """ # noqa: D401 if pretty: return dumps( obj, @@ -494,7 +498,7 @@ def cereal(self, obj, pretty=False): ) return dumps(obj, cls=self.encoder_class) - def jsonify(self, obj): + def jsonify(self, obj): # noqa: ANN201, D417 """ Returns a Flask Response object encapsulating the JSON representation of obj. @@ -508,7 +512,7 @@ def jsonify(self, obj): TypeError If obj is not serializable. - """ + """ # noqa: D401 return Response( self.cereal(obj, pretty=True), mimetype="application/json" ) diff --git a/inbox/api/metrics_api.py b/inbox/api/metrics_api.py index 0be206a60..e7d7cb365 100644 --- a/inbox/api/metrics_api.py +++ b/inbox/api/metrics_api.py @@ -91,7 +91,7 @@ def _get_folder_data(db_session, accounts): @app.route("/") -def index(): +def index(): # noqa: ANN201 with global_session_scope() as db_session: if "namespace_id" in request.args: try: @@ -252,7 +252,7 @@ def index(): } ) except Exception: - log.error( + log.error( # noqa: G201 "Error while serializing account metrics", account_id=account.id, exc_info=True, @@ -262,7 +262,7 @@ def index(): @app.route("/global-deltas") -def global_deltas(): +def global_deltas(): # noqa: ANN201 """ Return the namespaces with recent transactions. @@ -278,7 +278,7 @@ def global_deltas(): try: start_pointer = int(txnid) except ValueError: - raise InputError("Invalid cursor parameter") + raise InputError("Invalid cursor parameter") # noqa: B904 txns = redis_txn.zrangebyscore( TXN_REDIS_KEY, diff --git a/inbox/api/ns_api.py b/inbox/api/ns_api.py index 372097c1e..4c9ab192a 100644 --- a/inbox/api/ns_api.py +++ b/inbox/api/ns_api.py @@ -145,10 +145,11 @@ # TODO perhaps expand to encompass non-standard mimetypes too # see python mimetypes library common_extensions = {} -mt_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "mime_types.txt" +mt_path = os.path.join( # noqa: PTH118 + os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120 + "mime_types.txt", ) -with open(mt_path) as f: +with open(mt_path) as f: # noqa: PTH123 for x in f: x = x.strip() if not x or x.startswith("#"): @@ -244,7 +245,7 @@ def before_remote_request() -> None: @app.after_request -def finish(response): +def finish(response): # noqa: ANN201 if response.status_code == 200 and hasattr(g, "db_session"): # be cautious g.db_session.commit() if hasattr(g, "db_session"): @@ -253,7 +254,7 @@ def finish(response): @app.errorhandler(OperationalError) -def handle_operational_error(error): +def handle_operational_error(error): # noqa: ANN201 rule = request.url_rule if "send" in rule.rule and "rsvp" not in rule.rule: message = "A temporary database error prevented us from serving this request. Your message has NOT been sent. Please try again in a few minutes." @@ -267,7 +268,7 @@ def handle_operational_error(error): @app.errorhandler(NotImplementedError) -def handle_not_implemented_error(error): +def handle_not_implemented_error(error): # noqa: ANN201 request.environ["log_context"]["error"] = "NotImplementedError" response = flask_jsonify( message="API endpoint not yet implemented", type="api_error" @@ -277,7 +278,7 @@ def handle_not_implemented_error(error): @app.errorhandler(APIException) -def handle_input_error(error): +def handle_input_error(error): # noqa: ANN201 # these "errors" are normal, so we don't need to save a traceback request.environ["log_context"]["error"] = error.__class__.__name__ request.environ["log_context"]["error_message"] = error.message @@ -289,7 +290,7 @@ def handle_input_error(error): @app.errorhandler(Exception) -def handle_generic_error(error): +def handle_generic_error(error): # noqa: ANN201 log_exception(sys.exc_info()) response = flask_jsonify( message="An internal error occured. If this issue persists, please contact support@nylas.com and include this request_uid: {}".format( @@ -301,7 +302,7 @@ def handle_generic_error(error): @app.route("/account") -def one_account(): +def one_account(): # noqa: ANN201 g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) # Use a new encoder object with the expand parameter set. @@ -313,7 +314,7 @@ def one_account(): # Sync status (enable/disable account / throttling) # @app.route("/status/", methods=["GET", "PUT"]) -def status(): +def status(): # noqa: ANN201 account = g.namespace.account # Don't allow resuming accounts marked for deletion. @@ -343,7 +344,7 @@ def status(): # Threads # @app.route("/threads/") -def thread_query_api(): +def thread_query_api(): # noqa: ANN201 g.parser.add_argument("subject", type=bounded_str, location="args") g.parser.add_argument("to", type=bounded_str, location="args") g.parser.add_argument("from", type=bounded_str, location="args") @@ -402,7 +403,7 @@ def thread_query_api(): @app.route("/threads/search", methods=["GET"]) -def thread_search_api(): +def thread_search_api(): # noqa: ANN201 g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) if not args["q"]: @@ -429,7 +430,7 @@ def thread_search_api(): @app.route("/threads/search/streaming", methods=["GET"]) -def thread_streaming_search_api(): +def thread_streaming_search_api(): # noqa: ANN201 g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) if not args["q"]: @@ -457,7 +458,7 @@ def thread_streaming_search_api(): @app.route("/threads/") -def thread_api(public_id): +def thread_api(public_id): # noqa: ANN201 g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) # Use a new encoder object with the expand parameter set. @@ -475,14 +476,16 @@ def thread_api(public_id): ) return encoder.jsonify(thread) except NoResultFound: - raise NotFoundError(f"Couldn't find thread `{public_id}`") + raise NotFoundError( # noqa: B904 + f"Couldn't find thread `{public_id}`" + ) # # Update thread # @app.route("/threads/", methods=["PUT", "PATCH"]) -def thread_api_update(public_id): +def thread_api_update(public_id): # noqa: ANN201 try: valid_public_id(public_id) thread = ( @@ -495,7 +498,9 @@ def thread_api_update(public_id): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find thread `{public_id}` ") + raise NotFoundError( # noqa: B904 + f"Couldn't find thread `{public_id}` " + ) data = request.get_json(force=True) if not isinstance(data, dict): raise InputError("Invalid request body") @@ -512,7 +517,7 @@ def thread_api_update(public_id): # @app.route("/threads/", methods=["DELETE"]) def thread_api_delete(public_id) -> Never: - """Moves the thread to the trash""" + """Moves the thread to the trash""" # noqa: D401 raise NotImplementedError @@ -520,7 +525,7 @@ def thread_api_delete(public_id) -> Never: # Messages ## @app.route("/messages/") -def message_query_api(): +def message_query_api(): # noqa: ANN201 g.parser.add_argument("subject", type=bounded_str, location="args") g.parser.add_argument("to", type=bounded_str, location="args") g.parser.add_argument("from", type=bounded_str, location="args") @@ -580,7 +585,7 @@ def message_query_api(): @app.route("/messages/search", methods=["GET"]) -def message_search_api(): +def message_search_api(): # noqa: ANN201 g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) if not args["q"]: @@ -607,7 +612,7 @@ def message_search_api(): @app.route("/messages/search/streaming", methods=["GET"]) -def message_streaming_search_api(): +def message_streaming_search_api(): # noqa: ANN201 g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) if not args["q"]: @@ -635,7 +640,7 @@ def message_streaming_search_api(): @app.route("/messages/", methods=["GET"]) -def message_read_api(public_id): +def message_read_api(public_id): # noqa: ANN201 g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) encoder = APIEncoder(g.namespace.public_id, args["view"] == "expanded") @@ -646,7 +651,7 @@ def message_read_api(public_id): public_id, g.namespace.id, g.db_session ) except NoResultFound: - raise NotFoundError(f"Couldn't find message {public_id}") + raise NotFoundError(f"Couldn't find message {public_id}") # noqa: B904 if request.headers.get("Accept", None) == "message/rfc822": raw_message = blockstore.get_raw_mime(message.data_sha256) @@ -717,7 +722,7 @@ def message_read_api(public_id): @app.route("/messages/", methods=["PUT", "PATCH"]) -def message_update_api(public_id): +def message_update_api(public_id): # noqa: ANN201 try: valid_public_id(public_id) message = ( @@ -729,7 +734,9 @@ def message_update_api(public_id): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find message {public_id} ") + raise NotFoundError( # noqa: B904 + f"Couldn't find message {public_id} " + ) data = request.get_json(force=True) if not isinstance(data, dict): raise InputError("Invalid request body") @@ -744,7 +751,7 @@ def message_update_api(public_id): # Folders / Labels @app.route("/folders") @app.route("/labels") -def folders_labels_query_api(): +def folders_labels_query_api(): # noqa: ANN201 category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) @@ -773,16 +780,16 @@ def folders_labels_query_api(): @app.route("/folders/") -def folder_api(public_id): +def folder_api(public_id): # noqa: ANN201 return folders_labels_api_impl(public_id) @app.route("/labels/") -def label_api(public_id): +def label_api(public_id): # noqa: ANN201 return folders_labels_api_impl(public_id) -def folders_labels_api_impl(public_id): +def folders_labels_api_impl(public_id): # noqa: ANN201 category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) @@ -798,13 +805,13 @@ def folders_labels_api_impl(public_id): .one() ) except NoResultFound: - raise NotFoundError("Object not found") + raise NotFoundError("Object not found") # noqa: B904 return g.encoder.jsonify(category) @app.route("/folders", methods=["POST"]) @app.route("/labels", methods=["POST"]) -def folders_labels_create_api(): +def folders_labels_create_api(): # noqa: ANN201 category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) @@ -864,7 +871,7 @@ def folders_labels_create_api(): @app.route("/folders/", methods=["PUT", "PATCH"]) @app.route("/labels/", methods=["PUT", "PATCH"]) -def folder_label_update_api(public_id): +def folder_label_update_api(public_id): # noqa: ANN201 category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) @@ -880,7 +887,9 @@ def folder_label_update_api(public_id): .one() ) except NoResultFound: - raise InputError(f"Couldn't find {category_type} {public_id}") + raise InputError( # noqa: B904 + f"Couldn't find {category_type} {public_id}" + ) if category.name: raise InputError(f"Cannot modify a standard {category_type}") @@ -930,7 +939,7 @@ def folder_label_update_api(public_id): @app.route("/folders/", methods=["DELETE"]) @app.route("/labels/", methods=["DELETE"]) -def folder_label_delete_api(public_id): +def folder_label_delete_api(public_id): # noqa: ANN201 category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) @@ -946,7 +955,9 @@ def folder_label_delete_api(public_id): .one() ) except NoResultFound: - raise InputError(f"Couldn't find {category_type} {public_id}") + raise InputError( # noqa: B904 + f"Couldn't find {category_type} {public_id}" + ) if category.name: raise InputError(f"Cannot modify a standard {category_type}") @@ -994,7 +1005,7 @@ def folder_label_delete_api(public_id): # Contacts ## @app.route("/contacts/", methods=["GET"]) -def contact_api(): +def contact_api(): # noqa: ANN201 g.parser.add_argument( "filter", type=bounded_str, default="", location="args" ) @@ -1032,7 +1043,7 @@ def contact_api(): @app.route("/contacts/", methods=["GET"]) -def contact_read_api(public_id): +def contact_read_api(public_id): # noqa: ANN201 # Get all data for an existing contact. valid_public_id(public_id) result = inbox.contacts.crud.read(g.namespace, g.db_session, public_id) @@ -1045,7 +1056,7 @@ def contact_read_api(public_id): # Events ## @app.route("/events/", methods=["GET"]) -def event_api(): +def event_api(): # noqa: ANN201 g.parser.add_argument("event_id", type=valid_public_id, location="args") g.parser.add_argument("calendar_id", type=valid_public_id, location="args") g.parser.add_argument("title", type=bounded_str, location="args") @@ -1102,7 +1113,7 @@ def event_api(): @app.route("/events/", methods=["POST"]) -def event_create_api(): +def event_create_api(): # noqa: ANN201 g.parser.add_argument( "notify_participants", type=strict_bool, location="args" ) @@ -1167,7 +1178,7 @@ def event_create_api(): @app.route("/events/", methods=["GET"]) -def event_read_api(public_id): +def event_read_api(public_id): # noqa: ANN201 """Get all data for an existing event.""" valid_public_id(public_id) try: @@ -1181,12 +1192,14 @@ def event_read_api(public_id): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find event id {public_id}") + raise NotFoundError( # noqa: B904 + f"Couldn't find event id {public_id}" + ) return g.encoder.jsonify(event) @app.route("/events/", methods=["PUT", "PATCH"]) -def event_update_api(public_id): +def event_update_api(public_id): # noqa: ANN201 g.parser.add_argument( "notify_participants", type=strict_bool, location="args" ) @@ -1205,7 +1218,7 @@ def event_update_api(public_id): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find event {public_id}") + raise NotFoundError(f"Couldn't find event {public_id}") # noqa: B904 # iCalendar-imported files are read-only by default but let's give a # slightly more helpful error message. @@ -1294,7 +1307,7 @@ def event_update_api(public_id): @app.route("/events/", methods=["DELETE"]) -def event_delete_api(public_id): +def event_delete_api(public_id): # noqa: ANN201 g.parser.add_argument( "notify_participants", type=strict_bool, location="args" ) @@ -1313,7 +1326,7 @@ def event_delete_api(public_id): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find event {public_id}") + raise NotFoundError(f"Couldn't find event {public_id}") # noqa: B904 if event.calendar == g.namespace.account.emailed_events_calendar: raise InputError( @@ -1349,7 +1362,7 @@ def event_delete_api(public_id): @app.route("/send-rsvp", methods=["POST"]) -def event_rsvp_api(): +def event_rsvp_api(): # noqa: ANN201 data = request.get_json(force=True) event_id = data.get("event_id") @@ -1364,7 +1377,7 @@ def event_rsvp_api(): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find event {event_id}") + raise NotFoundError(f"Couldn't find event {event_id}") # noqa: B904 if event.message is None: raise InputError( @@ -1445,7 +1458,7 @@ def event_rsvp_api(): # Files # @app.route("/files/", methods=["GET"]) -def files_api(): +def files_api(): # noqa: ANN201 g.parser.add_argument("filename", type=bounded_str, location="args") g.parser.add_argument("message_id", type=valid_public_id, location="args") g.parser.add_argument("content_type", type=bounded_str, location="args") @@ -1468,7 +1481,7 @@ def files_api(): @app.route("/files/", methods=["GET"]) -def file_read_api(public_id): +def file_read_api(public_id): # noqa: ANN201 valid_public_id(public_id) try: f = ( @@ -1481,11 +1494,11 @@ def file_read_api(public_id): ) return g.encoder.jsonify(f) except NoResultFound: - raise NotFoundError(f"Couldn't find file {public_id} ") + raise NotFoundError(f"Couldn't find file {public_id} ") # noqa: B904 @app.route("/files/", methods=["DELETE"]) -def file_delete_api(public_id): +def file_delete_api(public_id): # noqa: ANN201 valid_public_id(public_id) try: f = ( @@ -1513,7 +1526,7 @@ def file_delete_api(public_id): # Effectively no error == success return g.encoder.jsonify(None) except NoResultFound: - raise NotFoundError(f"Couldn't find file {public_id} ") + raise NotFoundError(f"Couldn't find file {public_id} ") # noqa: B904 # @@ -1522,7 +1535,7 @@ def file_delete_api(public_id): # $ curl http://localhost:5555/n/4s4iz36h36w17kumggi36ha2b/files \ # --form upload=@dancingbaby.gif @app.route("/files/", methods=["POST"]) -def file_upload_api(): +def file_upload_api(): # noqa: ANN201 all_files = [] for name, uploaded in request.files.items(): request.environ["log_context"].setdefault("filenames", []).append(name) @@ -1543,7 +1556,7 @@ def file_upload_api(): # File downloads # @app.route("/files//download") -def file_download_api(public_id): +def file_download_api(public_id): # noqa: ANN201 valid_public_id(public_id) try: f = ( @@ -1555,7 +1568,7 @@ def file_download_api(public_id): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find file {public_id} ") + raise NotFoundError(f"Couldn't find file {public_id} ") # noqa: B904 # Here we figure out the filename.extension given the # properties which were set on the original attachment @@ -1638,7 +1651,7 @@ def file_download_api(public_id): # Calendars ## @app.route("/calendars/", methods=["GET"]) -def calendar_api(): +def calendar_api(): # noqa: ANN201 g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) @@ -1663,7 +1676,7 @@ def calendar_api(): @app.route("/calendars/", methods=["GET"]) -def calendar_read_api(public_id): +def calendar_read_api(public_id): # noqa: ANN201 """Get all data for an existing calendar.""" valid_public_id(public_id) @@ -1677,7 +1690,9 @@ def calendar_read_api(public_id): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find calendar {public_id}") + raise NotFoundError( # noqa: B904 + f"Couldn't find calendar {public_id}" + ) return g.encoder.jsonify(calendar) @@ -1690,7 +1705,7 @@ def calendar_read_api(public_id): @app.route("/drafts/", methods=["GET"]) -def draft_query_api(): +def draft_query_api(): # noqa: ANN201 g.parser.add_argument("subject", type=bounded_str, location="args") g.parser.add_argument("to", type=bounded_str, location="args") g.parser.add_argument("cc", type=bounded_str, location="args") @@ -1747,7 +1762,7 @@ def draft_query_api(): @app.route("/drafts/", methods=["GET"]) -def draft_get_api(public_id): +def draft_get_api(public_id): # noqa: ANN201 valid_public_id(public_id) draft = ( g.db_session.query(Message) @@ -1763,7 +1778,7 @@ def draft_get_api(public_id): @app.route("/drafts/", methods=["POST"]) -def draft_create_api(): +def draft_create_api(): # noqa: ANN201 data = request.get_json(force=True) draft = create_message_from_json( data, g.namespace, g.db_session, is_draft=True @@ -1772,7 +1787,7 @@ def draft_create_api(): @app.route("/drafts/", methods=["PUT", "PATCH"]) -def draft_update_api(public_id): +def draft_update_api(public_id): # noqa: ANN201 data = request.get_json(force=True) original_draft = get_draft( public_id, data.get("version"), g.namespace.id, g.db_session @@ -1815,7 +1830,7 @@ def draft_update_api(public_id): @app.route("/drafts/", methods=["DELETE"]) -def draft_delete_api(public_id): +def draft_delete_api(public_id): # noqa: ANN201 data = request.get_json(force=True) # Validate draft id, version, etc. draft = get_draft( @@ -1828,7 +1843,7 @@ def draft_delete_api(public_id): @app.route("/send", methods=["POST"]) @app.route("/send-with-features", methods=["POST"]) # TODO deprecate this URL -def draft_send_api(): +def draft_send_api(): # noqa: ANN201 request_started = time.time() account = g.namespace.account @@ -1904,8 +1919,8 @@ def draft_send_api(): @app.route("/send-multiple", methods=["POST"]) -def multi_send_create(): - """Initiates a multi-send session by creating a new multi-send draft.""" +def multi_send_create(): # noqa: ANN201 + """Initiates a multi-send session by creating a new multi-send draft.""" # noqa: D401 account = g.namespace.account if account.discriminator == "easaccount": @@ -1928,14 +1943,14 @@ def multi_send_create(): @app.route("/send-multiple/", methods=["POST"]) -def multi_send(draft_id): +def multi_send(draft_id): # noqa: ANN201 """ Performs a single send operation in an individualized multi-send session. Sends a copy of the draft at draft_id to the specified address with the specified body, and ensures that a corresponding sent message is either not created in the user's Sent folder or is immediately deleted from it. - """ + """ # noqa: D401 request_started = time.time() account = g.namespace.account @@ -1978,11 +1993,11 @@ def multi_send(draft_id): @app.route("/send-multiple/", methods=["DELETE"]) -def multi_send_finish(draft_id): +def multi_send_finish(draft_id): # noqa: ANN201 """ Closes out a multi-send session by marking the sending draft as sent and moving it to the user's Sent folder. - """ + """ # noqa: D401 account = g.namespace.account if account.discriminator == "easaccount": @@ -2023,7 +2038,7 @@ def multi_send_finish(draft_id): ## @app.route("/delta") @app.route("/delta/longpoll") -def sync_deltas(): +def sync_deltas(): # noqa: ANN201 g.parser.add_argument( "cursor", type=valid_public_id, location="args", required=True ) @@ -2083,7 +2098,7 @@ def sync_deltas(): .one() ) except NoResultFound: - raise InputError("Invalid cursor parameter") + raise InputError("Invalid cursor parameter") # noqa: B904 # The client wants us to wait until there are changes g.db_session.expunge(g.namespace) @@ -2128,7 +2143,7 @@ def sync_deltas(): # TODO Deprecate this @app.route("/delta/generate_cursor", methods=["POST"]) -def generate_cursor(): +def generate_cursor(): # noqa: ANN201 data = request.get_json(force=True) if list(data) != ["start"] or not isinstance(data["start"], int): @@ -2142,7 +2157,7 @@ def generate_cursor(): try: datetime.utcfromtimestamp(timestamp) except ValueError: - raise InputError( + raise InputError( # noqa: B904 "generate_cursor request body must have the format " '{"start": (seconds)}' ) @@ -2154,7 +2169,7 @@ def generate_cursor(): @app.route("/delta/latest_cursor", methods=["POST"]) -def latest_cursor(): +def latest_cursor(): # noqa: ANN201 cursor = delta_sync.get_transaction_cursor_near_timestamp( g.namespace.id, int(time.time()), g.db_session ) @@ -2167,7 +2182,7 @@ def latest_cursor(): @app.route("/delta/streaming") -def stream_changes(): +def stream_changes(): # noqa: ANN201 g.parser.add_argument("timeout", type=float, location="args") g.parser.add_argument( "cursor", type=valid_public_id, location="args", required=True @@ -2263,7 +2278,7 @@ def stream_changes(): @app.route("/groups/intrinsic") -def groups_intrinsic(): +def groups_intrinsic(): # noqa: ANN201 g.parser.add_argument( "force_recalculate", type=strict_bool, location="args" ) @@ -2313,7 +2328,7 @@ def groups_intrinsic(): @app.route("/contacts/rankings") -def contact_rankings(): +def contact_rankings(): # noqa: ANN201 g.parser.add_argument( "force_recalculate", type=strict_bool, location="args" ) diff --git a/inbox/api/sending.py b/inbox/api/sending.py index 681966c56..8487a4ac0 100644 --- a/inbox/api/sending.py +++ b/inbox/api/sending.py @@ -8,7 +8,7 @@ log = get_logger() -def send_draft(account, draft, db_session): +def send_draft(account, draft, db_session): # noqa: ANN201 """Send the draft with id = `draft_id`.""" # Update message state and prepare a response so that we can immediately # return it on success, and not potentially have queries fail after @@ -30,13 +30,13 @@ def send_draft(account, draft, db_session): return response_on_success -def send_draft_copy(account, draft, custom_body, recipient): +def send_draft_copy(account, draft, custom_body, recipient): # noqa: ANN201 """ Sends a copy of this draft to the recipient, using the specified body rather that the one on the draft object, and not marking the draft as sent. Used within multi-send to send messages to individual recipients with customized bodies. - """ + """ # noqa: D401 # Create the response to send on success by serlializing the draft. After # serializing, we replace the new custom body (which the recipient will get # and which should be returned in this response) in place of the existing @@ -74,7 +74,7 @@ def update_draft_on_send(account, draft, db_session) -> None: db_session.flush() -def send_raw_mime(account, db_session, msg): +def send_raw_mime(account, db_session, msg): # noqa: ANN201 # Prepare a response so that we can immediately return it on success, and # not potentially have queries fail after sending. response_on_success = APIEncoder().jsonify(msg) diff --git a/inbox/api/srv.py b/inbox/api/srv.py index 53dbeb318..0ecd6f531 100644 --- a/inbox/api/srv.py +++ b/inbox/api/srv.py @@ -44,13 +44,13 @@ @app.errorhandler(APIException) -def handle_input_error(error): +def handle_input_error(error): # noqa: ANN201 response = jsonify(message=error.message, type="invalid_request_error") response.status_code = error.status_code return response -def default_json_error(ex): +def default_json_error(ex): # noqa: ANN201 """Exception -> flask JSON responder""" logger = get_logger() logger.error("Uncaught error thrown by Flask/Werkzeug", exc_info=ex) @@ -65,7 +65,7 @@ def default_json_error(ex): @app.before_request -def auth(): +def auth(): # noqa: ANN201 """Check for account ID on all non-root URLS""" if ( request.path == "/" @@ -76,7 +76,7 @@ def auth(): return None if not request.authorization or not request.authorization.username: - AUTH_ERROR_MSG = ( + AUTH_ERROR_MSG = ( # noqa: N806 "Could not verify access credential.", 401, {"WWW-Authenticate": 'Basic realm="API Access Token Required"'}, @@ -119,7 +119,7 @@ def auth(): @app.after_request -def finish(response): +def finish(response): # noqa: ANN201 origin = request.headers.get("origin") if origin: # means it's just a regular request response.headers["Access-Control-Allow-Origin"] = origin @@ -134,7 +134,7 @@ def finish(response): @app.route("/accounts/", methods=["GET"]) -def ns_all(): +def ns_all(): # noqa: ANN201 """Return all namespaces""" # We do this outside the blueprint to support the case of an empty # public_id. However, this means the before_request isn't run, so we need @@ -253,7 +253,7 @@ def _get_account_data_for_microsoft_account( @app.route("/accounts/", methods=["POST"]) -def create_account(): +def create_account(): # noqa: ANN201 """Create a new account""" data = request.get_json(force=True) @@ -280,7 +280,7 @@ def create_account(): @app.route("/accounts//", methods=["PUT"]) -def modify_account(namespace_public_id): +def modify_account(namespace_public_id): # noqa: ANN201 """ Modify an existing account @@ -320,7 +320,7 @@ def modify_account(namespace_public_id): @app.route("/accounts//", methods=["DELETE"]) -def delete_account(namespace_public_id): +def delete_account(namespace_public_id): # noqa: ANN201 """Mark an existing account for deletion.""" try: with global_session_scope() as db_session: @@ -333,7 +333,9 @@ def delete_account(namespace_public_id): account.mark_for_deletion() db_session.commit() except NoResultFound: - raise NotFoundError(f"Couldn't find account `{namespace_public_id}` ") + raise NotFoundError( # noqa: B904 + f"Couldn't find account `{namespace_public_id}` " + ) encoder = APIEncoder() return encoder.jsonify({}) @@ -345,11 +347,11 @@ def home() -> str: @app.route("/logout") -def logout(): +def logout(): # noqa: ANN201 """ Utility function used to force browsers to reset cached HTTP Basic Auth credentials - """ + """ # noqa: D401 return make_response( ( ".", diff --git a/inbox/api/update.py b/inbox/api/update.py index 07d3a396a..6dad69ef3 100644 --- a/inbox/api/update.py +++ b/inbox/api/update.py @@ -79,7 +79,7 @@ def update_thread(thread, request_data, db_session, optimistic) -> None: ## FLAG UPDATES ## -def parse_flags(request_data): +def parse_flags(request_data): # noqa: ANN201 unread = request_data.pop("unread", None) if unread is not None and not isinstance(unread, bool): raise InputError('"unread" must be true or false') @@ -121,7 +121,7 @@ def update_message_flags( ## FOLDER UPDATES ## -def parse_folder(request_data, db_session, namespace_id): +def parse_folder(request_data, db_session, namespace_id): # noqa: ANN201 # TODO deprecate being able to post "folder" and not "folder_id" if "folder_id" not in request_data and "folder" not in request_data: return None @@ -144,7 +144,9 @@ def parse_folder(request_data, db_session, namespace_id): .one() ) except NoResultFound: - raise InputError(f"The folder {folder_public_id} does not exist") + raise InputError( # noqa: B904 + f"The folder {folder_public_id} does not exist" + ) def update_message_folder(message, db_session, category, optimistic) -> None: @@ -165,7 +167,7 @@ def update_message_folder(message, db_session, category, optimistic) -> None: ### LABEL UPDATES ### -def parse_labels(request_data, db_session, namespace_id): +def parse_labels(request_data, db_session, namespace_id): # noqa: ANN201 # TODO deprecate being able to post "labels" and not "label_ids" if "label_ids" not in request_data and "labels" not in request_data: return None @@ -199,7 +201,7 @@ def parse_labels(request_data, db_session, namespace_id): ) labels.add(category) except NoResultFound: - raise InputError(f"The label {id_} does not exist") + raise InputError(f"The label {id_} does not exist") # noqa: B904 return labels @@ -329,7 +331,7 @@ def apply_gmail_label_rules( adding it removes a message out of the '[Gmail]Trash'/ '[Gmail]Spam' folders and into the '[Gmail]All Mail' folder. - """ + """ # noqa: D401 add: set[str] = set() discard: set[str] = set() diff --git a/inbox/api/validation.py b/inbox/api/validation.py index 36a5adb14..b6197b6b2 100644 --- a/inbox/api/validation.py +++ b/inbox/api/validation.py @@ -34,13 +34,13 @@ def handle_validation_error(self, error, bundle_errors) -> Never: # Custom parameter types -def bounded_str(value, key): +def bounded_str(value, key): # noqa: ANN201 if len(value) > 255: raise ValueError(f"Value {value} for {key} is too long") return value -def comma_separated_email_list(value, key): +def comma_separated_email_list(value, key): # noqa: ANN201 addresses = value.split(",") # Note that something like "foo,bar"@example.com is technical a valid # email address, but in practice nobody does this (and they shouldn't!) @@ -60,7 +60,7 @@ def comma_separated_email_list(value, key): return good_emails -def strict_bool(value, key): +def strict_bool(value, key): # noqa: ANN201 if value.lower() not in ["true", "false"]: raise ValueError( f'Value must be "true" or "false" (not "{value}") for {key}' @@ -68,18 +68,18 @@ def strict_bool(value, key): return value.lower() == "true" -def view(value, key): +def view(value, key): # noqa: ANN201 allowed_views = ["count", "ids", "expanded"] if value not in allowed_views: raise ValueError(f"Unknown view type {value}.") return value -def limit(value): +def limit(value): # noqa: ANN201 try: value = int(value) except ValueError: - raise ValueError("Limit parameter must be an integer.") + raise ValueError("Limit parameter must be an integer.") # noqa: B904 if value < 0: raise ValueError("Limit parameter must be nonnegative.") if value > MAX_LIMIT: @@ -89,17 +89,17 @@ def limit(value): return value -def offset(value): +def offset(value): # noqa: ANN201 try: value = int(value) except ValueError: - raise ValueError("Offset parameter must be an integer.") + raise ValueError("Offset parameter must be an integer.") # noqa: B904 if value < 0: raise ValueError("Offset parameter must be nonnegative.") return value -def valid_public_id(value): +def valid_public_id(value): # noqa: ANN201 if "_" in value: raise InputError(f"Invalid id: {value}") @@ -108,7 +108,7 @@ def valid_public_id(value): # raise TypeError if an integer is passed in int(value, 36) except (TypeError, ValueError): - raise InputError(f"Invalid id: {value}") + raise InputError(f"Invalid id: {value}") # noqa: B904 return value @@ -119,7 +119,7 @@ def valid_account(namespace) -> None: raise AccountStoppedError() -def valid_category_type(category_type, rule): +def valid_category_type(category_type, rule): # noqa: ANN201 if category_type not in rule: if category_type == "label": raise NotFoundError("GMail accounts don't support folders") @@ -128,24 +128,28 @@ def valid_category_type(category_type, rule): return category_type -def timestamp(value, key): +def timestamp(value, key): # noqa: ANN201 try: with contextlib.suppress(ValueError): value = float(value) return arrow.get(value).datetime except ValueError: - raise ValueError(f"Invalid timestamp value {value} for {key}") + raise ValueError( # noqa: B904 + f"Invalid timestamp value {value} for {key}" + ) except ParserError: - raise ValueError(f"Invalid datetime value {value} for {key}") + raise ValueError( # noqa: B904 + f"Invalid datetime value {value} for {key}" + ) -def strict_parse_args(parser, raw_args): +def strict_parse_args(parser, raw_args): # noqa: ANN201 """ Wrapper around parser.parse_args that raises a ValueError if unexpected arguments are present. - """ + """ # noqa: D401 args = parser.parse_args() unexpected_params = set(raw_args) - { allowed_arg.name for allowed_arg in parser.args @@ -155,7 +159,9 @@ def strict_parse_args(parser, raw_args): return args -def get_sending_draft(draft_public_id, namespace_id, db_session): +def get_sending_draft( # noqa: ANN201 + draft_public_id, namespace_id, db_session +): valid_public_id(draft_public_id) try: draft = ( @@ -167,7 +173,7 @@ def get_sending_draft(draft_public_id, namespace_id, db_session): .one() ) except NoResultFound: - raise NotFoundError( + raise NotFoundError( # noqa: B904 f"Couldn't find multi-send draft {draft_public_id}" ) @@ -178,14 +184,16 @@ def get_sending_draft(draft_public_id, namespace_id, db_session): return draft -def get_draft(draft_public_id, version, namespace_id, db_session): +def get_draft( # noqa: ANN201 + draft_public_id, version, namespace_id, db_session +): valid_public_id(draft_public_id) if version is None: raise InputError("Must specify draft version") try: version = int(version) except ValueError: - raise InputError("Invalid draft version") + raise InputError("Invalid draft version") # noqa: B904 try: draft = ( db_session.query(Message) @@ -196,7 +204,9 @@ def get_draft(draft_public_id, version, namespace_id, db_session): .one() ) except NoResultFound: - raise NotFoundError(f"Couldn't find draft {draft_public_id}") + raise NotFoundError( # noqa: B904 + f"Couldn't find draft {draft_public_id}" + ) if draft.is_sent or not draft.is_draft: raise InputError(f"Message {draft_public_id} is not a draft") @@ -209,7 +219,9 @@ def get_draft(draft_public_id, version, namespace_id, db_session): return draft -def get_attachments(block_public_ids, namespace_id, db_session): +def get_attachments( # noqa: ANN201 + block_public_ids, namespace_id, db_session +): attachments: set[Block] = set() if block_public_ids is None: return attachments @@ -231,11 +243,13 @@ def get_attachments(block_public_ids, namespace_id, db_session): # data by using #magic.from_buffer(data, mime=True)) attachments.add(block) except NoResultFound: - raise InputError(f"Invalid block public id {block_public_id}") + raise InputError( # noqa: B904 + f"Invalid block public id {block_public_id}" + ) return attachments -def get_message(message_public_id, namespace_id, db_session): +def get_message(message_public_id, namespace_id, db_session): # noqa: ANN201 if message_public_id is None: return None valid_public_id(message_public_id) @@ -249,10 +263,12 @@ def get_message(message_public_id, namespace_id, db_session): .one() ) except NoResultFound: - raise InputError(f"Invalid message public id {message_public_id}") + raise InputError( # noqa: B904 + f"Invalid message public id {message_public_id}" + ) -def get_thread(thread_public_id, namespace_id, db_session): +def get_thread(thread_public_id, namespace_id, db_session): # noqa: ANN201 if thread_public_id is None: return None valid_public_id(thread_public_id) @@ -267,10 +283,12 @@ def get_thread(thread_public_id, namespace_id, db_session): .one() ) except NoResultFound: - raise InputError(f"Invalid thread public id {thread_public_id}") + raise InputError( # noqa: B904 + f"Invalid thread public id {thread_public_id}" + ) -def get_recipients(recipients, field): +def get_recipients(recipients, field): # noqa: ANN201 if recipients is None: return None if not isinstance(recipients, list): @@ -289,7 +307,7 @@ def get_recipients(recipients, field): return [(r.get("name", ""), r.get("email", "")) for r in recipients] -def get_calendar(calendar_public_id, namespace, db_session): +def get_calendar(calendar_public_id, namespace, db_session): # noqa: ANN201 valid_public_id(calendar_public_id) try: return ( @@ -301,14 +319,16 @@ def get_calendar(calendar_public_id, namespace, db_session): .one() ) except NoResultFound: - raise NotFoundError(f"Calendar {calendar_public_id} not found") + raise NotFoundError( # noqa: B904 + f"Calendar {calendar_public_id} not found" + ) def valid_when(when) -> None: try: parse_as_when(when) except (ValueError, ParserError) as e: - raise InputError(str(e)) + raise InputError(str(e)) # noqa: B904 def valid_event(event) -> None: @@ -400,7 +420,7 @@ def noop_event_update(event, data) -> bool: if len(e_participants.keys()) != len(event_participants.keys()): return False - for email in e_participants: + for email in e_participants: # noqa: PLC0206 if email not in event_participants: return False @@ -420,7 +440,7 @@ def noop_event_update(event, data) -> bool: return True -def valid_delta_object_types(types_arg): +def valid_delta_object_types(types_arg): # noqa: ANN201 types = [item.strip() for item in types_arg.split(",")] allowed_types = ( "contact", @@ -457,7 +477,9 @@ def validate_draft_recipients(draft) -> None: ) -def valid_display_name(namespace_id, category_type, display_name, db_session): +def valid_display_name( # noqa: ANN201 + namespace_id, category_type, display_name, db_session +): if display_name is None or not isinstance(display_name, str): raise InputError('"display_name" must be a valid string') diff --git a/inbox/api/wsgi.py b/inbox/api/wsgi.py index f7497948e..7f8778abc 100644 --- a/inbox/api/wsgi.py +++ b/inbox/api/wsgi.py @@ -14,7 +14,7 @@ class NylasWSGIWorker(ThreadWorker): """Custom worker class for gunicorn.""" def init_process(self) -> None: - print("Python", sys.version, file=sys.stderr) + print("Python", sys.version, file=sys.stderr) # noqa: T201 maybe_enable_rollbar() @@ -23,7 +23,7 @@ def init_process(self) -> None: super().init_process() -from inbox.config import config +from inbox.config import config # noqa: E402 LOGLEVEL = config.get("LOGLEVEL", 10) diff --git a/inbox/auth/base.py b/inbox/auth/base.py index 33f740ee9..42adde2db 100644 --- a/inbox/auth/base.py +++ b/inbox/auth/base.py @@ -13,7 +13,7 @@ log = get_logger() -def handler_from_provider(provider_name): +def handler_from_provider(provider_name): # noqa: ANN201 """ Return an authentication handler for the given provider. @@ -62,7 +62,7 @@ def update_account(self, account, account_data) -> Never: """ raise NotImplementedError() - def get_imap_connection(self, account, use_timeout=True): + def get_imap_connection(self, account, use_timeout=True): # noqa: ANN201 host, port = account.imap_endpoint try: return create_imap_connection(host, port, use_timeout) @@ -79,7 +79,9 @@ def get_imap_connection(self, account, use_timeout=True): def authenticate_imap_connection(self, account, conn) -> Never: raise NotImplementedError() - def get_authenticated_imap_connection(self, account, use_timeout=True): + def get_authenticated_imap_connection( # noqa: ANN201 + self, account, use_timeout=True + ): conn = self.get_imap_connection(account, use_timeout=use_timeout) self.authenticate_imap_connection(account, conn) return conn @@ -100,7 +102,7 @@ def verify_account(self, account) -> bool: ------- True: If the client can successfully connect to both. - """ + """ # noqa: D401 # Verify IMAP login conn = self.get_authenticated_imap_connection(account) crispin = CrispinClient( @@ -123,7 +125,7 @@ def verify_account(self, account) -> bool: "Please contact your domain " "administrator and try again." ) - raise UserRecoverableConfigError(error_message) + raise UserRecoverableConfigError(error_message) # noqa: B904 finally: conn.logout() @@ -144,7 +146,7 @@ def verify_account(self, account) -> bool: "Couldn't resolve the SMTP server domain name. " "Please check that your SMTP settings are correct." ) - raise UserRecoverableConfigError(error_message) + raise UserRecoverableConfigError(error_message) # noqa: B904 except TimeoutError as exc: log.error( @@ -157,7 +159,7 @@ def verify_account(self, account) -> bool: "Connection timeout when connecting to SMTP server. " "Please check that your SMTP settings are correct." ) - raise UserRecoverableConfigError(error_message) + raise UserRecoverableConfigError(error_message) # noqa: B904 except Exception as exc: log.error( @@ -166,7 +168,7 @@ def verify_account(self, account) -> bool: account_id=account.id, error=exc, ) - raise UserRecoverableConfigError( + raise UserRecoverableConfigError( # noqa: B904 "Please check that your SMTP settings are correct." ) diff --git a/inbox/auth/generic.py b/inbox/auth/generic.py index 7794bab9e..d560f08b8 100644 --- a/inbox/auth/generic.py +++ b/inbox/auth/generic.py @@ -33,7 +33,7 @@ class GenericAccountData: class GenericAuthHandler(AuthHandler): - def create_account(self, account_data): + def create_account(self, account_data): # noqa: ANN201 namespace = Namespace() account = GenericAccount(namespace=namespace) account.provider = "custom" @@ -41,7 +41,7 @@ def create_account(self, account_data): account.sync_should_run = False return self.update_account(account, account_data) - def update_account(self, account, account_data): + def update_account(self, account, account_data): # noqa: ANN201 account.email_address = account_data.email account.imap_endpoint = ( @@ -74,9 +74,9 @@ def authenticate_imap_connection(self, account, conn) -> None: log.error( "IMAP login failed", account_id=account.id, error=exc ) - raise ValidationError(exc) + raise ValidationError(exc) # noqa: B904 elif auth_requires_app_password(exc): - raise AppPasswordError(exc) + raise AppPasswordError(exc) # noqa: B904 else: log.warning( "IMAP login failed for an unknown reason. Check auth_is_invalid", @@ -85,7 +85,7 @@ def authenticate_imap_connection(self, account, conn) -> None: ) raise - def interactive_auth(self, email_address): + def interactive_auth(self, email_address): # noqa: ANN201 imap_server_host = input("IMAP server host: ").strip() imap_server_port = input("IMAP server port: ").strip() or 993 imap_um = "IMAP username (empty for same as email address): " diff --git a/inbox/auth/google.py b/inbox/auth/google.py index 82ec24d86..7308dd7d8 100644 --- a/inbox/auth/google.py +++ b/inbox/auth/google.py @@ -77,7 +77,7 @@ def update_account( return account - def interactive_auth(self, email_address=None): + def interactive_auth(self, email_address=None): # noqa: ANN201 url_args = { "redirect_uri": self.OAUTH_REDIRECT_URI, "client_id": self.OAUTH_CLIENT_ID, @@ -90,8 +90,10 @@ def interactive_auth(self, email_address=None): url_args["login_hint"] = email_address url = url_concat(self.OAUTH_AUTHENTICATE_URL, url_args) - print("To authorize Nylas, visit this URL and follow the directions:") - print(f"\n{url}") + print( # noqa: T201 + "To authorize Nylas, visit this URL and follow the directions:" + ) + print(f"\n{url}") # noqa: T201 while True: auth_code = input("Enter authorization code: ").strip() @@ -108,7 +110,9 @@ def interactive_auth(self, email_address=None): sync_events=True, ) except OAuthError: - print("\nInvalid authorization code, try again...\n") + print( # noqa: T201 + "\nInvalid authorization code, try again...\n" + ) def verify_account(self, account) -> bool: """ diff --git a/inbox/auth/microsoft.py b/inbox/auth/microsoft.py index 348b78250..62705c415 100644 --- a/inbox/auth/microsoft.py +++ b/inbox/auth/microsoft.py @@ -79,7 +79,7 @@ def update_account( return account - def interactive_auth(self, email_address=None): + def interactive_auth(self, email_address=None): # noqa: ANN201 url_args = { "redirect_uri": self.OAUTH_REDIRECT_URI, "client_id": self.OAUTH_CLIENT_ID, @@ -91,8 +91,10 @@ def interactive_auth(self, email_address=None): url_args["login_hint"] = email_address url = url_concat(self.OAUTH_AUTHENTICATE_URL, url_args) - print("To authorize Nylas, visit this URL and follow the directions:") - print(f"\n{url}") + print( # noqa: T201 + "To authorize Nylas, visit this URL and follow the directions:" + ) + print(f"\n{url}") # noqa: T201 while True: auth_code = input("Enter authorization code: ").strip() @@ -108,4 +110,6 @@ def interactive_auth(self, email_address=None): sync_events=False, ) except OAuthError: - print("\nInvalid authorization code, try again...\n") + print( # noqa: T201 + "\nInvalid authorization code, try again...\n" + ) diff --git a/inbox/auth/oauth.py b/inbox/auth/oauth.py index 90a3cfc2b..5173d13cb 100644 --- a/inbox/auth/oauth.py +++ b/inbox/auth/oauth.py @@ -77,7 +77,7 @@ def _new_access_token_from_refresh_token( account_logger.error( "Network error renewing access token", error=e ) - raise ConnectionError() + raise ConnectionError() # noqa: B904 try: session_dict = response.json() @@ -86,7 +86,9 @@ def _new_access_token_from_refresh_token( "Invalid JSON renewing on renewing token", response=response.text, ) - raise ConnectionError("Invalid JSON response on renewing token") + raise ConnectionError( # noqa: B904 + "Invalid JSON response on renewing token" + ) if "error" in session_dict: if session_dict["error"] == "invalid_grant": @@ -184,7 +186,7 @@ def _new_access_token_from_authalligator( "Max retries reached" ) - def acquire_access_token( + def acquire_access_token( # noqa: D417 self, account: OAuthAccount, force_refresh: bool = False, @@ -274,12 +276,12 @@ def _get_user_info(self, session_dict): response = urllib.request.urlopen(request) except urllib.error.HTTPError as e: if e.code == 401: - raise OAuthError("Could not retrieve user info.") + raise OAuthError("Could not retrieve user info.") # noqa: B904 log.error("user_info_fetch_failed", error_code=e.code, error=e) - raise ConnectionError() + raise ConnectionError() # noqa: B904 except urllib.error.URLError as e: log.error("user_info_fetch_failed", error=e) - raise ConnectionError() + raise ConnectionError() # noqa: B904 userinfo_dict = json.loads(response.read()) @@ -325,7 +327,7 @@ class OAuthRequestsWrapper(requests.auth.AuthBase): def __init__(self, token) -> None: self.token = token - def __call__(self, r): + def __call__(self, r): # noqa: ANN204 r.headers["Authorization"] = f"Bearer {self.token}" return r diff --git a/inbox/auth/utils.py b/inbox/auth/utils.py index bfbe05a22..3b93b248d 100644 --- a/inbox/auth/utils.py +++ b/inbox/auth/utils.py @@ -15,10 +15,10 @@ def safe_decode(message: str | bytes) -> str: return message -def auth_requires_app_password(exc): +def auth_requires_app_password(exc): # noqa: ANN201 # Some servers require an application specific password, token, or # authorization code to login - PREFIXES = ( + PREFIXES = ( # noqa: N806 "Please using authorized code to login.", # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=28&&no=1001256 "Authorized code is incorrect", # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=28&&no=1001256 "Login fail. Please using weixin token", # http://service.exmail.qq.com/cgi-bin/help?subtype=1&no=1001023&id=23. @@ -27,12 +27,12 @@ def auth_requires_app_password(exc): return any(message.lower().startswith(msg.lower()) for msg in PREFIXES) -def auth_is_invalid(exc): +def auth_is_invalid(exc): # noqa: ANN201 # IMAP doesn't really have error semantics, so we have to match the error # message against a list of known response strings to determine whether we # couldn't log in because the credentials are invalid, or because of some # temporary server error. - AUTH_INVALID_PREFIXES = ( + AUTH_INVALID_PREFIXES = ( # noqa: N806 "[authenticationfailed]", "incorrect username or password", "invalid login or password", @@ -60,7 +60,7 @@ def auth_is_invalid(exc): ) -def create_imap_connection(host, port, use_timeout=True): +def create_imap_connection(host, port, use_timeout=True): # noqa: ANN201 """ Return a connection to the IMAP server. @@ -100,7 +100,7 @@ def create_imap_connection(host, port, use_timeout=True): return conn -def create_default_context(): +def create_default_context(): # noqa: ANN201 """ Return a backports.ssl.SSLContext object configured with sensible default settings. This was adapted from imapclient.create_default_context diff --git a/inbox/config.py b/inbox/config.py index 583c20393..f2a8c4165 100644 --- a/inbox/config.py +++ b/inbox/config.py @@ -5,7 +5,7 @@ import yaml urllib3.disable_warnings() -from urllib3.exceptions import InsecureRequestWarning +from urllib3.exceptions import InsecureRequestWarning # noqa: E402 urllib3.disable_warnings(InsecureRequestWarning) @@ -76,7 +76,9 @@ def _update_config_from_env(config, env): Missing files in the path will be ignored. """ - srcdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..") + srcdir = os.path.join( # noqa: PTH118 + os.path.dirname(os.path.realpath(__file__)), ".." # noqa: PTH120 + ) if env in ["prod", "staging"]: base_cfg_path = [ @@ -102,7 +104,7 @@ def _update_config_from_env(config, env): for filename in reversed(path): try: - with open(filename) as f: + with open(filename) as f: # noqa: PTH123 # this also parses json, which is a subset of yaml config.update(yaml.safe_load(f)) except OSError as e: diff --git a/inbox/console.py b/inbox/console.py index c386aa4f8..49ab7b527 100755 --- a/inbox/console.py +++ b/inbox/console.py @@ -1,4 +1,4 @@ -import sys +import sys # noqa: EXE002 import IPython @@ -21,16 +21,20 @@ def user_console(user_email_address) -> None: if len(result) == 1: account = result[0] elif len(result) > 1: - print(f"\n{len(result)} accounts found for that email.\n") + print( # noqa: T201 + f"\n{len(result)} accounts found for that email.\n" + ) for idx, acc in enumerate(result): - print( + print( # noqa: T201 f"[{idx}] - {acc.provider} {acc.namespace.email_address} {acc.namespace.public_id}" ) choice = int(input("\nWhich # do you want to select? ")) account = result[choice] if account is None: - print(f"No account found with email '{user_email_address}'") + print( # noqa: T201 + f"No account found with email '{user_email_address}'" + ) return if account.provider == "eas": diff --git a/inbox/contacts/algorithms.py b/inbox/contacts/algorithms.py index 32601bd90..3b7416a1f 100644 --- a/inbox/contacts/algorithms.py +++ b/inbox/contacts/algorithms.py @@ -39,7 +39,7 @@ def _get_participants(msg, excluded_emails=None): """ Returns an alphabetically sorted list of emails addresses that msg was sent to (including cc and bcc) - """ + """ # noqa: D401 excluded_emails = excluded_emails or [] participants = msg.to_addr + msg.cc_addr + msg.bcc_addr return sorted( @@ -54,7 +54,7 @@ def _get_participants(msg, excluded_emails=None): # Not really an algorithm, but it seemed reasonable to put this here? -def is_stale(last_updated, lifespan=14): +def is_stale(last_updated, lifespan=14): # noqa: ANN201 """ last_updated is a datetime.datetime object lifespan is measured in days @@ -70,7 +70,7 @@ def is_stale(last_updated, lifespan=14): ## -def calculate_contact_scores(messages, time_dependent=True): +def calculate_contact_scores(messages, time_dependent=True): # noqa: ANN201 now = datetime.datetime.now() res: defaultdict[str, int] = defaultdict(int) for message in messages: @@ -84,11 +84,11 @@ def calculate_contact_scores(messages, time_dependent=True): return res -def calculate_group_counts(messages, user_email): +def calculate_group_counts(messages, user_email): # noqa: ANN201 """ Strips out most of the logic from calculate_group_scores algorithm and just returns raw counts for each group. - """ + """ # noqa: D401 res: defaultdict[str, int] = defaultdict(int) for msg in messages: participants = _get_participants(msg, [user_email]) @@ -97,7 +97,7 @@ def calculate_group_counts(messages, user_email): return res -def calculate_group_scores(messages, user_email): +def calculate_group_scores(messages, user_email): # noqa: ANN201 """ This is a (modified) implementation of the algorithm described in this paper: http://mobisocial.stanford.edu/papers/iui11g.pdf @@ -107,7 +107,7 @@ def calculate_group_scores(messages, user_email): cc_addr - [('name1', 'email1@e.com'), ... ] bcc_addr - [('name1', 'email1@e.com'), ... ] date - datetime.datetime object - """ + """ # noqa: D401, D404 now = datetime.datetime.now() message_ids_to_scores: dict[str, float] = {} molecules_dict = defaultdict(set) # (emails, ...) -> {message ids, ...} @@ -171,12 +171,12 @@ def _subsume_molecules(molecules_list, get_message_list_weight): mol_weights = [get_message_list_weight(m) for (_, m) in molecules_list] for i in range(1, len(molecules_list)): - g1, m1 = molecules_list[i] # Smaller group + g1, m1 = molecules_list[i] # Smaller group # noqa: F841 m1_size = mol_weights[i] for j in range(i): if is_subsumed[j]: continue - g2, m2 = molecules_list[j] # Bigger group + g2, m2 = molecules_list[j] # Bigger group # noqa: F841 m2_size = mol_weights[j] if g1.issubset(g2): sharing_error = ((len(g2) - len(g1)) * (m1_size - m2_size)) / ( @@ -190,7 +190,7 @@ def _subsume_molecules(molecules_list, get_message_list_weight): def _combine_similar_molecules(molecules_list): - """Using a greedy approach here for speed""" + """Using a greedy approach here for speed""" # noqa: D401 new_guys_start_idx = 0 while new_guys_start_idx < len(molecules_list): combined = [False] * len(molecules_list) diff --git a/inbox/contacts/carddav.py b/inbox/contacts/carddav.py index 68710df10..a8596b858 100644 --- a/inbox/contacts/carddav.py +++ b/inbox/contacts/carddav.py @@ -18,20 +18,20 @@ - Implement WebDavSync - Support manipulating groups: http://stackoverflow.com/q/24202551 -""" +""" # noqa: D404 -import lxml.etree as ET +import lxml.etree as ET # noqa: N812 import requests # Fake it till you make it USER_AGENT = ( - "User-Agent: DAVKit/4.0.1 (730); CalendarStore/4.0.1 " + "User-Agent: DAVKit/4.0.1 (730); CalendarStore/4.0.1 " # noqa: ISC003 + "(973); iCal/4.0.1 (1374); Mac OS X/10.6.2 (10C540)" ) def supports_carddav(url) -> None: - """Basic verification that the endpoint supports CardDav""" + """Basic verification that the endpoint supports CardDav""" # noqa: D401 response = requests.request( "OPTIONS", url, headers={"User-Agent": USER_AGENT, "Depth": "1"} ) @@ -50,7 +50,7 @@ def __init__(self, email_address, password, base_url) -> None: self.session.headers.update({"User-Agent": USER_AGENT, "Depth": "1"}) self.base_url = base_url - def get_principal_url(self): + def get_principal_url(self): # noqa: ANN201 """Use PROPFIND method to find the `principal` carddav url""" payload = """ @@ -69,7 +69,7 @@ def get_principal_url(self): principal_href = element[0][1][0][0][0].text return principal_href - def get_address_book_home(self, url): + def get_address_book_home(self, url): # noqa: ANN201 payload = """ @@ -107,7 +107,7 @@ def get_address_book_home(self, url): # response.raise_for_status() # return response.content - def get_cards(self, url): + def get_cards(self, url): # noqa: ANN201 payload = """ diff --git a/inbox/contacts/crud.py b/inbox/contacts/crud.py index 07c5dbaae..5e46ea4dc 100644 --- a/inbox/contacts/crud.py +++ b/inbox/contacts/crud.py @@ -11,7 +11,7 @@ INBOX_PROVIDER_NAME = "inbox" -def create(namespace, db_session, name, email): +def create(namespace, db_session, name, email): # noqa: ANN201 contact = Contact( namespace=namespace, provider_name=INBOX_PROVIDER_NAME, @@ -24,7 +24,7 @@ def create(namespace, db_session, name, email): return contact -def read(namespace, db_session, contact_public_id): +def read(namespace, db_session, contact_public_id): # noqa: ANN201 return ( db_session.query(Contact) .filter( diff --git a/inbox/contacts/google.py b/inbox/contacts/google.py index 34beec212..8cbac48a1 100644 --- a/inbox/contacts/google.py +++ b/inbox/contacts/google.py @@ -56,8 +56,8 @@ def _get_google_client(self, retry_conn_errors=True): account = db_session.query(GmailAccount).get(self.account_id) db_session.expunge(account) access_token = token_manager.get_token(account) - token = gdata.gauth.AuthSubToken(access_token) - google_client = gdata.contacts.client.ContactsClient( + token = gdata.gauth.AuthSubToken(access_token) # noqa: F821 + google_client = gdata.contacts.client.ContactsClient( # noqa: F821 source=SOURCE_APP_NAME ) google_client.auth_token = token @@ -82,7 +82,7 @@ def _parse_contact_result(self, google_contact): AttributeError If the contact data could not be parsed correctly. - """ + """ # noqa: D401 email_addresses = [ email for email in google_contact.email if email.primary ] @@ -110,7 +110,7 @@ def _parse_contact_result(self, google_contact): # The entirety of the raw contact data in XML string # representation. raw_data = google_contact.to_string() - except AttributeError as e: + except AttributeError as e: # noqa: F841 self.log.error( "Something is wrong with contact", contact=google_contact ) @@ -128,7 +128,7 @@ def _parse_contact_result(self, google_contact): raw_data=raw_data, ) - def get_items(self, sync_from_dt=None, max_results=100000): + def get_items(self, sync_from_dt=None, max_results=100000): # noqa: ANN201 """ Fetches and parses fresh contact data. @@ -151,8 +151,8 @@ def get_items(self, sync_from_dt=None, max_results=100000): If no data could be fetched because of invalid credentials or insufficient permissions, respectively. - """ - query = gdata.contacts.client.ContactsQuery() + """ # noqa: D401 + query = gdata.contacts.client.ContactsQuery() # noqa: F821 # TODO(emfree): Implement batch fetching # Note: The Google contacts API will only return 25 results if # query.max_results is not explicitly set, so have to set it to a large @@ -168,7 +168,7 @@ def get_items(self, sync_from_dt=None, max_results=100000): return [ self._parse_contact_result(result) for result in results ] - except gdata.client.RequestError as e: + except gdata.client.RequestError as e: # noqa: F821 if e.status == 503: self.log.info( "Ran into Google bot detection. Sleeping.", message=e @@ -179,7 +179,7 @@ def get_items(self, sync_from_dt=None, max_results=100000): "contact sync request failure; retrying", message=e ) time.sleep(30 + random.randrange(0, 60)) - except gdata.client.Unauthorized: + except gdata.client.Unauthorized: # noqa: F821 self.log.warning( "Invalid access token; refreshing and retrying" ) diff --git a/inbox/contacts/icloud.py b/inbox/contacts/icloud.py index 0dcb6fe2c..f230a297c 100644 --- a/inbox/contacts/icloud.py +++ b/inbox/contacts/icloud.py @@ -2,7 +2,7 @@ import contextlib -import lxml.etree as ET +import lxml.etree as ET # noqa: N812 from inbox.contacts.abc import AbstractContactsProvider from inbox.contacts.carddav import supports_carddav @@ -36,7 +36,7 @@ def __init__(self, account_id, namespace_id) -> None: provider=self.PROVIDER_NAME, ) - def _vCard_raw_to_contact(self, cardstring): + def _vCard_raw_to_contact(self, cardstring): # noqa: N802 card = vcard_from_string(cardstring) def _x(key): # Ugly parsing helper for ugly formats @@ -66,7 +66,7 @@ def _x(key): # Ugly parsing helper for ugly formats raw_data=cardstring, ) - def get_items(self, sync_from_dt=None, max_results=100000): + def get_items(self, sync_from_dt=None, max_results=100000): # noqa: ANN201 with session_scope(self.namespace_id) as db_session: account = db_session.query(GenericAccount).get(self.account_id) email_address = account.email_address diff --git a/inbox/contacts/processing.py b/inbox/contacts/processing.py index 5e196ec2b..06212166d 100644 --- a/inbox/contacts/processing.py +++ b/inbox/contacts/processing.py @@ -24,7 +24,7 @@ def _get_contact_map( """ Retrieves or creates contacts for the given address pairs, returning a dict with the canonicalized emails mapped to Contact objects. - """ + """ # noqa: D401 canonicalized_addresses = [ canonicalize(address) for _, address in all_addresses ] diff --git a/inbox/contacts/vcard.py b/inbox/contacts/vcard.py index fcf260506..a9ec1f668 100644 --- a/inbox/contacts/vcard.py +++ b/inbox/contacts/vcard.py @@ -34,14 +34,14 @@ import vobject -def list_clean(string): +def list_clean(string): # noqa: ANN201 """ Transforms a comma seperated string to a list, stripping whitespaces "HOME, WORK,pref" -> ['HOME', 'WORK', 'pref'] string: string of comma seperated elements returns: list() - """ + """ # noqa: D401 string = string.split(",") rstring = list() for element in string: @@ -126,7 +126,7 @@ def list_clean(string): BTEXT = "\x1b[1m" -def get_names(display_name): +def get_names(display_name): # noqa: ANN201 first_name, last_name = "", display_name if display_name.find(",") > 0: @@ -143,14 +143,14 @@ def get_names(display_name): return first_name.strip().capitalize(), last_name.strip().capitalize() -def fix_vobject(vcard): +def fix_vobject(vcard): # noqa: ANN201 """ Trying to fix some more or less common errors in vcards for now only missing FN properties are handled (and reconstructed from N) :type vcard: vobject.base.Component (vobject based vcard) - """ + """ # noqa: D401 if "fn" not in vcard.contents: logging.debug("vcard has no formatted name, reconstructing...") fname = vcard.contents["n"][0].valueRepr() @@ -160,7 +160,7 @@ def fix_vobject(vcard): return vcard -def vcard_from_vobject(vcard): +def vcard_from_vobject(vcard): # noqa: ANN201 vcard = fix_vobject(vcard) vdict = VCard() if vcard.name != "VCARD": @@ -181,7 +181,7 @@ def vcard_from_vobject(vcard): return vdict -def vcard_from_string(vcard_string): +def vcard_from_string(vcard_string): # noqa: ANN201 """ vcard_string: str returns VCard() @@ -189,11 +189,11 @@ def vcard_from_string(vcard_string): try: vcard = vobject.readOne(vcard_string) except vobject.base.ParseError as error: - raise Exception(error) # TODO proper exception + raise Exception(error) # TODO proper exception # noqa: B904 return vcard_from_vobject(vcard) -def vcard_from_email(display_name, email): +def vcard_from_email(display_name, email): # noqa: ANN201 fname, lname = get_names(display_name) vcard = vobject.vCard() vcard.add("n") @@ -206,7 +206,7 @@ def vcard_from_email(display_name, email): return vcard_from_vobject(vcard) -def cards_from_file(cards_f): +def cards_from_file(cards_f): # noqa: ANN201 collector = list() for vcard in vobject.readComponents(cards_f): collector.append(vcard_from_vobject(vcard)) @@ -242,11 +242,11 @@ def __init__(self, ddict="") -> None: self.etag = "" self.edited = 0 - def serialize(self): + def serialize(self): # noqa: ANN201 return repr(list(self.items())) @property - def name(self): + def name(self): # noqa: ANN201 return str(self["N"][0][0]) if self["N"] else "" @name.setter @@ -256,22 +256,22 @@ def name(self, value): self["N"][0][0] = value @property - def fname(self): + def fname(self): # noqa: ANN201 return str(self["FN"][0][0]) if self["FN"] else "" @fname.setter def fname(self, value): self["FN"][0] = (value, {}) - def alt_keys(self): + def alt_keys(self): # noqa: ANN201 keylist = list(self) for one in [x for x in ["FN", "N", "VERSION"] if x in keylist]: keylist.remove(one) keylist.sort() return keylist - def print_email(self): - """Prints only name, email and type for use with mutt""" + def print_email(self): # noqa: ANN201 + """Prints only name, email and type for use with mutt""" # noqa: D401 collector = list() try: for one in self["EMAIL"]: @@ -284,8 +284,8 @@ def print_email(self): except KeyError: return "" - def print_tel(self): - """Prints only name, email and type for use with mutt""" + def print_tel(self): # noqa: ANN201 + """Prints only name, email and type for use with mutt""" # noqa: D401 collector = list() try: for one in self["TEL"]: @@ -299,11 +299,11 @@ def print_tel(self): return "" @property - def pretty(self): + def pretty(self): # noqa: ANN201 return self._pretty_base(self.alt_keys()) @property - def pretty_min(self): + def pretty_min(self): # noqa: ANN201 return self._pretty_base(["TEL", "EMAIL"]) def _pretty_base(self, keylist): @@ -332,7 +332,7 @@ def _line_helper(self, line): return ";" + ";".join(collector) @property - def vcf(self): + def vcf(self): # noqa: ANN201 """ Serialize to VCARD as specified in RFC2426, if no UID is specified yet, one will be added (as a UID is mandatory diff --git a/inbox/crispin.py b/inbox/crispin.py index c92840dc0..2b75020ef 100644 --- a/inbox/crispin.py +++ b/inbox/crispin.py @@ -36,28 +36,28 @@ r'"' ) -import functools -import queue -import socket -import threading -from collections import defaultdict, namedtuple -from email.parser import HeaderParser -from threading import BoundedSemaphore - -from sqlalchemy.orm import joinedload - -from inbox.exceptions import GmailSettingError -from inbox.folder_edge_cases import localized_folder_names -from inbox.logging import get_logger -from inbox.models import Account -from inbox.models.backends.generic import GenericAccount -from inbox.models.backends.gmail import GmailAccount -from inbox.models.backends.imap import ImapAccount -from inbox.models.backends.outlook import OutlookAccount -from inbox.models.session import session_scope -from inbox.util.concurrency import retry -from inbox.util.itert import chunk -from inbox.util.misc import or_none +import functools # noqa: E402 +import queue # noqa: E402 +import socket # noqa: E402 +import threading # noqa: E402 +from collections import defaultdict, namedtuple # noqa: E402 +from email.parser import HeaderParser # noqa: E402 +from threading import BoundedSemaphore # noqa: E402 + +from sqlalchemy.orm import joinedload # noqa: E402 + +from inbox.exceptions import GmailSettingError # noqa: E402 +from inbox.folder_edge_cases import localized_folder_names # noqa: E402 +from inbox.logging import get_logger # noqa: E402 +from inbox.models import Account # noqa: E402 +from inbox.models.backends.generic import GenericAccount # noqa: E402 +from inbox.models.backends.gmail import GmailAccount # noqa: E402 +from inbox.models.backends.imap import ImapAccount # noqa: E402 +from inbox.models.backends.outlook import OutlookAccount # noqa: E402 +from inbox.models.session import session_scope # noqa: E402 +from inbox.util.concurrency import retry # noqa: E402 +from inbox.util.itert import chunk # noqa: E402 +from inbox.util.misc import or_none # noqa: E402 log = get_logger() @@ -317,7 +317,7 @@ def _set_account_info(self): self.client_cls = CrispinClient def _new_raw_connection(self): - """Returns a new, authenticated IMAPClient instance for the account.""" + """Returns a new, authenticated IMAPClient instance for the account.""" # noqa: D401 from inbox.auth.google import GoogleAuthHandler from inbox.auth.microsoft import MicrosoftAuthHandler @@ -389,7 +389,7 @@ def fixed_parse_message_list(data: list[bytes]) -> Iterable[int]: https://github.com/mjs/imapclient/blob/master/imapclient/response_parser.py#L39-L79 Implemented in: https://github.com/closeio/sync-engine/pull/483 - """ + """ # noqa: D401 # Handle case where we receive many elements instead of a single element if len(data) > 1: unique_uids: set[int] = set() @@ -520,7 +520,7 @@ def select_folder_if_necessary( reselect the folder which in turn won't initiate a new session, so if you care about having a non-stale value for HIGHESTMODSEQ then don't use this function. - """ + """ # noqa: D401 if ( self.selected_folder is None or folder_name != self.selected_folder[0] @@ -550,7 +550,7 @@ def select_folder( Starts a new session even if `folder` is already selected, since this does things like e.g. makes sure we're not getting cached/out-of-date values for HIGHESTMODSEQ from the IMAP server. - """ + """ # noqa: D401 try: interruptible_threading.check_interrupted() select_info: dict[bytes, Any] = self.conn.select_folder( @@ -566,13 +566,13 @@ def select_folder( or "does not exist" in message or "doesn't exist" in message ): - raise FolderMissingError(folder_name) + raise FolderMissingError(folder_name) # noqa: B904 if "Access denied" in message: # TODO: This is not the best exception name, but it does the # expected thing here: We stop syncing the folder (but would # attempt selecting the folder again later). - raise FolderMissingError(folder_name) + raise FolderMissingError(folder_name) # noqa: B904 # We can't assume that all errors here are caused by the folder # being deleted, as other connection errors could occur - but we @@ -588,19 +588,19 @@ def select_folder( return uidvalidity_callback(self.account_id, folder_name, select_info) @property - def selected_folder_name(self): + def selected_folder_name(self): # noqa: ANN201 return or_none(self.selected_folder, lambda f: f[0]) @property - def selected_folder_info(self): + def selected_folder_info(self): # noqa: ANN201 return or_none(self.selected_folder, lambda f: f[1]) @property - def selected_uidvalidity(self): + def selected_uidvalidity(self): # noqa: ANN201 return or_none(self.selected_folder_info, lambda i: i[b"UIDVALIDITY"]) @property - def selected_uidnext(self): + def selected_uidnext(self): # noqa: ANN201 return or_none(self.selected_folder_info, lambda i: i.get(b"UIDNEXT")) @property @@ -625,7 +625,7 @@ def folder_prefix(self) -> str: else: return "" - def sync_folders(self): + def sync_folders(self): # noqa: ANN201 # () -> List[str] """ List of folders to sync, in order of sync priority. Currently, that @@ -737,7 +737,7 @@ def folders(self) -> list[RawFolder]: return raw_folders - def _get_missing_roles( + def _get_missing_roles( # noqa: D417 self, folders: list[RawFolder], roles: list[str] ) -> list[str]: """ @@ -765,7 +765,7 @@ def _get_missing_roles( return list(missing_roles) - def _guess_role(self, folder: str) -> str | None: + def _guess_role(self, folder: str) -> str | None: # noqa: D417 """ Given a folder, guess the system role that corresponds to that folder @@ -1074,7 +1074,7 @@ def save_draft(self, message, date=None) -> None: self.selected_folder_name, message, ["\\Draft", "\\Seen"], date ) - def create_message(self, message, date=None): + def create_message(self, message, date=None): # noqa: ANN201 """ Create a message on the server. Only used to fix server-side bugs, like iCloud not saving Sent messages. @@ -1104,7 +1104,7 @@ def fetch_headers( headers.update(self.conn.fetch(uid_chunk, ["BODY.PEEK[HEADER]"])) return headers - def find_by_header(self, header_name, header_value): + def find_by_header(self, header_name, header_value): # noqa: ANN201 """Find all uids in the selected folder with the given header value.""" all_uids = self.all_uids() # It would be nice to just search by header too, but some backends @@ -1124,7 +1124,9 @@ def find_by_header(self, header_name, header_value): return results - def delete_sent_message(self, message_id_header, delete_multiple=False): + def delete_sent_message( # noqa: ANN201 + self, message_id_header, delete_multiple=False + ): """ Delete a message in the sent folder, as identified by the Message-Id header. We first delete the message from the Sent folder, and then @@ -1150,7 +1152,7 @@ def delete_sent_message(self, message_id_header, delete_multiple=False): self._delete_message(message_id_header, delete_multiple) return msg_deleted - def delete_draft(self, message_id_header): + def delete_draft(self, message_id_header): # noqa: ANN201 """ Delete a draft, as identified by its Message-Id header. We first delete the message from the Drafts folder, @@ -1210,7 +1212,7 @@ def logout(self) -> None: interruptible_threading.check_interrupted() self.conn.logout() - def idle(self, timeout: int): + def idle(self, timeout: int): # noqa: ANN201 """ Idle for up to `timeout` seconds. Make sure we take the connection back out of idle mode so that we can reuse this connection in another @@ -1508,7 +1510,7 @@ def uids(self, uids: list[int]) -> list[RawMessage]: ) return raw_messages - def g_metadata(self, uids): + def g_metadata(self, uids): # noqa: ANN201 """ Download Gmail MSGIDs, THRIDs, and message sizes for the given uids. @@ -1540,7 +1542,7 @@ def g_metadata(self, uids): if uid in uid_set } - def expand_thread(self, g_thrid): + def expand_thread(self, g_thrid): # noqa: ANN201 """ Find all message UIDs in the selected folder with X-GM-THRID equal to g_thrid. @@ -1555,7 +1557,7 @@ def expand_thread(self, g_thrid): # UIDs ascend over time; return in order most-recent first return sorted(uids, reverse=True) - def find_by_header(self, header_name, header_value): + def find_by_header(self, header_name, header_value): # noqa: ANN201 interruptible_threading.check_interrupted() return self.conn.search(["HEADER", header_name, header_value]) @@ -1730,7 +1732,7 @@ def search_uids(self, criteria: list[str]) -> Iterable[int]: # Make BAD IMAP responses easier to understand to the user, with a link to the docs m = re.match(r"SEARCH command error: BAD \[(.+)\]", str(e)) if m: - raise imapclient.exceptions.InvalidCriteriaError( + raise imapclient.exceptions.InvalidCriteriaError( # noqa: B904 "{original_msg}\n\n" "This error may have been caused by a syntax error in the criteria: " "{criteria}\nPlease refer to the documentation for more information " diff --git a/inbox/error_handling.py b/inbox/error_handling.py index e7964894c..618324303 100644 --- a/inbox/error_handling.py +++ b/inbox/error_handling.py @@ -15,7 +15,7 @@ class SyncEngineRollbarHandler(RollbarHandler): - def emit(self, record): + def emit(self, record): # noqa: ANN201 try: data = json.loads(record.msg) except ValueError: @@ -38,7 +38,7 @@ def emit(self, record): return super().emit(record) -def log_uncaught_errors(logger=None, **kwargs) -> None: +def log_uncaught_errors(logger=None, **kwargs) -> None: # noqa: D417 """ Helper to log uncaught exceptions. @@ -47,7 +47,7 @@ def log_uncaught_errors(logger=None, **kwargs) -> None: logger: structlog.BoundLogger, optional The logging object to write to. - """ + """ # noqa: D401 logger = logger or get_logger() kwargs.update(create_error_log_context(sys.exc_info())) logger.error("Uncaught error", **kwargs) @@ -77,7 +77,7 @@ def log_uncaught_errors(logger=None, **kwargs) -> None: ] -def payload_handler(payload, **kw): +def payload_handler(payload, **kw): # noqa: ANN201 title = payload["data"].get("title") exception = ( payload["data"].get("body", {}).get("trace", {}).get("exception", {}) diff --git a/inbox/events/google.py b/inbox/events/google.py index 06fdd9959..55f311933 100644 --- a/inbox/events/google.py +++ b/inbox/events/google.py @@ -112,7 +112,7 @@ def sync_events( return updates def _get_raw_calendars(self) -> list[dict[str, Any]]: - """Gets raw data for the user's calendars.""" + """Gets raw data for the user's calendars.""" # noqa: D401 return self._get_resource_list(CALENDARS_URL) def _get_raw_events( @@ -135,7 +135,7 @@ def _get_raw_events( ------- list of dictionaries representing JSON. - """ + """ # noqa: D401 if sync_from_time is not None: # Note explicit offset is required by Google calendar API. sync_from_time_str = ( @@ -152,7 +152,7 @@ def _get_raw_events( url, updatedMin=sync_from_time_str, eventTypes="default" ) except requests.exceptions.HTTPError as exc: - assert exc.response is not None + assert exc.response is not None # noqa: PT017 if exc.response.status_code == 410: # The calendar API may return 410 if you pass a value for # updatedMin that's too far in the past. In that case, refetch @@ -162,7 +162,7 @@ def _get_raw_events( raise def _get_resource_list(self, url: str, **params) -> list[dict[str, Any]]: - """Handles response pagination.""" + """Handles response pagination.""" # noqa: D401 token = self._get_access_token() items = [] next_page_token: str | None = None @@ -240,7 +240,7 @@ def _make_event_request( event_uid: str | None = None, **kwargs, ) -> requests.Response: - """Makes a POST/PUT/DELETE request for a particular event.""" + """Makes a POST/PUT/DELETE request for a particular event.""" # noqa: D401 event_uid = event_uid or "" url = "https://www.googleapis.com/calendar/v3/calendars/{}/events/{}".format( urllib.parse.quote(calendar_uid), urllib.parse.quote(event_uid) @@ -251,7 +251,7 @@ def _make_event_request( ) return response - def create_remote_event(self, event, **kwargs): + def create_remote_event(self, event, **kwargs): # noqa: ANN201 data = _dump_event(event) params = {} @@ -439,7 +439,7 @@ def watch_calendar( # Handle error and return None self._handle_watch_errors(r) except requests.exceptions.HTTPError as e: - assert e.response is not None + assert e.response is not None # noqa: PT017 if e.response.status_code == 404: raise CalendarGoneException(calendar.uid) from e @@ -502,7 +502,9 @@ def _handle_watch_errors(self, r: requests.Response) -> None: ) -def parse_calendar_response(calendar: dict[str, Any]) -> Calendar: +def parse_calendar_response( # noqa: D417 + calendar: dict[str, Any] +) -> Calendar: """ Constructs a Calendar object from a Google calendarList resource (a dictionary). See @@ -516,7 +518,7 @@ def parse_calendar_response(calendar: dict[str, Any]) -> Calendar: ------- A corresponding Calendar instance. - """ + """ # noqa: D401 uid = calendar["id"] name = calendar["summary"] @@ -585,7 +587,7 @@ def sanitize_conference_data( ) -def parse_event_response( +def parse_event_response( # noqa: D417 event: dict[str, Any], read_only_calendar: bool ) -> Event: """ @@ -601,7 +603,7 @@ def parse_event_response( A corresponding Event instance. This instance is not committed or added to a session. - """ + """ # noqa: D401 uid = str(event["id"]) # The entirety of the raw event data in json representation. raw_data = json.dumps(event) diff --git a/inbox/events/ical.py b/inbox/events/ical.py index dc40536de..7e8d1adaf 100644 --- a/inbox/events/ical.py +++ b/inbox/events/ical.py @@ -47,7 +47,7 @@ def normalize_repeated_component( """ Some software can repeat components several times. We can safely recover from it if all of them have the same value. - """ + """ # noqa: D401 if component is None: return None elif isinstance(component, str): @@ -58,7 +58,7 @@ def normalize_repeated_component( raise MalformedEventError("Cannot normalize component", component) -def events_from_ics(namespace, calendar, ics_str): +def events_from_ics(namespace, calendar, ics_str): # noqa: ANN201 try: cal = iCalendar.from_ical(ics_str) except (ValueError, IndexError, KeyError, TypeError) as e: @@ -92,7 +92,9 @@ def events_from_ics(namespace, calendar, ics_str): try: original_start = component.get("dtstart").dt except AttributeError: - raise MalformedEventError("Event lacks one of DTSTART") + raise MalformedEventError( # noqa: B904 + "Event lacks one of DTSTART" + ) if component.get("dtend"): original_end = component["dtend"].dt @@ -539,7 +541,7 @@ def import_attached_events( ) -def generate_icalendar_invite(event, invite_type="request"): +def generate_icalendar_invite(event, invite_type="request"): # noqa: ANN201 # Generates an iCalendar invite from an event. assert invite_type in ["request", "cancel"] @@ -612,7 +614,9 @@ def generate_icalendar_invite(event, invite_type="request"): return cal -def generate_invite_message(ical_txt, event, account, invite_type="request"): +def generate_invite_message( # noqa: ANN201 + ical_txt, event, account, invite_type="request" +): assert invite_type in ["request", "update", "cancel"] html_body = event.description or "" @@ -657,8 +661,8 @@ def generate_invite_message(ical_txt, event, account, invite_type="request"): def send_invite(ical_txt, event, account, invite_type="request") -> None: # We send those transactional emails through a separate domain. - MAILGUN_API_KEY = config.get("NOTIFICATIONS_MAILGUN_API_KEY") - MAILGUN_DOMAIN = config.get("NOTIFICATIONS_MAILGUN_DOMAIN") + MAILGUN_API_KEY = config.get("NOTIFICATIONS_MAILGUN_API_KEY") # noqa: N806 + MAILGUN_DOMAIN = config.get("NOTIFICATIONS_MAILGUN_DOMAIN") # noqa: N806 assert MAILGUN_DOMAIN is not None assert MAILGUN_API_KEY is not None @@ -744,7 +748,7 @@ def _generate_rsvp(status, account, event): return {"cal": cal} -def generate_rsvp(event, participant, account): +def generate_rsvp(event, participant, account): # noqa: ANN201 # Generates an iCalendar file to RSVP to an invite. status = INVERTED_STATUS_MAP.get(participant["status"]) return _generate_rsvp(status, account, event) @@ -754,7 +758,7 @@ def generate_rsvp(event, participant, account): # We try to find the organizer address from the iCal file. # If it's not defined, we try to return the invite sender's # email address. -def rsvp_recipient(event): +def rsvp_recipient(event): # noqa: ANN201 if event is None: return None diff --git a/inbox/events/microsoft/graph_client.py b/inbox/events/microsoft/graph_client.py index fef5e02c7..bb12b8f87 100644 --- a/inbox/events/microsoft/graph_client.py +++ b/inbox/events/microsoft/graph_client.py @@ -179,7 +179,7 @@ def get_calendar(self, calendar_id: str) -> dict[str, Any]: """ return self.request("GET", f"/me/calendars/{calendar_id}") - def iter_events( + def iter_events( # noqa: D417 self, calendar_id: str, *, @@ -220,7 +220,7 @@ def iter_events( f"/me/calendars/{calendar_id}/events", params=params ) - def get_event( + def get_event( # noqa: D417 self, event_id: str, *, fields: Iterable[str] | None = None ) -> dict[str, Any]: """ @@ -242,7 +242,7 @@ def get_event( return self.request("GET", f"/me/events/{event_id}", params=params) - def iter_event_instances( + def iter_event_instances( # noqa: D417 self, event_id: str, *, diff --git a/inbox/events/microsoft/parse.py b/inbox/events/microsoft/parse.py index ec602c57e..db63fbf6f 100644 --- a/inbox/events/microsoft/parse.py +++ b/inbox/events/microsoft/parse.py @@ -70,7 +70,9 @@ def get_microsoft_tzinfo(timezone_id: str) -> pytz.tzinfo.BaseTzInfo: MAX_DATETIME = datetime.datetime(9999, 12, 31, 23, 59, 59) -def parse_msgraph_datetime_tz_as_utc(datetime_tz: MsGraphDateTimeTimeZone): +def parse_msgraph_datetime_tz_as_utc( # noqa: ANN201 + datetime_tz: MsGraphDateTimeTimeZone, +): """ Parse Microsoft Graph DateTimeTimeZone and return UTC datetime. @@ -221,7 +223,7 @@ def parse_msgraph_range_start_and_until( recurrence_timezone = get_recurrence_timezone(event) assert recurrence_timezone tzinfo = get_microsoft_tzinfo(recurrence_timezone) - range = event["recurrence"]["range"] + range = event["recurrence"]["range"] # noqa: A001 start_datetime = combine_msgraph_recurrence_date_with_time( range["startDate"], tzinfo, CombineMode.START @@ -306,7 +308,7 @@ def convert_msgraph_patterned_recurrence_to_ical_rrule( """ assert event["recurrence"] patterned_recurrence = event["recurrence"] - pattern, range = ( + pattern, range = ( # noqa: A001 patterned_recurrence["pattern"], patterned_recurrence["range"], ) diff --git a/inbox/events/recurring.py b/inbox/events/recurring.py index 0765ad209..312de0596 100644 --- a/inbox/events/recurring.py +++ b/inbox/events/recurring.py @@ -24,7 +24,7 @@ EXPAND_RECURRING_YEARS = 1 -def link_events(db_session, event): +def link_events(db_session, event): # noqa: ANN201 if isinstance(event, RecurringEvent): # Attempt to find my overrides return link_overrides(db_session, event) @@ -34,7 +34,7 @@ def link_events(db_session, event): return None -def link_overrides(db_session, event): +def link_overrides(db_session, event): # noqa: ANN201 # Find event instances which override this specific # RecurringEvent instance. overrides = ( @@ -53,7 +53,7 @@ def link_overrides(db_session, event): return overrides -def link_master(db_session, event): +def link_master(db_session, event): # noqa: ANN201 # Find the master RecurringEvent that spawned this # RecurringEventOverride (may not exist if it hasn't # been synced yet) @@ -73,7 +73,7 @@ def link_master(db_session, event): return event.master # This may be None. -def parse_rrule(event): +def parse_rrule(event): # noqa: ANN201 # Parse the RRULE string and return a dateutil.rrule.rrule object if event.rrule is not None: if event.all_day: @@ -89,7 +89,7 @@ def parse_rrule(event): return rule except Exception as e: - log.error( + log.error( # noqa: G201 "Error parsing RRULE entry", event_id=event.id, error=e, @@ -98,7 +98,7 @@ def parse_rrule(event): return None -def parse_exdate(event): +def parse_exdate(event): # noqa: ANN201 # Parse the EXDATE string and return a list of arrow datetimes excl_dates = [] if event.exdate: @@ -115,7 +115,7 @@ def parse_exdate(event): return excl_dates -def get_start_times(event, start=None, end=None): +def get_start_times(event, start=None, end=None): # noqa: ANN201 # Expands the rrule on event to return a list of arrow datetimes # representing start times for its recurring instances. # If start and/or end are supplied, will return times within that range, @@ -194,7 +194,7 @@ def get_start_times(event, start=None, end=None): weekday_map = (MO, TU, WE, TH, FR, SA, SU) -def rrule_to_json(r): +def rrule_to_json(r): # noqa: ANN201 if not isinstance(r, rrule): r = parse_rrule(r) info = vars(r) diff --git a/inbox/events/remote_sync.py b/inbox/events/remote_sync.py index 49cdf9009..1d951632d 100644 --- a/inbox/events/remote_sync.py +++ b/inbox/events/remote_sync.py @@ -142,7 +142,7 @@ def handle_calendar_deletes( def handle_calendar_updates( namespace_id: int, calendars, log: Any, db_session: Any ) -> list[tuple[str, int]]: - """Persists new or updated Calendar objects to the database.""" + """Persists new or updated Calendar objects to the database.""" # noqa: D401 ids_ = [] added_count = 0 updated_count = 0 @@ -185,7 +185,7 @@ def handle_event_updates( log: Any, db_session: Any, ) -> None: - """Persists new or updated Event objects to the database.""" + """Persists new or updated Event objects to the database.""" # noqa: D401 added_count = 0 updated_count = 0 existing_event_query = ( @@ -380,7 +380,7 @@ def _sync_data(self) -> None: try: self._sync_calendar(calendar, db_session) except HTTPError as exc: - assert exc.response is not None + assert exc.response is not None # noqa: PT017 if exc.response.status_code == 404: self.log.warning( "Tried to sync a deleted calendar." diff --git a/inbox/events/util.py b/inbox/events/util.py index b019be9da..e248ba91d 100644 --- a/inbox/events/util.py +++ b/inbox/events/util.py @@ -14,7 +14,7 @@ class MalformedEventError(Exception): pass -def parse_datetime(datetime): +def parse_datetime(datetime): # noqa: ANN201 # returns a UTC-aware datetime as an Arrow object. # to access the `datetime` object: `obj.datetime` # to convert to a naive datetime: `obj.naive` @@ -26,7 +26,7 @@ def parse_datetime(datetime): return None -def parse_rrule_datetime(datetime, tzinfo=None): +def parse_rrule_datetime(datetime, tzinfo=None): # noqa: ANN201 # format: 20140904T133000Z (datetimes) or 20140904 (dates) if datetime[-1] == "Z": tzinfo = "UTC" @@ -40,19 +40,19 @@ def parse_rrule_datetime(datetime, tzinfo=None): return dt -def serialize_datetime(d): +def serialize_datetime(d): # noqa: ANN201 return d.strftime("%Y%m%dT%H%M%SZ") EventTime = namedtuple("EventTime", ["start", "end", "all_day"]) -def when_to_event_time(raw): +def when_to_event_time(raw): # noqa: ANN201 when = parse_as_when(raw) return EventTime(when.start, when.end, when.all_day) -def parse_google_time(d): +def parse_google_time(d): # noqa: ANN201 # google dictionaries contain either 'date' or 'dateTime' & 'timeZone' # 'dateTime' is in ISO format so is UTC-aware, 'date' is just a date for key, dt in d.items(): @@ -80,16 +80,18 @@ def google_to_event_time(start_raw: str, end_raw: str) -> EventTime: return event_time -def valid_base36(uid): +def valid_base36(uid): # noqa: ANN201 # Check that an uid is a base36 element. return all(c in (string.ascii_lowercase + string.digits) for c in uid) -def removed_participants(original_participants, update_participants): +def removed_participants( # noqa: ANN201 + original_participants, update_participants +): """ Returns the name and addresses of the participants which have been removed. - """ + """ # noqa: D401 original_table = { part["email"].lower(): part.get("name") for part in original_participants @@ -102,7 +104,7 @@ def removed_participants(original_participants, update_participants): } ret = [] - for email in original_table: + for email in original_table: # noqa: PLC0206 if email not in update_table: ret.append(dict(email=email, name=original_table[email])) diff --git a/inbox/exceptions.py b/inbox/exceptions.py index b755e8d11..c654f4c74 100644 --- a/inbox/exceptions.py +++ b/inbox/exceptions.py @@ -6,7 +6,7 @@ class SSLNotSupportedError(AuthError): pass -class ConnectionError(AuthError): +class ConnectionError(AuthError): # noqa: A001 pass diff --git a/inbox/folder_edge_cases.py b/inbox/folder_edge_cases.py index 58023f135..e76c96998 100644 --- a/inbox/folder_edge_cases.py +++ b/inbox/folder_edge_cases.py @@ -4,7 +4,7 @@ This list was constructed semi automatically, and manuallly verified. Should we encounter problems with account folders in the future, add them below to test for them. -""" +""" # noqa: D404 localized_folder_names = { "trash": { diff --git a/inbox/heartbeat/config.py b/inbox/heartbeat/config.py index 266520204..477441fb6 100644 --- a/inbox/heartbeat/config.py +++ b/inbox/heartbeat/config.py @@ -26,7 +26,7 @@ def _get_redis_connection_pool(host, port, db): # instantiating the singleton HeartBeatStore, so doing this here # should be okay for now. # TODO[k]: Refactor. - global connection_pool_map + global connection_pool_map # noqa: PLW0602 connection_pool = connection_pool_map.get(host) if connection_pool is None: @@ -43,11 +43,11 @@ def _get_redis_connection_pool(host, port, db): return connection_pool -def account_redis_shard_number(account_id): +def account_redis_shard_number(account_id): # noqa: ANN201 return account_id % len(REDIS_SHARDS) -def get_redis_client(account_id): +def get_redis_client(account_id): # noqa: ANN201 account_shard_number = account_redis_shard_number(account_id) host = REDIS_SHARDS[account_shard_number] diff --git a/inbox/heartbeat/status.py b/inbox/heartbeat/status.py index 28f802449..0f85f2e38 100644 --- a/inbox/heartbeat/status.py +++ b/inbox/heartbeat/status.py @@ -16,7 +16,9 @@ FolderPing = namedtuple("FolderPing", ["id", "alive", "timestamp"]) -def get_ping_status(account_ids, host=None, port=6379, threshold=ALIVE_EXPIRY): +def get_ping_status( # noqa: ANN201 + account_ids, host=None, port=6379, threshold=ALIVE_EXPIRY +): # Query the indexes and not the per-folder info for faster lookup. store = HeartbeatStore.store(host, port) now = time.time() @@ -48,7 +50,9 @@ def get_ping_status(account_ids, host=None, port=6379, threshold=ALIVE_EXPIRY): return accounts -def clear_heartbeat_status(account_id, folder_id=None, device_id=None): +def clear_heartbeat_status( # noqa: ANN201 + account_id, folder_id=None, device_id=None +): # Clears the status for the account, folder and/or device. # Returns the number of folders cleared. store = HeartbeatStore.store() diff --git a/inbox/heartbeat/store.py b/inbox/heartbeat/store.py index bd8fcc7bf..ec0b21d95 100644 --- a/inbox/heartbeat/store.py +++ b/inbox/heartbeat/store.py @@ -12,12 +12,14 @@ log = get_logger() -def safe_failure(f): +def safe_failure(f): # noqa: ANN201 def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except Exception: - log.error("Error interacting with heartbeats", exc_info=True) + log.error( # noqa: G201 + "Error interacting with heartbeats", exc_info=True + ) return wrapper @@ -31,31 +33,31 @@ def __init__(self, account_id, folder_id) -> None: def __repr__(self) -> str: return self.key - def __lt__(self, other): + def __lt__(self, other): # noqa: ANN204 if self.account_id != other.account_id: return self.account_id < other.account_id return self.folder_id < other.folder_id - def __eq__(self, other): + def __eq__(self, other): # noqa: ANN204 return ( self.account_id == other.account_id and self.folder_id == other.folder_id ) @classmethod - def all_folders(cls, account_id): + def all_folders(cls, account_id): # noqa: ANN206 return cls(account_id, "*") @classmethod - def contacts(cls, account_id): + def contacts(cls, account_id): # noqa: ANN206 return cls(account_id, CONTACTS_FOLDER_ID) @classmethod - def events(cls, account_id): + def events(cls, account_id): # noqa: ANN206 return cls(account_id, EVENTS_FOLDER_ID) @classmethod - def from_string(cls, string_key): + def from_string(cls, string_key): # noqa: ANN206 account_id, folder_id = (int(part) for part in string_key.split(":")) return cls(account_id, folder_id) @@ -83,7 +85,7 @@ def publish(self, **kwargs) -> None: self.store.publish(self.key, self.heartbeat_at) except Exception: log = get_logger() - log.error( + log.error( # noqa: G201 "Error while writing the heartbeat status", account_id=self.key.account_id, folder_id=self.key.folder_id, @@ -111,7 +113,7 @@ def __init__(self, host=None, port=6379) -> None: self.port = port @classmethod - def store(cls, host=None, port=None): + def store(cls, host=None, port=None): # noqa: ANN206 # Allow singleton access to the store, keyed by host. if cls._instances.get(host) is None: cls._instances[host] = cls(host, port) @@ -138,7 +140,9 @@ def remove(self, key, device_id=None, client=None) -> None: self.remove_from_folder_index(key, client) @safe_failure - def remove_folders(self, account_id, folder_id=None, device_id=None): + def remove_folders( # noqa: ANN201 + self, account_id, folder_id=None, device_id=None + ): # Remove heartbeats for the given account, folder and/or device. if folder_id: key = HeartbeatStatusKey(account_id, folder_id) @@ -172,11 +176,11 @@ def update_accounts_index(self, key) -> None: # Find the oldest heartbeat from the account-folder index try: client = heartbeat_config.get_redis_client(key.account_id) - f, oldest_heartbeat = client.zrange( + f, oldest_heartbeat = client.zrange( # noqa: F841 key.account_id, 0, 0, withscores=True ).pop() client.zadd("account_index", {key.account_id: oldest_heartbeat}) - except Exception: + except Exception: # noqa: S110 # If all heartbeats were deleted at the same time as this, the pop # will fail -- ignore it. pass @@ -191,11 +195,11 @@ def remove_from_account_index(self, account_id, client) -> None: client.delete(account_id) client.zrem("account_index", account_id) - def get_account_folders(self, account_id): + def get_account_folders(self, account_id): # noqa: ANN201 client = heartbeat_config.get_redis_client(account_id) return client.zrange(account_id, 0, -1, withscores=True) - def get_accounts_folders(self, account_ids): + def get_accounts_folders(self, account_ids): # noqa: ANN201 # This is where things get interesting --- we need to make queries # to multiple shards and return the results to a single caller. # Preferred method of querying for multiple accounts. Uses pipelining diff --git a/inbox/ignition.py b/inbox/ignition.py index 945bb3dfc..74d7add77 100644 --- a/inbox/ignition.py +++ b/inbox/ignition.py @@ -31,7 +31,9 @@ pool_tracker: MutableMapping[Any, dict[str, Any]] = weakref.WeakKeyDictionary() -def build_uri(username, password, hostname, port, database_name): +def build_uri( # noqa: ANN201 + username, password, hostname, port, database_name +): uri_template = ( "mysql+mysqldb://{username}:{password}@{hostname}" ":{port}/{database_name}?charset=utf8mb4" @@ -45,7 +47,7 @@ def build_uri(username, password, hostname, port, database_name): ) -def engine( +def engine( # noqa: ANN201 database_name, database_uri, pool_size=DB_POOL_SIZE, @@ -178,13 +180,13 @@ def __init__(self, databases, users, include_disabled=False) -> None: def shard_key_for_id(self, id_) -> int: return 0 - def get_for_id(self, id_): + def get_for_id(self, id_): # noqa: ANN201 return self.engines[self.shard_key_for_id(id_)] - def zone_for_id(self, id_): + def zone_for_id(self, id_): # noqa: ANN201 return self._engine_zones[self.shard_key_for_id(id_)] - def shards_for_zone(self, zone): + def shards_for_zone(self, zone): # noqa: ANN201 return [k for k, z in self._engine_zones.items() if z == zone] @@ -258,7 +260,9 @@ def verify_db(engine, schema, key) -> None: verified.add(table) -def reset_invalid_autoincrements(engine, schema, key, dry_run=True): +def reset_invalid_autoincrements( # noqa: ANN201 + engine, schema, key, dry_run=True +): from inbox.models.base import MailSyncBase query = """SELECT AUTO_INCREMENT from information_schema.TABLES where diff --git a/inbox/instrumentation.py b/inbox/instrumentation.py index 823d88d30..143957e1a 100644 --- a/inbox/instrumentation.py +++ b/inbox/instrumentation.py @@ -20,7 +20,9 @@ def start(self) -> None: try: signal.signal(signal.SIGVTALRM, self._sample) except ValueError: - raise ValueError("Can only sample on the main thread") + raise ValueError( # noqa: B904 + "Can only sample on the main thread" + ) signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0) @@ -39,7 +41,7 @@ def _format_frame(self, frame): frame.f_code.co_name, frame.f_globals.get("__name__") ) - def stats(self): + def stats(self): # noqa: ANN201 if self._started is None: return "" elapsed = time.time() - self._started diff --git a/inbox/interruptible_threading.py b/inbox/interruptible_threading.py index 0a03f9bfc..b4ebc5729 100644 --- a/inbox/interruptible_threading.py +++ b/inbox/interruptible_threading.py @@ -255,7 +255,7 @@ class InterruptibleThreadTimeout(BaseException): @contextlib.contextmanager -def timeout(timeout: float): +def timeout(timeout: float): # noqa: ANN201 """ Context manager to set a timeout for the interruptible operations run by the current interruptible thread. diff --git a/inbox/logging.py b/inbox/logging.py index b7de19388..e51dc14e3 100644 --- a/inbox/logging.py +++ b/inbox/logging.py @@ -22,7 +22,7 @@ MAX_EXCEPTION_LENGTH = 10000 -def find_first_app_frame_and_name(ignores=None): +def find_first_app_frame_and_name(ignores=None): # noqa: ANN201 """ Remove ignorable calls and return the relevant app frame. Borrowed from structlog, but fixes an issue when the stack includes an 'exec' statement @@ -79,17 +79,17 @@ def _record_module(logger, name, event_dict): return event_dict -def safe_format_exception(etype, value, tb, limit=None): +def safe_format_exception(etype, value, tb, limit=None): # noqa: ANN201 """ Similar to structlog._format_exception, but truncate the exception part. This is because SQLAlchemy exceptions can sometimes have ludicrously large exception strings. """ if tb: - list = ["Traceback (most recent call last):\n"] - list = list + traceback.format_tb(tb, limit) + list = ["Traceback (most recent call last):\n"] # noqa: A001 + list = list + traceback.format_tb(tb, limit) # noqa: A001 elif etype and value: - list = [] + list = [] # noqa: A001 else: return None exc_only = traceback.format_exception_only(etype, value) @@ -97,7 +97,7 @@ def safe_format_exception(etype, value, tb, limit=None): # errors it may contain multiple elements, but we don't really need to # worry about that here. exc_only[0] = exc_only[0][:MAX_EXCEPTION_LENGTH] - list = list + exc_only + list = list + exc_only # noqa: A001 return "".join(list) @@ -257,7 +257,7 @@ def json_excepthook(etype, value, tb) -> None: class ConditionalFormatter(logging.Formatter): - def format(self, record): + def format(self, record): # noqa: ANN201 if ( record.name in {"__main__", "inbox"} or record.name.startswith("inbox.") diff --git a/inbox/mailsync/backends/gmail.py b/inbox/mailsync/backends/gmail.py index 9f7365a82..fff902022 100644 --- a/inbox/mailsync/backends/gmail.py +++ b/inbox/mailsync/backends/gmail.py @@ -60,7 +60,7 @@ def __init__(self, *args, **kwargs) -> None: FolderSyncEngine.__init__(self, *args, **kwargs) self.saved_uids = set() - def is_all_mail(self, crispin_client): + def is_all_mail(self, crispin_client): # noqa: ANN201 if not hasattr(self, "_is_all_mail"): folder_names = crispin_client.folder_names() self._is_all_mail = ( @@ -69,7 +69,7 @@ def is_all_mail(self, crispin_client): ) return self._is_all_mail - def should_idle(self, crispin_client): + def should_idle(self, crispin_client): # noqa: ANN201 return self.is_all_mail(crispin_client) def initial_sync_impl(self, crispin_client: "CrispinClient") -> None: @@ -216,7 +216,7 @@ def resync_uids_impl(self) -> None: for entry in imap_uid_entries.yield_per(chunk_size): if entry.message.g_msgid in mapping: log.debug( - "X-GM-MSGID {} from UID {} to UID {}".format( + "X-GM-MSGID {} from UID {} to UID {}".format( # noqa: G001 entry.message.g_msgid, entry.msg_uid, mapping[entry.message.g_msgid], @@ -226,7 +226,7 @@ def resync_uids_impl(self) -> None: else: db_session.delete(entry) log.debug( - "UIDVALIDITY from {} to {}".format( + "UIDVALIDITY from {} to {}".format( # noqa: G001 imap_folder_info_entry.uidvalidity, uidvalidity ) ) @@ -367,7 +367,9 @@ def download_and_commit_uids(self, crispin_client, uids) -> int | None: self.saved_uids.update(new_uids) return None - def expand_uids_to_download(self, crispin_client, uids, metadata): + def expand_uids_to_download( # noqa: ANN201 + self, crispin_client, uids, metadata + ): # During Gmail initial sync, we expand threads: given a UID to # download, we want to also download other UIDs on the same thread, so # that you don't see incomplete thread views for the duration of the @@ -432,7 +434,7 @@ def batch_download_uids( interruptible_threading.sleep(THROTTLE_WAIT) @property - def throttled(self): + def throttled(self): # noqa: ANN201 with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) throttled = account.throttled @@ -440,7 +442,7 @@ def throttled(self): return throttled -def g_msgids(namespace_id, session, in_): +def g_msgids(namespace_id, session, in_): # noqa: ANN201 if not in_: return [] # Easiest way to account-filter Messages is to namespace-filter from diff --git a/inbox/mailsync/backends/imap/common.py b/inbox/mailsync/backends/imap/common.py index 9df573d6b..705b8af1d 100644 --- a/inbox/mailsync/backends/imap/common.py +++ b/inbox/mailsync/backends/imap/common.py @@ -60,7 +60,7 @@ def local_uids( return {uid for uid, in db_api_cursor.fetchall()} -def lastseenuid(account_id, session, folder_id): +def lastseenuid(account_id, session, folder_id): # noqa: ANN201 q = session.query(func.max(ImapUid.msg_uid)).with_hint( ImapUid, "FORCE INDEX (ix_imapuid_account_id_folder_id_msg_uid_desc)" ) @@ -278,7 +278,7 @@ def remove_deleted_uids(account_id, folder_id, uids) -> None: log.info("Deleted expunged UIDs", count=deleted_uid_count) -def get_folder_info(account_id, session, folder_name): +def get_folder_info(account_id, session, folder_name): # noqa: ANN201 try: # using .one() here may catch duplication bugs return ( diff --git a/inbox/mailsync/backends/imap/generic.py b/inbox/mailsync/backends/imap/generic.py index e035ec13e..9d982c3bd 100644 --- a/inbox/mailsync/backends/imap/generic.py +++ b/inbox/mailsync/backends/imap/generic.py @@ -85,31 +85,31 @@ from inbox.util.threading import MAX_THREAD_LENGTH, fetch_corresponding_thread log = get_logger() -from inbox.config import config -from inbox.crispin import ( +from inbox.config import config # noqa: E402 +from inbox.crispin import ( # noqa: E402 CrispinClient, FolderMissingError, RawMessage, connection_pool, retry_crispin, ) -from inbox.events.ical import import_attached_events -from inbox.heartbeat.store import HeartbeatStatusProxy -from inbox.mailsync.backends.base import ( +from inbox.events.ical import import_attached_events # noqa: E402 +from inbox.heartbeat.store import HeartbeatStatusProxy # noqa: E402 +from inbox.mailsync.backends.base import ( # noqa: E402 THROTTLE_COUNT, THROTTLE_WAIT, MailsyncDone, MailsyncError, ) -from inbox.mailsync.backends.imap import common -from inbox.models import Account, Folder, Message -from inbox.models.backends.imap import ( +from inbox.mailsync.backends.imap import common # noqa: E402 +from inbox.models import Account, Folder, Message # noqa: E402 +from inbox.models.backends.imap import ( # noqa: E402 ImapFolderInfo, ImapFolderSyncStatus, ImapThread, ImapUid, ) -from inbox.models.session import session_scope +from inbox.models.session import session_scope # noqa: E402 # Idle doesn't necessarily pick up flag changes, so we don't want to # idle for very long, or we won't detect things like messages being @@ -179,7 +179,7 @@ def __init__( .one() ) except NoResultFound: - raise MailsyncError( + raise MailsyncError( # noqa: B904 f"Missing Folder '{folder_name}' on account {account_id}" ) @@ -281,7 +281,7 @@ def start_sync(saved_folder_status): folder_id=self.folder_id, account_id=self.account_id, ) - raise MailsyncDone() + raise MailsyncDone() # noqa: B904 # NOTE: The parent ImapSyncMonitor handler could kill us at any # time if it receives a shutdown command. The shutdown command is @@ -327,7 +327,7 @@ def _run_impl(self): ) account.sync_state = "stopped" db_session.commit() - raise MailsyncDone() + raise MailsyncDone() # noqa: B904 else: self.state = "finish" self.heartbeat_status.publish(state=self.state) @@ -340,9 +340,9 @@ def _run_impl(self): account_id=self.account_id, folder_id=self.folder_id, ) - raise MailsyncDone() + raise MailsyncDone() # noqa: B904 except ValidationError as exc: - log.error( + log.error( # noqa: G201 "Error authenticating; stopping sync", exc_info=True, account_id=self.account_id, @@ -353,7 +353,7 @@ def _run_impl(self): account = db_session.query(Account).get(self.account_id) account.mark_invalid() account.update_sync_error(exc) - raise MailsyncDone() + raise MailsyncDone() # noqa: B904 # State handlers are idempotent, so it's okay if we're # killed between the end of the handler and the commit. @@ -523,7 +523,7 @@ def initial_sync_impl(self, crispin_client: CrispinClient) -> None: # schedule change_poller to die change_poller.kill() - def should_idle(self, crispin_client): + def should_idle(self, crispin_client): # noqa: ANN201 if not hasattr(self, "_should_idle"): self._should_idle = ( crispin_client.idle_supported() @@ -723,7 +723,7 @@ def add_message_to_thread( else: parent_thread.messages.append(message_obj) - def download_and_commit_uids(self, crispin_client, uids): + def download_and_commit_uids(self, crispin_client, uids): # noqa: ANN201 start = datetime.utcnow() raw_messages = crispin_client.uids(uids) if not raw_messages: @@ -836,7 +836,7 @@ def get_new_uids(self, crispin_client) -> None: or "does not exist" in message or "doesn't exist" in message ): - raise FolderMissingError() + raise FolderMissingError() # noqa: B904 else: raise if remote_uidnext is not None and remote_uidnext == self.uidnext: @@ -1055,7 +1055,7 @@ def check_uid_changes(self, crispin_client: "CrispinClient") -> None: self.generic_refresh_flags(crispin_client) @property - def uidvalidity(self): + def uidvalidity(self): # noqa: ANN201 if not hasattr(self, "_uidvalidity"): self._uidvalidity = self._load_imap_folder_info().uidvalidity return self._uidvalidity @@ -1066,7 +1066,7 @@ def uidvalidity(self, value): self._uidvalidity = value @property - def uidnext(self): + def uidnext(self): # noqa: ANN201 if not hasattr(self, "_uidnext"): self._uidnext = self._load_imap_folder_info().uidnext return self._uidnext @@ -1077,7 +1077,7 @@ def uidnext(self, value): self._uidnext = value @property - def last_slow_refresh(self): + def last_slow_refresh(self): # noqa: ANN201 # We persist the last_slow_refresh timestamp so that we don't end up # doing a (potentially expensive) full flags refresh for every account # on every process restart. @@ -1093,7 +1093,7 @@ def last_slow_refresh(self, value): self._last_slow_refresh = value @property - def highestmodseq(self): + def highestmodseq(self): # noqa: ANN201 if not hasattr(self, "_highestmodseq"): self._highestmodseq = self._load_imap_folder_info().highestmodseq return self._highestmodseq @@ -1129,7 +1129,9 @@ def _update_imap_folder_info(self, attrname, value): setattr(imapfolderinfo, attrname, value) db_session.commit() - def uidvalidity_cb(self, account_id, folder_name, select_info): + def uidvalidity_cb( # noqa: ANN201 + self, account_id, folder_name, select_info + ): assert folder_name == self.folder_name assert account_id == self.account_id selected_uidvalidity = select_info[b"UIDVALIDITY"] @@ -1161,7 +1163,7 @@ class UidInvalid(Exception): def uidvalidity_cb( account_id: int, folder_name: str, select_info: dict[bytes, Any] ) -> dict[bytes, Any]: - assert ( + assert ( # noqa: PT018 folder_name is not None and select_info is not None ), "must start IMAP session before verifying UIDVALIDITY" with session_scope(account_id) as db_session: diff --git a/inbox/mailsync/backends/imap/monitor.py b/inbox/mailsync/backends/imap/monitor.py index a710671d7..d2e02344d 100644 --- a/inbox/mailsync/backends/imap/monitor.py +++ b/inbox/mailsync/backends/imap/monitor.py @@ -43,11 +43,11 @@ def __init__(self, account, heartbeat=1, refresh_frequency=30) -> None: BaseMailSyncMonitor.__init__(self, account, heartbeat) @retry_crispin - def prepare_sync(self): + def prepare_sync(self): # noqa: ANN201 """ Gets and save Folder objects for folders on the IMAP backend. Returns a list of folder names for the folders we want to sync (in order). - """ + """ # noqa: D401 with connection_pool(self.account_id).get() as crispin_client: # Get a fresh list of the folder names from the remote remote_folders = crispin_client.folders() @@ -184,7 +184,7 @@ def sync(self) -> None: interruptible_threading.sleep(self.refresh_frequency) self.start_new_folder_sync_engines() except ValidationError as exc: - log.error( + log.error( # noqa: G201 "Error authenticating; stopping sync", exc_info=True, account_id=self.account_id, diff --git a/inbox/mailsync/frontend.py b/inbox/mailsync/frontend.py index f264ff7ad..761c72be1 100644 --- a/inbox/mailsync/frontend.py +++ b/inbox/mailsync/frontend.py @@ -13,7 +13,7 @@ class ProfilingHTTPFrontend: or syncback process. It allows you to programmatically interact with the process: to get profile/memory/load metrics, or to schedule new account syncs. - """ + """ # noqa: D404 def __init__(self, port, profile) -> None: self.port = port @@ -81,7 +81,7 @@ def unassign_account(): @app.route("/build-metadata", methods=["GET"]) def build_metadata(): filename = "/usr/share/python/cloud-core/metadata.txt" - with open(filename) as f: + with open(filename) as f: # noqa: PTH123 _, build_id = f.readline().rstrip("\n").split() build_id = build_id[ 1:-1 diff --git a/inbox/mailsync/service.py b/inbox/mailsync/service.py index 2ec61c886..501aa4a7e 100644 --- a/inbox/mailsync/service.py +++ b/inbox/mailsync/service.py @@ -46,7 +46,7 @@ SHARED_SYNC_EVENT_QUEUE_ZONE_MAP = {} -def shared_sync_event_queue_for_zone(zone): +def shared_sync_event_queue_for_zone(zone): # noqa: ANN201 queue_name = SHARED_SYNC_EVENT_QUEUE_NAME.format(zone) if queue_name not in SHARED_SYNC_EVENT_QUEUE_ZONE_MAP: SHARED_SYNC_EVENT_QUEUE_ZONE_MAP[queue_name] = EventQueue(queue_name) @@ -161,7 +161,7 @@ def _run_impl(self): """ Waits for notifications about Account migrations and checks for start/stop commands. - """ + """ # noqa: D401 # When the service first starts we should check the state of the world. self.poll() event = None @@ -242,7 +242,7 @@ def poll(self) -> None: try: self.start_sync(account_id) except OperationalError: - self.log.error( + self.log.error( # noqa: G201 "Database error starting account sync", exc_info=True ) log_uncaught_errors() @@ -253,12 +253,12 @@ def poll(self) -> None: try: self.stop_sync(account_id) except OperationalError: - self.log.error( + self.log.error( # noqa: G201 "Database error stopping account sync", exc_info=True ) log_uncaught_errors() - def account_ids_to_sync(self): + def account_ids_to_sync(self): # noqa: ANN201 with global_session_scope() as db_session: return { r[0] @@ -285,7 +285,7 @@ def account_ids_to_sync(self): .all() } - def account_ids_owned(self): + def account_ids_owned(self): # noqa: ANN201 with global_session_scope() as db_session: return { r[0] @@ -331,7 +331,7 @@ def start_sync(self, account_id: int) -> bool: Starts a sync for the account with the given account_id. If that account doesn't exist, does nothing. - """ + """ # noqa: D401 with self.semaphore, session_scope(account_id) as db_session: account = ( db_session.query(Account).with_for_update().get(account_id) @@ -393,7 +393,7 @@ def start_sync(self, account_id: int) -> bool: sync_host=account.sync_host, ) except Exception: - self.log.error( + self.log.error( # noqa: G201 "Error starting sync", exc_info=True, account_id=account_id ) return False @@ -408,7 +408,7 @@ def stop_sync(self, account_id) -> bool: Stops the sync for the account with given account_id. If that account doesn't exist, does nothing. - """ + """ # noqa: D401 with self.semaphore: self.log.info("Stopping monitors", account_id=account_id) if account_id in self.email_sync_monitors: diff --git a/inbox/models/account.py b/inbox/models/account.py index 9e4e23a49..d36d3da38 100644 --- a/inbox/models/account.py +++ b/inbox/models/account.py @@ -69,7 +69,7 @@ def provider(self) -> Never: raise NotImplementedError @property - def verbose_provider(self): + def verbose_provider(self): # noqa: ANN201 """ A detailed identifier for the account provider (e.g., 'gmail', 'office365', 'outlook'). @@ -89,17 +89,17 @@ def category_type(self) -> CategoryType: raise NotImplementedError @property - def auth_handler(self): + def auth_handler(self): # noqa: ANN201 from inbox.auth.base import handler_from_provider return handler_from_provider(self.provider) @property - def provider_info(self): + def provider_info(self): # noqa: ANN201 return provider_info(self.provider) @property - def thread_cls(self): + def thread_cls(self): # noqa: ANN201 from inbox.models.thread import Thread return Thread @@ -149,7 +149,7 @@ def create_emailed_events_calendar(self) -> None: self._emailed_events_calendar = cal @property - def emailed_events_calendar(self): + def emailed_events_calendar(self): # noqa: ANN201 self.create_emailed_events_calendar() return self._emailed_events_calendar @@ -172,7 +172,7 @@ def emailed_events_calendar(self, cal): # folders and heartbeats. @property - def sync_enabled(self): + def sync_enabled(self): # noqa: ANN201 return self.sync_should_run sync_state = Column( @@ -185,7 +185,7 @@ def sync_enabled(self): ) @property - def sync_status(self): + def sync_status(self): # noqa: ANN201 d = dict( id=self.id, email=self.email_address, @@ -200,11 +200,11 @@ def sync_status(self): return d @property - def sync_error(self): + def sync_error(self): # noqa: ANN201 return self._sync_status.get("sync_error") @property - def initial_sync_start(self): + def initial_sync_start(self): # noqa: ANN201 if len(self.folders) == 0 or any( [f.initial_sync_start is None for f in self.folders] ): @@ -212,7 +212,7 @@ def initial_sync_start(self): return min(f.initial_sync_start for f in self.folders) @property - def initial_sync_end(self): + def initial_sync_end(self): # noqa: ANN201 if len(self.folders) == 0 or any( [f.initial_sync_end is None for f in self.folders] ): @@ -220,7 +220,7 @@ def initial_sync_end(self): return max(f.initial_sync_end for f in self.folders) @property - def initial_sync_duration(self): + def initial_sync_duration(self): # noqa: ANN201 if not self.initial_sync_start or not self.initial_sync_end: return None return (self.initial_sync_end - self.initial_sync_end).total_seconds() @@ -323,21 +323,21 @@ def sync_stopped(self, requesting_host) -> bool: return False @classmethod - def get(cls, id_, session): + def get(cls, id_, session): # noqa: ANN206 q = session.query(cls) q = q.filter(cls.id == bindparam("id_")) return q.params(id_=id_).first() @property - def is_killed(self): + def is_killed(self): # noqa: ANN201 return self.sync_state == "killed" @property - def is_running(self): + def is_running(self): # noqa: ANN201 return self.sync_state == "running" @property - def is_marked_for_deletion(self): + def is_marked_for_deletion(self): # noqa: ANN201 return ( self.sync_state in ("stopped", "killed", "invalid") and self.sync_should_run is False @@ -372,7 +372,7 @@ def get_raw_message_contents(self, message) -> Never: } -def should_send_event(obj): +def should_send_event(obj): # noqa: ANN201 if not isinstance(obj, Account): return False inspected_obj = inspect(obj) @@ -386,7 +386,7 @@ def should_send_event(obj): return hist.has_changes() -def already_registered_listener(obj): +def already_registered_listener(obj): # noqa: ANN201 return getattr(obj, "_listener_state", None) is not None @@ -409,7 +409,7 @@ def f(session): if obj_state["sent_event"]: return - id = obj_state["id"] + id = obj_state["id"] # noqa: A001 sync_should_run = obj_state["sync_should_run"] sync_host = obj_state["sync_host"] desired_sync_host = obj_state["desired_sync_host"] diff --git a/inbox/models/action_log.py b/inbox/models/action_log.py index 13291a611..a46bc93d8 100644 --- a/inbox/models/action_log.py +++ b/inbox/models/action_log.py @@ -73,7 +73,9 @@ class ActionLog(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): extra_args = Column(JSON, nullable=True) @classmethod - def create(cls, action, table_name, record_id, namespace_id, extra_args): + def create( # noqa: ANN206 + cls, action, table_name, record_id, namespace_id, extra_args + ): return cls( action=action, table_name=table_name, diff --git a/inbox/models/backends/generic.py b/inbox/models/backends/generic.py index 6062f8a7a..119b489ed 100644 --- a/inbox/models/backends/generic.py +++ b/inbox/models/backends/generic.py @@ -61,7 +61,7 @@ class GenericAccount(ImapAccount): __mapper_args__ = {"polymorphic_identity": "genericaccount"} @property - def verbose_provider(self): + def verbose_provider(self): # noqa: ANN201 if self.provider == "custom": return "imap" return self.provider @@ -74,7 +74,7 @@ def valid_password(self, value: str | bytes) -> bytes: try: value.decode("utf-8") except UnicodeDecodeError: - raise ValueError("Invalid password") + raise ValueError("Invalid password") # noqa: B904 if b"\x00" in value: raise ValueError("Invalid password") @@ -113,19 +113,19 @@ def category_type(self) -> CategoryType: return "folder" @property - def thread_cls(self): + def thread_cls(self): # noqa: ANN201 from inbox.models.backends.imap import ImapThread return ImapThread @property - def actionlog_cls(self): + def actionlog_cls(self): # noqa: ANN201 from inbox.models.action_log import ActionLog return ActionLog @property - def server_settings(self): + def server_settings(self): # noqa: ANN201 settings = {} settings["imap_host"], settings["imap_port"] = self.imap_endpoint settings["smtp_host"], settings["smtp_port"] = self.smtp_endpoint @@ -138,13 +138,13 @@ def server_settings(self): # provider attribute to "gmail" to use the Gmail sync engine. @property - def provider_info(self): + def provider_info(self): # noqa: ANN201 provider_info = super().provider_info provider_info["auth"] = "password" return provider_info @property - def auth_handler(self): + def auth_handler(self): # noqa: ANN201 from inbox.auth.base import handler_from_provider return handler_from_provider("custom") diff --git a/inbox/models/backends/gmail.py b/inbox/models/backends/gmail.py index b7aa8878f..687b5d510 100644 --- a/inbox/models/backends/gmail.py +++ b/inbox/models/backends/gmail.py @@ -32,19 +32,19 @@ class GmailAccount(CalendarSyncAccountMixin, OAuthAccount, ImapAccount): scope = Column(String(512)) @property - def email_scopes(self): + def email_scopes(self): # noqa: ANN201 return GOOGLE_EMAIL_SCOPES @property - def contacts_scopes(self): + def contacts_scopes(self): # noqa: ANN201 return GOOGLE_CONTACTS_SCOPES @property - def calendar_scopes(self): + def calendar_scopes(self): # noqa: ANN201 return GOOGLE_CALENDAR_SCOPES @property - def scopes(self): + def scopes(self): # noqa: ANN201 return [ *self.calendar_scopes, *self.contacts_scopes, @@ -52,7 +52,7 @@ def scopes(self): ] @property - def provider(self): + def provider(self): # noqa: ANN201 return PROVIDER @property @@ -60,18 +60,18 @@ def category_type(self) -> CategoryType: return "label" @property - def thread_cls(self): + def thread_cls(self): # noqa: ANN201 from inbox.models.backends.imap import ImapThread return ImapThread @property - def actionlog_cls(self): + def actionlog_cls(self): # noqa: ANN201 from inbox.models.action_log import ActionLog return ActionLog - def get_raw_message_contents(self, message): + def get_raw_message_contents(self, message): # noqa: ANN201 from inbox.s3.backends.gmail import get_gmail_raw_contents return get_gmail_raw_contents(message) diff --git a/inbox/models/backends/imap.py b/inbox/models/backends/imap.py index 002173057..c80ea2edb 100644 --- a/inbox/models/backends/imap.py +++ b/inbox/models/backends/imap.py @@ -48,7 +48,7 @@ class ImapAccount(Account): _smtp_server_port = Column(Integer, nullable=False, server_default="587") @property - def imap_endpoint(self): + def imap_endpoint(self): # noqa: ANN201 if self._imap_server_host is not None: # We have to take care to coerce to int here and below, because # mysqlclient returns Integer columns as type long, and @@ -65,7 +65,7 @@ def imap_endpoint(self, endpoint): self._imap_server_port = int(port) @property - def smtp_endpoint(self): + def smtp_endpoint(self): # noqa: ANN201 if self._smtp_server_host is not None: return (self._smtp_server_host, int(self._smtp_server_port)) else: @@ -77,7 +77,7 @@ def smtp_endpoint(self, endpoint): self._smtp_server_host = host self._smtp_server_port = int(port) - def get_raw_message_contents(self, message): + def get_raw_message_contents(self, message): # noqa: ANN201 from inbox.s3.backends.imap import get_imap_raw_contents return get_imap_raw_contents(message) @@ -142,7 +142,7 @@ def update_flags(self, new_flags: list[bytes]) -> None: parameters. Returns True if any values have changed compared to what we previously stored. - """ + """ # noqa: D401 changed = False new_flags = {flag.decode() for flag in new_flags} columns_for_flag = { @@ -217,7 +217,7 @@ def update_labels(self, new_labels: list[str]) -> None: self.labels.add(label) @property - def namespace(self): + def namespace(self): # noqa: ANN201 return self.imapaccount.namespace @property @@ -339,7 +339,9 @@ class ImapThread(Thread): g_thrid = Column(BigInteger, nullable=True, index=True, unique=False) @classmethod - def from_gmail_message(cls, session, namespace_id, message): + def from_gmail_message( # noqa: ANN206 + cls, session, namespace_id, message + ): """ Threads are broken solely on Gmail's X-GM-THRID for now. (Subjects are not taken into account, even if they change.) @@ -367,7 +369,7 @@ def from_gmail_message(cls, session, namespace_id, message): return thread @classmethod - def from_imap_message(cls, session, namespace_id, message): + def from_imap_message(cls, session, namespace_id, message): # noqa: ANN206 if message.thread is not None: # If this message *already* has a thread associated with it, don't # create a new one. @@ -425,7 +427,7 @@ class ImapFolderSyncStatus( _metrics = Column(MutableDict.as_mutable(JSON), default={}, nullable=True) @property - def metrics(self): + def metrics(self): # noqa: ANN201 status = dict(name=self.folder.name, state=self.state) status.update(self._metrics or {}) @@ -441,7 +443,7 @@ def stop_sync(self) -> None: self._metrics["sync_end_time"] = datetime.utcnow() @property - def is_killed(self): + def is_killed(self): # noqa: ANN201 return self._metrics.get("run_state") == "killed" def update_metrics(self, metrics) -> None: @@ -466,7 +468,7 @@ def update_metrics(self, metrics) -> None: self._metrics = metrics @property - def sync_enabled(self): + def sync_enabled(self): # noqa: ANN201 # sync is enabled if the folder's run bit is set, and the account's # run bit is set. (this saves us needing to reproduce account-state # transition logic on the folder level, and gives us a comparison bit @@ -498,7 +500,7 @@ class LabelItem(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): ) @property - def namespace(self): + def namespace(self): # noqa: ANN201 return self.label.namespace diff --git a/inbox/models/backends/oauth.py b/inbox/models/backends/oauth.py index 638d160e6..39c986b57 100644 --- a/inbox/models/backends/oauth.py +++ b/inbox/models/backends/oauth.py @@ -17,7 +17,7 @@ log = get_logger() -def hash_token(token, prefix=None): +def hash_token(token, prefix=None): # noqa: ANN201 if not token: return None string = f"{prefix}:{token}" if prefix else token @@ -121,11 +121,11 @@ def scopes(self) -> list[str] | None: return None @declared_attr - def refresh_token_id(self): + def refresh_token_id(self): # noqa: ANN201 return Column(ForeignKey(Secret.id), nullable=False) @declared_attr - def secret(self): + def secret(self): # noqa: ANN201 return relationship( "Secret", cascade="all", @@ -156,7 +156,7 @@ def refresh_token(self, value: str | bytes) -> None: try: value.decode("utf-8") except UnicodeDecodeError: - raise ValueError("Invalid refresh_token") + raise ValueError("Invalid refresh_token") # noqa: B904 if b"\x00" in value: raise ValueError("Invalid refresh_token") @@ -173,7 +173,7 @@ def set_secret(self, secret_type: SecretType, secret_value: bytes) -> None: self.secret.type = secret_type.value self.secret.secret = secret_value - def get_client_info(self): + def get_client_info(self): # noqa: ANN201 """ Obtain the client ID and secret for this OAuth account. @@ -186,7 +186,7 @@ def get_client_info(self): else: raise OAuthError("No valid tokens.") - def new_token( + def new_token( # noqa: D417 self, force_refresh: bool = False, scopes: list[str] | None = None ) -> tuple[str, int]: """ @@ -202,7 +202,7 @@ def new_token( Raises: OAuthError: If no token could be obtained. - """ + """ # noqa: D401 try: return self.auth_handler.acquire_access_token( self, force_refresh=force_refresh, scopes=scopes diff --git a/inbox/models/backends/outlook.py b/inbox/models/backends/outlook.py index cc34bef49..ec9d07956 100644 --- a/inbox/models/backends/outlook.py +++ b/inbox/models/backends/outlook.py @@ -45,7 +45,7 @@ class OutlookAccount(CalendarSyncAccountMixin, ImapAccount, OAuthAccount): locale = Column(String(8)) @property - def email_scopes(self): + def email_scopes(self): # noqa: ANN201 return MICROSOFT_EMAIL_SCOPES @property @@ -53,15 +53,15 @@ def contacts_scopes(self) -> None: return None @property - def calendar_scopes(self): + def calendar_scopes(self): # noqa: ANN201 return MICROSOFT_CALENDAR_SCOPES @property - def scopes(self): + def scopes(self): # noqa: ANN201 return self.email_scopes @property - def provider(self): + def provider(self): # noqa: ANN201 return PROVIDER @property @@ -69,13 +69,13 @@ def category_type(self) -> CategoryType: return "folder" @property - def thread_cls(self): + def thread_cls(self): # noqa: ANN201 from inbox.models.backends.imap import ImapThread return ImapThread @property - def actionlog_cls(self): + def actionlog_cls(self): # noqa: ANN201 from inbox.models.action_log import ActionLog return ActionLog diff --git a/inbox/models/base.py b/inbox/models/base.py index 0f10eb309..ab8c61e28 100644 --- a/inbox/models/base.py +++ b/inbox/models/base.py @@ -15,11 +15,11 @@ class MailSyncBase(CreatedAtMixin): id = Column(BigInteger, primary_key=True, autoincrement=True) @declared_attr - def __tablename__(self): + def __tablename__(self): # noqa: ANN204 return self.__name__.lower() @declared_attr - def __table_args__(self): + def __table_args__(self): # noqa: ANN204 return {"extend_existing": True} def __repr__(self) -> str: diff --git a/inbox/models/block.py b/inbox/models/block.py index a5de3288e..93026c39f 100644 --- a/inbox/models/block.py +++ b/inbox/models/block.py @@ -102,7 +102,7 @@ def init_on_load(self) -> None: self.content_type = self._content_type_other @property - def data(self): + def data(self): # noqa: ANN201 value: bytes | None if self.size == 0: log.warning("Block size is 0") @@ -273,17 +273,17 @@ class Part(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): __table_args__ = (UniqueConstraint("message_id", "walk_index"),) @property - def thread_id(self): + def thread_id(self): # noqa: ANN201 if not self.message: return None return self.message.thread_id @property - def is_attachment(self): + def is_attachment(self): # noqa: ANN201 return self.content_disposition is not None @property - def is_embedded(self): + def is_embedded(self): # noqa: ANN201 return ( self.content_disposition is not None and self.content_disposition.lower() == "inline" diff --git a/inbox/models/category.py b/inbox/models/category.py index 0d33f2e96..e6501e003 100644 --- a/inbox/models/category.py +++ b/inbox/models/category.py @@ -25,7 +25,7 @@ EPOCH = datetime.utcfromtimestamp(0) -def sanitize_name(name): +def sanitize_name(name): # noqa: ANN201 return unicode_safe_truncate(name, MAX_INDEXABLE_LENGTH) @@ -52,7 +52,7 @@ class Category( MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, DeletedAtMixin ): @property - def API_OBJECT_NAME(self): + def API_OBJECT_NAME(self): # noqa: ANN201, N802 return self.type_ # Override the default `deleted_at` column with one that is NOT NULL -- @@ -81,7 +81,7 @@ def API_OBJECT_NAME(self): type_ = Column(Enum("folder", "label"), nullable=False, default="folder") @validates("display_name") - def validate_display_name(self, key, display_name): + def validate_display_name(self, key, display_name): # noqa: ANN201 sanitized_name = sanitize_name(display_name) if sanitized_name != display_name: log.warning( @@ -92,7 +92,9 @@ def validate_display_name(self, key, display_name): return sanitized_name @classmethod - def find_or_create(cls, session, namespace_id, name, display_name, type_): + def find_or_create( # noqa: ANN206 + cls, session, namespace_id, name, display_name, type_ + ): name = name or "" objects = ( @@ -136,7 +138,9 @@ def find_or_create(cls, session, namespace_id, name, display_name, type_): return obj @classmethod - def create(cls, session, namespace_id, name, display_name, type_): + def create( # noqa: ANN206 + cls, session, namespace_id, name, display_name, type_ + ): name = name or "" obj = cls( namespace_id=namespace_id, @@ -149,23 +153,23 @@ def create(cls, session, namespace_id, name, display_name, type_): return obj @property - def account(self): + def account(self): # noqa: ANN201 return self.namespace.account @property - def type(self): + def type(self): # noqa: ANN201 return self.account.category_type @hybrid_property - def lowercase_name(self): + def lowercase_name(self): # noqa: ANN201 return self.display_name.lower() @lowercase_name.comparator - def lowercase_name(self): + def lowercase_name(self): # noqa: ANN201 return CaseInsensitiveComparator(self.display_name) @property - def api_display_name(self): + def api_display_name(self): # noqa: ANN201 if self.namespace.account.provider == "gmail": if self.display_name.startswith("[Gmail]/"): return self.display_name[8:] @@ -182,7 +186,7 @@ def api_display_name(self): return self.display_name @property - def is_deleted(self): + def is_deleted(self): # noqa: ANN201 return self.deleted_at > EPOCH __table_args__ = ( diff --git a/inbox/models/contact.py b/inbox/models/contact.py index 41fc77d1e..07d27a24d 100644 --- a/inbox/models/contact.py +++ b/inbox/models/contact.py @@ -77,13 +77,13 @@ class Contact( ) @validates("raw_data") - def validate_text_column_length(self, key, value): + def validate_text_column_length(self, key, value): # noqa: ANN201 if value is None: return None return unicode_safe_truncate(value, MAX_TEXT_CHARS) @property - def versioned_relationships(self): + def versioned_relationships(self): # noqa: ANN201 return ["phone_numbers"] def merge_from(self, new_contact) -> None: diff --git a/inbox/models/data_processing.py b/inbox/models/data_processing.py index 3570ff8da..dcb8abdc2 100644 --- a/inbox/models/data_processing.py +++ b/inbox/models/data_processing.py @@ -23,7 +23,7 @@ class DataProcessingCache(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): contact_groups_last_updated = Column(DateTime) @property - def contact_rankings(self): + def contact_rankings(self): # noqa: ANN201 if self._contact_rankings is None: return None else: @@ -37,7 +37,7 @@ def contact_rankings(self, value): self.contact_rankings_last_updated = datetime.datetime.now() @property - def contact_groups(self): + def contact_groups(self): # noqa: ANN201 if self._contact_groups is None: return None else: diff --git a/inbox/models/event.py b/inbox/models/event.py index 087935d30..d858c91c2 100644 --- a/inbox/models/event.py +++ b/inbox/models/event.py @@ -83,20 +83,20 @@ class FlexibleDateTime(TypeDecorator): impl = DateTime - def process_bind_param(self, value, dialect): + def process_bind_param(self, value, dialect): # noqa: ANN201 if isinstance(value, arrow.arrow.Arrow): value = value.to("utc").naive if isinstance(value, datetime): value = arrow.get(value).to("utc").naive return value - def process_result_value(self, value, dialect): + def process_result_value(self, value, dialect): # noqa: ANN201 if value is None: return value else: return arrow.get(value).to("utc") - def compare_values(self, x, y): + def compare_values(self, x, y): # noqa: ANN201 if isinstance(x, datetime | int): x = arrow.get(x) if isinstance(y, datetime) or isinstance(x, int): @@ -216,13 +216,13 @@ class Event( "uid", "raw_data", ) - def validate_length(self, key, value): + def validate_length(self, key, value): # noqa: ANN201 if value is None: return None return unicode_safe_truncate(value, MAX_LENS[key]) @property - def when(self): + def when(self): # noqa: ANN201 if self.all_day: # Dates are stored as DateTimes so transform to dates here. start = arrow.get(self.start).to("utc").date() @@ -355,7 +355,7 @@ def update(self, event: "Event") -> None: self.sequence_number = event.sequence_number @property - def recurring(self): + def recurring(self): # noqa: ANN201 if self.recurrence and self.recurrence != "": try: r = ast.literal_eval(self.recurrence) @@ -372,7 +372,7 @@ def recurring(self): return [] @property - def organizer_email(self): + def organizer_email(self): # noqa: ANN201 # For historical reasons, the event organizer field is stored as # "Owner Name ". @@ -386,7 +386,7 @@ def organizer_email(self): return parsed_owner[1] @property - def organizer_name(self): + def organizer_name(self): # noqa: ANN201 parsed_owner = parseaddr(self.owner) if len(parsed_owner) == 0: @@ -398,15 +398,15 @@ def organizer_name(self): return parsed_owner[0] @property - def is_recurring(self): + def is_recurring(self): # noqa: ANN201 return self.recurrence is not None @property - def length(self): + def length(self): # noqa: ANN201 return self.when.delta @property - def cancelled(self): + def cancelled(self): # noqa: ANN201 return self.status == "cancelled" @cancelled.setter @@ -417,28 +417,28 @@ def cancelled(self, is_cancelled): self.status = "confirmed" @property - def calendar_event_link(self): + def calendar_event_link(self): # noqa: ANN201 try: return json.loads(self.raw_data)["htmlLink"] except (ValueError, KeyError): return None @property - def emails_from_description(self): + def emails_from_description(self): # noqa: ANN201 if self.description: return extract_emails_from_text(self.description) else: return [] @property - def emails_from_title(self): + def emails_from_title(self): # noqa: ANN201 if self.title: return extract_emails_from_text(self.title) else: return [] @classmethod - def create(cls, **kwargs): + def create(cls, **kwargs): # noqa: ANN206 # Decide whether or not to instantiate a RecurringEvent/Override # based on the kwargs we get. cls_ = cls @@ -497,7 +497,7 @@ def __init__(self, **kwargs) -> None: try: self.unwrap_rrule() except Exception as e: - log.error( + log.error( # noqa: G201 "Error parsing RRULE entry", event_id=self.id, error=e, @@ -510,14 +510,14 @@ def reconstruct(self) -> None: try: self.unwrap_rrule() except Exception as e: - log.error( + log.error( # noqa: G201 "Error parsing stored RRULE entry", event_id=self.id, error=e, exc_info=True, ) - def inflate(self, start=None, end=None): + def inflate(self, start=None, end=None): # noqa: ANN201 # Convert a RecurringEvent into a series of InflatedEvents # by expanding its RRULE into a series of start times. from inbox.events.recurring import get_start_times @@ -539,7 +539,7 @@ def unwrap_rrule(self) -> None: elif item.startswith("EXDATE"): self.exdate = item - def all_events(self, start=None, end=None): + def all_events(self, start=None, end=None): # noqa: ANN201 # Returns all inflated events along with overrides that match the # provided time range. overrides = self.overrides @@ -606,7 +606,7 @@ class RecurringEventOverride(Event): ) @validates("master_event_uid") - def validate_master_event_uid_length(self, key, value): + def validate_master_event_uid_length(self, key, value): # noqa: ANN201 if value is None: return None return unicode_safe_truncate(value, MAX_LENS[key]) @@ -625,7 +625,7 @@ class InflatedEvent(Event): on the fly when a recurring event is expanded. These are transient objects that should never be committed to the database. - """ + """ # noqa: D404 __mapper_args__ = {"polymorphic_identity": "inflatedevent"} __tablename__ = "event" diff --git a/inbox/models/folder.py b/inbox/models/folder.py index aab81aa60..f7f2f263f 100644 --- a/inbox/models/folder.py +++ b/inbox/models/folder.py @@ -51,7 +51,7 @@ class Folder(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): ) @property - def canonical_name(self): + def canonical_name(self): # noqa: ANN201 return self._canonical_name @canonical_name.setter @@ -72,7 +72,7 @@ def canonical_name(self, value): initial_sync_end = Column(DateTime, nullable=True) @validates("name") - def validate_name(self, key, name): + def validate_name(self, key, name): # noqa: ANN201 sanitized_name = sanitize_name(name) if sanitized_name != name: log.warning( @@ -83,7 +83,7 @@ def validate_name(self, key, name): return sanitized_name @classmethod - def find_or_create(cls, session, account, name, role=None): + def find_or_create(cls, session, account, name, role=None): # noqa: ANN206 q = ( session.query(cls) .filter(cls.account_id == account.id) @@ -112,7 +112,7 @@ def find_or_create(cls, session, account, name, role=None): return obj @classmethod - def get(cls, id_, session): + def get(cls, id_, session): # noqa: ANN206 q = session.query(cls) q = q.filter(cls.id == bindparam("id_")) return q.params(id_=id_).first() diff --git a/inbox/models/label.py b/inbox/models/label.py index 14483dbaa..a68b46e02 100644 --- a/inbox/models/label.py +++ b/inbox/models/label.py @@ -45,7 +45,7 @@ class Label(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): ) @validates("name") - def validate_name(self, key, name): + def validate_name(self, key, name): # noqa: ANN201 sanitized_name = sanitize_name(name) if sanitized_name != name: log.warning( @@ -56,7 +56,7 @@ def validate_name(self, key, name): return sanitized_name @classmethod - def find_or_create(cls, session, account, name, role=None): + def find_or_create(cls, session, account, name, role=None): # noqa: ANN206 q = session.query(cls).filter(cls.account_id == account.id) role = role or "" diff --git a/inbox/models/message.py b/inbox/models/message.py index 89f00281a..058ee219a 100644 --- a/inbox/models/message.py +++ b/inbox/models/message.py @@ -113,7 +113,7 @@ class Message( MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, DeletedAtMixin ): @property - def API_OBJECT_NAME(self) -> str: + def API_OBJECT_NAME(self) -> str: # noqa: N802 return "message" if not self.is_draft else "draft" namespace_id = Column(BigInteger, index=True, nullable=False) @@ -136,7 +136,7 @@ def API_OBJECT_NAME(self) -> str: ) @property - def thread(self): + def thread(self): # noqa: ANN201 return self._thread @thread.setter @@ -185,7 +185,7 @@ def thread(self, value): ) @property - def is_sending(self): + def is_sending(self): # noqa: ANN201 return self.version == MAX_MYSQL_INTEGER and not self.is_draft def mark_as_sending(self) -> None: @@ -196,7 +196,7 @@ def mark_as_sending(self) -> None: self.regenerate_nylas_uid() @property - def categories_changes(self): + def categories_changes(self): # noqa: ANN201 return self.state == "actions_pending" @categories_changes.setter @@ -243,7 +243,7 @@ def regenerate_nylas_uid(self) -> None: concatenated. Because the nylas_uid identifies the draft on the remote provider, we regenerate it on each draft revision so that we can delete the old draft and add the new one on the remote. - """ + """ # noqa: D401 from inbox.sendmail.message import generate_message_id_header self.nylas_uid = f"{self.public_id}-{self.version}" @@ -291,7 +291,7 @@ def sanitize_subject(self, key: Any, value: str | None) -> str | None: return value @classmethod - def create_from_synced( + def create_from_synced( # noqa: D417 cls, account: Account, imap_uid: int, @@ -316,7 +316,7 @@ def create_from_synced( body : bytes The full message including headers (encoded). - """ + """ # noqa: D401 # stop trickle-down bugs assert account.namespace is not None assert isinstance(body, bytes) @@ -480,7 +480,7 @@ def _parse_metadata( self.message_id_header: str | None = parsed.headers.get("Message-Id") if self.message_id_header and len(self.message_id_header) > 998: self.message_id_header = self.message_id_header[:998] - log.warning( + log.warning( # noqa: PLE1205 "Message-Id header too long. Truncating", parsed.headers.get("Message-Id"), logstash_tag="truncated_message_id", @@ -523,7 +523,7 @@ def _parse_mimepart( ) -> None: disposition, _ = mimepart.content_disposition content_id: str | None = mimepart.headers.get("Content-Id") - content_type, params = mimepart.content_type + content_type, params = mimepart.content_type # noqa: F841 filename: str | None = mimepart.detected_file_name if filename == "": @@ -691,7 +691,7 @@ def calculate_html_snippet(self, text: str) -> str: try: text = strip_tags(text) except HTMLParseError: - log.error( + log.error( # noqa: G201 "error stripping tags", message_nylas_uid=self.nylas_uid, exc_info=True, @@ -774,15 +774,15 @@ def api_attachment_metadata(self) -> list[dict[str, Any]]: return resp @property - def versioned_relationships(self): + def versioned_relationships(self): # noqa: ANN201 return ["parts", "messagecategories"] @property - def propagated_attributes(self): + def propagated_attributes(self): # noqa: ANN201 return ["is_read", "is_starred", "messagecategories"] @property - def has_attached_events(self): + def has_attached_events(self): # noqa: ANN201 return "text/calendar" in [p.block.content_type for p in self.parts] @property @@ -817,7 +817,7 @@ def from_public_id( return q.params(public_id=public_id, namespace_id=namespace_id).one() @classmethod - def api_loading_options(cls, expand=False): + def api_loading_options(cls, expand=False): # noqa: ANN206 columns = [ "public_id", "is_draft", @@ -930,7 +930,7 @@ class MessageCategory(MailSyncBase): ) @property - def namespace(self): + def namespace(self): # noqa: ANN201 return self.message.namespace diff --git a/inbox/models/mixins.py b/inbox/models/mixins.py index d4d4b263a..0202dc0c4 100644 --- a/inbox/models/mixins.py +++ b/inbox/models/mixins.py @@ -14,7 +14,7 @@ class HasRevisions(ABCMixin): """Mixin for tables that should be versioned in the transaction log.""" @property - def versioned_relationships(self): + def versioned_relationships(self): # noqa: ANN201 """ May be overriden by subclasses. This should be the list of relationship attribute names that should trigger an update revision @@ -25,7 +25,7 @@ def versioned_relationships(self): return [] @property - def propagated_attributes(self): + def propagated_attributes(self): # noqa: ANN201 """ May be overridden by subclasses. This is the list of attribute names that should trigger an update revision for a /related/ object - @@ -84,20 +84,20 @@ class HasPublicID: class AddressComparator(Comparator): - def __eq__(self, other): + def __eq__(self, other): # noqa: ANN204 return self.__clause_element__() == canonicalize_address(other) - def like(self, term, escape=None): + def like(self, term, escape=None): # noqa: ANN201 return self.__clause_element__().like(term, escape=escape) - def in_(self, addresses): + def in_(self, addresses): # noqa: ANN201 return self.__clause_element__().in_( [canonicalize_address(address) for address in addresses] ) class CaseInsensitiveComparator(Comparator): - def __eq__(self, other): + def __eq__(self, other): # noqa: ANN204 return func.lower(self.__clause_element__()) == func.lower(other) @@ -123,11 +123,11 @@ class HasEmailAddress: ) @hybrid_property - def email_address(self): + def email_address(self): # noqa: ANN201 return self._raw_address @email_address.comparator - def email_address(self): + def email_address(self): # noqa: ANN201 return AddressComparator(self._canonicalized_address) @email_address.setter diff --git a/inbox/models/namespace.py b/inbox/models/namespace.py index 15f9a672a..066a6d5af 100644 --- a/inbox/models/namespace.py +++ b/inbox/models/namespace.py @@ -29,19 +29,19 @@ def __str__(self) -> str: ) @property - def email_address(self): + def email_address(self): # noqa: ANN201 if self.account is not None: return self.account.email_address return None @classmethod - def get(cls, id_, session): + def get(cls, id_, session): # noqa: ANN206 q = session.query(cls) q = q.filter(cls.id == bindparam("id_")) return q.params(id_=id_).first() @classmethod - def from_public_id(cls, public_id, db_session): + def from_public_id(cls, public_id, db_session): # noqa: ANN206 q = db_session.query(Namespace) q = q.filter(Namespace.public_id == bindparam("public_id")) return q.params(public_id=public_id).one() diff --git a/inbox/models/secret.py b/inbox/models/secret.py index d458cb8eb..7a5459b0e 100644 --- a/inbox/models/secret.py +++ b/inbox/models/secret.py @@ -46,7 +46,7 @@ def secret(self, plaintext: str | bytes) -> None: self._secret, self.encryption_scheme = e_oracle.encrypt(plaintext) @validates("type") - def validate_type(self, k, type): + def validate_type(self, k, type): # noqa: ANN201 if type not in [x.value for x in SecretType]: raise TypeError("Invalid secret type.") diff --git a/inbox/models/session.py b/inbox/models/session.py index eba6abb89..3cbfd70bb 100644 --- a/inbox/models/session.py +++ b/inbox/models/session.py @@ -18,7 +18,7 @@ MAX_SANE_TRX_TIME_MS = 30000 -def two_phase_session(engine_map, versioned=True): +def two_phase_session(engine_map, versioned=True): # noqa: ANN201, D417 """ Returns a session that implements two-phase-commit. @@ -29,7 +29,7 @@ def two_phase_session(engine_map, versioned=True): versioned: bool - """ + """ # noqa: D401 session = Session( binds=engine_map, twophase=True, autoflush=True, autocommit=False ) @@ -39,8 +39,8 @@ def two_phase_session(engine_map, versioned=True): return session -def new_session(engine, versioned=True): - """Returns a session bound to the given engine.""" +def new_session(engine, versioned=True): # noqa: ANN201 + """Returns a session bound to the given engine.""" # noqa: D401 session = Session(bind=engine, autoflush=True, autocommit=False) if versioned: @@ -93,7 +93,7 @@ def end(session): return session -def configure_versioning(session): +def configure_versioning(session): # noqa: ANN201 from inbox.models.transaction import ( bump_redis_txn_id, create_revisions, @@ -112,7 +112,7 @@ def after_flush(session, flush_context): Hook to log revision snapshots. Must be post-flush in order to grab object IDs on new objects. - """ + """ # noqa: D401 # Note: `bump_redis_txn_id` __must__ come first. `create_revisions` # creates new objects which haven't been flushed to the db yet. # `bump_redis_txn_id` looks at objects on the session and expects them @@ -128,7 +128,7 @@ def after_flush(session, flush_context): @contextmanager -def session_scope(id_, versioned=True): +def session_scope(id_, versioned=True): # noqa: ANN201 """ Provide a transactional scope around a series of operations. @@ -185,7 +185,7 @@ def session_scope(id_, versioned=True): "Encountered OperationalError on rollback", original_exception=type(exc), ) - raise exc + raise exc # noqa: B904 finally: if config.get("LOG_DB_SESSIONS"): lifetime = time.time() - start_time @@ -198,7 +198,7 @@ def session_scope(id_, versioned=True): @contextmanager -def session_scope_by_shard_id(shard_id, versioned=True): +def session_scope_by_shard_id(shard_id, versioned=True): # noqa: ANN201 key = shard_id << 48 with session_scope(key, versioned) as db_session: @@ -208,11 +208,11 @@ def session_scope_by_shard_id(shard_id, versioned=True): # GLOBAL (cross-shard) queries. USE WITH CAUTION. -def shard_chooser(mapper, instance, clause=None): +def shard_chooser(mapper, instance, clause=None): # noqa: ANN201 return str(engine_manager.shard_key_for_id(instance.id)) -def id_chooser(query, ident): +def id_chooser(query, ident): # noqa: ANN201 # STOPSHIP(emfree): is ident a tuple here??? # TODO[k]: What if len(list) > 1? if isinstance(ident, list) and len(ident) == 1: @@ -220,12 +220,12 @@ def id_chooser(query, ident): return [str(engine_manager.shard_key_for_id(ident))] -def query_chooser(query): +def query_chooser(query): # noqa: ANN201 return [str(k) for k in engine_manager.engines] @contextmanager -def global_session_scope(): +def global_session_scope(): # noqa: ANN201 shards = {str(k): v for k, v in engine_manager.engines.items()} session = ShardedSession( shard_chooser=shard_chooser, diff --git a/inbox/models/thread.py b/inbox/models/thread.py index 9c8712a51..ef150adfa 100644 --- a/inbox/models/thread.py +++ b/inbox/models/thread.py @@ -61,12 +61,12 @@ class Thread( version = Column(Integer, nullable=True, server_default="0") @validates("subject") - def compute_cleaned_up_subject(self, key, value): + def compute_cleaned_up_subject(self, key, value): # noqa: ANN201 self._cleaned_subject = cleanup_subject(value) return value @validates("messages") - def update_from_message(self, k, message): + def update_from_message(self, k, message): # noqa: ANN201 with object_session(self).no_autoflush: if message.is_draft: # Don't change subjectdate, recentdate, or unread/unseen based @@ -84,7 +84,7 @@ def update_from_message(self, k, message): return message @property - def most_recent_received_date(self): + def most_recent_received_date(self): # noqa: ANN201 received_recent_date: datetime.datetime | None = None for m in self.messages: if ( @@ -117,12 +117,12 @@ def most_recent_received_date(self): return received_recent_date @property - def most_recent_sent_date(self): + def most_recent_sent_date(self): # noqa: ANN201 """ This is the timestamp of the most recently *sent* message on this thread, as decided by whether the message is in the sent folder or not. Clients can use this to properly sort the Sent view. - """ + """ # noqa: D404 sent_recent_date = None sorted_messages = sorted( self.messages, key=lambda m: m.received_date, reverse=True @@ -140,19 +140,19 @@ def unread(self) -> bool: return not all(m.is_read for m in self.messages if not m.is_draft) @property - def starred(self): + def starred(self): # noqa: ANN201 return any(m.is_starred for m in self.messages if not m.is_draft) @property - def has_attachments(self): + def has_attachments(self): # noqa: ANN201 return any(m.attachments for m in self.messages if not m.is_draft) @property - def versioned_relationships(self): + def versioned_relationships(self): # noqa: ANN201 return ["messages"] @property - def participants(self): + def participants(self): # noqa: ANN201 """ Different messages in the thread may reference the same email address with different phrases. We partially deduplicate: if the same @@ -177,7 +177,7 @@ def participants(self): return p @property - def drafts(self): + def drafts(self): # noqa: ANN201 """ Return all drafts on this thread that don't have later revisions. @@ -185,22 +185,22 @@ def drafts(self): return [m for m in self.messages if m.is_draft] @property - def attachments(self): + def attachments(self): # noqa: ANN201 return any(m.attachments for m in self.messages) @property - def account(self): + def account(self): # noqa: ANN201 return self.namespace.account @property - def categories(self): + def categories(self): # noqa: ANN201 categories = set() for m in self.messages: categories.update(m.categories) return categories @classmethod - def api_loading_options(cls, expand=False): + def api_loading_options(cls, expand=False): # noqa: ANN206 message_columns = [ "public_id", "is_draft", diff --git a/inbox/models/transaction.py b/inbox/models/transaction.py index bea0e9fee..2b23539e7 100644 --- a/inbox/models/transaction.py +++ b/inbox/models/transaction.py @@ -177,7 +177,7 @@ def increment_versions(session) -> None: def bump_redis_txn_id(session) -> None: """ Called from post-flush hook to bump the latest id stored in redis - """ + """ # noqa: D401 def get_namespace_public_id(namespace_id): # the namespace was just used to create the transaction, so it should diff --git a/inbox/models/util.py b/inbox/models/util.py index 7f61dbab7..3233914ee 100644 --- a/inbox/models/util.py +++ b/inbox/models/util.py @@ -88,7 +88,7 @@ def reconcile_message( return existing_message -def transaction_objects(): +def transaction_objects(): # noqa: ANN201 """ Return the mapping from API object name - which becomes the Transaction.object_type - for models that generate Transactions (i.e. @@ -121,7 +121,7 @@ def transaction_objects(): } -def get_accounts_to_delete(shard_id): +def get_accounts_to_delete(shard_id): # noqa: ANN201 ids_to_delete = [] with session_scope_by_shard_id(shard_id) as db_session: ids_to_delete = [ @@ -179,7 +179,9 @@ def delete_namespace(namespace_id, throttle=False, dry_run=False) -> None: .one() ) except NoResultFound: - raise AccountDeletionErrror("Could not find account in database") + raise AccountDeletionErrror( # noqa: B904 + "Could not find account in database" + ) if not account.is_marked_for_deletion: raise AccountDeletionErrror( @@ -289,7 +291,7 @@ def _batch_delete( ): (column, id_) = column_id_filters count = engine.execute( - f"SELECT COUNT(*) FROM {table} WHERE {column}={id_};" + f"SELECT COUNT(*) FROM {table} WHERE {column}={id_};" # noqa: S608 ).scalar() if count == 0: @@ -306,7 +308,7 @@ def _batch_delete( if table in ("message", "block"): query = "" else: - query = f"DELETE FROM {table} WHERE {column}={id_} LIMIT {CHUNK_SIZE};" + query = f"DELETE FROM {table} WHERE {column}={id_} LIMIT {CHUNK_SIZE};" # noqa: S608 log.info("deleting", account_id=account_id, table=table) @@ -376,7 +378,7 @@ def _batch_delete( log.info("Completed batch deletion", time=end - start, table=table) count = engine.execute( - f"SELECT COUNT(*) FROM {table} WHERE {column}={id_};" + f"SELECT COUNT(*) FROM {table} WHERE {column}={id_};" # noqa: S608 ).scalar() if dry_run is False: @@ -390,7 +392,7 @@ def check_throttle() -> bool: check_throttle is ignored entirely if the separate `throttle` flag is False (meaning that throttling is not done at all), but if throttling is enabled, this method determines when. - """ + """ # noqa: D401 return True @@ -406,12 +408,12 @@ def purge_transactions( if dry_run: offset = 0 query = ( - "SELECT id FROM transaction where created_at < " + "SELECT id FROM transaction where created_at < " # noqa: S608 f"DATE_SUB({start}, INTERVAL {days_ago} day) LIMIT {limit}" ) else: query = ( - f"DELETE FROM transaction where created_at < DATE_SUB({start}," + f"DELETE FROM transaction where created_at < DATE_SUB({start}," # noqa: S608 f" INTERVAL {days_ago} day) LIMIT {limit}" ) try: diff --git a/inbox/models/when.py b/inbox/models/when.py index cc69e9a81..ebb1b34da 100644 --- a/inbox/models/when.py +++ b/inbox/models/when.py @@ -16,7 +16,7 @@ def parse_as_when( ------ ValueError - """ + """ # noqa: D401 when_classes = [TimeSpan, Time, DateSpan, Date] keys_for_type = { tuple(sorted(cls_.json_keys)): cls_ for cls_ in when_classes @@ -48,7 +48,7 @@ class When: spanning = False @classmethod - def parse(cls, raw: dict[str, Any]): + def parse(cls, raw: dict[str, Any]): # noqa: ANN206 parsed_times = cls.parse_keys(raw) return cls(*parsed_times) @@ -60,7 +60,7 @@ def parse_keys(cls, raw: dict[str, Any]) -> list[arrow.Arrow]: time = parse_utc(raw[key]) times.append(time) except (AttributeError, ValueError, TypeError): - raise ValueError(f"'{key}' parameter invalid.") + raise ValueError(f"'{key}' parameter invalid.") # noqa: B904 return times def __init__( @@ -94,7 +94,7 @@ class SpanningWhen(When): singular_cls: type @classmethod - def parse(cls, raw: dict[str, Any]): + def parse(cls, raw: dict[str, Any]): # noqa: ANN206 # If initializing a span, we sanity check the timestamps and initialize # the singular form if they are equal. start, end = cls.parse_keys(raw) diff --git a/inbox/providers.py b/inbox/providers.py index 9c49b87a1..9c872f465 100644 --- a/inbox/providers.py +++ b/inbox/providers.py @@ -5,7 +5,7 @@ __all__ = ["provider_info", "providers"] -def provider_info(provider_name): +def provider_info(provider_name): # noqa: ANN201 """ Like providers[provider_name] except raises inbox.basicauth.NotSupportedError instead of KeyError when the provider is diff --git a/inbox/s3/backends/gmail.py b/inbox/s3/backends/gmail.py index 880cdeb29..0a28d751d 100644 --- a/inbox/s3/backends/gmail.py +++ b/inbox/s3/backends/gmail.py @@ -12,7 +12,7 @@ # We use the Google API so we don't have to worry about # the Gmail max IMAP connection limit. -def get_gmail_raw_contents(message): +def get_gmail_raw_contents(message): # noqa: ANN201 account = message.namespace.account auth_token = token_manager.get_token(account) @@ -33,7 +33,7 @@ def get_gmail_raw_contents(message): r = requests.get(url, auth=OAuthRequestsWrapper(auth_token)) if r.status_code != 200: - log.error( + log.error( # noqa: PLE1205 "Got an error when fetching raw email", r.status_code, r.text ) diff --git a/inbox/s3/backends/imap.py b/inbox/s3/backends/imap.py index ce6935415..aa0fe95b7 100644 --- a/inbox/s3/backends/imap.py +++ b/inbox/s3/backends/imap.py @@ -8,7 +8,7 @@ log = get_logger() -def get_imap_raw_contents(message): +def get_imap_raw_contents(message): # noqa: ANN201 account = message.namespace.account if len(message.imapuids) == 0: @@ -31,12 +31,12 @@ def get_imap_raw_contents(message): return uids[0].body except imapclient.IMAPClient.Error: - log.error( + log.error( # noqa: G201 "Error while fetching raw contents", exc_info=True, logstash_tag="fetching_error", ) - raise EmailFetchException( + raise EmailFetchException( # noqa: B904 "Couldn't get message from server. " "Please try again in a few minutes." ) diff --git a/inbox/s3/base.py b/inbox/s3/base.py index 75a068eb5..d027c01bd 100644 --- a/inbox/s3/base.py +++ b/inbox/s3/base.py @@ -1,4 +1,4 @@ -def get_raw_from_provider(message): +def get_raw_from_provider(message): # noqa: ANN201 """Get the raw contents of a message from the provider.""" account = message.account return account.get_raw_message_contents(message) diff --git a/inbox/search/backends/gmail.py b/inbox/search/backends/gmail.py index 9062cc3f7..a7fbb8547 100644 --- a/inbox/search/backends/gmail.py +++ b/inbox/search/backends/gmail.py @@ -25,14 +25,16 @@ def __init__(self, account) -> None: self.auth_token = token_manager.get_token(self.account) db_session.expunge_all() except OAuthError: - raise SearchBackendException( + raise SearchBackendException( # noqa: B904 "This search can't be performed because the account's " "credentials are out of date. Please reauthenticate and try " "again.", 403, ) - def search_messages(self, db_session, search_query, offset=0, limit=40): + def search_messages( # noqa: ANN201 + self, db_session, search_query, offset=0, limit=40 + ): # We need to get the next limit + offset terms if we want to # offset results from the db. g_msgids = self._search(search_query, limit=limit + offset) @@ -57,7 +59,7 @@ def search_messages(self, db_session, search_query, offset=0, limit=40): # We're only issuing a single request to the Gmail API so there's # no need to stream it. - def stream_messages(self, search_query): + def stream_messages(self, search_query): # noqa: ANN201 def g(): encoder = APIEncoder() @@ -68,7 +70,9 @@ def g(): return g - def search_threads(self, db_session, search_query, offset=0, limit=40): + def search_threads( # noqa: ANN201 + self, db_session, search_query, offset=0, limit=40 + ): # We need to get the next limit + offset terms if we want to # offset results from the db. g_msgids = self._search(search_query, limit=limit + offset) @@ -94,7 +98,7 @@ def search_threads(self, db_session, search_query, offset=0, limit=40): return query.all() - def stream_threads(self, search_query): + def stream_threads(self, search_query): # noqa: ANN201 def g(): encoder = APIEncoder() diff --git a/inbox/search/backends/imap.py b/inbox/search/backends/imap.py index c9ecee6d5..88f20d59f 100644 --- a/inbox/search/backends/imap.py +++ b/inbox/search/backends/imap.py @@ -30,7 +30,7 @@ def _open_crispin_connection(self, db_session): account ) except (IMAPClient.Error, OSError, IMAP4.error): - raise SearchBackendException( + raise SearchBackendException( # noqa: B904 ( "Unable to connect to the IMAP " "server. Please retry in a " @@ -39,7 +39,7 @@ def _open_crispin_connection(self, db_session): 503, ) except ValidationError: - raise SearchBackendException( + raise SearchBackendException( # noqa: B904 ( "This search can't be performed " "because the account's credentials " @@ -68,7 +68,9 @@ def _open_crispin_connection(self, db_session): def _close_crispin_connection(self): self.crispin_client.logout() - def search_messages(self, db_session, search_query, offset=0, limit=40): + def search_messages( # noqa: ANN201 + self, db_session, search_query, offset=0, limit=40 + ): imap_uids = [] for uids in self._search(db_session, search_query): imap_uids.extend(uids) @@ -90,7 +92,7 @@ def search_messages(self, db_session, search_query, offset=0, limit=40): return query.all() - def stream_messages(self, search_query): + def stream_messages(self, search_query): # noqa: ANN201 def g(): encoder = APIEncoder() @@ -112,7 +114,9 @@ def g(): return g - def search_threads(self, db_session, search_query, offset=0, limit=40): + def search_threads( # noqa: ANN201 + self, db_session, search_query, offset=0, limit=40 + ): imap_uids = [] for uids in self._search(db_session, search_query): imap_uids.extend(uids) @@ -137,7 +141,7 @@ def search_threads(self, db_session, search_query, offset=0, limit=40): query = query.limit(limit) return query.all() - def stream_threads(self, search_query): + def stream_threads(self, search_query): # noqa: ANN201 def g(): encoder = APIEncoder() @@ -209,7 +213,7 @@ def _search_folder(self, folder, criteria, charset): self.log.warning("Won't search missing IMAP folder", exc_info=True) return [] except UidInvalid: - self.log.error( + self.log.error( # noqa: G201 ("Got Uidvalidity error when searching. Skipping."), exc_info=True, ) @@ -219,7 +223,7 @@ def _search_folder(self, folder, criteria, charset): uids = self.crispin_client.conn.search(criteria, charset=charset) except IMAP4.error: self.log.warning("Search error", exc_info=True) - raise SearchBackendException( + raise SearchBackendException( # noqa: B904 ("Unknown IMAP error when performing search."), 503 ) diff --git a/inbox/search/base.py b/inbox/search/base.py index c5c1ae9f3..f144302f3 100644 --- a/inbox/search/base.py +++ b/inbox/search/base.py @@ -1,4 +1,4 @@ -def get_search_client(account): +def get_search_client(account): # noqa: ANN201 from inbox.search.backends import module_registry search_mod = module_registry.get(account.provider) diff --git a/inbox/security/blobstorage.py b/inbox/security/blobstorage.py index 7ee89aa18..6fc08ee5d 100644 --- a/inbox/security/blobstorage.py +++ b/inbox/security/blobstorage.py @@ -11,7 +11,7 @@ values are 0 (no encryption) and 1 (encryption with a static key). The key version bytes can be used to rotate encryption keys. (Right now these are always just null bytes.) -""" +""" # noqa: D404 import struct import zlib diff --git a/inbox/security/oracles.py b/inbox/security/oracles.py index a27561c2f..624225413 100644 --- a/inbox/security/oracles.py +++ b/inbox/security/oracles.py @@ -14,7 +14,7 @@ class EncryptionScheme(enum.Enum): SECRETBOX_WITH_STATIC_KEY = 1 -def get_encryption_oracle(secret_name): +def get_encryption_oracle(secret_name): # noqa: ANN201 """ Return an encryption oracle for the given secret. """ @@ -38,7 +38,7 @@ class _EncryptionOracle: In the future, it may interface with a subprocess or a hardware security module. - """ + """ # noqa: D404 def __init__(self, secret_name) -> None: self._closed = False @@ -54,10 +54,10 @@ def __init__(self, secret_name) -> None: encoder=nacl.encoding.HexEncoder, ) - def __enter__(self): + def __enter__(self): # noqa: ANN204 return self - def __exit__(self, exc_type, exc_obj, exc_tb): + def __exit__(self, exc_type, exc_obj, exc_tb): # noqa: ANN204 self.close() def __del__(self) -> None: @@ -126,7 +126,7 @@ class _DecryptionOracle(_EncryptionOracle): In the future, it may interface with a subprocess or a hardware security module. - """ + """ # noqa: D404 def reencrypt( self, ciphertext, encryption_scheme, new_encryption_scheme=None diff --git a/inbox/sendmail/base.py b/inbox/sendmail/base.py index f377ba469..17b766c03 100644 --- a/inbox/sendmail/base.py +++ b/inbox/sendmail/base.py @@ -45,7 +45,7 @@ def __init__( super().__init__(message, http_code, server_error, failures) -def get_sendmail_client(account): +def get_sendmail_client(account): # noqa: ANN201 from inbox.sendmail import module_registry sendmail_mod = module_registry.get(account.provider) @@ -101,7 +101,7 @@ def create_draft_from_mime( return msg -def block_to_part(block, message, namespace): +def block_to_part(block, message, namespace): # noqa: ANN201 inline_image_uri = rf"cid:{block.public_id}" is_inline = re.search(inline_image_uri, message.body) is not None # Create a new Part object to associate to the message object. @@ -116,7 +116,9 @@ def block_to_part(block, message, namespace): return part -def create_message_from_json(data, namespace, db_session, is_draft): +def create_message_from_json( # noqa: ANN201 + data, namespace, db_session, is_draft +): """ Construct a Message instance from `data`, a dictionary representing the POST body of an API request. All new objects are added to the session, but @@ -257,7 +259,7 @@ def create_message_from_json(data, namespace, db_session, is_draft): return message -def update_draft( +def update_draft( # noqa: ANN201 db_session, account, draft, @@ -368,7 +370,7 @@ def delete_draft(db_session, account, draft) -> None: db_session.commit() -def generate_attachments(message, blocks): +def generate_attachments(message, blocks): # noqa: ANN201 attachment_dicts = [] for block in blocks: content_disposition = "attachment" diff --git a/inbox/sendmail/message.py b/inbox/sendmail/message.py index 9ff38c474..8b29370ba 100644 --- a/inbox/sendmail/message.py +++ b/inbox/sendmail/message.py @@ -36,7 +36,7 @@ # and garble the encoded messages when sending, unless you break the lines with # '=\r\n'. Their expectation seems to be technically correct, per RFC1521 # section 5.1. However, we opt to simply avoid this mess entirely. -def fallback_to_base64(charset, preferred_encoding, body): +def fallback_to_base64(charset, preferred_encoding, body): # noqa: ANN201 if charset in ("ascii", "iso8859=1", "us-ascii"): if mime.message.part.has_long_lines(body): # In the original implementation, this was @@ -55,7 +55,7 @@ def fallback_to_base64(charset, preferred_encoding, body): mime.message.part.choose_text_encoding = fallback_to_base64 -def create_email( +def create_email( # noqa: ANN201, D417 from_name, from_email, reply_to, @@ -96,7 +96,7 @@ def create_email( attachments: list of dicts, optional a list of dicts(filename, data, content_type, content_disposition) - """ + """ # noqa: D401 html = html if html else "" plaintext = html2text(html) @@ -186,7 +186,7 @@ def create_email( return rfcmsg -def encode_string(value, maxlinelen): +def encode_string(value, maxlinelen): # noqa: ANN201 try: header = Header(value.encode("ascii"), "ascii", maxlinelen) except UnicodeEncodeError: @@ -201,7 +201,7 @@ def _get_full_spec_without_validation(name, email): a Flanker address.EmailAddress object. This function exists because you can't construct a Flanker EmailAddress object with an invalid email address. - """ + """ # noqa: D401, D404 if name: encoded_name = smart_quote( encode_string(name, maxlinelen=MAX_ADDRESS_LENGTH) diff --git a/inbox/sendmail/smtp/postel.py b/inbox/sendmail/smtp/postel.py index 8398e6b7a..9dbb08ef5 100644 --- a/inbox/sendmail/smtp/postel.py +++ b/inbox/sendmail/smtp/postel.py @@ -7,17 +7,20 @@ from inbox.logging import get_logger log = get_logger() -from inbox.exceptions import OAuthError -from inbox.models.backends.generic import GenericAccount -from inbox.models.backends.imap import ImapAccount -from inbox.models.backends.oauth import token_manager -from inbox.models.session import session_scope -from inbox.providers import provider_info -from inbox.sendmail.base import SendMailException, generate_attachments -from inbox.sendmail.message import create_email -from inbox.util.blockstore import get_from_blockstore - -from .util import SMTP_ERRORS +from inbox.exceptions import OAuthError # noqa: E402 +from inbox.models.backends.generic import GenericAccount # noqa: E402 +from inbox.models.backends.imap import ImapAccount # noqa: E402 +from inbox.models.backends.oauth import token_manager # noqa: E402 +from inbox.models.session import session_scope # noqa: E402 +from inbox.providers import provider_info # noqa: E402 +from inbox.sendmail.base import ( # noqa: E402 + SendMailException, + generate_attachments, +) +from inbox.sendmail.message import create_email # noqa: E402 +from inbox.util.blockstore import get_from_blockstore # noqa: E402 + +from .util import SMTP_ERRORS # noqa: E402 # TODO[k]: Other types (LOGIN, XOAUTH, PLAIN-CLIENTTOKEN, CRAM-MD5) AUTH_EXTNS = {"oauth2": "XOAUTH2", "password": "PLAIN"} @@ -37,7 +40,7 @@ SMTP_TEMP_AUTH_FAIL_CODES = (421, 454) -class SMTP_SSL(smtplib.SMTP_SSL): +class SMTP_SSL(smtplib.SMTP_SSL): # noqa: N801 """ Derived class which correctly surfaces SMTP errors. """ @@ -57,7 +60,7 @@ def rset(self) -> None: basically obfuscates the actual server error. See also http://bugs.python.org/issue16005 - """ + """ # noqa: D402 try: smtplib.SMTP_SSL.rset(self) except smtplib.SMTPServerDisconnected: @@ -84,7 +87,7 @@ def rset(self) -> None: basically obfuscates the actual server error. See also http://bugs.python.org/issue16005 - """ + """ # noqa: D402 try: smtplib.SMTP.rset(self) except smtplib.SMTPServerDisconnected: @@ -140,10 +143,10 @@ def __init__( } self.setup() - def __enter__(self): + def __enter__(self): # noqa: ANN204 return self - def __exit__(self, type, value, traceback): + def __exit__(self, type, value, traceback): # noqa: ANN204 try: self.connection.quit() except smtplib.SMTPServerDisconnected: @@ -155,11 +158,11 @@ def _connect(self, host, port): self.connection.connect(host, port) except OSError as e: # 'Connection refused', SSL errors for non-TLS connections, etc. - log.error( + log.error( # noqa: G201 "SMTP connection error", exc_info=True, server_error=e.strerror ) msg = _transform_ssl_error(e.strerror) - raise SendMailException(msg, 503) + raise SendMailException(msg, 503) # noqa: B904 def setup(self) -> None: host, port = self.smtp_endpoint @@ -193,7 +196,7 @@ def _upgrade_connection(self): except ssl.SSLError as e: log.warning("STARTTLS supported but failed.", exc_info=True) msg = _transform_ssl_error(e.strerror) - raise SendMailException(msg, 503) + raise SendMailException(msg, 503) # noqa: B904 else: raise SendMailException( "Required SMTP STARTTLS not supported.", 403 @@ -255,7 +258,7 @@ def smtp_password(self) -> None: c.login(self.smtp_username, self.auth_token) except smtplib.SMTPAuthenticationError as e: self.log.error("SMTP login refused", exc=e) - raise SendMailException( + raise SendMailException( # noqa: B904 "Could not authenticate with the SMTP server.", 403 ) except smtplib.SMTPException as e: @@ -265,11 +268,11 @@ def smtp_password(self) -> None: self.log.error( "SMTP auth failed due to unsupported mechanism", exc=e ) - raise SendMailException(str(e), 403) + raise SendMailException(str(e), 403) # noqa: B904 self.log.info("SMTP Auth(Password) success") - def sendmail(self, recipients, msg): + def sendmail(self, recipients, msg): # noqa: ANN201 try: return self.connection.sendmail( self.email_address, recipients, msg @@ -281,7 +284,7 @@ def sendmail(self, recipients, msg): account_id=self.account_id, recipients=recipients, ) - raise SendMailException( + raise SendMailException( # noqa: B904 "Invalid character in recipient address", 402 ) @@ -310,7 +313,7 @@ def __init__(self, account) -> None: account, force_refresh=False, scopes=account.email_scopes ) except OAuthError: - raise SendMailException( + raise SendMailException( # noqa: B904 "Could not authenticate with the SMTP server.", 403 ) else: @@ -358,7 +361,9 @@ def _send(self, recipients, msg): ) except smtplib.SMTPException as err: last_error = err - self.log.error("Error sending", error=err, exc_info=True) + self.log.error( # noqa: G201 + "Error sending", error=err, exc_info=True + ) assert last_error is not None self.log.error( @@ -408,12 +413,12 @@ def _handle_sending_exception(self, err): "Sending failed", http_code=503, server_error=str(err) ) - def send_generated_email(self, recipients, raw_message): + def send_generated_email(self, recipients, raw_message): # noqa: ANN201 # A tiny wrapper over _send because the API differs # between SMTP and EAS. return self._send(recipients, raw_message) - def send_custom(self, draft, body, recipients) -> None: + def send_custom(self, draft, body, recipients) -> None: # noqa: D417 """ Turn a draft object into a MIME message, replacing the body with the provided body, and send it only to the provided recipients. diff --git a/inbox/sqlalchemy_ext/json_util.py b/inbox/sqlalchemy_ext/json_util.py index 7280e7982..b9d4866b4 100644 --- a/inbox/sqlalchemy_ext/json_util.py +++ b/inbox/sqlalchemy_ext/json_util.py @@ -33,17 +33,17 @@ EPOCH_NAIVE = datetime.datetime.utcfromtimestamp(0) -def dumps(obj, *args, **kwargs): +def dumps(obj, *args, **kwargs): # noqa: ANN201 """ Helper function that wraps :class:`json.dumps`. Recursive function that handles all datetime.datetime type. - """ + """ # noqa: D401 return json.dumps(_json_convert(obj), *args, **kwargs) -def loads(s, *args, **kwargs): - """Helper function that wraps :class:`json.loads`.""" +def loads(s, *args, **kwargs): # noqa: ANN201 + """Helper function that wraps :class:`json.loads`.""" # noqa: D401 kwargs["object_hook"] = lambda dct: object_hook(dct) return json.loads(s, *args, **kwargs) @@ -63,7 +63,7 @@ def _json_convert(obj): return obj -def object_hook(dct): +def object_hook(dct): # noqa: ANN201 if "$date" in dct: dtm = dct["$date"] secs = float(dtm) / 1000.0 @@ -71,7 +71,7 @@ def object_hook(dct): return dct -def default(obj): +def default(obj): # noqa: ANN201 if isinstance(obj, datetime.datetime): if obj.utcoffset() is not None: obj = obj - obj.utcoffset() diff --git a/inbox/sqlalchemy_ext/util.py b/inbox/sqlalchemy_ext/util.py index da66c8f55..899ba5caf 100644 --- a/inbox/sqlalchemy_ext/util.py +++ b/inbox/sqlalchemy_ext/util.py @@ -37,7 +37,7 @@ # that. Don't use this to silence any warnings in application code because # these warnings are an indicator of excessive lazy loading from the DB. @contextlib.contextmanager -def disabled_dubiously_many_queries_warning(): +def disabled_dubiously_many_queries_warning(): # noqa: ANN201 global should_log_dubiously_many_queries should_log_dubiously_many_queries = False yield @@ -104,11 +104,11 @@ def __init__(self, string_transform, *args, **kwargs) -> None: raise TypeError("`string_transform` must be callable") self._string_transform = string_transform - def process_bind_param(self, value, dialect): + def process_bind_param(self, value, dialect): # noqa: ANN201 return self._string_transform(value) - class comparator_factory(String.Comparator): - def __eq__(self, other): + class comparator_factory(String.Comparator): # noqa: N801 + def __eq__(self, other): # noqa: ANN204 other = self.type._string_transform(other) return self.operate(operators.eq, other) @@ -119,13 +119,13 @@ class JSON(TypeDecorator): impl = Text - def process_bind_param(self, value, dialect): + def process_bind_param(self, value, dialect): # noqa: ANN201 if value is None: return None return json_util.dumps(value) - def process_result_value(self, value, dialect): + def process_result_value(self, value, dialect): # noqa: ANN201 if not value: return None @@ -139,7 +139,7 @@ def process_result_value(self, value, dialect): log.error("ValueError on decoding JSON", value=value) -def json_field_too_long(value): +def json_field_too_long(value): # noqa: ANN201 return len(json_util.dumps(value)) > MAX_TEXT_CHARS @@ -176,7 +176,7 @@ def process_result_value( # (because these are simply called under the hood) class MutableDict(Mutable, dict): @classmethod - def coerce(cls, key, value): + def coerce(cls, key, value): # noqa: ANN206 """Convert plain dictionaries to MutableDict.""" if not isinstance(value, MutableDict): if isinstance(value, dict): @@ -202,16 +202,16 @@ def update(self, *args, **kwargs) -> None: self[k] = v # To support pickling: - def __getstate__(self): + def __getstate__(self): # noqa: ANN204 return dict(self) - def __setstate__(self, state): + def __setstate__(self, state): # noqa: ANN204 self.update(state) class MutableList(Mutable, list): @classmethod - def coerce(cls, key, value): + def coerce(cls, key, value): # noqa: ANN206 """Convert plain list to MutableList""" if not isinstance(value, MutableList): if isinstance(value, list): @@ -242,7 +242,7 @@ def extend(self, values) -> None: list.extend(self, values) self.changed() - def pop(self, *args, **kw): + def pop(self, *args, **kw): # noqa: ANN201 value = list.pop(self, *args, **kw) self.changed() return value @@ -271,12 +271,12 @@ def b36_to_bin(b36_string: str) -> bytes: returns binary 128 bit unsigned integer """ int128 = base36decode(b36_string) - MAX_INT64 = 0xFFFFFFFFFFFFFFFF + MAX_INT64 = 0xFFFFFFFFFFFFFFFF # noqa: N806 return struct.pack(">QQ", (int128 >> 64) & MAX_INT64, int128 & MAX_INT64) def generate_public_id() -> str: - """Returns a base-36 string UUID""" + """Returns a base-36 string UUID""" # noqa: D401 u = uuid.uuid4().bytes result = int128_to_b36(u) assert result @@ -350,7 +350,7 @@ def receive_connect(dbapi_connection, connection_record) -> None: dbapi_connection.encoding = "utf8-surrogate-fix" -def safer_yield_per(query, id_field, start_id, count): +def safer_yield_per(query, id_field, start_id, count): # noqa: ANN201, D417 """ Incautious execution of 'for result in query.yield_per(N):' may cause slowness or OOMing over large tables. This is a less general but less @@ -379,7 +379,7 @@ def safer_yield_per(query, id_field, start_id, count): cur_id = results[-1].id + 1 -def get_db_api_cursor_with_query(session, query): +def get_db_api_cursor_with_query(session, query): # noqa: ANN201 """ Return a DB-API cursor with the given SQLAlchemy query executed. diff --git a/inbox/sync/base_sync.py b/inbox/sync/base_sync.py index de7749ea9..9a01f60e8 100644 --- a/inbox/sync/base_sync.py +++ b/inbox/sync/base_sync.py @@ -74,7 +74,7 @@ def _run(self): ) except ValidationError: # Bad account credentials; exit. - self.log.error( + self.log.error( # noqa: G201 "Credential validation error; exiting", exc_info=True, logstash_tag="mark_invalid", @@ -91,14 +91,14 @@ def _run_impl(self): # If we get a connection or API permissions error, then sleep # 2x poll frequency. except ConnectionError: - self.log.error("Error while polling", exc_info=True) + self.log.error("Error while polling", exc_info=True) # noqa: G201 interruptible_threading.sleep( introduce_jitter(self.poll_frequency) ) interruptible_threading.sleep(introduce_jitter(self.poll_frequency)) def sync(self) -> Never: - """Subclasses should override this to do work""" + """Subclasses should override this to do work""" # noqa: D401 raise NotImplementedError def __repr__(self) -> str: diff --git a/inbox/transactions/actions.py b/inbox/transactions/actions.py index d2c2b334e..99544d738 100644 --- a/inbox/transactions/actions.py +++ b/inbox/transactions/actions.py @@ -76,11 +76,11 @@ } -def action_uses_crispin_client(action): +def action_uses_crispin_client(action): # noqa: ANN201 return action in MAIL_ACTION_FUNCTION_MAP -def function_for_action(action): +def function_for_action(action): # noqa: ANN201 if action in MAIL_ACTION_FUNCTION_MAP: return MAIL_ACTION_FUNCTION_MAP[action] return EVENT_ACTION_FUNCTION_MAP[action] @@ -168,7 +168,7 @@ def _has_recent_move_action(self, db_session, log_entries): Determines if we recently completed a move action. Since Nylas doesn't update local UID state after completing an action, we space non-optimistic actions apart so the sync process can catch up. - """ + """ # noqa: D401 if not log_entries: return False @@ -309,7 +309,7 @@ def _get_batch_task(self, db_session, log_entries, has_more): """ Helper for _batch_log_entries that returns the batch task for the given valid log entries. - """ + """ # noqa: D401 if not log_entries: return None namespace = log_entries[0].namespace @@ -395,7 +395,7 @@ def _batch_log_entries(self, db_session, log_entries): log_entry.status = "failed" db_session.commit() self.log.warning( - "Marking action as failed for {} account, older than grace period".format( + "Marking action as failed for {} account, older than grace period".format( # noqa: G001 sync_state ), account_id=account_id, @@ -570,14 +570,14 @@ def execute(self) -> None: # failed. break - def uses_crispin_client(self): + def uses_crispin_client(self): # noqa: ANN201 return any([task.uses_crispin_client() for task in self.tasks]) - def timeout(self, per_task_timeout): + def timeout(self, per_task_timeout): # noqa: ANN201 return len(self.tasks) * per_task_timeout @property - def action_log_ids(self): + def action_log_ids(self): # noqa: ANN201 return [entry for task in self.tasks for entry in task.action_log_ids] @@ -621,7 +621,7 @@ def __init__( self.retry_interval = retry_interval self.crispin_client = None - def try_merge_with(self, other): + def try_merge_with(self, other): # noqa: ANN201 if self.func != other.func: return None @@ -828,10 +828,10 @@ def _mark_action_as_failed(self, action_log_entry, db_session): event.deleted_at = datetime.now() db_session.commit() - def uses_crispin_client(self): + def uses_crispin_client(self): # noqa: ANN201 return action_uses_crispin_client(self.action_name) - def timeout(self, per_task_timeout): + def timeout(self, per_task_timeout): # noqa: ANN201 return per_task_timeout def execute(self) -> None: @@ -859,7 +859,7 @@ def _run(self): ): task.execute() except Exception: - self.log.error( + self.log.error( # noqa: G201 "SyncbackWorker caught exception", exc_info=True, account_id=task.account_id, diff --git a/inbox/transactions/delta_sync.py b/inbox/transactions/delta_sync.py index 523c33c7d..e5d9721d4 100644 --- a/inbox/transactions/delta_sync.py +++ b/inbox/transactions/delta_sync.py @@ -17,7 +17,9 @@ } -def get_transaction_cursor_near_timestamp(namespace_id, timestamp, db_session): +def get_transaction_cursor_near_timestamp( # noqa: ANN201 + namespace_id, timestamp, db_session +): """ Exchange a timestamp for a 'cursor' into the transaction log entry near to that timestamp in age. The cursor is the public_id of that transaction @@ -94,7 +96,7 @@ def _get_last_trx_id_for_namespace(namespace_id, db_session): return q.params(namespace_id=namespace_id).one()[0] -def format_transactions_after_pointer( +def format_transactions_after_pointer( # noqa: ANN201, D417 namespace, pointer, db_session, @@ -283,7 +285,7 @@ def format_transactions_after_pointer( pointer = transactions[-1].id -def streaming_change_generator( +def streaming_change_generator( # noqa: ANN201, D417 namespace, poll_interval, timeout, diff --git a/inbox/util/addr.py b/inbox/util/addr.py index 60f36bbfe..f402f7db0 100644 --- a/inbox/util/addr.py +++ b/inbox/util/addr.py @@ -74,6 +74,6 @@ def parse_mimepart_address_header( return sorted(list(elem) for elem in addresses) -def extract_emails_from_text(text): +def extract_emails_from_text(text): # noqa: ANN201 emails = EMAIL_FIND_RE.findall(text) return [email for email in emails if valid_email(email)] diff --git a/inbox/util/blockstore.py b/inbox/util/blockstore.py index 9b5295c71..79e34371b 100644 --- a/inbox/util/blockstore.py +++ b/inbox/util/blockstore.py @@ -16,8 +16,8 @@ # TODO: store AWS credentials in a better way. STORE_MSG_ON_S3 = config.get("STORE_MESSAGES_ON_S3", None) -import boto3 -import botocore.exceptions +import boto3 # noqa: E402 +import botocore.exceptions # noqa: E402 # https://github.com/facebook/zstd/blob/dev/doc/zstd_compression_format.md#zstandard-frames # > This value was selected to be less probable to find at the beginning of some random file. @@ -28,7 +28,7 @@ def _data_file_directory(h): - return os.path.join( + return os.path.join( # noqa: PTH118 config.get_required("MSG_PARTS_DIRECTORY"), h[0], h[1], @@ -40,7 +40,7 @@ def _data_file_directory(h): def _data_file_path(h): - return os.path.join(_data_file_directory(h), h) + return os.path.join(_data_file_directory(h), h) # noqa: PTH118 def maybe_compress_raw_mime( @@ -141,9 +141,9 @@ def save_to_blockstore( _save_to_s3(data_sha256, data, overwrite=overwrite) else: directory = _data_file_directory(data_sha256) - os.makedirs(directory, exist_ok=True) + os.makedirs(directory, exist_ok=True) # noqa: PTH103 - with open(_data_file_path(data_sha256), "wb") as f: + with open(_data_file_path(data_sha256), "wb") as f: # noqa: PTH123 f.write(data) @@ -162,7 +162,7 @@ def _save_to_s3( ) -def get_s3_bucket(bucket_name): +def get_s3_bucket(bucket_name): # noqa: ANN201 resource = boto3.resource( "s3", aws_access_key_id=config.get("AWS_ACCESS_KEY_ID"), @@ -331,7 +331,7 @@ def _get_from_disk(data_sha256): return None try: - with open(_data_file_path(data_sha256), "rb") as f: + with open(_data_file_path(data_sha256), "rb") as f: # noqa: PTH123 return f.read() except OSError: log.warning(f"No file with name: {data_sha256}!") @@ -372,7 +372,7 @@ def _delete_from_disk(data_sha256): return try: - os.remove(_data_file_path(data_sha256)) + os.remove(_data_file_path(data_sha256)) # noqa: PTH107 except OSError: log.warning(f"No file with name: {data_sha256}!") diff --git a/inbox/util/concurrency.py b/inbox/util/concurrency.py index a7b4cb60b..cdb7a0f51 100644 --- a/inbox/util/concurrency.py +++ b/inbox/util/concurrency.py @@ -40,7 +40,7 @@ ) -def retry( +def retry( # noqa: ANN201, D417 func, retry_classes=None, fail_classes=None, @@ -64,7 +64,7 @@ class filters. Configures what not to retry on. If specified, func is /not/ retried if one of these exceptions is raised. - """ + """ # noqa: D401 if ( fail_classes and retry_classes @@ -105,7 +105,7 @@ def wrapped(*args, **kwargs): return wrapped -def retry_with_logging( +def retry_with_logging( # noqa: ANN201 func, logger=None, retry_classes=None, diff --git a/inbox/util/db.py b/inbox/util/db.py index 5973eed36..ab18851d5 100644 --- a/inbox/util/db.py +++ b/inbox/util/db.py @@ -15,7 +15,7 @@ def drop_everything(engine, keep_tables=None, reset_columns=None) -> None: `reset_columns` is used to specify the columns that should be reset to default value in the tables that we're keeping - provided as a dict of table_name: list_of_column_names. - """ + """ # noqa: D401 keep_tables = keep_tables or [] reset_columns = reset_columns or {} conn = engine.connect() @@ -43,7 +43,7 @@ def drop_everything(engine, keep_tables=None, reset_columns=None) -> None: if c["name"] in column_names: assert c["default"] - q = "UPDATE {} SET {}={};".format( + q = "UPDATE {} SET {}={};".format( # noqa: S608 table_name, c["name"], c["default"] ) conn.execute(q) diff --git a/inbox/util/encoding.py b/inbox/util/encoding.py index 0d3a63d1b..59d9ab2c9 100644 --- a/inbox/util/encoding.py +++ b/inbox/util/encoding.py @@ -22,7 +22,7 @@ def unicode_safe_truncate(s: bytes | str | int, max_length: int) -> str: """ Implements unicode-safe truncation and trims whitespace for a given input string, number or unicode string. - """ + """ # noqa: D401 if isinstance(s, bytes): s = s.decode("utf-8", "ignore") else: diff --git a/inbox/util/file.py b/inbox/util/file.py index b1eed633d..3b9951df0 100644 --- a/inbox/util/file.py +++ b/inbox/util/file.py @@ -2,13 +2,15 @@ from collections.abc import Generator ROOT_PATH = os.path.normpath( - os.path.join(__file__, os.pardir, os.pardir, os.pardir) + os.path.join(__file__, os.pardir, os.pardir, os.pardir) # noqa: PTH118 ) def get_data(filename: str) -> bytes: """Read contents of a file relative to the project root folder""" - with open(os.path.join(ROOT_PATH, filename), "rb") as file: + with open( # noqa: PTH123 + os.path.join(ROOT_PATH, filename), "rb" # noqa: PTH118 + ) as file: return file.read() @@ -16,13 +18,15 @@ def iter_module_names(paths: list[str]) -> Generator[str, None, None]: """Iterate all Python module names in given paths""" for path in paths: for name in os.listdir(path): - isdirectory = os.path.isdir(os.path.join(path, name)) + isdirectory = os.path.isdir( # noqa: PTH112 + os.path.join(path, name) # noqa: PTH118 + ) if not isdirectory and name == "__init__.py": continue if not isdirectory and name.endswith(".py"): yield name[:-3] - elif isdirectory and os.path.isfile( - os.path.join(path, name, "__init__.py") + elif isdirectory and os.path.isfile( # noqa: PTH113 + os.path.join(path, name, "__init__.py") # noqa: PTH118 ): yield name diff --git a/inbox/util/html.py b/inbox/util/html.py index e7e50516e..96a1cc77f 100644 --- a/inbox/util/html.py +++ b/inbox/util/html.py @@ -10,7 +10,7 @@ class HTMLParseError(ValueError): # http://stackoverflow.com/questions/753052/strip-html-from-strings-in-python class HTMLTagStripper(HTMLParser): - strippedTags = ["title", "script", "style"] + strippedTags = ["title", "script", "style"] # noqa: N815 def __init__(self) -> None: self.reset() diff --git a/inbox/util/itert.py b/inbox/util/itert.py index caa1dcaa5..09514d6fa 100644 --- a/inbox/util/itert.py +++ b/inbox/util/itert.py @@ -1,7 +1,7 @@ import itertools -def chunk(iterable, size): +def chunk(iterable, size): # noqa: ANN201 """ Yield chunks of an iterable. diff --git a/inbox/util/misc.py b/inbox/util/misc.py index 633d9bf42..b38f0eed3 100644 --- a/inbox/util/misc.py +++ b/inbox/util/misc.py @@ -9,10 +9,10 @@ class DummyContextManager: - def __enter__(self): + def __enter__(self): # noqa: ANN204 return None - def __exit__(self, exc_type, exc_value, traceback): + def __exit__(self, exc_type, exc_value, traceback): # noqa: ANN204 return False @@ -20,14 +20,14 @@ class ProviderSpecificException(Exception): pass -def or_none(value, selector): +def or_none(value, selector): # noqa: ANN201 if value is None: return None else: return selector(value) -def parse_ml_headers(headers): +def parse_ml_headers(headers): # noqa: ANN201 """ Parse the mailing list headers described in RFC 4021, these headers are optional (RFC 2369). @@ -78,7 +78,7 @@ def parse_references(references: str, in_reply_to: str) -> list[str]: return reference_list -def dt_to_timestamp(dt): +def dt_to_timestamp(dt): # noqa: ANN201 return int((dt - datetime(1970, 1, 1)).total_seconds()) @@ -98,7 +98,7 @@ def get_internaldate(date: str | None, received: str | None) -> datetime: # Based on: http://stackoverflow.com/a/8556471 -def load_modules(base_name, base_path): +def load_modules(base_name, base_path): # noqa: ANN201 """ Imports all modules underneath `base_module` in the module tree. @@ -107,7 +107,7 @@ def load_modules(base_name, base_path): list All the modules in the base module tree. - """ + """ # noqa: D401 modules = [] for module_name in iter_module_names(base_path): @@ -121,7 +121,7 @@ def load_modules(base_name, base_path): return modules -def register_backends(base_name, base_path): +def register_backends(base_name, base_path): # noqa: ANN201 """ Dynamically loads all packages contained within thread backends module, including those by other module install paths @@ -144,7 +144,7 @@ def register_backends(base_name, base_path): return mod_for -def cleanup_subject(subject_str): +def cleanup_subject(subject_str): # noqa: ANN201 """ Clean-up a message subject-line, including whitespace. For instance, 'Re: Re: Re: Birthday party' becomes 'Birthday party' @@ -163,7 +163,7 @@ def cleanup_subject(subject_str): # IMAP doesn't support nested folders and instead encodes paths inside folder # names. # imap_folder_path converts a "/" delimited path to an IMAP compatible path. -def imap_folder_path(path, separator=".", prefix=""): +def imap_folder_path(path, separator=".", prefix=""): # noqa: ANN201 folders = [folder for folder in path.split("/") if folder != ""] res = None @@ -182,7 +182,7 @@ def imap_folder_path(path, separator=".", prefix=""): return res -def strip_prefix(path, prefix): +def strip_prefix(path, prefix): # noqa: ANN201 if path.startswith(prefix): return path[len(prefix) :] @@ -190,7 +190,7 @@ def strip_prefix(path, prefix): # fs_folder_path converts an IMAP compatible path to a "/" delimited path. -def fs_folder_path(path, separator=".", prefix=""): +def fs_folder_path(path, separator=".", prefix=""): # noqa: ANN201 if prefix: path = strip_prefix(path, prefix) diff --git a/inbox/util/rdb.py b/inbox/util/rdb.py index dfb45f82f..60c8a45a4 100644 --- a/inbox/util/rdb.py +++ b/inbox/util/rdb.py @@ -47,13 +47,13 @@ def interact(self, banner=None) -> None: to confuse this with the real interpreter -- since it's so close!). - """ + """ # noqa: D401 try: - sys.ps1 + sys.ps1 # noqa: B018 except AttributeError: sys.ps1 = ">>> " try: - sys.ps2 + sys.ps2 # noqa: B018 except AttributeError: sys.ps2 = "... " cprt = 'Type "help", "copyright", "credits" or "license" for more information.' @@ -99,7 +99,7 @@ def terminate(self) -> None: except OSError: return - def raw_input(self, prompt=""): + def raw_input(self, prompt=""): # noqa: ANN201 self.handle.write(prompt) self.handle.flush() return self.handle.readline() diff --git a/inbox/util/sharding.py b/inbox/util/sharding.py index be5dd8618..69368e640 100644 --- a/inbox/util/sharding.py +++ b/inbox/util/sharding.py @@ -4,11 +4,11 @@ from inbox.ignition import engine_manager -def get_shards(): +def get_shards(): # noqa: ANN201 return list(engine_manager.engines) -def get_open_shards(): +def get_open_shards(): # noqa: ANN201 # Can't use engine_manager.engines here because it does not track # shard state (open/ closed) database_hosts = config.get_required("DATABASE_HOSTS") @@ -23,7 +23,7 @@ def get_open_shards(): return open_shards -def get_shard_schemas(): +def get_shard_schemas(): # noqa: ANN201 # Can't use engine_manager.engines here because it does not track # shard schemas. shard_schemas = {} @@ -37,7 +37,7 @@ def get_shard_schemas(): return shard_schemas -def generate_open_shard_key(): +def generate_open_shard_key(): # noqa: ANN201 """ Return the key that can be passed into session_scope() for an open shard, picked at random. diff --git a/inbox/util/startup.py b/inbox/util/startup.py index 4f180ccea..5dea8f4c5 100644 --- a/inbox/util/startup.py +++ b/inbox/util/startup.py @@ -12,8 +12,9 @@ def _absolute_path(relative_path): - return os.path.join( - os.path.dirname(os.path.abspath(__file__)), relative_path + return os.path.join( # noqa: PTH118 + os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120 + relative_path, ) @@ -59,8 +60,8 @@ def load_overrides(file_path, loaded_config=config) -> None: file_path : the full path to a file containing valid JSON for configuration overrides - """ - with open(file_path) as data_file: + """ # noqa: D401 + with open(file_path) as data_file: # noqa: PTH123 try: overrides = json.load(data_file) except ValueError: diff --git a/inbox/util/stats.py b/inbox/util/stats.py index c1ae30623..02a221515 100644 --- a/inbox/util/stats.py +++ b/inbox/util/stats.py @@ -3,7 +3,7 @@ from inbox.config import config -def get_statsd_client(): +def get_statsd_client(): # noqa: ANN201 return statsd.StatsClient( str(config.get("STATSD_HOST", "localhost")), config.get("STATSD_PORT", 8125), diff --git a/inbox/util/testutils.py b/inbox/util/testutils.py index df76890ef..6bc1022e7 100644 --- a/inbox/util/testutils.py +++ b/inbox/util/testutils.py @@ -22,7 +22,7 @@ def create_test_db() -> None: - """Creates new, empty test databases.""" + """Creates new, empty test databases.""" # noqa: D401 from inbox.config import config database_hosts = config.get_required("DATABASE_HOSTS") @@ -58,7 +58,7 @@ def setup_test_db() -> None: Creates new, empty test databases with table structures generated from declarative model classes. - """ + """ # noqa: D401 from inbox.config import config from inbox.ignition import engine_manager, init_db @@ -86,7 +86,7 @@ def __init__(self) -> None: def _load_records(self, filename): self._registry = json.loads(get_data(filename)) - def query(self, domain, record_type): + def query(self, domain, record_type): # noqa: ANN201 record_type = record_type.lower() entry = self._registry[record_type][domain] if isinstance(entry, dict): @@ -100,7 +100,7 @@ def query(self, domain, record_type): @pytest.fixture -def mock_dns_resolver(monkeypatch): +def mock_dns_resolver(monkeypatch): # noqa: ANN201 dns_resolver = MockDNSResolver() monkeypatch.setattr("inbox.util.url.dns_resolver", dns_resolver) yield dns_resolver @@ -108,7 +108,7 @@ def mock_dns_resolver(monkeypatch): @pytest.fixture -def dump_dns_queries(monkeypatch): +def dump_dns_queries(monkeypatch): # noqa: ANN201 original_query = dns.resolver.Resolver.query query_results: dict[ Literal["mx", "ns"], dict[str, dict[Literal["error"], str] | list[str]] @@ -135,7 +135,7 @@ def mock_query(self, domain, record_type): monkeypatch.setattr("dns.resolver.Resolver.query", mock_query) yield - print(json.dumps(query_results, indent=4, sort_keys=True)) + print(json.dumps(query_results, indent=4, sort_keys=True)) # noqa: T201 class MockIMAPClient: @@ -164,23 +164,23 @@ def login(self, email, password) -> None: def logout(self) -> None: pass - def list_folders(self, directory="", pattern="*"): + def list_folders(self, directory="", pattern="*"): # noqa: ANN201 return [(b"\\All", b"/", "[Gmail]/All Mail")] def has_capability(self, capability) -> bool: return False - def idle_check(self, timeout=None): + def idle_check(self, timeout=None): # noqa: ANN201 return [] - def idle_done(self): + def idle_done(self): # noqa: ANN201 return ("Idle terminated", []) def add_folder_data(self, folder_name, uids) -> None: - """Adds fake UID data for the given folder.""" + """Adds fake UID data for the given folder.""" # noqa: D401 self._data[folder_name] = uids - def search(self, criteria): + def search(self, criteria): # noqa: ANN201 assert self.selected_folder is not None assert isinstance(criteria, list) uid_dict = self._data[self.selected_folder] @@ -208,11 +208,11 @@ def search(self, criteria): return [u for u, v in uid_dict.items() if v[criteria[0]] == thrid] raise ValueError(f"unsupported test criteria: {criteria!r}") - def select_folder(self, folder_name, readonly=False): + def select_folder(self, folder_name, readonly=False): # noqa: ANN201 self.selected_folder = folder_name return self.folder_status(folder_name) - def fetch(self, items, data, modifiers=None): + def fetch(self, items, data, modifiers=None): # noqa: ANN201 assert self.selected_folder is not None uid_dict = self._data[self.selected_folder] resp = {} @@ -265,10 +265,10 @@ def copy(self, matching_uids, folder_name) -> None: self._data[folder_name][u] = self._data[self.selected_folder][u] self.delete_messages(matching_uids) - def capabilities(self): + def capabilities(self): # noqa: ANN201 return [] - def folder_status(self, folder_name, data=None): + def folder_status(self, folder_name, data=None): # noqa: ANN201 folder_data = self._data[folder_name] lastuid = max(folder_data) if folder_data else 0 resp = {b"UIDNEXT": lastuid + 1, b"UIDVALIDITY": self.uidvalidity} @@ -296,7 +296,7 @@ def oauth2_login(self, email, token) -> None: @pytest.fixture -def mock_imapclient(monkeypatch): +def mock_imapclient(monkeypatch): # noqa: ANN201 conn = MockIMAPClient() monkeypatch.setattr( "inbox.crispin.CrispinConnectionPool._new_raw_connection", @@ -314,7 +314,7 @@ class MockSMTPClient: @pytest.fixture -def mock_smtp_get_connection(monkeypatch): +def mock_smtp_get_connection(monkeypatch): # noqa: ANN201 client = MockSMTPClient() @contextlib.contextmanager @@ -329,12 +329,12 @@ def get_connection(account): @pytest.fixture -def files(db): +def files(db): # noqa: ANN201 filenames = FILENAMES data = [] for filename in filenames: - path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), + path = os.path.join( # noqa: PTH118 + os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120 "..", "..", "tests", @@ -346,7 +346,7 @@ def files(db): @pytest.fixture -def uploaded_file_ids(api_client, files): +def uploaded_file_ids(api_client, files): # noqa: ANN201 file_ids = [] upload_path = "/files" for filename, path in files: @@ -359,7 +359,7 @@ def uploaded_file_ids(api_client, files): filename = "ἄνδρα μοι ἔννεπε" elif filename == "long-non-ascii-filename.txt": filename = 100 * "μ" - with open(path, "rb") as fp: + with open(path, "rb") as fp: # noqa: PTH123 data = {"file": (fp, filename)} r = api_client.post_raw(upload_path, data=data) assert r.status_code == 200 diff --git a/inbox/util/threading.py b/inbox/util/threading.py index bbe16e264..e97821927 100644 --- a/inbox/util/threading.py +++ b/inbox/util/threading.py @@ -11,7 +11,9 @@ MAX_MESSAGES_SCANNED = 20000 -def fetch_corresponding_thread(db_session, namespace_id, message): +def fetch_corresponding_thread( # noqa: ANN201 + db_session, namespace_id, message +): """ Fetch a thread matching the corresponding message. Returns None if there's no matching thread. diff --git a/inbox/util/url.py b/inbox/util/url.py index d3886da10..a71cff78b 100644 --- a/inbox/util/url.py +++ b/inbox/util/url.py @@ -10,7 +10,7 @@ log = get_logger("inbox.util.url") -from inbox.providers import providers +from inbox.providers import providers # noqa: E402 # http://www.regular-expressions.info/email.html EMAIL_REGEX = re.compile( @@ -46,7 +46,7 @@ def _fallback_get_mx_domains(domain): return [] -def get_mx_domains(domain, dns_resolver=_dns_resolver): +def get_mx_domains(domain, dns_resolver=_dns_resolver): # noqa: ANN201 """Retrieve and return the MX records for a domain.""" mx_records = [] try: @@ -91,7 +91,9 @@ def mx_match(mx_domains, match_domains) -> bool: return False -def provider_from_address(email_address, dns_resolver=_dns_resolver): +def provider_from_address( # noqa: ANN201 + email_address, dns_resolver=_dns_resolver +): if not EMAIL_REGEX.match(email_address): raise InvalidEmailAddressError("Invalid email address") @@ -139,7 +141,7 @@ def provider_from_address(email_address, dns_resolver=_dns_resolver): # From tornado.httputil -def url_concat(url, args, fragments=None): +def url_concat(url, args, fragments=None): # noqa: ANN201 """ Concatenate url and argument dictionary regardless of whether url has existing query parameters. @@ -168,18 +170,18 @@ def url_concat(url, args, fragments=None): return url + args_tail + fragment_tail -def resolve_hostname(addr): +def resolve_hostname(addr): # noqa: ANN201 try: return socket.gethostbyname(addr) except OSError: return None -def parent_domain(domain): +def parent_domain(domain): # noqa: ANN201 return tld_extract(domain).registered_domain -def naked_domain(url): +def naked_domain(url): # noqa: ANN201 # This function extracts the domain name part of an URL. # It works indiscriminately on URLs or plain domains. res = tld_extract(url) diff --git a/inbox/webhooks/google_notifications.py b/inbox/webhooks/google_notifications.py index afd75d7d0..1cf0ea4e2 100644 --- a/inbox/webhooks/google_notifications.py +++ b/inbox/webhooks/google_notifications.py @@ -6,11 +6,11 @@ from inbox.logging import get_logger log = get_logger() -import limitlion +import limitlion # noqa: E402 -from inbox.models import Calendar -from inbox.models.backends.gmail import GmailAccount -from inbox.models.session import global_session_scope +from inbox.models import Calendar # noqa: E402 +from inbox.models.backends.gmail import GmailAccount # noqa: E402 +from inbox.models.session import global_session_scope # noqa: E402 app = Blueprint("google_webhooks", "google_webhooks_api", url_prefix="/w") @@ -19,7 +19,7 @@ GOOGLE_RESOURCE_ID_STRING = "X-Goog-Resource-ID" -def resp(http_code, message=None, **kwargs): +def resp(http_code, message=None, **kwargs): # noqa: ANN201 resp = kwargs if message: resp["message"] = message @@ -31,13 +31,13 @@ def resp(http_code, message=None, **kwargs): @app.before_request -def start(): +def start(): # noqa: ANN201 try: watch_state = request.headers[GOOGLE_RESOURCE_STATE_STRING] g.watch_channel_id = request.headers[GOOGLE_CHANNEL_ID_STRING] g.watch_resource_id = request.headers[GOOGLE_RESOURCE_ID_STRING] except KeyError: - raise InputError("Malformed headers") + raise InputError("Malformed headers") # noqa: B904 request.environ.setdefault("log_context", {}).update( { @@ -53,18 +53,18 @@ def start(): @app.errorhandler(APIException) -def handle_input_error(error): +def handle_input_error(error): # noqa: ANN201 response = jsonify(message=error.message, type="invalid_request_error") response.status_code = error.status_code return response @app.route("/calendar_list_update/", methods=["POST"]) -def calendar_update(account_public_id): +def calendar_update(account_public_id): # noqa: ANN201 request.environ["log_context"]["account_public_id"] = account_public_id try: valid_public_id(account_public_id) - allowed, tokens, sleep = limitlion.throttle( + allowed, tokens, sleep = limitlion.throttle( # noqa: F841 f"gcal_account:{account_public_id}", rps=0.5 ) if allowed: @@ -78,17 +78,19 @@ def calendar_update(account_public_id): db_session.commit() return resp(200) except ValueError: - raise InputError("Invalid public ID") + raise InputError("Invalid public ID") # noqa: B904 except NoResultFound: - raise NotFoundError(f"Couldn't find account `{account_public_id}`") + raise NotFoundError( # noqa: B904 + f"Couldn't find account `{account_public_id}`" + ) @app.route("/calendar_update/", methods=["POST"]) -def event_update(calendar_public_id): +def event_update(calendar_public_id): # noqa: ANN201 request.environ["log_context"]["calendar_public_id"] = calendar_public_id try: valid_public_id(calendar_public_id) - allowed, tokens, sleep = limitlion.throttle( + allowed, tokens, sleep = limitlion.throttle( # noqa: F841 f"gcal_calendar:{calendar_public_id}", rps=0.5 ) if allowed: @@ -103,6 +105,8 @@ def event_update(calendar_public_id): db_session.commit() return resp(200) except ValueError: - raise InputError("Invalid public ID") + raise InputError("Invalid public ID") # noqa: B904 except NoResultFound: - raise NotFoundError(f"Couldn't find calendar `{calendar_public_id}`") + raise NotFoundError( # noqa: B904 + f"Couldn't find calendar `{calendar_public_id}`" + ) diff --git a/inbox/webhooks/microsoft_notifications.py b/inbox/webhooks/microsoft_notifications.py index b8b09d691..df34c98f7 100644 --- a/inbox/webhooks/microsoft_notifications.py +++ b/inbox/webhooks/microsoft_notifications.py @@ -21,7 +21,7 @@ ) -def handle_initial_validation_response(view_function): +def handle_initial_validation_response(view_function): # noqa: ANN201 @wraps(view_function) def _handle_initial_validation_response(*args, **kwargs): """ @@ -48,7 +48,7 @@ def _handle_initial_validation_response(*args, **kwargs): return _handle_initial_validation_response -def validate_webhook_payload_factory(type: MsGraphType): +def validate_webhook_payload_factory(type: MsGraphType): # noqa: ANN201 def validate_webhook_payload(view_function): @wraps(view_function) def _validate_webhook_payload(*args, **kwargs): @@ -61,7 +61,7 @@ def _validate_webhook_payload(*args, **kwargs): event changes. """ try: - request.json + request.json # noqa: B018 except UnsupportedMediaType: return ("Malformed JSON payload", 415) @@ -96,7 +96,7 @@ def _validate_webhook_payload(*args, **kwargs): @app.route("/calendar_list_update/", methods=["POST"]) @handle_initial_validation_response @validate_webhook_payload_factory("#Microsoft.Graph.Calendar") -def calendar_update(account_public_id): +def calendar_update(account_public_id): # noqa: ANN201 """Handle calendar list update for given account.""" with global_session_scope() as db_session: try: @@ -117,7 +117,7 @@ def calendar_update(account_public_id): @app.route("/calendar_update/", methods=["POST"]) @handle_initial_validation_response @validate_webhook_payload_factory("#Microsoft.Graph.Event") -def event_update(calendar_public_id): +def event_update(calendar_public_id): # noqa: ANN201 """Handle events update for given calendar.""" with global_session_scope() as db_session: try: diff --git a/migrations/versions/005_import_old_accounts.py b/migrations/versions/005_import_old_accounts.py index 4ded28a60..db399845d 100644 --- a/migrations/versions/005_import_old_accounts.py +++ b/migrations/versions/005_import_old_accounts.py @@ -28,7 +28,7 @@ def upgrade() -> None: from inbox.models.backends.imap import ImapAccount # Assert we have the dump file - if not os.path.isfile(SQL_DUMP_FILENAME): + if not os.path.isfile(SQL_DUMP_FILENAME): # noqa: PTH113 print( "Can't find old user SQL dump at {}...\nMigration no users.".format( SQL_DUMP_FILENAME @@ -37,15 +37,15 @@ def upgrade() -> None: return # Imports to `imapaccount_old` table - with open(SQL_DUMP_FILENAME) as f: + with open(SQL_DUMP_FILENAME) as f: # noqa: PTH123 print("Importing old account data..."), op.execute(f.read()) print("OK!") - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapAccount_Old(Base): + class ImapAccount_Old(Base): # noqa: N801 __table__ = Base.metadata.tables["imapaccount_old"] with session_scope() as db_session: diff --git a/migrations/versions/007_per_provider_table_split.py b/migrations/versions/007_per_provider_table_split.py index f276f2c8f..77c9f54cd 100644 --- a/migrations/versions/007_per_provider_table_split.py +++ b/migrations/versions/007_per_provider_table_split.py @@ -35,10 +35,10 @@ def genericize_imapaccount() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapAccount_(Base): + class ImapAccount_(Base): # noqa: N801 __table__ = Base.metadata.tables["imapaccount"] # Get data from columns-to-be-dropped @@ -81,10 +81,10 @@ def genericize_thread() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Thread_(Base): + class Thread_(Base): # noqa: N801 __table__ = Base.metadata.tables["thread"] # Get data from columns-to-be-dropped @@ -183,10 +183,10 @@ def downgrade_imapaccount() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapAccount_(Base): + class ImapAccount_(Base): # noqa: N801 __table__ = Base.metadata.tables["imapaccount"] # Get data from table-to-be-dropped @@ -252,10 +252,10 @@ def downgrade_imapthread() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class ImapThread_(Base): + class ImapThread_(Base): # noqa: N801 __table__ = Base.metadata.tables["imapthread"] # Get data from table-to-be-dropped diff --git a/migrations/versions/012_move_google_userinfo_fields_to_.py b/migrations/versions/012_move_google_userinfo_fields_to_.py index 091bb7121..153a32b88 100644 --- a/migrations/versions/012_move_google_userinfo_fields_to_.py +++ b/migrations/versions/012_move_google_userinfo_fields_to_.py @@ -22,7 +22,7 @@ def upgrade() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) # ADD: op.add_column( @@ -55,7 +55,7 @@ def upgrade() -> None: ) # MOVE: - class Account_(Base): + class Account_(Base): # noqa: N801 __table__ = Base.metadata.tables["account"] with session_scope() as db_session: @@ -114,7 +114,7 @@ def downgrade() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) # ADD: op.add_column( @@ -144,7 +144,7 @@ def downgrade() -> None: ) # MOVE: - class ImapAccount_(Base): + class ImapAccount_(Base): # noqa: N801 __table__ = Base.metadata.tables["imapaccount"] with session_scope() as db_session: diff --git a/migrations/versions/019_blocks_to_parts.py b/migrations/versions/019_blocks_to_parts.py index 8a7a0a656..f43c9f8ae 100644 --- a/migrations/versions/019_blocks_to_parts.py +++ b/migrations/versions/019_blocks_to_parts.py @@ -58,10 +58,12 @@ def upgrade() -> None: ) print("Reflecting old block table schema") - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Block_(Base): # old schema, reflected from database table + class Block_( # noqa: N801 + Base + ): # old schema, reflected from database table __table__ = Base.metadata.tables["block"] print("Adding namespace_id column to blocks ", end=" ") @@ -82,7 +84,7 @@ class Block_(Base): # old schema, reflected from database table p.content_disposition = block.content_disposition p.content_id = block.content_id p.misc_keyval = block.misc_keyval - p.is_inboxapp_attachment + p.is_inboxapp_attachment # noqa: B018 old_namespace = ( db_session.query(Namespace) diff --git a/migrations/versions/023_tighten_nullable_constraints_on_.py b/migrations/versions/023_tighten_nullable_constraints_on_.py index c46694769..d3bc55e58 100644 --- a/migrations/versions/023_tighten_nullable_constraints_on_.py +++ b/migrations/versions/023_tighten_nullable_constraints_on_.py @@ -25,7 +25,7 @@ def upgrade() -> None: engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class ImapUid(Base): diff --git a/migrations/versions/024_remote_folders_and_inbox_tags_split.py b/migrations/versions/024_remote_folders_and_inbox_tags_split.py index 3f6e4c3d9..f3e7da760 100644 --- a/migrations/versions/024_remote_folders_and_inbox_tags_split.py +++ b/migrations/versions/024_remote_folders_and_inbox_tags_split.py @@ -142,7 +142,7 @@ def upgrade() -> None: engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easuid" in Base.metadata.tables: @@ -174,7 +174,7 @@ def upgrade() -> None: ) # Include our changes to the EASUid table: - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Folder(Base): diff --git a/migrations/versions/026_add_audit_timestamps_to_all_objects.py b/migrations/versions/026_add_audit_timestamps_to_all_objects.py index a63787cac..469345491 100644 --- a/migrations/versions/026_add_audit_timestamps_to_all_objects.py +++ b/migrations/versions/026_add_audit_timestamps_to_all_objects.py @@ -44,7 +44,7 @@ def add_eas_tables() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) for table_name in ["easuid", "easfoldersync"]: if table_name in Base.metadata.tables: diff --git a/migrations/versions/028_tag_api_migration.py b/migrations/versions/028_tag_api_migration.py index 164240105..09b11e0c3 100644 --- a/migrations/versions/028_tag_api_migration.py +++ b/migrations/versions/028_tag_api_migration.py @@ -27,7 +27,7 @@ def upgrade() -> None: engine = main_engine(pool_size=1, max_overflow=0) - Session = sessionmaker(bind=engine) + Session = sessionmaker(bind=engine) # noqa: N806 @contextmanager def basic_session(): @@ -76,7 +76,7 @@ def basic_session(): "account", sa.Column("important_folder_id", sa.Integer, nullable=True) ) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Folder(Base): diff --git a/migrations/versions/029_set_inbox_folder_exposed_name.py b/migrations/versions/029_set_inbox_folder_exposed_name.py index f8bfbe34c..bba8014d8 100644 --- a/migrations/versions/029_set_inbox_folder_exposed_name.py +++ b/migrations/versions/029_set_inbox_folder_exposed_name.py @@ -19,7 +19,7 @@ def upgrade() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Folder(Base): diff --git a/migrations/versions/030_add_is_read_attribute_to_messages.py b/migrations/versions/030_add_is_read_attribute_to_messages.py index a0eecdf6a..d5f40b640 100644 --- a/migrations/versions/030_add_is_read_attribute_to_messages.py +++ b/migrations/versions/030_add_is_read_attribute_to_messages.py @@ -46,7 +46,7 @@ def upgrade() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Message(Base): diff --git a/migrations/versions/031_add_indexes_to_timestamps.py b/migrations/versions/031_add_indexes_to_timestamps.py index 67f2505c6..9ba04ee1f 100644 --- a/migrations/versions/031_add_indexes_to_timestamps.py +++ b/migrations/versions/031_add_indexes_to_timestamps.py @@ -19,7 +19,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) op.create_index( @@ -355,7 +355,7 @@ def downgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easuid" in Base.metadata.tables: diff --git a/migrations/versions/032_tighten_easuid.py b/migrations/versions/032_tighten_easuid.py index b452bcb9b..4c3a38a48 100644 --- a/migrations/versions/032_tighten_easuid.py +++ b/migrations/versions/032_tighten_easuid.py @@ -20,7 +20,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easfoldersync" in Base.metadata.tables: @@ -54,7 +54,7 @@ def downgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easfoldersync" in Base.metadata.tables: diff --git a/migrations/versions/033_add_more_indexes.py b/migrations/versions/033_add_more_indexes.py index 3a1e01261..57e1b45a4 100644 --- a/migrations/versions/033_add_more_indexes.py +++ b/migrations/versions/033_add_more_indexes.py @@ -19,7 +19,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easuid" in Base.metadata.tables: @@ -40,7 +40,7 @@ def downgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easuid" in Base.metadata.tables: diff --git a/migrations/versions/036_replace_usertag_by_generic_tag.py b/migrations/versions/036_replace_usertag_by_generic_tag.py index 813d1fe84..ac7c6a76a 100644 --- a/migrations/versions/036_replace_usertag_by_generic_tag.py +++ b/migrations/versions/036_replace_usertag_by_generic_tag.py @@ -92,7 +92,7 @@ def upgrade() -> None: with session_scope(versioned=False) as db_session: # create canonical tags that don't already exist. - CANONICAL_TAG_NAMES = [ + CANONICAL_TAG_NAMES = [ # noqa: N806 "inbox", "all", "archive", @@ -135,7 +135,7 @@ def upgrade() -> None: count = 0 for folderitem in db_session.query(FolderItem).yield_per(500): folderitem.thread.also_set_tag(None, folderitem, False) - count += 1 + count += 1 # noqa: SIM113 if not count % 500: db_session.commit() diff --git a/migrations/versions/038_add_public_ids_to_transactions.py b/migrations/versions/038_add_public_ids_to_transactions.py index 76966b9ba..cc19a11f7 100644 --- a/migrations/versions/038_add_public_ids_to_transactions.py +++ b/migrations/versions/038_add_public_ids_to_transactions.py @@ -38,7 +38,7 @@ def upgrade() -> None: from inbox.sqlalchemy_ext.util import b36_to_bin, generate_public_id engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Transaction(Base): diff --git a/migrations/versions/039_change_easfoldersync_unique_constraint.py b/migrations/versions/039_change_easfoldersync_unique_constraint.py index 050dd1961..d405d8594 100644 --- a/migrations/versions/039_change_easfoldersync_unique_constraint.py +++ b/migrations/versions/039_change_easfoldersync_unique_constraint.py @@ -19,7 +19,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easfoldersync" in Base.metadata.tables: @@ -35,7 +35,7 @@ def downgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easfoldersync" in Base.metadata.tables: diff --git a/migrations/versions/040_gmailaccount.py b/migrations/versions/040_gmailaccount.py index 365bb5cf0..26ea023f7 100644 --- a/migrations/versions/040_gmailaccount.py +++ b/migrations/versions/040_gmailaccount.py @@ -53,7 +53,7 @@ def upgrade() -> None: engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Account(Base): diff --git a/migrations/versions/041_add_sync_status_columns_to_foldersync.py b/migrations/versions/041_add_sync_status_columns_to_foldersync.py index 1366f512b..3995f848a 100644 --- a/migrations/versions/041_add_sync_status_columns_to_foldersync.py +++ b/migrations/versions/041_add_sync_status_columns_to_foldersync.py @@ -22,7 +22,7 @@ def upgrade() -> None: engine = main_engine(pool_size=1, max_overflow=0) from inbox.sqlalchemy_ext.util import JSON, MutableDict - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) op.add_column( @@ -45,7 +45,7 @@ def downgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) op.drop_column("foldersync", "_sync_status") diff --git a/migrations/versions/045_new_password_storage.py b/migrations/versions/045_new_password_storage.py index 2c5ffa14e..55b86a83d 100644 --- a/migrations/versions/045_new_password_storage.py +++ b/migrations/versions/045_new_password_storage.py @@ -26,7 +26,7 @@ # Copied from deprecated inbox.util.cryptography module. # Needed to port passwords to new storage method. -def decrypt_aes(ciphertext, key): +def decrypt_aes(ciphertext, key): # noqa: ANN201 """ Decrypts a ciphertext that was AES-encrypted with the given key. The function expects the ciphertext as a byte string and it returns the @@ -52,14 +52,14 @@ def upgrade() -> None: from inbox.util.file import mkdirp - OriginalBase = sa.ext.declarative.declarative_base() + OriginalBase = sa.ext.declarative.declarative_base() # noqa: N806 OriginalBase.metadata.reflect(engine) if "easaccount" in OriginalBase.metadata.tables: op.add_column("easaccount", sa.Column("password", sa.String(256))) # Reflect again to pick up added column - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Account(Base): @@ -69,18 +69,18 @@ class EASAccount(Account): __table__ = Base.metadata.tables["easaccount"] @property - def _keyfile(self, create_dir=True): + def _keyfile(self, create_dir=True): # noqa: PLR0206 assert self.key assert KEY_DIR if create_dir: mkdirp(KEY_DIR) key_filename = f"{sha256(self.key).hexdigest()}" - return os.path.join(KEY_DIR, key_filename) + return os.path.join(KEY_DIR, key_filename) # noqa: PTH118 def get_old_password(self): if self.password_aes is not None: - with open(self._keyfile) as f: + with open(self._keyfile) as f: # noqa: PTH123 key = f.read() key = self.key + key diff --git a/migrations/versions/050_imap_table_cleanups.py b/migrations/versions/050_imap_table_cleanups.py index a1889ac22..86b1ed688 100644 --- a/migrations/versions/050_imap_table_cleanups.py +++ b/migrations/versions/050_imap_table_cleanups.py @@ -94,7 +94,7 @@ def upgrade() -> None: ) # migrate data and add new constraints - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easfoldersync" in Base.metadata.tables: diff --git a/migrations/versions/051_store_secrets_in_local_vault.py b/migrations/versions/051_store_secrets_in_local_vault.py index 150b8daa8..3d362b18f 100644 --- a/migrations/versions/051_store_secrets_in_local_vault.py +++ b/migrations/versions/051_store_secrets_in_local_vault.py @@ -40,7 +40,7 @@ def upgrade() -> None: sa.Column("refresh_token_id", sa.Integer(), nullable=True), ) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Account(Base): @@ -96,7 +96,7 @@ def downgrade() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Account(Base): diff --git a/migrations/versions/053_canonicalize_addresses.py b/migrations/versions/053_canonicalize_addresses.py index 403faf5a3..f6c02a00c 100644 --- a/migrations/versions/053_canonicalize_addresses.py +++ b/migrations/versions/053_canonicalize_addresses.py @@ -66,7 +66,7 @@ def upgrade() -> None: from inbox.models.session import session_scope - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) def canonicalize_address(addr): @@ -129,7 +129,7 @@ def downgrade() -> None: from inbox.models.session import session_scope - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Account(Base): diff --git a/migrations/versions/057_consolidate_account_sync_status_columns.py b/migrations/versions/057_consolidate_account_sync_status_columns.py index dde41d30d..5170c682a 100644 --- a/migrations/versions/057_consolidate_account_sync_status_columns.py +++ b/migrations/versions/057_consolidate_account_sync_status_columns.py @@ -38,7 +38,7 @@ def upgrade() -> None: ), ) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Account(Base): diff --git a/migrations/versions/058_enforce_length_limit_of_255_on_message_.py b/migrations/versions/058_enforce_length_limit_of_255_on_message_.py index eb01b5b99..e906f3959 100644 --- a/migrations/versions/058_enforce_length_limit_of_255_on_message_.py +++ b/migrations/versions/058_enforce_length_limit_of_255_on_message_.py @@ -33,7 +33,7 @@ def upgrade() -> None: from sqlalchemy.ext.declarative import declarative_base - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Message(Base): diff --git a/migrations/versions/060_cascade_folder_deletes_to_easuid.py b/migrations/versions/060_cascade_folder_deletes_to_easuid.py index fb2bfcf8d..36f98804b 100644 --- a/migrations/versions/060_cascade_folder_deletes_to_easuid.py +++ b/migrations/versions/060_cascade_folder_deletes_to_easuid.py @@ -20,7 +20,7 @@ def upgrade() -> None: engine = main_engine(pool_size=1, max_overflow=0) from sqlalchemy.ext.declarative import declarative_base - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easfoldersyncstatus" in Base.metadata.tables: diff --git a/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py b/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py index 09b1d68ce..628677bb8 100644 --- a/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py +++ b/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py @@ -15,7 +15,7 @@ def upgrade() -> None: - if "easfoldersyncstatus" in Base.metadata.tables: + if "easfoldersyncstatus" in Base.metadata.tables: # noqa: F821 from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) @@ -24,7 +24,7 @@ def upgrade() -> None: from inbox.models.session import session_scope - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) from inbox.models import Folder from inbox.models.backends.eas import EASFolderSyncStatus diff --git a/migrations/versions/064_make_address_fields_non_null.py b/migrations/versions/064_make_address_fields_non_null.py index 2826bb058..5f8633c3e 100644 --- a/migrations/versions/064_make_address_fields_non_null.py +++ b/migrations/versions/064_make_address_fields_non_null.py @@ -24,7 +24,7 @@ def upgrade() -> None: from inbox.models.session import session_scope - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Message(Base): diff --git a/migrations/versions/066_kill_spoolmessage.py b/migrations/versions/066_kill_spoolmessage.py index 88c8752f8..94c342786 100644 --- a/migrations/versions/066_kill_spoolmessage.py +++ b/migrations/versions/066_kill_spoolmessage.py @@ -64,7 +64,7 @@ def upgrade() -> None: "message_ibfk_3", "message", "message", ["parent_draft_id"], ["id"] ) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Message(Base): diff --git a/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py b/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py index ea3bf3cbc..1cdf795da 100644 --- a/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py +++ b/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py @@ -22,7 +22,7 @@ def upgrade() -> None: engine = main_engine(pool_size=1, max_overflow=0) - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) op.drop_constraint("folder_fk1", "folder", type_="foreignkey") diff --git a/migrations/versions/072_recompute_snippets.py b/migrations/versions/072_recompute_snippets.py index 8bab66064..f265529a8 100644 --- a/migrations/versions/072_recompute_snippets.py +++ b/migrations/versions/072_recompute_snippets.py @@ -15,8 +15,8 @@ # solution from http://stackoverflow.com/a/1217947 -def page_query(q): - CHUNK_SIZE = 1000 +def page_query(q): # noqa: ANN201 + CHUNK_SIZE = 1000 # noqa: N806 offset = 0 while True: r = False @@ -34,10 +34,10 @@ def upgrade() -> None: from inbox.util.html import strip_tags engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - SNIPPET_LENGTH = 191 + SNIPPET_LENGTH = 191 # noqa: N806 class Message(Base): __table__ = Base.metadata.tables["message"] diff --git a/migrations/versions/073_generic_providers.py b/migrations/versions/073_generic_providers.py index 4f803ec6a..d8f040324 100644 --- a/migrations/versions/073_generic_providers.py +++ b/migrations/versions/073_generic_providers.py @@ -37,7 +37,7 @@ def upgrade() -> None: sa.PrimaryKeyConstraint("id"), ) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Account(Base): @@ -126,7 +126,7 @@ def downgrade() -> None: sa.PrimaryKeyConstraint("id"), ) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Account(Base): @@ -147,7 +147,9 @@ class GenericAccount(Base): with session_scope(versioned=False) as db_session: for acct in db_session.query(GenericAccount): secret = ( - db_session.query(Secret).filter_by(id=acct.password_id).one() + db_session.query(Secret) # noqa: F821 + .filter_by(id=acct.password_id) + .one() ) if acct.provider == "yahoo": diff --git a/migrations/versions/074_add_eas_thrid_index.py b/migrations/versions/074_add_eas_thrid_index.py index 7ff1905e2..5d4a570b4 100644 --- a/migrations/versions/074_add_eas_thrid_index.py +++ b/migrations/versions/074_add_eas_thrid_index.py @@ -19,7 +19,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine() - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easthread" in Base.metadata.tables: @@ -36,7 +36,7 @@ def downgrade() -> None: from inbox.ignition import main_engine engine = main_engine() - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easthread" in Base.metadata.tables: diff --git a/migrations/versions/079_events_longer_uids.py b/migrations/versions/079_events_longer_uids.py index 0f89c40f7..70f50f1de 100644 --- a/migrations/versions/079_events_longer_uids.py +++ b/migrations/versions/079_events_longer_uids.py @@ -20,7 +20,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) # The model previously didn't reflect the migration, therefore diff --git a/migrations/versions/086_event_date_times.py b/migrations/versions/086_event_date_times.py index ea0a3797a..5025081b9 100644 --- a/migrations/versions/086_event_date_times.py +++ b/migrations/versions/086_event_date_times.py @@ -20,7 +20,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) # The model previously didn't reflect the migration, therefore diff --git a/migrations/versions/088_calendar_descriptions.py b/migrations/versions/088_calendar_descriptions.py index cca3739bc..f4bf4370d 100644 --- a/migrations/versions/088_calendar_descriptions.py +++ b/migrations/versions/088_calendar_descriptions.py @@ -60,7 +60,7 @@ def upgrade() -> None: # # Also, any already synced events are read only as nobody has created # events yet. - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Calendar(Base): diff --git a/migrations/versions/089_revert_encryption.py b/migrations/versions/089_revert_encryption.py index 6fc829eaa..30663a982 100644 --- a/migrations/versions/089_revert_encryption.py +++ b/migrations/versions/089_revert_encryption.py @@ -43,7 +43,7 @@ def upgrade() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) key = config.get_required("SECRET_ENCRYPTION_KEY") diff --git a/migrations/versions/094_eas_passwords.py b/migrations/versions/094_eas_passwords.py index eba978d11..0153aab3c 100644 --- a/migrations/versions/094_eas_passwords.py +++ b/migrations/versions/094_eas_passwords.py @@ -31,7 +31,7 @@ def upgrade() -> None: "easaccount", sa.Column("password_id", sa.Integer(), sa.ForeignKey("secret.id")), ) - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) from inbox.models.session import session_scope diff --git a/migrations/versions/096_migrate_secret_data.py b/migrations/versions/096_migrate_secret_data.py index 6446750b2..50905f255 100644 --- a/migrations/versions/096_migrate_secret_data.py +++ b/migrations/versions/096_migrate_secret_data.py @@ -23,7 +23,7 @@ def upgrade() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Secret(Base): @@ -60,7 +60,7 @@ class EASAccount(Base): for s in secrets: plain = ( s.secret.encode("utf-8") - if isinstance(s.secret, unicode) + if isinstance(s.secret, unicode) # noqa: F821 else s.secret ) if config.get_required("ENCRYPT_SECRETS"): diff --git a/migrations/versions/107_drop_eas_state.py b/migrations/versions/107_drop_eas_state.py index e84307dbe..c74f59c6a 100644 --- a/migrations/versions/107_drop_eas_state.py +++ b/migrations/versions/107_drop_eas_state.py @@ -21,7 +21,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine() - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easaccount" in Base.metadata.tables: diff --git a/migrations/versions/108_easaccount_username.py b/migrations/versions/108_easaccount_username.py index 0ee8f77de..8b6b43c6a 100644 --- a/migrations/versions/108_easaccount_username.py +++ b/migrations/versions/108_easaccount_username.py @@ -36,7 +36,7 @@ def upgrade() -> None: sa.Column("eas_auth", sa.String(MAX_INDEXABLE_LENGTH), nullable=True), ) - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) from inbox.models.session import session_scope diff --git a/migrations/versions/115_eas_twodevices_turn.py b/migrations/versions/115_eas_twodevices_turn.py index 519f88215..9d6571ef7 100644 --- a/migrations/versions/115_eas_twodevices_turn.py +++ b/migrations/versions/115_eas_twodevices_turn.py @@ -29,7 +29,7 @@ def upgrade() -> None: from inbox.models.session import session_scope - Base = sa.ext.declarative.declarative_base() + Base = sa.ext.declarative.declarative_base() # noqa: N806 Base.metadata.reflect(engine) class EASAccount(Base): diff --git a/migrations/versions/141_remote_remote_contacts.py b/migrations/versions/141_remote_remote_contacts.py index 3234db148..05cb23b21 100644 --- a/migrations/versions/141_remote_remote_contacts.py +++ b/migrations/versions/141_remote_remote_contacts.py @@ -22,10 +22,10 @@ def upgrade() -> None: from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) - class Contact_Old(Base): + class Contact_Old(Base): # noqa: N801 __table__ = Base.metadata.tables["contact"] # Delete the "remote" contacts. This is just a server cache for comparing diff --git a/migrations/versions/150_add_polymorphic_events.py b/migrations/versions/150_add_polymorphic_events.py index 094a48e4c..34025846a 100644 --- a/migrations/versions/150_add_polymorphic_events.py +++ b/migrations/versions/150_add_polymorphic_events.py @@ -83,10 +83,10 @@ def populate() -> None: ): try: raw_data = json.loads(e.raw_data) - except: + except: # noqa: E722 try: raw_data = ast.literal_eval(e.raw_data) - except: + except: # noqa: E722 print(f"Could not load raw data for event {e.id}") continue e.recurrence = raw_data["recurrence"] @@ -117,10 +117,10 @@ def populate() -> None: try: # Some raw data is str(dict), other is json.dumps raw_data = json.loads(e.raw_data) - except: + except: # noqa: E722 try: raw_data = ast.literal_eval(e.raw_data) - except: + except: # noqa: E722 print(f"Could not load raw data for event {e.id}") continue rec_uid = raw_data.get("recurringEventId") @@ -168,10 +168,10 @@ def populate() -> None: r.unwrap_rrule() try: raw_data = json.loads(r.raw_data) - except: + except: # noqa: E722 try: raw_data = ast.literal_eval(r.raw_data) - except: + except: # noqa: E722 print(f"Could not load raw data for event {r.id}") continue r.start_timezone = raw_data["start"].get("timeZone") diff --git a/migrations/versions/166_migrate_body_format.py b/migrations/versions/166_migrate_body_format.py index 98e5fa82b..2e19d050b 100644 --- a/migrations/versions/166_migrate_body_format.py +++ b/migrations/versions/166_migrate_body_format.py @@ -24,7 +24,7 @@ def upgrade() -> None: from inbox.security.blobstorage import encode_blob engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class Message(Base): diff --git a/migrations/versions/170_update_easuid_schema_2.py b/migrations/versions/170_update_easuid_schema_2.py index 6e463b3cc..53f3589fa 100644 --- a/migrations/versions/170_update_easuid_schema_2.py +++ b/migrations/versions/170_update_easuid_schema_2.py @@ -23,7 +23,7 @@ def upgrade() -> None: engine = main_engine(pool_size=1, max_overflow=0) if not engine.has_table("easuid"): return - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class EASUid(Base): diff --git a/migrations/versions/171_update_easuid_schema_3.py b/migrations/versions/171_update_easuid_schema_3.py index da1ddee90..abbbd0b23 100644 --- a/migrations/versions/171_update_easuid_schema_3.py +++ b/migrations/versions/171_update_easuid_schema_3.py @@ -24,7 +24,7 @@ def upgrade() -> None: if not engine.has_table("easuid"): return - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class EASUid(Base): diff --git a/migrations/versions/185_backfill_gmail_auth_credentials_table.py b/migrations/versions/185_backfill_gmail_auth_credentials_table.py index 3f478bb78..61d195545 100644 --- a/migrations/versions/185_backfill_gmail_auth_credentials_table.py +++ b/migrations/versions/185_backfill_gmail_auth_credentials_table.py @@ -25,7 +25,7 @@ def upgrade() -> None: engine = main_engine() now = datetime.datetime.now() - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) class GmailAccount(Base): diff --git a/migrations/versions/187_migrate_data_for_folders_overhaul.py b/migrations/versions/187_migrate_data_for_folders_overhaul.py index 5797608fa..3bd33c586 100644 --- a/migrations/versions/187_migrate_data_for_folders_overhaul.py +++ b/migrations/versions/187_migrate_data_for_folders_overhaul.py @@ -149,7 +149,7 @@ def migrate_messages(account_id) -> None: if not messages: return for message in messages: - try: + try: # noqa: SIM105 message.update_metadata(message.is_draft) except IndexError: # Can happen for messages without a folder. diff --git a/migrations/versions/193_calculate_receivedrecentdate_for_threads.py b/migrations/versions/193_calculate_receivedrecentdate_for_threads.py index 2243b8818..140ff98c9 100644 --- a/migrations/versions/193_calculate_receivedrecentdate_for_threads.py +++ b/migrations/versions/193_calculate_receivedrecentdate_for_threads.py @@ -13,8 +13,8 @@ # solution from http://stackoverflow.com/a/1217947 -def page_query(q): - CHUNK_SIZE = 500 +def page_query(q): # noqa: ANN201 + CHUNK_SIZE = 500 # noqa: N806 offset = 0 while True: r = False diff --git a/migrations/versions/194_extend_eas_folder_id.py b/migrations/versions/194_extend_eas_folder_id.py index 5475d4f76..0d3ec72df 100644 --- a/migrations/versions/194_extend_eas_folder_id.py +++ b/migrations/versions/194_extend_eas_folder_id.py @@ -23,7 +23,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easfoldersyncstatus" in Base.metadata.tables: @@ -42,7 +42,7 @@ def downgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easfoldersyncstatus" in Base.metadata.tables: diff --git a/migrations/versions/196_create_outlook_account_column.py b/migrations/versions/196_create_outlook_account_column.py index 5a2cade34..4f73a0493 100644 --- a/migrations/versions/196_create_outlook_account_column.py +++ b/migrations/versions/196_create_outlook_account_column.py @@ -23,7 +23,7 @@ def upgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easaccount" in Base.metadata.tables: @@ -39,7 +39,7 @@ def downgrade() -> None: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) - Base = declarative_base() + Base = declarative_base() # noqa: N806 Base.metadata.reflect(engine) if "easaccount" in Base.metadata.tables: diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 2379b9b02..dba473c19 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,4 +1,4 @@ -from pytest import fixture +from pytest import fixture # noqa: PT013 from tests.api.base import new_api_client diff --git a/tests/api/test_drafts.py b/tests/api/test_drafts.py index a269a6ba7..530f03919 100644 --- a/tests/api/test_drafts.py +++ b/tests/api/test_drafts.py @@ -49,8 +49,11 @@ def attachments(db): filenames = ["muir.jpg", "LetMeSendYouEmail.wav", "piece-jointe.jpg"] data = [] for filename in filenames: - path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "..", "data", filename + path = os.path.join( # noqa: PTH118 + os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120 + "..", + "data", + filename, ).encode("utf-8") # Mac and linux fight over filesystem encodings if we store this # filename on the fs. Work around by changing the filename we upload @@ -193,7 +196,7 @@ def test_create_draft_with_attachments( attachment_ids = [] upload_path = "/files" for filename, path in attachments: - with open(path, "rb") as fp: + with open(path, "rb") as fp: # noqa: PTH123 data = {"file": (fp, filename)} r = api_client.post_raw(upload_path, data=data) assert r.status_code == 200 diff --git a/tests/api/test_event_when.py b/tests/api/test_event_when.py index 7f1f347ab..9c0545b05 100644 --- a/tests/api/test_event_when.py +++ b/tests/api/test_event_when.py @@ -235,7 +235,7 @@ def test_api_invalid_event_when_time_bad_params( "when": {"object": "date", "time": 0}, } - e_data + e_data # noqa: B018 with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) diff --git a/tests/api/test_events_recurring.py b/tests/api/test_events_recurring.py index 41d0ad632..73a5fb07a 100644 --- a/tests/api/test_events_recurring.py +++ b/tests/api/test_events_recurring.py @@ -182,7 +182,7 @@ def test_api_override_serialization( db.session.add(override) db.session.commit() - filter = "starts_after={}&ends_before={}".format( + filter = "starts_after={}&ends_before={}".format( # noqa: A001 urlsafe(event.start.shift(hours=-1)), urlsafe(event.start.shift(weeks=+1)), ) diff --git a/tests/api/test_files.py b/tests/api/test_files.py index 9459b6787..5e25fe6a6 100644 --- a/tests/api/test_files.py +++ b/tests/api/test_files.py @@ -152,13 +152,13 @@ def test_download(api_client, uploaded_file_ids, filename) -> None: in_file = api_client.get_data(f"/files?filename={filename}")[0] data = api_client.get_raw("/files/{}/download".format(in_file["id"])).data - path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), + path = os.path.join( # noqa: PTH118 + os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120 "..", "data", original_filename, ) - with open(path, "rb") as fp: + with open(path, "rb") as fp: # noqa: PTH123 local_data = fp.read() local_md5 = md5(local_data).digest() dl_md5 = md5(data).digest() @@ -195,14 +195,14 @@ def test_direct_fetching( monkeypatch.setattr("inbox.util.blockstore.save_to_blockstore", save_mock) # Mock the request to return the contents of an actual email. - path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), + path = os.path.join( # noqa: PTH118 + os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120 "..", "data", "raw_message_with_filename_attachment.txt", ) data = "" - with open(path, "rb") as fd: + with open(path, "rb") as fd: # noqa: PTH123 data = fd.read() raw_mock = mock.Mock(return_value=data) diff --git a/tests/api/test_searching.py b/tests/api/test_searching.py index d3395bfc6..ee0c9ccd0 100644 --- a/tests/api/test_searching.py +++ b/tests/api/test_searching.py @@ -4,7 +4,7 @@ import pytest import requests -from pytest import fixture +from pytest import fixture # noqa: PT013 from inbox.models import Folder from inbox.search.backends.gmail import GmailSearchClient diff --git a/tests/api/test_streaming.py b/tests/api/test_streaming.py index 30a56ee07..873b20ee4 100644 --- a/tests/api/test_streaming.py +++ b/tests/api/test_streaming.py @@ -180,6 +180,6 @@ def test_longpoll_delta_timeout(db, api_client, default_namespace) -> None: assert end_time - start_time - test_timeout < EPSILON parsed_responses = json.loads(resp.data) assert len(parsed_responses["deltas"]) == 0 - assert type(parsed_responses["deltas"]) == list + assert type(parsed_responses["deltas"]) == list # noqa: E721 assert parsed_responses["cursor_start"] == cursor assert parsed_responses["cursor_end"] == cursor diff --git a/tests/auth/providers/mock_gmail.py b/tests/auth/providers/mock_gmail.py index 3803ea32e..798b24ce2 100644 --- a/tests/auth/providers/mock_gmail.py +++ b/tests/auth/providers/mock_gmail.py @@ -67,8 +67,8 @@ def create_account(self, email_address, response): try: self.verify_config(account) except GmailSettingError as e: - print(e) - raise UserRecoverableConfigError(e) + print(e) # noqa: T201 + raise UserRecoverableConfigError(e) # noqa: B904 return account diff --git a/tests/auth/test_generic_auth.py b/tests/auth/test_generic_auth.py index 94d92047b..930c4f2c6 100644 --- a/tests/auth/test_generic_auth.py +++ b/tests/auth/test_generic_auth.py @@ -135,7 +135,7 @@ def test_double_auth(db, mock_imapclient) -> None: invalid_settings = attr.evolve( account_data, imap_password="invalid_password" ) - with pytest.raises(ValidationError): + with pytest.raises(ValidationError): # noqa: PT012 account = handler.update_account(account, invalid_settings) handler.verify_account(account) diff --git a/tests/conftest.py b/tests/conftest.py index cb78f20d0..075df8a9a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,7 +4,7 @@ os.environ["NYLAS_ENV"] = "test" -from pytest import fixture +from pytest import fixture # noqa: PT013 from tests.api.base import TestAPIClient diff --git a/tests/events/microsoft/conftest.py b/tests/events/microsoft/conftest.py index 3d539fb40..978da8973 100644 --- a/tests/events/microsoft/conftest.py +++ b/tests/events/microsoft/conftest.py @@ -1,4 +1,4 @@ -import pytest +import pytest # noqa: INP001 from inbox.events.microsoft.graph_client import MicrosoftGraphClient diff --git a/tests/events/microsoft/test_events_provider.py b/tests/events/microsoft/test_events_provider.py index 73c193305..3f407572d 100644 --- a/tests/events/microsoft/test_events_provider.py +++ b/tests/events/microsoft/test_events_provider.py @@ -1,4 +1,4 @@ -import datetime +import datetime # noqa: INP001 from unittest import mock import pytest diff --git a/tests/events/microsoft/test_graph_client.py b/tests/events/microsoft/test_graph_client.py index d3df715c4..77eeadbfc 100644 --- a/tests/events/microsoft/test_graph_client.py +++ b/tests/events/microsoft/test_graph_client.py @@ -1,4 +1,4 @@ -import datetime +import datetime # noqa: INP001 import json import unittest.mock diff --git a/tests/events/microsoft/test_parse.py b/tests/events/microsoft/test_parse.py index 9c63d73df..561af1d24 100644 --- a/tests/events/microsoft/test_parse.py +++ b/tests/events/microsoft/test_parse.py @@ -1,4 +1,4 @@ -import datetime +import datetime # noqa: INP001 import ciso8601 import dateutil diff --git a/tests/events/test_google_events.py b/tests/events/test_google_events.py index 89a78a535..66405be33 100644 --- a/tests/events/test_google_events.py +++ b/tests/events/test_google_events.py @@ -35,7 +35,9 @@ def cmp_event_attrs(event1, event2): "recurrence", ): if getattr(event1, attr) != getattr(event2, attr): - print(attr, getattr(event1, attr), getattr(event2, attr)) + print( # noqa: T201 + attr, getattr(event1, attr), getattr(event2, attr) + ) return all( getattr(event1, attr) == getattr(event2, attr) for attr in ( @@ -276,7 +278,7 @@ def test_event_parsing() -> None: assert found_cancelled_event for obtained, expected in zip(updates, expected_updates): - print(obtained, expected) + print(obtained, expected) # noqa: T201 assert cmp_event_attrs(obtained, expected) # Test read-only support diff --git a/tests/events/test_ics_parsing.py b/tests/events/test_ics_parsing.py index 00e60e16c..9340b91a9 100644 --- a/tests/events/test_ics_parsing.py +++ b/tests/events/test_ics_parsing.py @@ -27,7 +27,9 @@ def test_invalid_ical(db, default_account) -> None: def test_windows_tz_ical(db, default_account) -> None: data = None - with open(absolute_path(FIXTURES + "windows_event.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "windows_event.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -48,7 +50,9 @@ def test_windows_tz_ical(db, default_account) -> None: def test_icloud_allday_event(db, default_account) -> None: data = None - with open(absolute_path(FIXTURES + "icloud_oneday_event.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "icloud_oneday_event.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -70,7 +74,9 @@ def test_icloud_allday_event(db, default_account) -> None: def test_iphone_through_exchange(db, default_account) -> None: data = None - with open(absolute_path(FIXTURES + "iphone_through_exchange.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "iphone_through_exchange.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -96,7 +102,7 @@ def test_event_update(db, default_account, message) -> None: read_only=True, ) - with open(absolute_path(FIXTURES + "gcal_v1.ics")) as fd: + with open(absolute_path(FIXTURES + "gcal_v1.ics")) as fd: # noqa: PTH123 ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -116,7 +122,7 @@ def test_event_update(db, default_account, message) -> None: "Olympia Hall, 28 Boulevard des Capucines, 75009 Paris, France" ) - with open(absolute_path(FIXTURES + "gcal_v2.ics")) as fd: + with open(absolute_path(FIXTURES + "gcal_v2.ics")) as fd: # noqa: PTH123 ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -157,7 +163,9 @@ def test_self_sent_update(db, default_account, message) -> None: ) # Import the self-sent event. - with open(absolute_path(FIXTURES + "self_sent_v1.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "self_sent_v1.ics") + ) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -184,7 +192,9 @@ def test_self_sent_update(db, default_account, message) -> None: db.session.add(event_copy) db.session.commit() - with open(absolute_path(FIXTURES + "self_sent_v2.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "self_sent_v2.ics") + ) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -215,7 +225,9 @@ def test_self_sent_update(db, default_account, message) -> None: @pytest.mark.usefixtures("blockstore_backend") @pytest.mark.parametrize("blockstore_backend", ["disk", "s3"], indirect=True) def test_recurring_ical(db, default_account) -> None: - with open(absolute_path(FIXTURES + "gcal_recur.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "gcal_recur.ics") + ) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -240,7 +252,9 @@ def test_recurring_ical(db, default_account) -> None: def test_event_no_end_time(db, default_account) -> None: # With no end time, import should fail - with open(absolute_path(FIXTURES + "meetup_infinite.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "meetup_infinite.ics") + ) as fd: ics_data = fd.read() add_fake_msg_with_calendar_part(db.session, default_account, ics_data) @@ -257,7 +271,7 @@ def test_event_no_end_time(db, default_account) -> None: def test_event_no_participants(db, default_account) -> None: data = None - with open( + with open( # noqa: PTH123 absolute_path(FIXTURES + "event_with_no_participants.ics") ) as fd: data = fd.read() @@ -275,7 +289,9 @@ def test_event_no_participants(db, default_account) -> None: def test_multiple_events(db, default_account) -> None: data = None - with open(absolute_path(FIXTURES + "multiple_events.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "multiple_events.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -303,7 +319,9 @@ def test_icalendar_import(db, generic_account, message) -> None: read_only=True, ) - with open(absolute_path(FIXTURES + "invite_w_rsvps1.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "invite_w_rsvps1.ics") + ) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -349,7 +367,9 @@ def test_rsvp_merging(db, generic_account, message) -> None: read_only=True, ) - with open(absolute_path(FIXTURES + "invite_w_rsvps1.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "invite_w_rsvps1.ics") + ) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -378,7 +398,9 @@ def test_rsvp_merging(db, generic_account, message) -> None: ev.public_id = "cccc" ev.calendar = cal2 - with open(absolute_path(FIXTURES + "invite_w_rsvps2.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "invite_w_rsvps2.ics") + ) as fd: ics_data = fd.read() msg2 = add_fake_msg_with_calendar_part( @@ -408,7 +430,9 @@ def test_rsvp_merging(db, generic_account, message) -> None: elif participant["email"] == "karim@example.com": assert participant["status"] == "noreply" - with open(absolute_path(FIXTURES + "invite_w_rsvps3.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "invite_w_rsvps3.ics") + ) as fd: ics_data = fd.read() msg3 = add_fake_msg_with_calendar_part( @@ -445,7 +469,9 @@ def test_rsvp_merging(db, generic_account, message) -> None: # discarded. ev.sequence_number += 1 - with open(absolute_path(FIXTURES + "invite_w_rsvps_4.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "invite_w_rsvps_4.ics") + ) as fd: ics_data = fd.read() add_fake_msg_with_calendar_part(db.session, generic_account, ics_data) @@ -476,7 +502,9 @@ def test_rsvp_merging(db, generic_account, message) -> None: def test_cancelled_event(db, default_account) -> None: - with open(absolute_path(FIXTURES + "google_cancelled1.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "google_cancelled1.ics") + ) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -494,7 +522,9 @@ def test_cancelled_event(db, default_account) -> None: assert ev.status == "confirmed" - with open(absolute_path(FIXTURES + "google_cancelled2.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "google_cancelled2.ics") + ) as fd: ics_data = fd.read() msg2 = add_fake_msg_with_calendar_part( @@ -514,7 +544,9 @@ def test_cancelled_event(db, default_account) -> None: def test_icloud_cancelled_event(db, default_account) -> None: - with open(absolute_path(FIXTURES + "icloud_cancelled1.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "icloud_cancelled1.ics") + ) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -532,7 +564,9 @@ def test_icloud_cancelled_event(db, default_account) -> None: assert ev.status == "confirmed" - with open(absolute_path(FIXTURES + "icloud_cancelled2.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "icloud_cancelled2.ics") + ) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( @@ -553,7 +587,9 @@ def test_icloud_cancelled_event(db, default_account) -> None: def test_multiple_summaries(db, default_account) -> None: data = None - with open(absolute_path(FIXTURES + "multiple_summaries.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "multiple_summaries.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -569,7 +605,9 @@ def test_multiple_summaries(db, default_account) -> None: def test_invalid_rsvp(db, default_account) -> None: # Test that we don't save an RSVP reply with an invalid id. data = None - with open(absolute_path(FIXTURES + "invalid_rsvp.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "invalid_rsvp.ics") + ) as fd: data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, data) @@ -590,7 +628,9 @@ def test_rsvp_for_other_provider(db, default_account) -> None: # Test that we don't save RSVP replies which aren't replies to a Nylas # invite. data = None - with open(absolute_path(FIXTURES + "invalid_rsvp2.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "invalid_rsvp2.ics") + ) as fd: data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, data) @@ -609,7 +649,9 @@ def test_rsvp_for_other_provider(db, default_account) -> None: def test_truncate_bogus_sequence_numbers(db, default_account) -> None: data = None - with open(absolute_path(FIXTURES + "bogus_sequence_number.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "bogus_sequence_number.ics") + ) as fd: data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, data) @@ -629,7 +671,9 @@ def test_truncate_bogus_sequence_numbers(db, default_account) -> None: def test_handle_missing_sequence_number(db, default_account) -> None: - with open(absolute_path(FIXTURES + "event_without_sequence.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "event_without_sequence.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -644,7 +688,7 @@ def test_handle_missing_sequence_number(db, default_account) -> None: def test_event_without_dtend_with_duration(db, default_account) -> None: - with open( + with open( # noqa: PTH123 absolute_path(FIXTURES + "event_without_dtend_with_duration.ics") ) as fd: data = fd.read() @@ -663,7 +707,7 @@ def test_event_without_dtend_with_duration(db, default_account) -> None: def test_event_with_windows_timezone(db, default_account) -> None: - with open( + with open( # noqa: PTH123 absolute_path(FIXTURES + "event_with_windows_timezone.ics") ) as fd: data = fd.read() @@ -684,7 +728,7 @@ def test_event_with_windows_timezone(db, default_account) -> None: def test_event_with_dtstamp_without_timezone(db, default_account) -> None: - with open( + with open( # noqa: PTH123 absolute_path(FIXTURES + "event_with_dtstamp_without_timezone.ics") ) as fd: data = fd.read() @@ -702,7 +746,7 @@ def test_event_with_dtstamp_without_timezone(db, default_account) -> None: def test_event_with_status_repeated(db, default_account) -> None: - with open( + with open( # noqa: PTH123 absolute_path(FIXTURES + "event_with_status_repeated.ics") ) as fd: data = fd.read() @@ -718,7 +762,7 @@ def test_event_with_status_repeated(db, default_account) -> None: def test_event_with_method_repeated(db, default_account) -> None: - with open( + with open( # noqa: PTH123 absolute_path(FIXTURES + "event_with_method_repeated.ics") ) as fd: data = fd.read() @@ -734,7 +778,9 @@ def test_event_with_method_repeated(db, default_account) -> None: def test_event_with_dstart_only(db, default_account) -> None: - with open(absolute_path(FIXTURES + "event_with_dtstart_only.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "event_with_dtstart_only.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -753,7 +799,9 @@ def test_event_malformed_publish(db, default_account) -> None: # and so is malformed, but the calendar method is PUBLISH # so we don't need to process it at all because it does not contain # rsvps or invites. - with open(absolute_path(FIXTURES + "event_malformed_publish.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "event_malformed_publish.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -766,7 +814,9 @@ def test_event_malformed_publish(db, default_account) -> None: def test_event_with_organizer_list(db, default_account) -> None: - with open(absolute_path(FIXTURES + "event_with_organizer_list.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "event_with_organizer_list.ics") + ) as fd: data = fd.read() events = events_from_ics( @@ -780,7 +830,9 @@ def test_event_with_organizer_list(db, default_account) -> None: def test_event_with_non_ascii_uid(db, default_account) -> None: - with open(absolute_path(FIXTURES + "event_with_non_ascii_uid.ics")) as fd: + with open( # noqa: PTH123 + absolute_path(FIXTURES + "event_with_non_ascii_uid.ics") + ) as fd: data = fd.read() events = events_from_ics( diff --git a/tests/events/test_recurrence.py b/tests/events/test_recurrence.py index dad5491e3..564a8e7af 100644 --- a/tests/events/test_recurrence.py +++ b/tests/events/test_recurrence.py @@ -274,7 +274,9 @@ def test_inflation_exceptions(db, default_account, calendar) -> None: assert i.start != arrow.get(2014, 9, 4, 13, 30, 0) -def test_inflate_across_DST(db, default_account, calendar) -> None: +def test_inflate_across_DST( # noqa: N802 + db, default_account, calendar +) -> None: # If we inflate a RRULE that covers a change to/from Daylight Savings Time, # adjust the base time accordingly to account for the new UTC offset. # Daylight Savings for US/PST: March 8, 2015 - Nov 1, 2015 @@ -411,7 +413,7 @@ def test_inflated_events_cant_persist(db, default_account, calendar) -> None: infl = event.inflate() for i in infl: db.session.add(i) - with pytest.raises(Exception) as excinfo: + with pytest.raises(Exception) as excinfo: # noqa: PT012 # FIXME "No handlers could be found for logger" - ensure this is only # a test issue or fix. db.session.commit() @@ -669,7 +671,7 @@ def test_made_recurring_then_cancelled(db, default_account, calendar) -> None: # the same time, we cancel it. normal = recurring_event(db, default_account, calendar, None) # Check this is specifically an Event, not a RecurringEvent - assert type(normal) == Event + assert type(normal) == Event # noqa: E721 # Update with a recurrence rule *and* cancellation update = recurring_event( diff --git a/tests/general/test_message_parsing.py b/tests/general/test_message_parsing.py index e8dfa602d..b36ed8385 100644 --- a/tests/general/test_message_parsing.py +++ b/tests/general/test_message_parsing.py @@ -604,7 +604,7 @@ def test_attachments_emoji_filename_parsing( assert m.attachments[0].content_disposition == "attachment" -def test_attachments_emoji_filename_parsing( +def test_attachments_emoji_filename_parsing( # noqa: F811 db, default_account, raw_message_with_outlook_emoji_inline ): m = create_from_synced( diff --git a/tests/general/test_mutable_json_type.py b/tests/general/test_mutable_json_type.py index 7b38b282e..a058cb57e 100644 --- a/tests/general/test_mutable_json_type.py +++ b/tests/general/test_mutable_json_type.py @@ -25,7 +25,7 @@ def test_mutable_json_type(db, config, default_account, folder) -> None: updated_metrics = sync_status.metrics metrics.update(original_metrics) - assert ( + assert ( # noqa: PT018 updated_metrics != original_metrics and updated_metrics == metrics ), "metrics not updated correctly" diff --git a/tests/general/test_namespace.py b/tests/general/test_namespace.py index a4b57d1e4..7c4f77385 100644 --- a/tests/general/test_namespace.py +++ b/tests/general/test_namespace.py @@ -2,7 +2,7 @@ import pytest from freezegun import freeze_time -from pytest import fixture +from pytest import fixture # noqa: PT013 from requests import Response from inbox.models.namespace import Namespace @@ -283,7 +283,7 @@ def test_namespace_deletion(db, default_account) -> None: for m in models: c = db.session.query(m).filter(m.namespace_id == namespace_id).count() - print("count for", m, ":", c) + print("count for", m, ":", c) # noqa: T201 assert c != 0 fake_account = add_generic_imap_account(db.session) @@ -366,7 +366,7 @@ def test_namespace_delete_cascade(db, default_account) -> None: for m in models: c = db.session.query(m).filter(m.namespace_id == namespace_id).count() - print("count for", m, ":", c) + print("count for", m, ":", c) # noqa: T201 assert c != 0 fake_account = add_generic_imap_account(db.session) diff --git a/tests/general/test_thread_creation.py b/tests/general/test_thread_creation.py index f24badf72..cb611808f 100644 --- a/tests/general/test_thread_creation.py +++ b/tests/general/test_thread_creation.py @@ -76,7 +76,7 @@ def test_threading_limit(db, folder_sync_engine, monkeypatch) -> None: from inbox.models import Message, Thread # Shorten bound to make test faster - MAX_THREAD_LENGTH = 10 + MAX_THREAD_LENGTH = 10 # noqa: N806 monkeypatch.setattr( "inbox.mailsync.backends.imap.generic.MAX_THREAD_LENGTH", MAX_THREAD_LENGTH, diff --git a/tests/heartbeat/test_heartbeat.py b/tests/heartbeat/test_heartbeat.py index b55ebb543..556a08002 100644 --- a/tests/heartbeat/test_heartbeat.py +++ b/tests/heartbeat/test_heartbeat.py @@ -103,7 +103,7 @@ def test_kill_device_multiple() -> None: folders = local_store.get_account_folders(1) assert len(folders) == 1 - f, ts = folders[0] + f, ts = folders[0] # noqa: F841 assert f.decode() == "2" diff --git a/tests/imap/data.py b/tests/imap/data.py index bcc9b8a2d..77973b6ef 100644 --- a/tests/imap/data.py +++ b/tests/imap/data.py @@ -4,7 +4,7 @@ aspects of IMAP sync. See https://hypothesis.readthedocs.org/en/latest/data.html for more information about how this works. -""" +""" # noqa: D404 import string diff --git a/tests/imap/network/test_actions_syncback.py b/tests/imap/network/test_actions_syncback.py index 21557a773..97d22b3ff 100644 --- a/tests/imap/network/test_actions_syncback.py +++ b/tests/imap/network/test_actions_syncback.py @@ -29,7 +29,7 @@ def test_archive_move_syncback(db, config) -> None: set_remote_archived(account, THREAD_ID, False, db.session) set_remote_archived(account, THREAD_ID, True, db.session) - assert ( + assert ( # noqa: PT018 account.inbox_folder_id and account.all_folder_id ), "`inbox_folder_id` and `all_folder_id` cannot be NULL" with crispin_client(account.id, account.provider) as client: diff --git a/tests/imap/network/test_drafts_syncback.py b/tests/imap/network/test_drafts_syncback.py index d1bda9243..ef8db6a44 100644 --- a/tests/imap/network/test_drafts_syncback.py +++ b/tests/imap/network/test_drafts_syncback.py @@ -71,7 +71,9 @@ def test_remote_save_draft(db, config, message) -> None: flags = c.conn.get_flags(draft_uids) for uid in draft_uids: f = flags.get(uid) - assert f and "\\Draft" in f, "Message missing '\\Draft' flag" + assert ( # noqa: PT018 + f and "\\Draft" in f + ), "Message missing '\\Draft' flag" c.conn.delete_messages(draft_uids) c.conn.expunge() diff --git a/tests/imap/test_actions.py b/tests/imap/test_actions.py index bc65544e9..064e4b527 100644 --- a/tests/imap/test_actions.py +++ b/tests/imap/test_actions.py @@ -252,7 +252,7 @@ def test_failed_event_creation( ) db.session.commit() - NUM_WORKERS = 2 + NUM_WORKERS = 2 # noqa: N806 service = SyncbackService( syncback_id=0, process_number=0, diff --git a/tests/imap/test_crispin_client.py b/tests/imap/test_crispin_client.py index 04e752ff4..0709db4b6 100644 --- a/tests/imap/test_crispin_client.py +++ b/tests/imap/test_crispin_client.py @@ -27,7 +27,7 @@ class MockedIMAPClient(imapclient.IMAPClient): - def _create_IMAP4(self): + def _create_IMAP4(self): # noqa: N802 return mock.Mock() diff --git a/tests/imap/test_delete_handling.py b/tests/imap/test_delete_handling.py index b3058e2ac..5b1487999 100644 --- a/tests/imap/test_delete_handling.py +++ b/tests/imap/test_delete_handling.py @@ -54,9 +54,9 @@ def test_drafts_deleted_synchronously( remove_deleted_uids(default_account.id, folder.id, [msg_uid]) db.session.expire_all() with pytest.raises(ObjectDeletedError): - message.id + message.id # noqa: B018 with pytest.raises(ObjectDeletedError): - thread.id + thread.id # noqa: B018 def test_deleting_from_a_message_with_multiple_uids( @@ -115,9 +115,9 @@ def test_deletion_with_short_ttl( db.session.expire_all() # Check that objects were actually deleted with pytest.raises(ObjectDeletedError): - marked_deleted_message.id + marked_deleted_message.id # noqa: B018 with pytest.raises(ObjectDeletedError): - thread.id + thread.id # noqa: B018 def test_thread_deletion_with_short_ttl( @@ -143,8 +143,8 @@ def test_thread_deletion_with_short_ttl( db.session.expire_all() with pytest.raises(ObjectDeletedError): - marked_deleted_message.id - thread.id + marked_deleted_message.id # noqa: B018 + thread.id # noqa: B018 assert thread.deleted_at is not None handler.check(thread.deleted_at + timedelta(seconds=121)) @@ -152,9 +152,9 @@ def test_thread_deletion_with_short_ttl( db.session.expire_all() with pytest.raises(ObjectDeletedError): - marked_deleted_message.id + marked_deleted_message.id # noqa: B018 with pytest.raises(ObjectDeletedError): - thread.id + thread.id # noqa: B018 def test_non_orphaned_messages_get_unmarked( @@ -202,9 +202,9 @@ def test_threads_only_deleted_when_no_messages_left( db.session.expire_all() # Check that the orphaned message was deleted. with pytest.raises(ObjectDeletedError): - marked_deleted_message.id + marked_deleted_message.id # noqa: B018 # Would raise ObjectDeletedError if thread was deleted. - thread.id + thread.id # noqa: B018 def test_deletion_deferred_with_longer_ttl( @@ -226,8 +226,8 @@ def test_deletion_deferred_with_longer_ttl( handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) # Would raise ObjectDeletedError if objects were deleted - marked_deleted_message.id - thread.id + marked_deleted_message.id # noqa: B018 + thread.id # noqa: B018 def test_deletion_creates_revision( diff --git a/tests/imap/test_folder_sync.py b/tests/imap/test_folder_sync.py index 1c9925a55..b49c447f9 100644 --- a/tests/imap/test_folder_sync.py +++ b/tests/imap/test_folder_sync.py @@ -200,7 +200,7 @@ def test_generic_flags_refresh_expunges_transient_uids( folder_sync_engine.last_slow_refresh = None folder_sync_engine.poll_impl() with pytest.raises(ObjectDeletedError): - transient_uid.id + transient_uid.id # noqa: B018 def test_handle_uidinvalid( diff --git a/tests/imap/test_full_imap_enabled.py b/tests/imap/test_full_imap_enabled.py index 7de862750..6331bd2fc 100644 --- a/tests/imap/test_full_imap_enabled.py +++ b/tests/imap/test_full_imap_enabled.py @@ -7,7 +7,7 @@ class MockIMAPClient(IMAPClient): def __init__(self) -> None: super().__init__("randomhost") - def _create_IMAP4(self): + def _create_IMAP4(self): # noqa: N802 return Mock() def logout(self) -> None: diff --git a/tests/imap/test_labels.py b/tests/imap/test_labels.py index 05ab3b0c1..d2d860421 100644 --- a/tests/imap/test_labels.py +++ b/tests/imap/test_labels.py @@ -219,7 +219,7 @@ def test_adding_inbox_adds_all_and_removes_trash_spam( # Verify a Gmail message in 'trash', 'spam' cannot have 'inbox'. # This time we test that adding 'inbox' to a message in the 'trash'/ 'spam' # moves it to 'all' in addition to adding 'inbox'. - folder_map, message_map = folder_and_message_maps + folder_map, message_map = folder_and_message_maps # noqa: F841 message = message_map[label] resp_data = api_client.get_data(f"/messages/{message.public_id}") @@ -246,7 +246,7 @@ def test_adding_inbox_adds_all_and_removes_trash_spam( def test_adding_a_custom_label_preserves_other_labels( db, api_client, default_account, folder_and_message_maps, label ): - folder_map, message_map = folder_and_message_maps + folder_map, message_map = folder_and_message_maps # noqa: F841 message = message_map[label] resp_data = api_client.get_data(f"/messages/{message.public_id}") @@ -274,7 +274,7 @@ def test_adding_a_custom_label_preserves_other_labels( def test_removing_a_mutually_exclusive_label_does_not_orphan_a_message( db, api_client, default_account, folder_and_message_maps, label ): - folder_map, message_map = folder_and_message_maps + folder_map, message_map = folder_and_message_maps # noqa: F841 message = message_map[label] resp_data = api_client.get_data(f"/messages/{message.public_id}") diff --git a/tests/imap/test_update_metadata.py b/tests/imap/test_update_metadata.py index ff5c25ccf..f3924efe5 100644 --- a/tests/imap/test_update_metadata.py +++ b/tests/imap/test_update_metadata.py @@ -111,7 +111,7 @@ def test_categories_from_multiple_imap_folders( appear in one of those folders depending on the order they were returned from the database. This makes it deterministic and more-correct because a message is likely in a folder it was added to last. - """ + """ # noqa: D404 thread = add_fake_thread(db.session, generic_account.namespace.id) message = add_fake_message( db.session, generic_account.namespace.id, thread diff --git a/tests/security/test_secret.py b/tests/security/test_secret.py index ce75c891b..595198cd5 100644 --- a/tests/security/test_secret.py +++ b/tests/security/test_secret.py @@ -82,7 +82,7 @@ def test_token(db, config, encrypt) -> None: ), "token encrypted when encryption disabled" decrypted_secret = secret.secret # type: bytes - assert decrypted_secret == token.encode( + assert decrypted_secret == token.encode( # noqa: PT018 "utf-8" ) and account.refresh_token == decrypted_secret.decode( "utf-8" diff --git a/tests/transactions/test_transaction_deletion.py b/tests/transactions/test_transaction_deletion.py index 499170c23..3d7355278 100644 --- a/tests/transactions/test_transaction_deletion.py +++ b/tests/transactions/test_transaction_deletion.py @@ -57,7 +57,7 @@ def transactions(self, clear_redis, now, db, default_namespace): Creates transactions, some new and some old. Yields the newest transaction - """ + """ # noqa: D401 # Transactions created less than 30 days ago should not be deleted t0 = create_transaction(db, now, default_namespace.id) create_transaction(db, now - timedelta(days=29), default_namespace.id) @@ -75,10 +75,8 @@ def test_transaction_deletion_dry_run( self, now, db, default_namespace ) -> None: shard_id = default_namespace.id >> 48 - query = ( - "SELECT count(id) FROM transaction WHERE namespace_id={}".format( - default_namespace.id - ) + query = "SELECT count(id) FROM transaction WHERE namespace_id={}".format( # noqa: S608 + default_namespace.id ) all_transactions = db.session.execute(query).scalar() @@ -90,14 +88,12 @@ def test_transaction_deletion_30_days( self, now, db, default_namespace ) -> None: shard_id = default_namespace.id >> 48 - query = ( - "SELECT count(id) FROM transaction WHERE namespace_id={}".format( - default_namespace.id - ) + query = "SELECT count(id) FROM transaction WHERE namespace_id={}".format( # noqa: S608 + default_namespace.id ) all_transactions = db.session.execute(query).scalar() date_query = ( - "SELECT count(id) FROM transaction WHERE created_at < " + "SELECT count(id) FROM transaction WHERE created_at < " # noqa: S608 f"DATE_SUB({format_datetime(now)}, INTERVAL 30 day)" ) older_than_thirty_days = db.session.execute(date_query).scalar() @@ -113,15 +109,13 @@ def test_transaction_deletion_one_day( self, now, transactions, db, default_namespace ) -> None: shard_id = default_namespace.id >> 48 - query = ( - "SELECT count(id) FROM transaction WHERE namespace_id={}".format( - default_namespace.id - ) + query = "SELECT count(id) FROM transaction WHERE namespace_id={}".format( # noqa: S608 + default_namespace.id ) all_transactions = db.session.execute(query).scalar() date_query = ( - "SELECT count(id) FROM transaction WHERE created_at < " + "SELECT count(id) FROM transaction WHERE created_at < " # noqa: S608 f"DATE_SUB({format_datetime(now)}, INTERVAL 1 day)" ) older_than_one_day = db.session.execute(date_query).scalar() diff --git a/tests/util/base.py b/tests/util/base.py index 16816b953..f67d9a14a 100644 --- a/tests/util/base.py +++ b/tests/util/base.py @@ -6,7 +6,7 @@ from flanker import mime from mockredis import mock_strict_redis_client -from pytest import fixture +from pytest import fixture # noqa: PT013 from inbox.util.testutils import setup_test_db @@ -16,16 +16,20 @@ def absolute_path(path): Returns the absolute path for a path specified as relative to the tests/ directory, needed for the dump file name in config.cfg - """ - return os.path.abspath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", path) + """ # noqa: D401 + return os.path.abspath( # noqa: PTH100 + os.path.join( # noqa: PTH118 + os.path.dirname(os.path.realpath(__file__)), # noqa: PTH120 + "..", + path, + ) ) def make_config(tmpdir_factory): from inbox.config import config - assert ( + assert ( # noqa: PT018 "NYLAS_ENV" in os.environ and os.environ["NYLAS_ENV"] == "test" ), "NYLAS_ENV must be 'test' to run tests" # don't try to write test data to the module tree