diff --git a/bin/create-event-contact-associations.py b/bin/create-event-contact-associations.py
index 0b32b5681..7bfe5a1c4 100755
--- a/bin/create-event-contact-associations.py
+++ b/bin/create-event-contact-associations.py
@@ -19,7 +19,7 @@
log = get_logger(purpose="create-event-contact-associations")
-def process_shard(shard_id, dry_run, id_start=0) -> None:
+def process_shard(shard_id, dry_run, id_start: int = 0) -> None:
# At 500K events, we need to process 6 events per second to finish within a day.
batch_size = 100
rps = 6 / batch_size
diff --git a/bin/syncback-service.py b/bin/syncback-service.py
index bec289672..81968a3bb 100755
--- a/bin/syncback-service.py
+++ b/bin/syncback-service.py
@@ -68,7 +68,7 @@ def main(prod, config, process_num, syncback_id, enable_profiler) -> None:
os.environ.get("SYNCBACK_PROCESSES", 1) # noqa: PLW1508
)
- def start():
+ def start() -> None:
# Start the syncback service, and just hang out forever
syncback = SyncbackService(syncback_id, process_num, total_processes)
diff --git a/inbox/actions/backends/generic.py b/inbox/actions/backends/generic.py
index 4f0b58ccd..81eda3852 100644
--- a/inbox/actions/backends/generic.py
+++ b/inbox/actions/backends/generic.py
@@ -71,7 +71,9 @@ def _create_email(account, message):
return msg
-def _set_flag(crispin_client, account_id, message_id, flag_name, is_add):
+def _set_flag(
+ crispin_client, account_id, message_id, flag_name, is_add
+) -> None:
with session_scope(account_id) as db_session:
uids_for_message = uids_by_folder(message_id, db_session)
if not uids_for_message:
@@ -185,7 +187,7 @@ def remote_save_draft(crispin_client, account_id, message_id) -> None:
def remote_update_draft(
crispin_client, account_id, message_id, old_message_id_header
-):
+) -> None:
with session_scope(account_id) as db_session:
account = db_session.query(Account).get(account_id)
message = db_session.query(Message).get(message_id)
@@ -244,8 +246,11 @@ def remote_delete_draft(
def remote_delete_sent(
- crispin_client, account_id, message_id_header, delete_multiple=False
-):
+ crispin_client,
+ account_id,
+ message_id_header,
+ delete_multiple: bool = False,
+) -> None:
if "sent" not in crispin_client.folder_names():
log.warning(
"Account has no detected sent folder; not deleting message",
@@ -255,7 +260,7 @@ def remote_delete_sent(
crispin_client.delete_sent_message(message_id_header, delete_multiple)
-def remote_save_sent(crispin_client, account_id, message_id):
+def remote_save_sent(crispin_client, account_id, message_id) -> None:
with session_scope(account_id) as db_session:
account = db_session.query(Account).get(account_id)
message = db_session.query(Message).get(message_id)
diff --git a/inbox/actions/backends/gmail.py b/inbox/actions/backends/gmail.py
index 6f485cf76..08bf36645 100644
--- a/inbox/actions/backends/gmail.py
+++ b/inbox/actions/backends/gmail.py
@@ -21,7 +21,7 @@ def _encode_labels(labels):
def remote_change_labels(
crispin_client, account_id, message_ids, removed_labels, added_labels
-):
+) -> None:
uids_for_message: dict[str, list[str]] = {}
with session_scope(account_id) as db_session:
for message_id in message_ids:
diff --git a/inbox/api/filtering.py b/inbox/api/filtering.py
index 4b3821fd6..2b8b9bfbd 100644
--- a/inbox/api/filtering.py
+++ b/inbox/api/filtering.py
@@ -593,7 +593,7 @@ def recurring_events( # noqa: ANN201
ends_before,
ends_after,
db_session,
- show_cancelled=False,
+ show_cancelled: bool = False,
):
# Expands individual recurring events into full instances.
# If neither starts_before or ends_before is given, the recurring range
diff --git a/inbox/api/kellogs.py b/inbox/api/kellogs.py
index a3d604651..b94de2de1 100644
--- a/inbox/api/kellogs.py
+++ b/inbox/api/kellogs.py
@@ -75,7 +75,7 @@ def format_phone_numbers(phone_numbers): # noqa: ANN201
def encode( # noqa: ANN201
- obj, namespace_public_id=None, expand=False, is_n1=False
+ obj, namespace_public_id=None, expand: bool = False, is_n1: bool = False
):
try:
return _encode(obj, namespace_public_id, expand, is_n1=is_n1)
@@ -103,7 +103,7 @@ def _convert_timezone_to_iana_tz(original_tz):
def _encode( # noqa: D417
- obj, namespace_public_id=None, expand=False, is_n1=False
+ obj, namespace_public_id=None, expand: bool = False, is_n1: bool = False
):
"""
Returns a dictionary representation of a Nylas model object obj, or
@@ -455,13 +455,18 @@ class APIEncoder:
"""
def __init__(
- self, namespace_public_id=None, expand=False, is_n1=False
+ self,
+ namespace_public_id=None,
+ expand: bool = False,
+ is_n1: bool = False,
) -> None:
self.encoder_class = self._encoder_factory(
namespace_public_id, expand, is_n1=is_n1
)
- def _encoder_factory(self, namespace_public_id, expand, is_n1=False):
+ def _encoder_factory(
+ self, namespace_public_id, expand, is_n1: bool = False
+ ):
class InternalEncoder(JSONEncoder):
def default(self, obj):
custom_representation = encode(
@@ -474,7 +479,7 @@ def default(self, obj):
return InternalEncoder
- def cereal(self, obj, pretty=False): # noqa: ANN201, D417
+ def cereal(self, obj, pretty: bool = False): # noqa: ANN201, D417
"""
Returns the JSON string representation of obj.
diff --git a/inbox/auth/base.py b/inbox/auth/base.py
index 42adde2db..22393d868 100644
--- a/inbox/auth/base.py
+++ b/inbox/auth/base.py
@@ -62,7 +62,9 @@ def update_account(self, account, account_data) -> Never:
"""
raise NotImplementedError()
- def get_imap_connection(self, account, use_timeout=True): # noqa: ANN201
+ def get_imap_connection( # noqa: ANN201
+ self, account, use_timeout: bool = True
+ ):
host, port = account.imap_endpoint
try:
return create_imap_connection(host, port, use_timeout)
@@ -80,7 +82,7 @@ def authenticate_imap_connection(self, account, conn) -> Never:
raise NotImplementedError()
def get_authenticated_imap_connection( # noqa: ANN201
- self, account, use_timeout=True
+ self, account, use_timeout: bool = True
):
conn = self.get_imap_connection(account, use_timeout=use_timeout)
self.authenticate_imap_connection(account, conn)
diff --git a/inbox/auth/utils.py b/inbox/auth/utils.py
index 3b93b248d..aa4cfc73c 100644
--- a/inbox/auth/utils.py
+++ b/inbox/auth/utils.py
@@ -60,7 +60,9 @@ def auth_is_invalid(exc): # noqa: ANN201
)
-def create_imap_connection(host, port, use_timeout=True): # noqa: ANN201
+def create_imap_connection( # noqa: ANN201
+ host, port, use_timeout: bool = True
+):
"""
Return a connection to the IMAP server.
diff --git a/inbox/config.py b/inbox/config.py
index f2a8c4165..defc00b70 100644
--- a/inbox/config.py
+++ b/inbox/config.py
@@ -24,7 +24,7 @@
env = "prod"
-def is_live_env():
+def is_live_env() -> bool:
return env in ["prod", "staging"]
@@ -112,7 +112,7 @@ def _update_config_from_env(config, env):
raise
-def _update_config_from_env_variables(config):
+def _update_config_from_env_variables(config) -> None:
flags = (
os.environ.get("FEATURE_FLAGS", "") or config.get("FEATURE_FLAGS", "")
).split()
@@ -124,7 +124,7 @@ def _update_config_from_env_variables(config):
config["CALENDAR_POLL_FREQUENCY"] = calendar_poll_frequencey
-def _get_process_name(config):
+def _get_process_name(config) -> None:
if os.environ.get("PROCESS_NAME") is not None:
config["PROCESS_NAME"] = os.environ.get("PROCESS_NAME")
diff --git a/inbox/contacts/algorithms.py b/inbox/contacts/algorithms.py
index ebd339a6e..79b48cc44 100644
--- a/inbox/contacts/algorithms.py
+++ b/inbox/contacts/algorithms.py
@@ -54,7 +54,7 @@ def _get_participants(msg, excluded_emails=None):
# Not really an algorithm, but it seemed reasonable to put this here?
-def is_stale(last_updated, lifespan=14): # noqa: ANN201
+def is_stale(last_updated, lifespan: int = 14): # noqa: ANN201
"""
last_updated is a datetime.datetime object
lifespan is measured in days
@@ -70,7 +70,9 @@ def is_stale(last_updated, lifespan=14): # noqa: ANN201
##
-def calculate_contact_scores(messages, time_dependent=True): # noqa: ANN201
+def calculate_contact_scores( # noqa: ANN201
+ messages, time_dependent: bool = True
+):
now = datetime.datetime.now()
res: defaultdict[str, int] = defaultdict(int)
for message in messages:
@@ -152,7 +154,7 @@ def get_message_list_weight(message_ids):
# Helper functions for calculating group scores
-def _expand_molecule_pool(molecules_dict):
+def _expand_molecule_pool(molecules_dict) -> None:
mditems = [(set(g), msgs) for (g, msgs) in molecules_dict.items()]
for i in range(len(mditems)):
g1, m1 = mditems[i]
diff --git a/inbox/contacts/google.py b/inbox/contacts/google.py
index 8cbac48a1..168b2a8ce 100644
--- a/inbox/contacts/google.py
+++ b/inbox/contacts/google.py
@@ -50,7 +50,7 @@ def __init__(self, account_id, namespace_id) -> None:
provider=self.PROVIDER_NAME,
)
- def _get_google_client(self, retry_conn_errors=True):
+ def _get_google_client(self, retry_conn_errors: bool = True):
"""Return the Google API client."""
with session_scope(self.namespace_id) as db_session:
account = db_session.query(GmailAccount).get(self.account_id)
@@ -128,7 +128,9 @@ def _parse_contact_result(self, google_contact):
raw_data=raw_data,
)
- def get_items(self, sync_from_dt=None, max_results=100000): # noqa: ANN201
+ def get_items( # noqa: ANN201
+ self, sync_from_dt=None, max_results: int = 100000
+ ):
"""
Fetches and parses fresh contact data.
diff --git a/inbox/contacts/icloud.py b/inbox/contacts/icloud.py
index f230a297c..1ab7f91a6 100644
--- a/inbox/contacts/icloud.py
+++ b/inbox/contacts/icloud.py
@@ -66,7 +66,9 @@ def _x(key): # Ugly parsing helper for ugly formats
raw_data=cardstring,
)
- def get_items(self, sync_from_dt=None, max_results=100000): # noqa: ANN201
+ def get_items( # noqa: ANN201
+ self, sync_from_dt=None, max_results: int = 100000
+ ):
with session_scope(self.namespace_id) as db_session:
account = db_session.query(GenericAccount).get(self.account_id)
email_address = account.email_address
diff --git a/inbox/contacts/remote_sync.py b/inbox/contacts/remote_sync.py
index 11faa7cc2..8c9df79a1 100644
--- a/inbox/contacts/remote_sync.py
+++ b/inbox/contacts/remote_sync.py
@@ -51,7 +51,7 @@ def __init__(
provider_name,
account_id,
namespace_id,
- poll_frequency=300,
+ poll_frequency: int = 300,
) -> None:
bind_context(self, "contactsync", account_id)
self.provider_name = provider_name
diff --git a/inbox/contacts/vcard.py b/inbox/contacts/vcard.py
index 5a2043f5e..10e8114e8 100644
--- a/inbox/contacts/vcard.py
+++ b/inbox/contacts/vcard.py
@@ -232,7 +232,7 @@ class VCard(defaultdict):
2: some property was deleted
"""
- def __init__(self, ddict="") -> None:
+ def __init__(self, ddict: str = "") -> None:
if ddict == "":
defaultdict.__init__(self, list)
else:
@@ -250,7 +250,7 @@ def name(self): # noqa: ANN201
return str(self["N"][0][0]) if self["N"] else ""
@name.setter
- def name(self, value):
+ def name(self, value) -> None:
if not self["N"]:
self["N"] = [("", {})]
self["N"][0][0] = value
@@ -260,7 +260,7 @@ def fname(self): # noqa: ANN201
return str(self["FN"][0][0]) if self["FN"] else ""
@fname.setter
- def fname(self, value):
+ def fname(self, value) -> None:
self["FN"][0] = (value, {})
def alt_keys(self): # noqa: ANN201
diff --git a/inbox/crispin.py b/inbox/crispin.py
index d3964d89b..793af3381 100644
--- a/inbox/crispin.py
+++ b/inbox/crispin.py
@@ -169,7 +169,7 @@ def connection_pool(account_id, pool_size=None):
_writable_pool_map: dict[int, "CrispinConnectionPool"] = {}
-def writable_connection_pool(account_id, pool_size=1):
+def writable_connection_pool(account_id, pool_size: int = 1):
"""
Per-account crispin connection pool, with *read-write* connections.
@@ -242,7 +242,7 @@ def _should_timeout_connection(self):
# constituent SyncbackTasks.
return self.readonly
- def _logout(self, client):
+ def _logout(self, client) -> None:
try:
client.logout()
except Exception:
@@ -302,7 +302,7 @@ def get(self, *, timeout: "float | None" = None):
self._queue.put(client)
self._sem.release()
- def _set_account_info(self):
+ def _set_account_info(self) -> None:
with session_scope(self.account_id) as db_session:
account = db_session.query(ImapAccount).get(self.account_id)
self.sync_state = account.sync_state
@@ -349,7 +349,7 @@ def _new_connection(self):
)
-def _exc_callback(exc):
+def _exc_callback(exc) -> None:
log.info(
"Connection broken with error; retrying with new connection",
exc_info=True,
@@ -1125,7 +1125,7 @@ def find_by_header(self, header_name, header_value): # noqa: ANN201
return results
def delete_sent_message( # noqa: ANN201
- self, message_id_header, delete_multiple=False
+ self, message_id_header, delete_multiple: bool = False
):
"""
Delete a message in the sent folder, as identified by the Message-Id
@@ -1179,7 +1179,9 @@ def delete_draft(self, message_id_header): # noqa: ANN201
self._delete_message(message_id_header)
return draft_deleted
- def _delete_message(self, message_id_header, delete_multiple=False):
+ def _delete_message(
+ self, message_id_header, delete_multiple: bool = False
+ ) -> bool:
"""
Delete a message from the selected folder, using the Message-Id header
to locate it. Does nothing if no matching messages are found, or if
@@ -1644,7 +1646,7 @@ def delete_draft(self, message_id_header) -> bool:
return True
def delete_sent_message(
- self, message_id_header, delete_multiple=False
+ self, message_id_header, delete_multiple: bool = False
) -> bool:
"""
Delete a message in the sent folder, as identified by the Message-Id
diff --git a/inbox/events/google.py b/inbox/events/google.py
index f5d59abae..59c797eca 100644
--- a/inbox/events/google.py
+++ b/inbox/events/google.py
@@ -311,7 +311,7 @@ def delete_remote_event(self, calendar_uid, event_uid, **kwargs) -> None:
# -------- logic for push notification subscriptions -------- #
def _get_access_token_for_push_notifications(
- self, account, force_refresh=False
+ self, account, force_refresh: bool = False
):
if not self.webhook_notifications_enabled(account):
raise OAuthError("Account not enabled for push notifications.")
diff --git a/inbox/events/ical.py b/inbox/events/ical.py
index 7e8d1adaf..9ab70729c 100644
--- a/inbox/events/ical.py
+++ b/inbox/events/ical.py
@@ -541,7 +541,9 @@ def import_attached_events(
)
-def generate_icalendar_invite(event, invite_type="request"): # noqa: ANN201
+def generate_icalendar_invite( # noqa: ANN201
+ event, invite_type: str = "request"
+):
# Generates an iCalendar invite from an event.
assert invite_type in ["request", "cancel"]
@@ -615,7 +617,7 @@ def generate_icalendar_invite(event, invite_type="request"): # noqa: ANN201
def generate_invite_message( # noqa: ANN201
- ical_txt, event, account, invite_type="request"
+ ical_txt, event, account, invite_type: str = "request"
):
assert invite_type in ["request", "update", "cancel"]
html_body = event.description or ""
@@ -659,7 +661,9 @@ def generate_invite_message( # noqa: ANN201
return msg
-def send_invite(ical_txt, event, account, invite_type="request") -> None:
+def send_invite(
+ ical_txt, event, account, invite_type: str = "request"
+) -> None:
# We send those transactional emails through a separate domain.
MAILGUN_API_KEY = config.get("NOTIFICATIONS_MAILGUN_API_KEY") # noqa: N806
MAILGUN_DOMAIN = config.get("NOTIFICATIONS_MAILGUN_DOMAIN") # noqa: N806
diff --git a/inbox/events/microsoft/parse.py b/inbox/events/microsoft/parse.py
index db63fbf6f..a73810f3d 100644
--- a/inbox/events/microsoft/parse.py
+++ b/inbox/events/microsoft/parse.py
@@ -285,7 +285,7 @@ def parse_msgraph_range_start_and_until(
def convert_msgraph_patterned_recurrence_to_ical_rrule(
- event: MsGraphEvent, *, naive=False
+ event: MsGraphEvent, *, naive: bool = False
) -> str:
"""
Convert Microsoft Graph PatternedRecurrence to iCal RRULE.
diff --git a/inbox/heartbeat/status.py b/inbox/heartbeat/status.py
index 0f85f2e38..cf12290bc 100644
--- a/inbox/heartbeat/status.py
+++ b/inbox/heartbeat/status.py
@@ -17,7 +17,7 @@
def get_ping_status( # noqa: ANN201
- account_ids, host=None, port=6379, threshold=ALIVE_EXPIRY
+ account_ids, host=None, port: int = 6379, threshold=ALIVE_EXPIRY
):
# Query the indexes and not the per-folder info for faster lookup.
store = HeartbeatStore.store(host, port)
diff --git a/inbox/heartbeat/store.py b/inbox/heartbeat/store.py
index ec0b21d95..31b91e30c 100644
--- a/inbox/heartbeat/store.py
+++ b/inbox/heartbeat/store.py
@@ -70,7 +70,7 @@ def __init__(
folder_name=None,
email_address=None,
provider_name=None,
- device_id=0,
+ device_id: int = 0,
) -> None:
self.key = HeartbeatStatusKey(account_id, folder_id)
self.account_id = account_id
@@ -108,7 +108,7 @@ class HeartbeatStore:
_instances: dict[str | None, "HeartbeatStore"] = {}
- def __init__(self, host=None, port=6379) -> None:
+ def __init__(self, host=None, port: int = 6379) -> None:
self.host = host
self.port = port
diff --git a/inbox/ignition.py b/inbox/ignition.py
index 74d7add77..c1947a72b 100644
--- a/inbox/ignition.py
+++ b/inbox/ignition.py
@@ -53,7 +53,7 @@ def engine( # noqa: ANN201
pool_size=DB_POOL_SIZE,
max_overflow=DB_POOL_MAX_OVERFLOW,
pool_timeout=DB_POOL_TIMEOUT,
- echo=False,
+ echo: bool = False,
):
connect_args = {
"binary_prefix": True,
@@ -76,7 +76,7 @@ def engine( # noqa: ANN201
@event.listens_for(engine, "checkout")
def receive_checkout(
dbapi_connection, connection_record, connection_proxy
- ):
+ ) -> None:
"""Log checkedout and overflow when a connection is checked out"""
hostname = gethostname().replace(".", "-")
process_name = str(config.get("PROCESS_NAME", "main_process"))
@@ -123,7 +123,7 @@ def receive_checkout(
}
@event.listens_for(engine, "checkin")
- def receive_checkin(dbapi_connection, connection_record):
+ def receive_checkin(dbapi_connection, connection_record) -> None:
if dbapi_connection in pool_tracker:
del pool_tracker[dbapi_connection]
@@ -131,7 +131,9 @@ def receive_checkin(dbapi_connection, connection_record):
class EngineManager:
- def __init__(self, databases, users, include_disabled=False) -> None:
+ def __init__(
+ self, databases, users, include_disabled: bool = False
+ ) -> None:
self.engines = {}
self._engine_zones = {}
keys = set()
@@ -196,7 +198,7 @@ def shards_for_zone(self, zone): # noqa: ANN201
)
-def init_db(engine, key=0) -> None:
+def init_db(engine, key: int = 0) -> None:
"""
Make the tables.
@@ -261,7 +263,7 @@ def verify_db(engine, schema, key) -> None:
def reset_invalid_autoincrements( # noqa: ANN201
- engine, schema, key, dry_run=True
+ engine, schema, key, dry_run: bool = True
):
from inbox.models.base import MailSyncBase
diff --git a/inbox/instrumentation.py b/inbox/instrumentation.py
index 143957e1a..e220bd093 100644
--- a/inbox/instrumentation.py
+++ b/inbox/instrumentation.py
@@ -10,7 +10,7 @@ class ProfileCollector:
this uses signals, it only works on the main thread.
"""
- def __init__(self, interval=0.005) -> None:
+ def __init__(self, interval: float = 0.005) -> None:
self.interval = interval
self._started = None
self._stack_counts = collections.defaultdict(int)
@@ -26,7 +26,7 @@ def start(self) -> None:
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
- def _sample(self, signum, frame):
+ def _sample(self, signum, frame) -> None:
stack: list[str] = []
while frame is not None:
stack.append(self._format_frame(frame))
@@ -36,7 +36,7 @@ def _sample(self, signum, frame):
self._stack_counts[stack_str] += 1
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
- def _format_frame(self, frame):
+ def _format_frame(self, frame) -> str:
return "{}({})".format(
frame.f_code.co_name, frame.f_globals.get("__name__")
)
diff --git a/inbox/mailsync/backends/base.py b/inbox/mailsync/backends/base.py
index ff0dc0605..2a19a54f1 100644
--- a/inbox/mailsync/backends/base.py
+++ b/inbox/mailsync/backends/base.py
@@ -46,7 +46,7 @@ class BaseMailSyncMonitor(InterruptibleThread):
How often to check for commands.
"""
- def __init__(self, account, heartbeat=1) -> None:
+ def __init__(self, account, heartbeat: int = 1) -> None:
bind_context(self, "mailsyncmonitor", account.id)
self.shutdown = threading.Event()
# how often to check inbox, in seconds
@@ -101,7 +101,7 @@ def _run_impl(self):
def sync(self) -> Never:
raise NotImplementedError
- def _cleanup(self):
+ def _cleanup(self) -> None:
self.sync_thread.kill()
with session_scope(self.namespace_id) as mailsync_db_session:
for x in self.folder_monitors:
diff --git a/inbox/mailsync/backends/imap/common.py b/inbox/mailsync/backends/imap/common.py
index 1e4d2a8db..620cccf94 100644
--- a/inbox/mailsync/backends/imap/common.py
+++ b/inbox/mailsync/backends/imap/common.py
@@ -352,7 +352,7 @@ def create_imap_message(
return imapuid
-def _update_categories(db_session, message, synced_categories):
+def _update_categories(db_session, message, synced_categories) -> None:
now = datetime.utcnow()
# We make the simplifying assumption that only the latest syncback action
diff --git a/inbox/mailsync/backends/imap/generic.py b/inbox/mailsync/backends/imap/generic.py
index 9d982c3bd..8881ad872 100644
--- a/inbox/mailsync/backends/imap/generic.py
+++ b/inbox/mailsync/backends/imap/generic.py
@@ -250,7 +250,7 @@ def _run(self):
# eagerly signal the sync status
self.heartbeat_status.publish()
- def start_sync(saved_folder_status):
+ def start_sync(saved_folder_status) -> None:
# Ensure we don't cause an error if the folder was deleted.
sync_end_time = (
saved_folder_status.folder
@@ -359,7 +359,7 @@ def _run_impl(self):
# killed between the end of the handler and the commit.
if self.state != old_state:
- def update(status):
+ def update(status) -> None:
status.state = self.state
self.update_folder_sync_status(update)
@@ -397,12 +397,12 @@ def update_folder_sync_status(self, cb) -> None:
def set_stopped(self, db_session) -> None:
self.update_folder_sync_status(lambda s: s.stop_sync())
- def _report_initial_sync_start(self):
+ def _report_initial_sync_start(self) -> None:
with session_scope(self.namespace_id) as db_session:
q = db_session.query(Folder).get(self.folder_id)
q.initial_sync_start = datetime.utcnow()
- def _report_initial_sync_end(self):
+ def _report_initial_sync_end(self) -> None:
with session_scope(self.namespace_id) as db_session:
q = db_session.query(Folder).get(self.folder_id)
q.initial_sync_end = datetime.utcnow()
@@ -758,7 +758,7 @@ def download_and_commit_uids(self, crispin_client, uids): # noqa: ANN201
return len(new_uids)
- def _report_first_message(self):
+ def _report_first_message(self) -> None:
# Only record the "time to first message" in the inbox. Because users
# can add more folders at any time, "initial sync"-style metrics for
# other folders don't mean much.
@@ -782,7 +782,7 @@ def _report_first_message(self):
for metric in metrics:
statsd_client.timing(metric, latency)
- def _report_message_velocity(self, timedelta, num_uids):
+ def _report_message_velocity(self, timedelta, num_uids) -> None:
latency = (timedelta).total_seconds() * 1000
latency_per_uid = float(latency) / num_uids
metrics = [
@@ -1061,7 +1061,7 @@ def uidvalidity(self): # noqa: ANN201
return self._uidvalidity
@uidvalidity.setter
- def uidvalidity(self, value):
+ def uidvalidity(self, value) -> None:
self._update_imap_folder_info("uidvalidity", value)
self._uidvalidity = value
@@ -1072,7 +1072,7 @@ def uidnext(self): # noqa: ANN201
return self._uidnext
@uidnext.setter
- def uidnext(self, value):
+ def uidnext(self, value) -> None:
self._update_imap_folder_info("uidnext", value)
self._uidnext = value
@@ -1088,7 +1088,7 @@ def last_slow_refresh(self): # noqa: ANN201
return self._last_slow_refresh
@last_slow_refresh.setter
- def last_slow_refresh(self, value):
+ def last_slow_refresh(self, value) -> None:
self._update_imap_folder_info("last_slow_refresh", value)
self._last_slow_refresh = value
@@ -1099,7 +1099,7 @@ def highestmodseq(self): # noqa: ANN201
return self._highestmodseq
@highestmodseq.setter
- def highestmodseq(self, value):
+ def highestmodseq(self, value) -> None:
self._highestmodseq = value
self._update_imap_folder_info("highestmodseq", value)
@@ -1116,7 +1116,7 @@ def _load_imap_folder_info(self):
db_session.expunge(imapfolderinfo)
return imapfolderinfo
- def _update_imap_folder_info(self, attrname, value):
+ def _update_imap_folder_info(self, attrname, value) -> None:
with session_scope(self.namespace_id) as db_session:
imapfolderinfo = (
db_session.query(ImapFolderInfo)
diff --git a/inbox/mailsync/backends/imap/monitor.py b/inbox/mailsync/backends/imap/monitor.py
index d2e02344d..4f553deef 100644
--- a/inbox/mailsync/backends/imap/monitor.py
+++ b/inbox/mailsync/backends/imap/monitor.py
@@ -32,7 +32,9 @@ class ImapSyncMonitor(BaseMailSyncMonitor):
sync_engine_class: ClassVar[type[FolderSyncEngine]] = FolderSyncEngine
- def __init__(self, account, heartbeat=1, refresh_frequency=30) -> None:
+ def __init__(
+ self, account, heartbeat: int = 1, refresh_frequency: int = 30
+ ) -> None:
self.refresh_frequency = refresh_frequency
self.syncmanager_lock = BoundedSemaphore(1)
self.saved_remote_folders = None
diff --git a/inbox/mailsync/frontend.py b/inbox/mailsync/frontend.py
index f41d26514..50fcc95d1 100644
--- a/inbox/mailsync/frontend.py
+++ b/inbox/mailsync/frontend.py
@@ -36,7 +36,7 @@ def start(self) -> None:
{"request_handler": _QuietHandler},
)
- def _create_app_impl(self, app):
+ def _create_app_impl(self, app) -> None:
@app.route("/profile")
def profile():
if self.profiler is None:
@@ -47,7 +47,7 @@ def profile():
return resp
@app.route("/load")
- def load():
+ def load() -> str:
return "Load tracing disabled\n"
@app.route("/mem")
@@ -66,7 +66,7 @@ def __init__(self, sync_service, port, profile) -> None:
self.sync_service = sync_service
super().__init__(port, profile)
- def _create_app_impl(self, app):
+ def _create_app_impl(self, app) -> None:
super()._create_app_impl(app)
@app.route("/unassign", methods=["POST"])
@@ -93,5 +93,5 @@ def build_metadata():
class _QuietHandler(WSGIRequestHandler):
- def log_request(self, *args, **kwargs):
+ def log_request(self, *args, **kwargs) -> None:
"""Suppress request logging so as not to pollute application logs."""
diff --git a/inbox/mailsync/gc.py b/inbox/mailsync/gc.py
index 111ff70eb..d31c06bbd 100644
--- a/inbox/mailsync/gc.py
+++ b/inbox/mailsync/gc.py
@@ -76,7 +76,7 @@ def __init__(
self.name = f"{self.__class__.__name__}(account_id={account_id!r})"
- def _run(self):
+ def _run(self) -> None:
while True:
interruptible_threading.check_interrupted()
retry_with_logging(
@@ -85,7 +85,7 @@ def _run(self):
provider=self.provider_name,
)
- def _run_impl(self):
+ def _run_impl(self) -> None:
current_time = datetime.datetime.utcnow()
self.check(current_time)
self.gc_deleted_categories()
@@ -254,7 +254,7 @@ def _run(self):
interruptible_threading.check_interrupted()
return retry_with_logging(self._run_impl, account_id=self.account_id)
- def _run_impl(self):
+ def _run_impl(self) -> None:
self.log.info(
"Starting LabelRenameHandler", label_name=self.label_name
)
diff --git a/inbox/mailsync/service.py b/inbox/mailsync/service.py
index 501aa4a7e..6726ceb13 100644
--- a/inbox/mailsync/service.py
+++ b/inbox/mailsync/service.py
@@ -157,7 +157,7 @@ def run(self) -> None:
"stopped email sync monitors", count=len(self.email_sync_monitors)
)
- def _run_impl(self):
+ def _run_impl(self) -> None:
"""
Waits for notifications about Account migrations and checks for start/stop commands.
@@ -183,7 +183,7 @@ def _run_impl(self):
self._flush_private_queue()
self.poll()
- def _flush_private_queue(self):
+ def _flush_private_queue(self) -> None:
while True:
event = self.private_queue.receive_event(timeout=None)
if event is None:
diff --git a/inbox/models/account.py b/inbox/models/account.py
index d36d3da38..fd158696a 100644
--- a/inbox/models/account.py
+++ b/inbox/models/account.py
@@ -154,7 +154,7 @@ def emailed_events_calendar(self): # noqa: ANN201
return self._emailed_events_calendar
@emailed_events_calendar.setter
- def emailed_events_calendar(self, cal):
+ def emailed_events_calendar(self, cal) -> None:
self._emailed_events_calendar = cal
sync_host = Column(String(255), nullable=True)
@@ -279,7 +279,9 @@ def disable_sync(self, reason) -> None:
"USER", "unknown"
)
- def mark_invalid(self, reason="invalid credentials", scope="mail") -> None:
+ def mark_invalid(
+ self, reason: str = "invalid credentials", scope: str = "mail"
+ ) -> None:
"""
In the event that the credentials for this account are invalid,
update the status and sync flag accordingly. Should only be called
@@ -386,7 +388,7 @@ def should_send_event(obj): # noqa: ANN201
return hist.has_changes()
-def already_registered_listener(obj): # noqa: ANN201
+def already_registered_listener(obj) -> bool:
return getattr(obj, "_listener_state", None) is not None
@@ -405,7 +407,7 @@ def after_flush(session, flush_context) -> None:
)
def send_migration_events(obj_state):
- def f(session):
+ def f(session) -> None:
if obj_state["sent_event"]:
return
diff --git a/inbox/models/backends/imap.py b/inbox/models/backends/imap.py
index c80ea2edb..a773ecd77 100644
--- a/inbox/models/backends/imap.py
+++ b/inbox/models/backends/imap.py
@@ -59,7 +59,7 @@ def imap_endpoint(self): # noqa: ANN201
return self.provider_info["imap"]
@imap_endpoint.setter
- def imap_endpoint(self, endpoint):
+ def imap_endpoint(self, endpoint) -> None:
host, port = endpoint
self._imap_server_host = host
self._imap_server_port = int(port)
@@ -72,7 +72,7 @@ def smtp_endpoint(self): # noqa: ANN201
return self.provider_info["smtp"]
@smtp_endpoint.setter
- def smtp_endpoint(self, endpoint):
+ def smtp_endpoint(self, endpoint) -> None:
host, port = endpoint
self._smtp_server_host = host
self._smtp_server_port = int(port)
diff --git a/inbox/models/block.py b/inbox/models/block.py
index 93026c39f..923ac39a1 100644
--- a/inbox/models/block.py
+++ b/inbox/models/block.py
@@ -210,7 +210,7 @@ def data(self): # noqa: ANN201
return value
@data.setter
- def data(self, value):
+ def data(self, value) -> None:
assert value is not None
assert isinstance(value, bytes)
@@ -279,7 +279,7 @@ def thread_id(self): # noqa: ANN201
return self.message.thread_id
@property
- def is_attachment(self): # noqa: ANN201
+ def is_attachment(self) -> bool:
return self.content_disposition is not None
@property
diff --git a/inbox/models/data_processing.py b/inbox/models/data_processing.py
index dcb8abdc2..e3b4b2359 100644
--- a/inbox/models/data_processing.py
+++ b/inbox/models/data_processing.py
@@ -30,7 +30,7 @@ def contact_rankings(self): # noqa: ANN201
return json.loads(zlib.decompress(self._contact_rankings))
@contact_rankings.setter
- def contact_rankings(self, value):
+ def contact_rankings(self, value) -> None:
self._contact_rankings = zlib.compress(
json.dumps(value).encode("utf-8")
)
@@ -44,7 +44,7 @@ def contact_groups(self): # noqa: ANN201
return json.loads(zlib.decompress(self._contact_groups))
@contact_groups.setter
- def contact_groups(self, value):
+ def contact_groups(self, value) -> None:
self._contact_groups = zlib.compress(json.dumps(value).encode("utf-8"))
self.contact_groups_last_updated = datetime.datetime.now()
diff --git a/inbox/models/event.py b/inbox/models/event.py
index d858c91c2..52e4fe2bc 100644
--- a/inbox/models/event.py
+++ b/inbox/models/event.py
@@ -234,7 +234,7 @@ def when(self): # noqa: ANN201
return Time(start) if start == end else TimeSpan(start, end)
@when.setter
- def when(self, when):
+ def when(self, when) -> None:
if "time" in when:
self.start = self.end = time_parse(when["time"])
self.all_day = False
@@ -398,7 +398,7 @@ def organizer_name(self): # noqa: ANN201
return parsed_owner[0]
@property
- def is_recurring(self): # noqa: ANN201
+ def is_recurring(self) -> bool:
return self.recurrence is not None
@property
@@ -410,7 +410,7 @@ def cancelled(self): # noqa: ANN201
return self.status == "cancelled"
@cancelled.setter
- def cancelled(self, is_cancelled):
+ def cancelled(self, is_cancelled) -> None:
if is_cancelled:
self.status = "cancelled"
else:
diff --git a/inbox/models/folder.py b/inbox/models/folder.py
index f7f2f263f..9963f55d7 100644
--- a/inbox/models/folder.py
+++ b/inbox/models/folder.py
@@ -55,7 +55,7 @@ def canonical_name(self): # noqa: ANN201
return self._canonical_name
@canonical_name.setter
- def canonical_name(self, value):
+ def canonical_name(self, value) -> None:
value = value or ""
self._canonical_name = value
if self.category:
diff --git a/inbox/models/message.py b/inbox/models/message.py
index 058ee219a..4d8cad578 100644
--- a/inbox/models/message.py
+++ b/inbox/models/message.py
@@ -140,7 +140,7 @@ def thread(self): # noqa: ANN201
return self._thread
@thread.setter
- def thread(self, value):
+ def thread(self, value) -> None:
if value is not None and self._thread is not None:
self._thread.deleted_at = None
self._thread = value
@@ -200,7 +200,7 @@ def categories_changes(self): # noqa: ANN201
return self.state == "actions_pending"
@categories_changes.setter
- def categories_changes(self, has_changes):
+ def categories_changes(self, has_changes) -> None:
if has_changes is True:
self.state = "actions_pending"
else:
@@ -782,7 +782,7 @@ def propagated_attributes(self): # noqa: ANN201
return ["is_read", "is_starred", "messagecategories"]
@property
- def has_attached_events(self): # noqa: ANN201
+ def has_attached_events(self) -> bool:
return "text/calendar" in [p.block.content_type for p in self.parts]
@property
@@ -817,7 +817,7 @@ def from_public_id(
return q.params(public_id=public_id, namespace_id=namespace_id).one()
@classmethod
- def api_loading_options(cls, expand=False): # noqa: ANN206
+ def api_loading_options(cls, expand: bool = False): # noqa: ANN206
columns = [
"public_id",
"is_draft",
diff --git a/inbox/models/mixins.py b/inbox/models/mixins.py
index d87ddc762..4ad8b02c4 100644
--- a/inbox/models/mixins.py
+++ b/inbox/models/mixins.py
@@ -131,7 +131,7 @@ def email_address(cls): # noqa: ANN201, N805
return AddressComparator(cls._canonicalized_address)
@email_address.setter
- def email_address(self, value):
+ def email_address(self, value) -> None:
# Silently truncate if necessary. In practice, this may be too
# long if somebody put a super-long email into their contacts by
# mistake or something.
diff --git a/inbox/models/session.py b/inbox/models/session.py
index 3cbfd70bb..e4dd31e68 100644
--- a/inbox/models/session.py
+++ b/inbox/models/session.py
@@ -18,7 +18,9 @@
MAX_SANE_TRX_TIME_MS = 30000
-def two_phase_session(engine_map, versioned=True): # noqa: ANN201, D417
+def two_phase_session( # noqa: ANN201, D417
+ engine_map, versioned: bool = True
+):
"""
Returns a session that implements two-phase-commit.
@@ -39,7 +41,7 @@ def two_phase_session(engine_map, versioned=True): # noqa: ANN201, D417
return session
-def new_session(engine, versioned=True): # noqa: ANN201
+def new_session(engine, versioned: bool = True): # noqa: ANN201
"""Returns a session bound to the given engine.""" # noqa: D401
session = Session(bind=engine, autoflush=True, autocommit=False)
@@ -61,7 +63,7 @@ def new_session(engine, versioned=True): # noqa: ANN201
metric_name = f"db.{engine.url.database}.{modname}.{funcname}"
@event.listens_for(session, "after_begin")
- def after_begin(session, transaction, connection):
+ def after_begin(session, transaction, connection) -> None:
# It's okay to key on the session object here, because each session
# binds to only one engine/connection. If this changes in the
# future such that a session may encompass multiple engines, then
@@ -70,7 +72,7 @@ def after_begin(session, transaction, connection):
@event.listens_for(session, "after_commit")
@event.listens_for(session, "after_rollback")
- def end(session):
+ def end(session) -> None:
start_time = transaction_start_map.get(session)
if not start_time:
return
@@ -102,12 +104,12 @@ def configure_versioning(session): # noqa: ANN201
)
@event.listens_for(session, "before_flush")
- def before_flush(session, flush_context, instances):
+ def before_flush(session, flush_context, instances) -> None:
propagate_changes(session)
increment_versions(session)
@event.listens_for(session, "after_flush")
- def after_flush(session, flush_context):
+ def after_flush(session, flush_context) -> None:
"""
Hook to log revision snapshots. Must be post-flush in order to
grab object IDs on new objects.
@@ -128,7 +130,7 @@ def after_flush(session, flush_context):
@contextmanager
-def session_scope(id_, versioned=True): # noqa: ANN201
+def session_scope(id_, versioned: bool = True): # noqa: ANN201
"""
Provide a transactional scope around a series of operations.
@@ -198,7 +200,9 @@ def session_scope(id_, versioned=True): # noqa: ANN201
@contextmanager
-def session_scope_by_shard_id(shard_id, versioned=True): # noqa: ANN201
+def session_scope_by_shard_id( # noqa: ANN201
+ shard_id, versioned: bool = True
+):
key = shard_id << 48
with session_scope(key, versioned) as db_session:
diff --git a/inbox/models/thread.py b/inbox/models/thread.py
index ef150adfa..47dce6b10 100644
--- a/inbox/models/thread.py
+++ b/inbox/models/thread.py
@@ -200,7 +200,7 @@ def categories(self): # noqa: ANN201
return categories
@classmethod
- def api_loading_options(cls, expand=False): # noqa: ANN206
+ def api_loading_options(cls, expand: bool = False): # noqa: ANN206
message_columns = [
"public_id",
"is_draft",
diff --git a/inbox/models/util.py b/inbox/models/util.py
index 121b66fdd..c9822246d 100644
--- a/inbox/models/util.py
+++ b/inbox/models/util.py
@@ -137,7 +137,7 @@ class AccountDeletionErrror(Exception):
def batch_delete_namespaces(
- ids_to_delete, throttle=False, dry_run=False
+ ids_to_delete, throttle: bool = False, dry_run: bool = False
) -> None:
start = time.time()
@@ -160,7 +160,9 @@ def batch_delete_namespaces(
)
-def delete_namespace(namespace_id, throttle=False, dry_run=False) -> None:
+def delete_namespace(
+ namespace_id, throttle: bool = False, dry_run: bool = False
+) -> None:
"""
Delete all the data associated with a namespace from the database.
USE WITH CAUTION.
@@ -287,8 +289,13 @@ def delete_namespace(namespace_id, throttle=False, dry_run=False) -> None:
def _batch_delete(
- engine, table, column_id_filters, account_id, throttle=False, dry_run=False
-):
+ engine,
+ table,
+ column_id_filters,
+ account_id,
+ throttle: bool = False,
+ dry_run: bool = False,
+) -> None:
(column, id_) = column_id_filters
count = engine.execute(
f"SELECT COUNT(*) FROM {table} WHERE {column}={id_};" # noqa: S608
@@ -397,7 +404,12 @@ def check_throttle() -> bool:
def purge_transactions(
- shard_id, days_ago=60, limit=1000, throttle=False, dry_run=False, now=None
+ shard_id,
+ days_ago: int = 60,
+ limit: int = 1000,
+ throttle: bool = False,
+ dry_run: bool = False,
+ now=None,
) -> None:
start = "now()"
if now is not None:
diff --git a/inbox/scheduling/event_queue.py b/inbox/scheduling/event_queue.py
index 892fcbbb0..1ec910641 100644
--- a/inbox/scheduling/event_queue.py
+++ b/inbox/scheduling/event_queue.py
@@ -12,7 +12,7 @@
SOCKET_TIMEOUT = 30
-def _get_redis_client(host=None, port=6379, db=1):
+def _get_redis_client(host=None, port: int = 6379, db: int = 1):
return StrictRedis(
host=host,
port=port,
diff --git a/inbox/search/backends/gmail.py b/inbox/search/backends/gmail.py
index a7fbb8547..e5ed138f3 100644
--- a/inbox/search/backends/gmail.py
+++ b/inbox/search/backends/gmail.py
@@ -33,7 +33,7 @@ def __init__(self, account) -> None:
)
def search_messages( # noqa: ANN201
- self, db_session, search_query, offset=0, limit=40
+ self, db_session, search_query, offset: int = 0, limit: int = 40
):
# We need to get the next limit + offset terms if we want to
# offset results from the db.
@@ -71,7 +71,7 @@ def g():
return g
def search_threads( # noqa: ANN201
- self, db_session, search_query, offset=0, limit=40
+ self, db_session, search_query, offset: int = 0, limit: int = 40
):
# We need to get the next limit + offset terms if we want to
# offset results from the db.
diff --git a/inbox/search/backends/imap.py b/inbox/search/backends/imap.py
index 88f20d59f..310d53e2e 100644
--- a/inbox/search/backends/imap.py
+++ b/inbox/search/backends/imap.py
@@ -65,11 +65,11 @@ def _open_crispin_connection(self, db_session):
readonly=True,
)
- def _close_crispin_connection(self):
+ def _close_crispin_connection(self) -> None:
self.crispin_client.logout()
def search_messages( # noqa: ANN201
- self, db_session, search_query, offset=0, limit=40
+ self, db_session, search_query, offset: int = 0, limit: int = 40
):
imap_uids = []
for uids in self._search(db_session, search_query):
@@ -115,7 +115,7 @@ def g():
return g
def search_threads( # noqa: ANN201
- self, db_session, search_query, offset=0, limit=40
+ self, db_session, search_query, offset: int = 0, limit: int = 40
):
imap_uids = []
for uids in self._search(db_session, search_query):
diff --git a/inbox/security/oracles.py b/inbox/security/oracles.py
index 624225413..921d3d043 100644
--- a/inbox/security/oracles.py
+++ b/inbox/security/oracles.py
@@ -1,4 +1,5 @@
import enum # Python 3 style enums from enum34
+from types import TracebackType
import nacl.secret
import nacl.utils
@@ -57,7 +58,12 @@ def __init__(self, secret_name) -> None:
def __enter__(self): # noqa: ANN204
return self
- def __exit__(self, exc_type, exc_obj, exc_tb): # noqa: ANN204
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_obj: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
self.close()
def __del__(self) -> None:
@@ -65,7 +71,7 @@ def __del__(self) -> None:
return
self.close()
- def close(self):
+ def close(self) -> None:
if self._closed:
# already closed
return
diff --git a/inbox/sendmail/base.py b/inbox/sendmail/base.py
index 17b766c03..b37cac838 100644
--- a/inbox/sendmail/base.py
+++ b/inbox/sendmail/base.py
@@ -276,7 +276,7 @@ def update_draft( # noqa: ANN201
Update draft with new attributes.
"""
- def update(attr, value=None):
+ def update(attr, value=None) -> None:
if value is not None:
setattr(draft, attr, value)
@@ -394,7 +394,7 @@ def generate_attachments(message, blocks): # noqa: ANN201
return attachment_dicts
-def _set_reply_headers(new_message, previous_message):
+def _set_reply_headers(new_message, previous_message) -> None:
"""
When creating a draft in reply to a thread, set the In-Reply-To and
References headers appropriately, if possible.
diff --git a/inbox/sendmail/smtp/postel.py b/inbox/sendmail/smtp/postel.py
index 9dbb08ef5..fdc32a9f6 100644
--- a/inbox/sendmail/smtp/postel.py
+++ b/inbox/sendmail/smtp/postel.py
@@ -3,6 +3,7 @@
import re
import smtplib
import ssl
+from types import TracebackType
from inbox.logging import get_logger
@@ -146,7 +147,12 @@ def __init__(
def __enter__(self): # noqa: ANN204
return self
- def __exit__(self, type, value, traceback): # noqa: ANN204
+ def __exit__(
+ self,
+ type: type[BaseException] | None,
+ value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
try:
self.connection.quit()
except smtplib.SMTPServerDisconnected:
@@ -203,7 +209,7 @@ def _upgrade_connection(self):
)
# OAuth2 authentication
- def _smtp_oauth2_try_refresh(self):
+ def _smtp_oauth2_try_refresh(self) -> None:
with session_scope(self.account_id) as db_session:
account = db_session.query(ImapAccount).get(self.account_id)
self.auth_token = token_manager.get_token(
diff --git a/inbox/sqlalchemy_ext/util.py b/inbox/sqlalchemy_ext/util.py
index 899ba5caf..6af47b98b 100644
--- a/inbox/sqlalchemy_ext/util.py
+++ b/inbox/sqlalchemy_ext/util.py
@@ -205,7 +205,7 @@ def update(self, *args, **kwargs) -> None:
def __getstate__(self): # noqa: ANN204
return dict(self)
- def __setstate__(self, state): # noqa: ANN204
+ def __setstate__(self, state) -> None:
self.update(state)
diff --git a/inbox/sync/base_sync.py b/inbox/sync/base_sync.py
index 9a01f60e8..220fa51e9 100644
--- a/inbox/sync/base_sync.py
+++ b/inbox/sync/base_sync.py
@@ -35,7 +35,7 @@ def __init__(
folder_id,
folder_name,
provider_name,
- poll_frequency=1,
+ poll_frequency: int = 1,
scope=None,
) -> None:
self.account_id = account_id
@@ -59,7 +59,7 @@ def __init__(
self.name = f"{self.__class__.__name__}(account_id={account_id!r})"
- def _run(self):
+ def _run(self) -> None:
# Bind thread-local logging context.
self.log = self.log.new(account_id=self.account_id)
try:
@@ -83,7 +83,7 @@ def _run(self):
account = db_session.query(Account).get(self.account_id)
account.mark_invalid(scope=self.scope)
- def _run_impl(self):
+ def _run_impl(self) -> None:
try:
self.sync()
self.heartbeat_status.publish(state="poll")
diff --git a/inbox/transactions/actions.py b/inbox/transactions/actions.py
index 99544d738..5062cd55e 100644
--- a/inbox/transactions/actions.py
+++ b/inbox/transactions/actions.py
@@ -76,7 +76,7 @@
}
-def action_uses_crispin_client(action): # noqa: ANN201
+def action_uses_crispin_client(action) -> bool:
return action in MAIL_ACTION_FUNCTION_MAP
@@ -103,11 +103,11 @@ def __init__(
syncback_id,
process_number,
total_processes,
- poll_interval=1,
- retry_interval=120,
+ poll_interval: int = 1,
+ retry_interval: int = 120,
num_workers=NUM_PARALLEL_ACCOUNTS,
- batch_size=20,
- fetch_batch_size=100,
+ batch_size: int = 20,
+ fetch_batch_size: int = 100,
) -> None:
self.process_number = process_number
self.total_processes = total_processes
@@ -163,7 +163,7 @@ def __init__(
self.running_action_ids = set()
super().__init__()
- def _has_recent_move_action(self, db_session, log_entries):
+ def _has_recent_move_action(self, db_session, log_entries) -> bool:
"""
Determines if we recently completed a move action. Since Nylas doesn't
update local UID state after completing an action, we space
@@ -449,7 +449,7 @@ def _batch_log_entries(self, db_session, log_entries):
)
return batch_task
- def _process_log(self):
+ def _process_log(self) -> None:
for key in self.keys:
with session_scope_by_shard_id(key) as db_session:
# Get the list of namespace ids with pending actions
@@ -491,14 +491,14 @@ def _process_log(self):
if task is not None:
self.task_queue.put(task)
- def _restart_workers(self):
+ def _restart_workers(self) -> None:
while len(self.workers) < self.num_workers:
worker = SyncbackWorker(self)
self.workers.append(worker)
self.num_idle_workers += 1
worker.start()
- def _run_impl(self):
+ def _run_impl(self) -> None:
self._restart_workers()
self._process_log()
# Wait for a worker to finish or for the fixed poll_interval,
@@ -513,7 +513,7 @@ def stop(self) -> None:
self.keep_running = False
kill_all(self.workers)
- def _run(self):
+ def _run(self) -> None:
self.log.info(
"Starting syncback service",
process_num=self.process_number,
@@ -606,7 +606,7 @@ def __init__(
account_id,
provider,
service,
- retry_interval=30,
+ retry_interval: int = 30,
extra_args=None,
) -> None:
self.parent_service = weakref.ref(service)
@@ -658,7 +658,7 @@ def try_merge_with(self, other): # noqa: ANN201
)
return None
- def _log_to_statsd(self, action_log_status, latency=None):
+ def _log_to_statsd(self, action_log_status, latency=None) -> None:
metric_names = [
f"syncback.overall.{action_log_status}",
f"syncback.providers.{self.provider}.{action_log_status}",
@@ -801,7 +801,7 @@ def _mark_action_as_successful(
self._log_to_statsd(action_log_entry.status, latency)
return (latency, func_latency)
- def _mark_action_as_failed(self, action_log_entry, db_session):
+ def _mark_action_as_failed(self, action_log_entry, db_session) -> None:
self.log.critical("Max retries reached, giving up.", exc_info=True)
action_log_entry.status = "failed"
self._log_to_statsd(action_log_entry.status)
@@ -840,13 +840,13 @@ def execute(self) -> None:
class SyncbackWorker(InterruptibleThread):
- def __init__(self, parent_service, task_timeout=60) -> None:
+ def __init__(self, parent_service, task_timeout: int = 60) -> None:
self.parent_service = weakref.ref(parent_service)
self.task_timeout = task_timeout
self.log = logger.new(component="syncback-worker")
super().__init__()
- def _run(self):
+ def _run(self) -> None:
while self.parent_service().keep_running:
task = interruptible_threading.queue_get(
self.parent_service().task_queue
diff --git a/inbox/transactions/delta_sync.py b/inbox/transactions/delta_sync.py
index e5d9721d4..d913e1158 100644
--- a/inbox/transactions/delta_sync.py
+++ b/inbox/transactions/delta_sync.py
@@ -103,11 +103,11 @@ def format_transactions_after_pointer( # noqa: ANN201, D417
result_limit,
exclude_types=None,
include_types=None,
- exclude_folders=True,
- exclude_metadata=True,
- exclude_account=True,
- expand=False,
- is_n1=False,
+ exclude_folders: bool = True,
+ exclude_metadata: bool = True,
+ exclude_account: bool = True,
+ expand: bool = False,
+ is_n1: bool = False,
):
"""
Return a pair (deltas, new_pointer), where deltas is a list of change
@@ -292,11 +292,11 @@ def streaming_change_generator( # noqa: ANN201, D417
transaction_pointer,
exclude_types=None,
include_types=None,
- exclude_folders=True,
- exclude_metadata=True,
- exclude_account=True,
- expand=False,
- is_n1=False,
+ exclude_folders: bool = True,
+ exclude_metadata: bool = True,
+ exclude_account: bool = True,
+ expand: bool = False,
+ is_n1: bool = False,
):
"""
Poll the transaction log for the given `namespace_id` until `timeout`
diff --git a/inbox/util/blockstore.py b/inbox/util/blockstore.py
index 79e34371b..a5990282b 100644
--- a/inbox/util/blockstore.py
+++ b/inbox/util/blockstore.py
@@ -210,7 +210,9 @@ def _save_to_s3_bucket(
statsd_client.timing("s3_blockstore.save_latency", latency_millis)
-def get_from_blockstore(data_sha256, *, check_sha=True) -> bytes | None:
+def get_from_blockstore(
+ data_sha256, *, check_sha: bool = True
+) -> bytes | None:
if STORE_MSG_ON_S3:
value = _get_from_s3(data_sha256)
else:
@@ -367,7 +369,7 @@ def _delete_from_s3_bucket(
statsd_client.timing("s3_blockstore.delete_latency", latency_millis)
-def _delete_from_disk(data_sha256):
+def _delete_from_disk(data_sha256) -> None:
if not data_sha256:
return
diff --git a/inbox/util/concurrency.py b/inbox/util/concurrency.py
index cdb7a0f51..df85eb963 100644
--- a/inbox/util/concurrency.py
+++ b/inbox/util/concurrency.py
@@ -74,7 +74,7 @@ class filters.
"Can't include exception classes in both fail_on and retry_on"
)
- def should_retry_on(exc):
+ def should_retry_on(exc) -> bool:
if fail_classes and isinstance(exc, tuple(fail_classes)):
return False
if retry_classes and not isinstance(exc, tuple(retry_classes)):
@@ -119,7 +119,7 @@ def retry_with_logging( # noqa: ANN201
# http://stackoverflow.com/questions/7935966/python-overwriting-variables-in-nested-functions
occurrences = [0]
- def callback(e):
+ def callback(e) -> None:
is_transient = isinstance(e, TRANSIENT_NETWORK_ERRS)
mysql_error = None
diff --git a/inbox/util/misc.py b/inbox/util/misc.py
index b38f0eed3..46a4354f8 100644
--- a/inbox/util/misc.py
+++ b/inbox/util/misc.py
@@ -3,6 +3,7 @@
from datetime import datetime
from email.utils import mktime_tz, parsedate_tz
from importlib import import_module
+from types import TracebackType
from inbox.providers import providers
from inbox.util.file import iter_module_names
@@ -12,7 +13,12 @@ class DummyContextManager:
def __enter__(self): # noqa: ANN204
return None
- def __exit__(self, exc_type, exc_value, traceback): # noqa: ANN204
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> bool:
return False
@@ -163,7 +169,9 @@ def cleanup_subject(subject_str): # noqa: ANN201
# IMAP doesn't support nested folders and instead encodes paths inside folder
# names.
# imap_folder_path converts a "/" delimited path to an IMAP compatible path.
-def imap_folder_path(path, separator=".", prefix=""): # noqa: ANN201
+def imap_folder_path( # noqa: ANN201
+ path, separator: str = ".", prefix: str = ""
+):
folders = [folder for folder in path.split("/") if folder != ""]
res = None
@@ -190,7 +198,9 @@ def strip_prefix(path, prefix): # noqa: ANN201
# fs_folder_path converts an IMAP compatible path to a "/" delimited path.
-def fs_folder_path(path, separator=".", prefix=""): # noqa: ANN201
+def fs_folder_path( # noqa: ANN201
+ path, separator: str = ".", prefix: str = ""
+):
if prefix:
path = strip_prefix(path, prefix)
diff --git a/inbox/util/rdb.py b/inbox/util/rdb.py
index 60c8a45a4..eaeefff16 100644
--- a/inbox/util/rdb.py
+++ b/inbox/util/rdb.py
@@ -99,13 +99,13 @@ def terminate(self) -> None:
except OSError:
return
- def raw_input(self, prompt=""): # noqa: ANN201
+ def raw_input(self, prompt: str = ""): # noqa: ANN201
self.handle.write(prompt)
self.handle.flush()
return self.handle.readline()
-def break_to_interpreter(host="localhost", port=None) -> None:
+def break_to_interpreter(host: str = "localhost", port=None) -> None:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
diff --git a/inbox/util/testutils.py b/inbox/util/testutils.py
index cd8164360..b7f405034 100644
--- a/inbox/util/testutils.py
+++ b/inbox/util/testutils.py
@@ -83,7 +83,7 @@ def __init__(self) -> None:
Literal["mx", "ns"], dict[str, dict[str, str] | list[str]]
] = {"mx": {}, "ns": {}}
- def _load_records(self, filename):
+ def _load_records(self, filename) -> None:
self._registry = json.loads(get_data(filename))
def query(self, domain, record_type): # noqa: ANN201
@@ -151,10 +151,10 @@ def __init__(self) -> None:
self.logins = {}
self.error_message = ""
- def _add_login(self, email, password):
+ def _add_login(self, email, password) -> None:
self.logins[email] = password
- def _set_error_message(self, message):
+ def _set_error_message(self, message) -> None:
self.error_message = message
def login(self, email, password) -> None:
@@ -164,7 +164,9 @@ def login(self, email, password) -> None:
def logout(self) -> None:
pass
- def list_folders(self, directory="", pattern="*"): # noqa: ANN201
+ def list_folders( # noqa: ANN201
+ self, directory: str = "", pattern: str = "*"
+ ):
return [(b"\\All", b"/", "[Gmail]/All Mail")]
def has_capability(self, capability) -> bool:
@@ -208,7 +210,9 @@ def search(self, criteria): # noqa: ANN201
return [u for u, v in uid_dict.items() if v[criteria[0]] == thrid]
raise ValueError(f"unsupported test criteria: {criteria!r}")
- def select_folder(self, folder_name, readonly=False): # noqa: ANN201
+ def select_folder( # noqa: ANN201
+ self, folder_name, readonly: bool = False
+ ):
self.selected_folder = folder_name
return self.folder_status(folder_name)
@@ -242,7 +246,13 @@ def fetch(self, items, data, modifiers=None): # noqa: ANN201
return resp
def append(
- self, folder_name, mimemsg, flags, date, x_gm_msgid=0, x_gm_thrid=0
+ self,
+ folder_name,
+ mimemsg,
+ flags,
+ date,
+ x_gm_msgid: int = 0,
+ x_gm_thrid: int = 0,
) -> None:
uid_dict = self._data[folder_name]
uidnext = max(uid_dict) if uid_dict else 1
@@ -278,7 +288,7 @@ def folder_status(self, folder_name, data=None): # noqa: ANN201
)
return resp
- def delete_messages(self, uids, silent=False) -> None:
+ def delete_messages(self, uids, silent: bool = False) -> None:
for u in uids:
del self._data[self.selected_folder][u]
diff --git a/migrations/versions/045_new_password_storage.py b/migrations/versions/045_new_password_storage.py
index 55b86a83d..79004fa34 100644
--- a/migrations/versions/045_new_password_storage.py
+++ b/migrations/versions/045_new_password_storage.py
@@ -69,7 +69,7 @@ class EASAccount(Account):
__table__ = Base.metadata.tables["easaccount"]
@property
- def _keyfile(self, create_dir=True): # noqa: PLR0206
+ def _keyfile(self, create_dir: bool = True): # noqa: PLR0206
assert self.key
assert KEY_DIR
diff --git a/migrations/versions/072_recompute_snippets.py b/migrations/versions/072_recompute_snippets.py
index f265529a8..f07beabad 100644
--- a/migrations/versions/072_recompute_snippets.py
+++ b/migrations/versions/072_recompute_snippets.py
@@ -42,7 +42,7 @@ def upgrade() -> None:
class Message(Base):
__table__ = Base.metadata.tables["message"]
- def calculate_html_snippet(msg, text):
+ def calculate_html_snippet(msg, text) -> None:
text = (
text.replace("
", " ")
.replace("
", " ")
@@ -51,7 +51,7 @@ def calculate_html_snippet(msg, text):
text = strip_tags(text)
calculate_plaintext_snippet(msg, text)
- def calculate_plaintext_snippet(msg, text):
+ def calculate_plaintext_snippet(msg, text) -> None:
msg.snippet = " ".join(text.split())[:SNIPPET_LENGTH]
with session_scope(versioned=False) as db_session: