Skip to content

Commit

Permalink
Format the codebase (#23)
Browse files Browse the repository at this point in the history
  • Loading branch information
AlecRosenbaum authored Aug 18, 2020
1 parent 7eb7ff7 commit 9022236
Show file tree
Hide file tree
Showing 501 changed files with 24,504 additions and 18,294 deletions.
24 changes: 24 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
version: 2.1
jobs:
static-code-analysis:
docker:
- image: circleci/python:3.6
working_directory: ~/code
steps:
- checkout

- run:
name: Prepare Environment
command: |
sudo -E pip install --no-deps -r requirements_lint.txt
- run:
name: black
when: always
command: black --check .

build:
machine:
image: ubuntu-1604:201903-01
Expand All @@ -20,3 +37,10 @@ jobs:
&& bin/wait-for-it.sh mysql:3306 \
&& NYLAS_ENV=test py.test inbox/test/
'
workflows:
version: 2
workflow:
jobs:
- static-code-analysis
- build
111 changes: 70 additions & 41 deletions bin/backfix-duplicate-categories.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from sqlalchemy.sql import exists, and_

configure_logging()
log = get_logger(purpose='duplicate-category-backfill')
log = get_logger(purpose="duplicate-category-backfill")


def backfix_shard(shard_id, dry_run):
Expand All @@ -27,21 +27,28 @@ def backfix_shard(shard_id, dry_run):
# is indeterminate. So we find the duplicate
# (namespace_id, display_name, name) pairs and use them to query
# for specific Category rows
category_query = db_session.query(Category.namespace_id,
Category.display_name,
Category.name)

duplicate_attrs = category_query. \
group_by(Category.display_name,
Category.namespace_id,
Category.name).having(
func.count(Category.id) > 1).all()
category_query = db_session.query(
Category.namespace_id, Category.display_name, Category.name
)

duplicate_attrs = (
category_query.group_by(
Category.display_name, Category.namespace_id, Category.name
)
.having(func.count(Category.id) > 1)
.all()
)

for namespace_id, display_name, name in duplicate_attrs:
duplicates = db_session.query(Category.id). \
filter(Category.namespace_id == namespace_id,
Category.display_name == display_name,
Category.name == name).all()
duplicates = (
db_session.query(Category.id)
.filter(
Category.namespace_id == namespace_id,
Category.display_name == display_name,
Category.name == name,
)
.all()
)

# duplicates is an array of tuples where each tuple is
# (Category.id,). We flatten the tuples here so that each item in
Expand All @@ -68,31 +75,37 @@ def backfix_shard(shard_id, dry_run):
# Iterate over all of the duplicate categories except master
for category_id in grouped_categories[1:]:
with session_scope_by_shard_id(shard_id) as db_session:
associated_messages = db_session.query(exists().where(
MessageCategory.category_id == category_id)).scalar()
associated_messages = db_session.query(
exists().where(MessageCategory.category_id == category_id)
).scalar()

# if category has messages, they need to be de-duped
# and consolidated
if associated_messages:
log.info('Category has associated messages',
category_id=category_id)
log.info(
"Category has associated messages", category_id=category_id
)
categories_with_messages.append(category_id)

# if category does not have messages, it can be deleted
else:
categories_to_delete.append(category_id)
log.info('Category does not have associated messages',
category_id=category_id)
log.info(
"Category does not have associated messages",
category_id=category_id,
)

if len(categories_with_messages) > 0:
log.info('Consolidating messages into category',
category_id=master_id)
log.info("Consolidating messages into category", category_id=master_id)

for category_id in categories_with_messages:
try:
with session_scope_by_shard_id(shard_id) as db_session:
messagecategories = db_session.query(MessageCategory).\
filter(MessageCategory.category_id == category_id).all() # noqa
messagecategories = (
db_session.query(MessageCategory)
.filter(MessageCategory.category_id == category_id)
.all()
) # noqa

for mc in messagecategories:
# Its possible for a message to be associated with
Expand All @@ -103,33 +116,45 @@ def backfix_shard(shard_id, dry_run):
# see such an object exists. If it does, we
# point the MessageCategory to the master
# category. If it does not, then simply delete it
mc_exists = db_session.query(exists().where(and_(
MessageCategory.category_id == master_id,
MessageCategory.message_id == mc.message_id)))\
.scalar()
mc_exists = db_session.query(
exists().where(
and_(
MessageCategory.category_id == master_id,
MessageCategory.message_id == mc.message_id,
)
)
).scalar()

if not dry_run:
# If mc_exists == True, then there's a
# MessageCategory associated with the master
# and the current category, so we can delete
# the current category
if mc_exists:
db_session.query(MessageCategory).filter_by(id=mc.id).delete()
db_session.query(MessageCategory).filter_by(
id=mc.id
).delete()
else:
# Master does not have a MessageCategory
# for this message. Update this one to
# point to the master
mc.category_id = master_id
db_session.commit()

log.info('Updated MessageCategory', mc_id=mc.id,
old_category_id=mc.category_id,
new_category_id=master_id)
log.info(
"Updated MessageCategory",
mc_id=mc.id,
old_category_id=mc.category_id,
new_category_id=master_id,
)

categories_to_delete.append(category_id)
except Exception as e:
log.critical('Exception encountered while consolidating'
' messagecategories', e=str(e))
log.critical(
"Exception encountered while consolidating"
" messagecategories",
e=str(e),
)
raise e

# We REALLY don't want to delete the category we consolidated all
Expand All @@ -138,28 +163,32 @@ def backfix_shard(shard_id, dry_run):

for category_id in categories_to_delete:
if dry_run:
log.info('Delete category', category_id=category_id)
log.info("Delete category", category_id=category_id)
continue

with session_scope_by_shard_id(shard_id) as db_session:
db_session.query(Category).filter_by(id=category_id).delete()
log.info('Deleted category', category_id=category_id)
log.info("Deleted category", category_id=category_id)

categories_to_delete.remove(category_id)

log.info('Completed category migration on shard',
categories_affected=categories_affected, shard_id=shard_id)
log.info(
"Completed category migration on shard",
categories_affected=categories_affected,
shard_id=shard_id,
)


@click.command()
@click.option('--shard-id', type=int, default=None)
@click.option('--dry-run', is_flag=True)
@click.option("--shard-id", type=int, default=None)
@click.option("--dry-run", is_flag=True)
def main(shard_id, dry_run):
if shard_id is not None:
backfix_shard(shard_id, dry_run)
else:
for shard_id in engine_manager.engines:
backfix_shard(shard_id, dry_run)

if __name__ == '__main__':

if __name__ == "__main__":
main()
30 changes: 17 additions & 13 deletions bin/backfix-generic-imap-separators.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,20 @@

from nylas.logging import get_logger, configure_logging
from inbox.models.backends.generic import GenericAccount
from inbox.models.session import (session_scope, global_session_scope,
session_scope_by_shard_id)
from inbox.models.session import (
session_scope,
global_session_scope,
session_scope_by_shard_id,
)

configure_logging()
log = get_logger(purpose='separator-backfix')
log = get_logger(purpose="separator-backfix")


@click.command()
@click.option('--min-id', type=int, default=None)
@click.option('--max-id', type=int, default=None)
@click.option('--shard-id', type=int, default=None)
@click.option("--min-id", type=int, default=None)
@click.option("--max-id", type=int, default=None)
@click.option("--shard-id", type=int, default=None)
def main(min_id, max_id, shard_id):
generic_accounts = []
failed = []
Expand All @@ -26,15 +29,14 @@ def main(min_id, max_id, shard_id):
# Get the list of running Gmail accounts.
with global_session_scope() as db_session:
generic_accounts = db_session.query(GenericAccount).filter(
GenericAccount.sync_state == 'running')
GenericAccount.sync_state == "running"
)

if min_id is not None:
generic_accounts = generic_accounts.filter(
GenericAccount.id > min_id)
generic_accounts = generic_accounts.filter(GenericAccount.id > min_id)

if max_id is not None:
generic_accounts = generic_accounts.filter(
GenericAccount.id <= max_id)
generic_accounts = generic_accounts.filter(GenericAccount.id <= max_id)

generic_accounts = [acc.id for acc in generic_accounts]

Expand All @@ -43,7 +45,8 @@ def main(min_id, max_id, shard_id):
elif shard_id is not None:
with session_scope_by_shard_id(shard_id) as db_session:
generic_accounts = db_session.query(GenericAccount).filter(
GenericAccount.sync_state == 'running')
GenericAccount.sync_state == "running"
)

generic_accounts = [acc.id for acc in generic_accounts]
db_session.expunge_all()
Expand All @@ -70,5 +73,6 @@ def main(min_id, max_id, shard_id):
print "Failed accounts:"
print failed

if __name__ == '__main__':

if __name__ == "__main__":
main()
44 changes: 28 additions & 16 deletions bin/create-event-contact-associations.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from sqlalchemy import asc

configure_logging()
log = get_logger(purpose='create-event-contact-associations')
log = get_logger(purpose="create-event-contact-associations")


def process_shard(shard_id, dry_run, id_start=0):
Expand All @@ -25,8 +25,9 @@ def process_shard(shard_id, dry_run, id_start=0):
rps = 6 / batch_size
window = 5

throttle = limitlion.throttle_wait('create-event-contact-associations',
rps=rps, window=window)
throttle = limitlion.throttle_wait(
"create-event-contact-associations", rps=rps, window=window
)

with session_scope_by_shard_id(shard_id) as db_session:
# NOTE: The session is implicitly autoflushed, which ensures no
Expand All @@ -52,39 +53,50 @@ def process_shard(shard_id, dry_run, id_start=0):
id_start = event.id

if n % batch_size == 0:
log.info('progress', shard_id=shard_id, id_start=id_start,
n=n, n_skipped=n_skipped, n_updated=n_updated)
log.info(
"progress",
shard_id=shard_id,
id_start=id_start,
n=n,
n_skipped=n_skipped,
n_updated=n_updated,
)

if event.contacts:
continue

if not dry_run:
event.contacts = []
update_contacts_from_event(db_session, event,
event.namespace_id)
update_contacts_from_event(db_session, event, event.namespace_id)
n_updated += 1

if n_updated % batch_size == 0:
db_session.commit()
log.info('committed', shard_id=shard_id, n=n,
n_skipped=n_skipped, n_updated=n_updated)
log.info(
"committed",
shard_id=shard_id,
n=n,
n_skipped=n_skipped,
n_updated=n_updated,
)
throttle()


log.info('finished', shard_id=shard_id, n=n, n_skipped=n_skipped,
n_updated=n_updated)
log.info(
"finished", shard_id=shard_id, n=n, n_skipped=n_skipped, n_updated=n_updated
)


@click.command()
@click.option('--shard-id', type=int, default=None)
@click.option('--id-start', type=int, default=0)
@click.option('--dry-run', is_flag=True)
@click.option("--shard-id", type=int, default=None)
@click.option("--id-start", type=int, default=0)
@click.option("--dry-run", is_flag=True)
def main(shard_id, id_start, dry_run):
if shard_id is not None:
process_shard(shard_id, dry_run, id_start)
else:
for shard_id in engine_manager.engines:
process_shard(shard_id, dry_run, id_start)

if __name__ == '__main__':

if __name__ == "__main__":
main()
4 changes: 3 additions & 1 deletion inbox/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
# Allow out-of-tree submodules.
from pkgutil import extend_path

__path__ = extend_path(__path__, __name__)

try:
from inbox.client import APIClient
__all__ = ['APIClient']

__all__ = ["APIClient"]
except ImportError:
pass
Loading

0 comments on commit 9022236

Please sign in to comment.