From 13aaabe7c391e4ccc4b23ae8de027142f739496e Mon Sep 17 00:00:00 2001 From: Alec Rosenbaum Date: Mon, 17 Aug 2020 10:30:22 -0400 Subject: [PATCH 1/4] add circle checking for formatting --- .circleci/config.yml | 17 +++++++++++++++++ requirements_lint.txt | 1 + 2 files changed, 18 insertions(+) create mode 100644 requirements_lint.txt diff --git a/.circleci/config.yml b/.circleci/config.yml index 457970508..5704137b6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,5 +1,22 @@ version: 2.1 jobs: + static-code-analysis: + docker: + - image: circleci/python:3.6 + working_directory: ~/code + steps: + - checkout + + - run: + name: Prepare Environment + command: | + sudo -E pip install --no-deps -r requirements_lint.txt + + - run: + name: black + when: always + command: black --check . + build: machine: image: ubuntu-1604:201903-01 diff --git a/requirements_lint.txt b/requirements_lint.txt new file mode 100644 index 000000000..fc3fb88b5 --- /dev/null +++ b/requirements_lint.txt @@ -0,0 +1 @@ +black==19.10b0 \ No newline at end of file From e1ba3d7f66bad61403c4d5d3da4b7e5b5b910d99 Mon Sep 17 00:00:00 2001 From: Alec Rosenbaum Date: Mon, 17 Aug 2020 10:34:05 -0400 Subject: [PATCH 2/4] add job to workflow --- .circleci/config.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5704137b6..33cc20511 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,3 +37,10 @@ jobs: && bin/wait-for-it.sh mysql:3306 \ && NYLAS_ENV=test py.test inbox/test/ ' + +workflows: + version: 2 + workflow: + jobs: + - static-code-analysis + - build From db8120f4b379931bcf20f091dde289b4e8b4f90e Mon Sep 17 00:00:00 2001 From: Alec Rosenbaum Date: Mon, 17 Aug 2020 10:38:01 -0400 Subject: [PATCH 3/4] add missing linter requirements --- requirements_lint.txt | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/requirements_lint.txt b/requirements_lint.txt index fc3fb88b5..7b87b6e58 100644 --- a/requirements_lint.txt +++ b/requirements_lint.txt @@ -1 +1,6 @@ -black==19.10b0 \ No newline at end of file +black==19.10b0 +click==7.1.2 +pathspec==0.8.0 +regex==2020.7.14 +toml==0.10.1 +typed-ast==1.4.1 \ No newline at end of file From 295aa2dc6a61e2023b134db640bbdbff0fcec7f4 Mon Sep 17 00:00:00 2001 From: Alec Rosenbaum Date: Mon, 17 Aug 2020 10:42:41 -0400 Subject: [PATCH 4/4] format files --- bin/backfix-duplicate-categories.py | 111 +- bin/backfix-generic-imap-separators.py | 30 +- bin/create-event-contact-associations.py | 44 +- inbox/__init__.py | 4 +- inbox/actions/backends/__init__.py | 2 + inbox/actions/backends/generic.py | 168 +- inbox/actions/backends/gmail.py | 18 +- inbox/actions/base.py | 122 +- inbox/api/err.py | 42 +- inbox/api/filtering.py | 692 ++++--- inbox/api/kellogs.py | 397 ++-- inbox/api/metrics_api.py | 196 +- inbox/api/ns_api.py | 1840 +++++++++-------- inbox/api/sending.py | 15 +- inbox/api/srv.py | 295 +-- inbox/api/update.py | 196 +- inbox/api/validation.py | 281 ++- inbox/api/wsgi.py | 20 +- inbox/auth/__init__.py | 1 + inbox/auth/base.py | 12 +- inbox/auth/generic.py | 331 +-- inbox/auth/gmail.py | 194 +- inbox/auth/oauth.py | 131 +- inbox/basicauth.py | 1 - inbox/config.py | 66 +- inbox/console.py | 41 +- inbox/contacts/__init__.py | 1 + inbox/contacts/algorithms.py | 61 +- inbox/contacts/carddav.py | 30 +- inbox/contacts/crud.py | 15 +- inbox/contacts/google.py | 97 +- inbox/contacts/icloud.py | 55 +- inbox/contacts/processing.py | 82 +- inbox/contacts/remote_sync.py | 84 +- inbox/contacts/search.py | 163 +- inbox/contacts/vcard.py | 216 +- inbox/crispin.py | 548 ++--- inbox/events/__init__.py | 1 + inbox/events/actions/backends/__init__.py | 2 + inbox/events/actions/backends/gmail.py | 11 +- inbox/events/actions/base.py | 62 +- inbox/events/google.py | 430 ++-- inbox/events/ical.py | 495 +++-- inbox/events/recurring.py | 84 +- inbox/events/remote_sync.py | 243 ++- inbox/events/timezones.py | 484 ++--- inbox/events/util.py | 46 +- inbox/folder_edge_cases.py | 86 +- inbox/heartbeat/config.py | 28 +- inbox/heartbeat/status.py | 14 +- inbox/heartbeat/store.py | 61 +- inbox/ignition.py | 210 +- inbox/instrumentation.py | 127 +- inbox/mailsync/backends/__init__.py | 2 + inbox/mailsync/backends/base.py | 43 +- inbox/mailsync/backends/gmail.py | 287 +-- inbox/mailsync/backends/imap/__init__.py | 6 +- inbox/mailsync/backends/imap/common.py | 120 +- inbox/mailsync/backends/imap/generic.py | 530 +++-- inbox/mailsync/backends/imap/monitor.py | 84 +- inbox/mailsync/frontend.py | 50 +- inbox/mailsync/gc.py | 112 +- inbox/mailsync/service.py | 215 +- inbox/models/__init__.py | 2 + inbox/models/account.py | 294 ++- inbox/models/action_log.py | 82 +- inbox/models/backends/__init__.py | 2 + inbox/models/backends/generic.py | 101 +- inbox/models/backends/gmail.py | 153 +- inbox/models/backends/imap.py | 232 ++- inbox/models/backends/oauth.py | 31 +- inbox/models/backends/outlook.py | 11 +- inbox/models/base.py | 11 +- inbox/models/block.py | 111 +- inbox/models/calendar.py | 82 +- inbox/models/category.py | 116 +- inbox/models/contact.py | 100 +- inbox/models/data_processing.py | 15 +- inbox/models/event.py | 270 +-- inbox/models/folder.py | 71 +- inbox/models/label.py | 51 +- inbox/models/message.py | 488 +++-- inbox/models/meta.py | 47 +- inbox/models/metadata.py | 30 +- inbox/models/mixins.py | 34 +- inbox/models/namespace.py | 38 +- inbox/models/roles.py | 84 +- inbox/models/search.py | 7 +- inbox/models/secret.py | 21 +- inbox/models/session.py | 76 +- inbox/models/thread.py | 138 +- inbox/models/transaction.py | 106 +- inbox/models/util.py | 281 ++- inbox/models/when.py | 15 +- inbox/providers.py | 836 +++++--- inbox/s3/__init__.py | 1 + inbox/s3/backends/__init__.py | 1 + inbox/s3/backends/gmail.py | 29 +- inbox/s3/backends/imap.py | 18 +- inbox/s3/exc.py | 2 + inbox/scheduling/deferred_migration.py | 67 +- inbox/scheduling/event_queue.py | 37 +- inbox/scheduling/queue.py | 63 +- inbox/search/backends/__init__.py | 2 + inbox/search/backends/generic.py | 6 +- inbox/search/backends/gmail.py | 84 +- inbox/search/backends/imap.py | 155 +- inbox/search/base.py | 4 +- inbox/security/blobstorage.py | 10 +- inbox/security/oracles.py | 37 +- inbox/sendmail/__init__.py | 2 + inbox/sendmail/base.py | 170 +- inbox/sendmail/generic.py | 6 +- inbox/sendmail/gmail.py | 6 +- inbox/sendmail/message.py | 130 +- inbox/sendmail/smtp/postel.py | 264 +-- inbox/sendmail/smtp/util.py | 80 +- inbox/sqlalchemy_ext/util.py | 44 +- inbox/sync/base_sync.py | 44 +- inbox/test/__init__.py | 1 + inbox/test/api/base.py | 17 +- inbox/test/api/test_account.py | 102 +- inbox/test/api/test_auth.py | 28 +- inbox/test/api/test_calendars.py | 89 +- inbox/test/api/test_contacts.py | 54 +- inbox/test/api/test_data_processing.py | 149 +- inbox/test/api/test_drafts.py | 406 ++-- inbox/test/api/test_event_participants.py | 513 +++-- inbox/test/api/test_event_when.py | 233 ++- inbox/test/api/test_events.py | 447 ++-- inbox/test/api/test_events_recurring.py | 175 +- inbox/test/api/test_files.py | 170 +- inbox/test/api/test_filtering.py | 371 ++-- inbox/test/api/test_folders.py | 48 +- inbox/test/api/test_folders_labels.py | 236 ++- inbox/test/api/test_invalid_account.py | 71 +- inbox/test/api/test_messages.py | 291 +-- inbox/test/api/test_searching.py | 516 ++--- inbox/test/api/test_sending.py | 1064 +++++----- inbox/test/api/test_streaming.py | 159 +- inbox/test/api/test_threads.py | 194 +- inbox/test/api/test_validation.py | 94 +- inbox/test/api/test_views.py | 43 +- inbox/test/auth/__init__.py | 1 + inbox/test/auth/providers/__init__.py | 1 + inbox/test/auth/providers/mock_gmail.py | 22 +- inbox/test/auth/test_generic_auth.py | 198 +- inbox/test/auth/test_gmail_auth.py | 65 +- .../test/auth/test_gmail_auth_credentials.py | 339 +-- inbox/test/auth/test_imap_smtp_auth.py | 110 +- inbox/test/auth/test_ssl_auth.py | 100 +- inbox/test/conftest.py | 18 +- inbox/test/contacts/test_process_mail.py | 111 +- inbox/test/contacts/test_remote_sync.py | 65 +- inbox/test/events/test_datetime.py | 91 +- inbox/test/events/test_events_util.py | 64 +- inbox/test/events/test_google_events.py | 858 ++++---- inbox/test/events/test_ics_parsing.py | 426 ++-- inbox/test/events/test_inviting.py | 42 +- inbox/test/events/test_merge.py | 42 +- inbox/test/events/test_recurrence.py | 510 +++-- inbox/test/events/test_rsvp.py | 20 +- inbox/test/events/test_sync.py | 316 ++- inbox/test/general/test_account.py | 15 +- .../general/test_address_canonicalization.py | 31 +- inbox/test/general/test_category.py | 30 +- inbox/test/general/test_concurrency.py | 56 +- inbox/test/general/test_draft_creation.py | 10 +- .../test/general/test_filename_truncation.py | 28 +- inbox/test/general/test_html_parsing.py | 20 +- inbox/test/general/test_ignition.py | 21 +- inbox/test/general/test_message_parsing.py | 499 +++-- inbox/test/general/test_mutable_json_type.py | 20 +- inbox/test/general/test_namespace.py | 145 +- inbox/test/general/test_paths.py | 39 +- .../test/general/test_provider_resolution.py | 86 +- inbox/test/general/test_relationships.py | 92 +- inbox/test/general/test_required_folders.py | 19 +- inbox/test/general/test_sync_engine_exit.py | 28 +- inbox/test/general/test_thread_creation.py | 73 +- inbox/test/general/test_threading.py | 105 +- inbox/test/general/test_util.py | 83 +- inbox/test/heartbeat/test_heartbeat.py | 48 +- inbox/test/imap/data.py | 64 +- .../imap/network/test_actions_syncback.py | 64 +- .../test/imap/network/test_drafts_syncback.py | 67 +- inbox/test/imap/network/test_send.py | 34 +- inbox/test/imap/test_actions.py | 197 +- inbox/test/imap/test_crispin_client.py | 784 ++++--- inbox/test/imap/test_delete_handling.py | 272 ++- inbox/test/imap/test_folder_state.py | 55 +- inbox/test/imap/test_folder_sync.py | 358 ++-- inbox/test/imap/test_full_imap_enabled.py | 31 +- inbox/test/imap/test_labels.py | 209 +- inbox/test/imap/test_pooling.py | 20 +- inbox/test/imap/test_save_folder_names.py | 213 +- inbox/test/imap/test_smtp.py | 101 +- inbox/test/imap/test_update_metadata.py | 137 +- inbox/test/providers/__init__.py | 2 + .../test/scheduling/test_sync_start_logic.py | 109 +- inbox/test/scheduling/test_syncback_logic.py | 118 +- inbox/test/search/conftest.py | 6 +- inbox/test/security/test_blobstorage.py | 11 +- inbox/test/security/test_secret.py | 91 +- inbox/test/security/test_smtp_ssl.py | 83 +- inbox/test/system/client.py | 7 +- inbox/test/system/conftest.py | 55 +- inbox/test/system/google_auth_helper.py | 51 +- inbox/test/system/outlook_auth_helper.py | 77 +- inbox/test/system/random_words.py | 46 +- inbox/test/system/test_auth.py | 17 +- inbox/test/system/test_drafts.py | 14 +- inbox/test/system/test_events.py | 32 +- inbox/test/system/test_google_events.py | 34 +- inbox/test/system/test_labels.py | 26 +- inbox/test/system/test_sending.py | 67 +- .../transactions/test_action_scheduling.py | 58 +- inbox/test/transactions/test_delta_sync.py | 193 +- .../transactions/test_thread_versioning.py | 14 +- .../transactions/test_transaction_creation.py | 327 +-- inbox/test/util/base.py | 406 ++-- inbox/test/util/crispin.py | 1 + .../test_gpush_calendar_notifications.py | 59 +- inbox/transactions/actions.py | 531 +++-- inbox/transactions/delta_sync.py | 172 +- inbox/transactions/search.py | 78 +- inbox/util/__init__.py | 1 + inbox/util/addr.py | 6 +- inbox/util/blockstore.py | 101 +- inbox/util/concurrency.py | 69 +- inbox/util/db.py | 13 +- inbox/util/debug.py | 5 +- inbox/util/encoding.py | 10 +- inbox/util/file.py | 14 +- inbox/util/fleet.py | 69 +- inbox/util/html.py | 57 +- inbox/util/logging_helper.py | 6 +- inbox/util/misc.py | 52 +- inbox/util/rdb.py | 14 +- inbox/util/sharding.py | 19 +- inbox/util/startup.py | 18 +- inbox/util/stats.py | 4 +- inbox/util/testutils.py | 193 +- inbox/util/threading.py | 45 +- inbox/util/url.py | 78 +- inbox/webhooks/gpush_notifications.py | 67 +- migrations/env.py | 36 +- .../000_g_msgid_g_thrid_as_integers.py | 18 +- ..._rename_message_id_to_message_id_header.py | 12 +- ...store_g_thrid_as_biginteger_instead_of_.py | 10 +- migrations/versions/003_expand_littlejson.py | 8 +- .../versions/004_drafts_as_required_folder.py | 10 +- .../versions/005_import_old_accounts.py | 57 +- migrations/versions/006_add_search_tokens.py | 21 +- .../versions/007_per_provider_table_split.py | 297 ++- .../versions/008_store_userinfo_from_oauth.py | 42 +- .../009_multiple_contact_providers.py | 42 +- .../versions/010_store_raw_contact_data.py | 8 +- migrations/versions/011_use_server_default.py | 278 ++- .../012_move_google_userinfo_fields_to_.py | 228 +- migrations/versions/013_add_spool_msg.py | 41 +- .../versions/014_contact_ranking_signals.py | 25 +- ...015_generalize_from_sender_header_field.py | 4 +- .../versions/016_extra_transaction_data.py | 11 +- migrations/versions/017_haspublicid.py | 98 +- .../018_message_contact_association.py | 32 +- migrations/versions/019_blocks_to_parts.py | 115 +- .../versions/020_store_webhook_parameters.py | 78 +- ..._add_references_column_to_message_table.py | 8 +- ...22_store_imapuid_msg_uid_as_biginteger_.py | 8 +- .../versions/022_webhooks_and_filters.py | 126 +- .../023_tighten_nullable_constraints_on_.py | 21 +- ...024_remote_folders_and_inbox_tags_split.py | 382 ++-- ...emove_user_sharedfolder_and_usersession.py | 24 +- ...026_add_audit_timestamps_to_all_objects.py | 105 +- .../versions/027_imapuid_soft_deletes.py | 6 +- migrations/versions/028_tag_api_migration.py | 168 +- .../029_set_inbox_folder_exposed_name.py | 14 +- .../030_add_is_read_attribute_to_messages.py | 64 +- .../versions/031_add_indexes_to_timestamps.py | 383 ++-- migrations/versions/032_tighten_easuid.py | 64 +- migrations/versions/033_add_more_indexes.py | 26 +- .../034_cascade_folder_deletes_to_imapuid.py | 187 +- .../035_add_columns_for_drafts_support_to_.py | 115 +- .../036_replace_usertag_by_generic_tag.py | 141 +- migrations/versions/037_shorten_addresses.py | 12 +- .../038_add_public_ids_to_transactions.py | 71 +- ..._change_easfoldersync_unique_constraint.py | 31 +- migrations/versions/040_gmailaccount.py | 144 +- ...1_add_sync_status_columns_to_foldersync.py | 29 +- .../versions/042_simplify_tags_schema.py | 18 +- ...columns_for_sync_running_stopped_killed.py | 24 +- .../versions/044_update_drafts_schema.py | 91 +- .../versions/045_new_password_storage.py | 31 +- migrations/versions/046_yahoo.py | 20 +- .../versions/047_store_more_on_threads.py | 43 +- .../048_remove_storage_of_access_token.py | 23 +- .../049_store_less_on_threads_after_all.py | 10 +- .../versions/050_imap_table_cleanups.py | 234 ++- .../051_store_secrets_in_local_vault.py | 94 +- ...2_store_google_client_id_and_secret_on_.py | 19 +- .../versions/053_canonicalize_addresses.py | 122 +- .../054_dont_specially_store_mailing_list_.py | 18 +- .../versions/055_add_account_liveness.py | 10 +- .../versions/056_message_unique_constraint.py | 77 +- ...consolidate_account_sync_status_columns.py | 30 +- ...enforce_length_limit_of_255_on_message_.py | 31 +- migrations/versions/059_add_action_log.py | 59 +- .../060_cascade_folder_deletes_to_easuid.py | 24 +- ...remove_easfoldersyncstatus_folder_rows_.py | 29 +- ...max_length_of_message_message_id_header.py | 14 +- .../063_drop_misc_keyval_column_on_parts.py | 6 +- .../064_make_address_fields_non_null.py | 60 +- .../065_add_multi_column_transaction_index.py | 15 +- migrations/versions/066_kill_spoolmessage.py | 76 +- .../067_add_executed_status_to_action_log.py | 58 +- migrations/versions/068_outlook.py | 50 +- migrations/versions/069_aol.py | 20 +- ..._easfoldersyncstatus_unique_constraints.py | 29 +- migrations/versions/071_more_sync_states.py | 30 +- migrations/versions/072_recompute_snippets.py | 15 +- migrations/versions/073_generic_providers.py | 127 +- .../versions/074_add_eas_thrid_index.py | 21 +- .../075_drop_contacts_search_signals.py | 10 +- .../versions/076_add_thread_order_column.py | 9 +- .../077_add_supports_condstore_column_to_.py | 11 +- migrations/versions/078_events.py | 62 +- migrations/versions/079_events_longer_uids.py | 27 +- .../versions/080_longer_event_summaries.py | 8 +- ...move_imapfolder_highestmodseq_to_bigint.py | 30 +- migrations/versions/082_event_participants.py | 45 +- .../versions/083_calendars_event_owners.py | 137 +- migrations/versions/084_mutable_drafts.py | 50 +- migrations/versions/085_add_attachment_tag.py | 50 +- migrations/versions/086_event_date_times.py | 11 +- .../versions/087_fix_account_foreign_keys.py | 20 +- .../versions/088_calendar_descriptions.py | 98 +- migrations/versions/089_revert_encryption.py | 44 +- migrations/versions/090_parts_block_ids.py | 48 +- migrations/versions/091_remove_webhooks.py | 163 +- .../versions/092_fix_outlookaccount_typo.py | 20 +- .../versions/093_add_folder_identifier.py | 21 +- migrations/versions/094_eas_passwords.py | 37 +- migrations/versions/095_secret_storage.py | 50 +- .../versions/096_migrate_secret_data.py | 51 +- migrations/versions/097_secrets_endgame.py | 11 +- .../versions/098_add_throttling_support.py | 12 +- .../099_add_namespace_id_to_message.py | 24 +- .../100_make_message_namespace_id_nonnull.py | 14 +- .../versions/101_add_namespace_to_contacts.py | 28 +- .../versions/102_add_namespace_to_events.py | 28 +- .../103_add_namespace_to_calendars.py | 31 +- .../104_add_message_inbox_uid_index.py | 9 +- .../versions/105_add_subject_indexes.py | 12 +- migrations/versions/106_add_more_indexes.py | 21 +- migrations/versions/107_drop_eas_state.py | 9 +- .../versions/108_easaccount_username.py | 37 +- ...109_add_retries_column_to_the_actionlog.py | 28 +- migrations/versions/110_add_thread_index.py | 15 +- .../versions/111_add_account_name_column.py | 26 +- .../versions/112_imap_delete_cascades.py | 16 +- .../versions/113_add_custom_imap_overrides.py | 44 +- .../versions/114_eas_twodevices_pledge.py | 159 +- .../versions/115_eas_twodevices_turn.py | 95 +- .../versions/116_eas_twodevices_prestige.py | 44 +- .../117_fix_easuid_delete_cascades.py | 20 +- .../118_store_label_information_per_uid.py | 9 +- .../versions/119_store_full_message_body.py | 16 +- .../versions/120_simplify_transaction_log.py | 12 +- .../versions/121_add_searchindexcursor.py | 59 +- migrations/versions/122_add_easeventuid.py | 69 +- .../versions/123_remove_gmail_inbox_syncs.py | 14 +- .../124_remove_soft_deleted_objects.py | 8 +- .../125_refactor_participants_table.py | 10 +- .../126_add_account_sync_contacts_events.py | 30 +- migrations/versions/127_remove_easeventuid.py | 17 +- migrations/versions/128_fix_cascades.py | 12 +- .../129_make_folder_name_case_sensitive.py | 12 +- migrations/versions/130_add_message_index.py | 16 +- .../131_update_transaction_indices.py | 14 +- .../132_add_cascade_delete_part_block_id.py | 8 +- .../133_add_unique_account_constraint.py | 11 +- migrations/versions/134_add_message_index.py | 14 +- .../135_add_thread_tag_index_to_tagitem.py | 8 +- .../versions/136_add_actionlog_index.py | 11 +- migrations/versions/137_add_versions.py | 23 +- .../versions/138_add_participants_column.py | 4 +- .../139_add_ns_index_to_contact_and_event.py | 20 +- ..._relax_participants_by_email_constraint.py | 4 +- .../versions/141_remote_remote_contacts.py | 11 +- migrations/versions/142_add_sync_run_bit.py | 12 +- .../versions/143_add_reply_to_message_id.py | 42 +- .../versions/144_update_calendar_index.py | 37 +- .../versions/145_drop_event_constraint.py | 14 +- .../146_update_google_calendar_uids.py | 9 +- .../versions/147_add_cleaned_subject.py | 22 +- ...148_add_last_modified_column_for_events.py | 4 +- .../149_add_emailed_events_calendar.py | 36 +- .../versions/150_add_polymorphic_events.py | 79 +- .../151_remove_message_thread_order.py | 13 +- .../versions/152_add_message_id_to_event.py | 12 +- .../153_revert_account_unique_constraint.py | 24 +- .../versions/154_add_message_indices.py | 20 +- migrations/versions/155_add_status_column.py | 24 +- .../versions/156_drop_cancelled_column.py | 12 +- migrations/versions/157_update_eas_schema.py | 66 +- .../versions/158_update_eas_schema_part_2.py | 32 +- .../versions/159_update_eas_schema_part_3.py | 17 +- migrations/versions/160_split_actionlog.py | 57 +- .../161_update_eas_schema_part_3_for_prod.py | 25 +- .../162_update_folder_unique_constraint.py | 17 +- .../versions/163_drop_transaction_snapshot.py | 6 +- .../versions/164_add_decode_error_index.py | 11 +- migrations/versions/165_add_compacted_body.py | 11 +- .../versions/166_migrate_body_format.py | 20 +- ..._create_index_for_querying_messages_by_.py | 14 +- .../168_drop_message_sanitized_body.py | 6 +- .../versions/169_update_easuid_schema.py | 13 +- .../versions/170_update_easuid_schema_2.py | 32 +- .../versions/171_update_easuid_schema_3.py | 14 +- .../versions/172_update_easuid_schema_4.py | 13 +- migrations/versions/173_add_owner2.py | 9 +- migrations/versions/174_backfill_owner2.py | 4 +- .../175_fix_recurring_override_cascade.py | 45 +- .../176_add_run_state_folderstatus.py | 18 +- .../177_add_run_state_eas_folderstatus.py | 24 +- ..._add_reply_to_messagecontactassociation.py | 28 +- .../versions/179_longer_event_descriptions.py | 9 +- .../180_migrate_event_descriptions.py | 11 +- .../181_drop_short_event_descriptions.py | 6 +- .../182_add_data_processing_cache_table.py | 36 +- .../183_change_event_sync_timestamp.py | 15 +- ...184_create_gmail_auth_credentials_table.py | 44 +- ...5_backfill_gmail_auth_credentials_table.py | 48 +- .../186_new_tables_for_folders_overhaul.py | 219 +- .../187_migrate_data_for_folders_overhaul.py | 101 +- .../188_create_sequence_number_column.py | 19 +- .../189_add_initial_sync_start_end_column.py | 16 +- .../versions/190_eas_add_device_retirement.py | 25 +- .../191_add_new_events_and_calendars_flags.py | 35 +- ...dd_receivedrecentdate_column_to_threads.py | 36 +- ...alculate_receivedrecentdate_for_threads.py | 17 +- .../versions/194_extend_eas_folder_id.py | 26 +- .../195_remove_receivedrecentdate_column.py | 30 +- .../196_create_outlook_account_column.py | 10 +- ...7_add_message_categories_change_counter.py | 36 +- ..._eas_foldersyncstatus_startstop_columns.py | 28 +- migrations/versions/199_save_imap_uidnext.py | 9 +- .../versions/200_update_imapfolderinfo.py | 11 +- .../201_add_sync_email_bit_to_account.py | 16 +- .../versions/202_drop_sync_raw_data_column.py | 18 +- .../versions/203_deleted_at_constraint.py | 46 +- .../204_remove_deleted_at_constraint.py | 46 +- .../versions/205_fix_categories_cascade.py | 36 +- .../versions/206_add_phone_numbers_table.py | 48 +- ...dd_contact_search_index_service_cursor_.py | 90 +- .../208_drop_easuid_uniqueconstraint.py | 12 +- .../versions/209_recreate_easuid_index.py | 18 +- .../210_drop_message_full_body_id_fk.py | 6 +- .../versions/211_drop_message_full_body_id.py | 6 +- ...add_columns_for_smtp_imap_specific_auth.py | 50 +- migrations/versions/213_add_metadata_table.py | 104 +- .../214_introduce_accounttransaction.py | 92 +- .../215_add_actionlog_status_type_index.py | 9 +- ...dd_folder_separator_column_for_generic_.py | 12 +- .../217_add_genericaccount_ssl_required.py | 7 +- .../versions/218_modify_metadata_indexes.py | 14 +- ...accounttransaction_namespace_id_cascade.py | 32 +- .../versions/220_folder_separators_again.py | 34 +- .../221_fix_category_column_defaults.py | 40 +- .../222_remove_unused_transaction_indices.py | 21 +- migrations/versions/223_time_mixins_fix.py | 195 +- .../224_namespace_id_idx_transaction.py | 9 +- .../225_drop_messagecategory_foreign_keys.py | 36 +- ..._add_queryable_value_column_to_metadata.py | 21 +- .../227_remove_message_foreignkeys.py | 43 +- .../228_increase_gmailaccount_token_length.py | 12 +- .../229_drop_transaction_foreign_keys.py | 35 +- .../versions/230_drop_block_foreign_keys.py | 43 +- .../versions/231_drop_contact_foreign_keys.py | 52 +- .../versions/232_add_thread_deleted_at.py | 19 +- .../233_revert_drop_block_foreign_keys.py | 43 +- .../234_change_contact_uid_collation.py | 14 +- .../235_change_imapfolderinfo_column.py | 28 +- .../versions/236_add_desired_sync_host.py | 4 +- .../versions/237_add_new_contacts_index.py | 15 +- .../versions/238_add_message_id_header_idx.py | 17 +- .../versions/239_server_default_created_at.py | 52 +- .../versions/240_create_missing_indexes.py | 133 +- .../241_create_messagecategory_index.py | 14 +- migrations/versions/242_fix_indexes.py | 26 +- .../versions/243_fix_action_log_indexes.py | 28 +- migrations/versions/244_cursor_index.py | 17 +- migrations/versions/245_cascade_secrets.py | 16 +- .../246_create_message_actionlog_indexes.py | 22 +- .../versions/247_add_event_visibility.py | 10 +- .../versions/248_event_contact_association.py | 50 +- ...249_fix_contact_association_constraints.py | 28 +- setup.py | 92 +- 499 files changed, 24474 insertions(+), 18294 deletions(-) diff --git a/bin/backfix-duplicate-categories.py b/bin/backfix-duplicate-categories.py index e09b14382..5617c9f68 100755 --- a/bin/backfix-duplicate-categories.py +++ b/bin/backfix-duplicate-categories.py @@ -16,7 +16,7 @@ from sqlalchemy.sql import exists, and_ configure_logging() -log = get_logger(purpose='duplicate-category-backfill') +log = get_logger(purpose="duplicate-category-backfill") def backfix_shard(shard_id, dry_run): @@ -27,21 +27,28 @@ def backfix_shard(shard_id, dry_run): # is indeterminate. So we find the duplicate # (namespace_id, display_name, name) pairs and use them to query # for specific Category rows - category_query = db_session.query(Category.namespace_id, - Category.display_name, - Category.name) - - duplicate_attrs = category_query. \ - group_by(Category.display_name, - Category.namespace_id, - Category.name).having( - func.count(Category.id) > 1).all() + category_query = db_session.query( + Category.namespace_id, Category.display_name, Category.name + ) + + duplicate_attrs = ( + category_query.group_by( + Category.display_name, Category.namespace_id, Category.name + ) + .having(func.count(Category.id) > 1) + .all() + ) for namespace_id, display_name, name in duplicate_attrs: - duplicates = db_session.query(Category.id). \ - filter(Category.namespace_id == namespace_id, - Category.display_name == display_name, - Category.name == name).all() + duplicates = ( + db_session.query(Category.id) + .filter( + Category.namespace_id == namespace_id, + Category.display_name == display_name, + Category.name == name, + ) + .all() + ) # duplicates is an array of tuples where each tuple is # (Category.id,). We flatten the tuples here so that each item in @@ -68,31 +75,37 @@ def backfix_shard(shard_id, dry_run): # Iterate over all of the duplicate categories except master for category_id in grouped_categories[1:]: with session_scope_by_shard_id(shard_id) as db_session: - associated_messages = db_session.query(exists().where( - MessageCategory.category_id == category_id)).scalar() + associated_messages = db_session.query( + exists().where(MessageCategory.category_id == category_id) + ).scalar() # if category has messages, they need to be de-duped # and consolidated if associated_messages: - log.info('Category has associated messages', - category_id=category_id) + log.info( + "Category has associated messages", category_id=category_id + ) categories_with_messages.append(category_id) # if category does not have messages, it can be deleted else: categories_to_delete.append(category_id) - log.info('Category does not have associated messages', - category_id=category_id) + log.info( + "Category does not have associated messages", + category_id=category_id, + ) if len(categories_with_messages) > 0: - log.info('Consolidating messages into category', - category_id=master_id) + log.info("Consolidating messages into category", category_id=master_id) for category_id in categories_with_messages: try: with session_scope_by_shard_id(shard_id) as db_session: - messagecategories = db_session.query(MessageCategory).\ - filter(MessageCategory.category_id == category_id).all() # noqa + messagecategories = ( + db_session.query(MessageCategory) + .filter(MessageCategory.category_id == category_id) + .all() + ) # noqa for mc in messagecategories: # Its possible for a message to be associated with @@ -103,10 +116,14 @@ def backfix_shard(shard_id, dry_run): # see such an object exists. If it does, we # point the MessageCategory to the master # category. If it does not, then simply delete it - mc_exists = db_session.query(exists().where(and_( - MessageCategory.category_id == master_id, - MessageCategory.message_id == mc.message_id)))\ - .scalar() + mc_exists = db_session.query( + exists().where( + and_( + MessageCategory.category_id == master_id, + MessageCategory.message_id == mc.message_id, + ) + ) + ).scalar() if not dry_run: # If mc_exists == True, then there's a @@ -114,7 +131,9 @@ def backfix_shard(shard_id, dry_run): # and the current category, so we can delete # the current category if mc_exists: - db_session.query(MessageCategory).filter_by(id=mc.id).delete() + db_session.query(MessageCategory).filter_by( + id=mc.id + ).delete() else: # Master does not have a MessageCategory # for this message. Update this one to @@ -122,14 +141,20 @@ def backfix_shard(shard_id, dry_run): mc.category_id = master_id db_session.commit() - log.info('Updated MessageCategory', mc_id=mc.id, - old_category_id=mc.category_id, - new_category_id=master_id) + log.info( + "Updated MessageCategory", + mc_id=mc.id, + old_category_id=mc.category_id, + new_category_id=master_id, + ) categories_to_delete.append(category_id) except Exception as e: - log.critical('Exception encountered while consolidating' - ' messagecategories', e=str(e)) + log.critical( + "Exception encountered while consolidating" + " messagecategories", + e=str(e), + ) raise e # We REALLY don't want to delete the category we consolidated all @@ -138,22 +163,25 @@ def backfix_shard(shard_id, dry_run): for category_id in categories_to_delete: if dry_run: - log.info('Delete category', category_id=category_id) + log.info("Delete category", category_id=category_id) continue with session_scope_by_shard_id(shard_id) as db_session: db_session.query(Category).filter_by(id=category_id).delete() - log.info('Deleted category', category_id=category_id) + log.info("Deleted category", category_id=category_id) categories_to_delete.remove(category_id) - log.info('Completed category migration on shard', - categories_affected=categories_affected, shard_id=shard_id) + log.info( + "Completed category migration on shard", + categories_affected=categories_affected, + shard_id=shard_id, + ) @click.command() -@click.option('--shard-id', type=int, default=None) -@click.option('--dry-run', is_flag=True) +@click.option("--shard-id", type=int, default=None) +@click.option("--dry-run", is_flag=True) def main(shard_id, dry_run): if shard_id is not None: backfix_shard(shard_id, dry_run) @@ -161,5 +189,6 @@ def main(shard_id, dry_run): for shard_id in engine_manager.engines: backfix_shard(shard_id, dry_run) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/bin/backfix-generic-imap-separators.py b/bin/backfix-generic-imap-separators.py index 63b294853..0a0df458c 100755 --- a/bin/backfix-generic-imap-separators.py +++ b/bin/backfix-generic-imap-separators.py @@ -7,17 +7,20 @@ from nylas.logging import get_logger, configure_logging from inbox.models.backends.generic import GenericAccount -from inbox.models.session import (session_scope, global_session_scope, - session_scope_by_shard_id) +from inbox.models.session import ( + session_scope, + global_session_scope, + session_scope_by_shard_id, +) configure_logging() -log = get_logger(purpose='separator-backfix') +log = get_logger(purpose="separator-backfix") @click.command() -@click.option('--min-id', type=int, default=None) -@click.option('--max-id', type=int, default=None) -@click.option('--shard-id', type=int, default=None) +@click.option("--min-id", type=int, default=None) +@click.option("--max-id", type=int, default=None) +@click.option("--shard-id", type=int, default=None) def main(min_id, max_id, shard_id): generic_accounts = [] failed = [] @@ -26,15 +29,14 @@ def main(min_id, max_id, shard_id): # Get the list of running Gmail accounts. with global_session_scope() as db_session: generic_accounts = db_session.query(GenericAccount).filter( - GenericAccount.sync_state == 'running') + GenericAccount.sync_state == "running" + ) if min_id is not None: - generic_accounts = generic_accounts.filter( - GenericAccount.id > min_id) + generic_accounts = generic_accounts.filter(GenericAccount.id > min_id) if max_id is not None: - generic_accounts = generic_accounts.filter( - GenericAccount.id <= max_id) + generic_accounts = generic_accounts.filter(GenericAccount.id <= max_id) generic_accounts = [acc.id for acc in generic_accounts] @@ -43,7 +45,8 @@ def main(min_id, max_id, shard_id): elif shard_id is not None: with session_scope_by_shard_id(shard_id) as db_session: generic_accounts = db_session.query(GenericAccount).filter( - GenericAccount.sync_state == 'running') + GenericAccount.sync_state == "running" + ) generic_accounts = [acc.id for acc in generic_accounts] db_session.expunge_all() @@ -70,5 +73,6 @@ def main(min_id, max_id, shard_id): print "Failed accounts:" print failed -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/bin/create-event-contact-associations.py b/bin/create-event-contact-associations.py index 6b794d85b..6aef3b1e2 100755 --- a/bin/create-event-contact-associations.py +++ b/bin/create-event-contact-associations.py @@ -16,7 +16,7 @@ from sqlalchemy import asc configure_logging() -log = get_logger(purpose='create-event-contact-associations') +log = get_logger(purpose="create-event-contact-associations") def process_shard(shard_id, dry_run, id_start=0): @@ -25,8 +25,9 @@ def process_shard(shard_id, dry_run, id_start=0): rps = 6 / batch_size window = 5 - throttle = limitlion.throttle_wait('create-event-contact-associations', - rps=rps, window=window) + throttle = limitlion.throttle_wait( + "create-event-contact-associations", rps=rps, window=window + ) with session_scope_by_shard_id(shard_id) as db_session: # NOTE: The session is implicitly autoflushed, which ensures no @@ -52,33 +53,43 @@ def process_shard(shard_id, dry_run, id_start=0): id_start = event.id if n % batch_size == 0: - log.info('progress', shard_id=shard_id, id_start=id_start, - n=n, n_skipped=n_skipped, n_updated=n_updated) + log.info( + "progress", + shard_id=shard_id, + id_start=id_start, + n=n, + n_skipped=n_skipped, + n_updated=n_updated, + ) if event.contacts: continue if not dry_run: event.contacts = [] - update_contacts_from_event(db_session, event, - event.namespace_id) + update_contacts_from_event(db_session, event, event.namespace_id) n_updated += 1 if n_updated % batch_size == 0: db_session.commit() - log.info('committed', shard_id=shard_id, n=n, - n_skipped=n_skipped, n_updated=n_updated) + log.info( + "committed", + shard_id=shard_id, + n=n, + n_skipped=n_skipped, + n_updated=n_updated, + ) throttle() - - log.info('finished', shard_id=shard_id, n=n, n_skipped=n_skipped, - n_updated=n_updated) + log.info( + "finished", shard_id=shard_id, n=n, n_skipped=n_skipped, n_updated=n_updated + ) @click.command() -@click.option('--shard-id', type=int, default=None) -@click.option('--id-start', type=int, default=0) -@click.option('--dry-run', is_flag=True) +@click.option("--shard-id", type=int, default=None) +@click.option("--id-start", type=int, default=0) +@click.option("--dry-run", is_flag=True) def main(shard_id, id_start, dry_run): if shard_id is not None: process_shard(shard_id, dry_run, id_start) @@ -86,5 +97,6 @@ def main(shard_id, id_start, dry_run): for shard_id in engine_manager.engines: process_shard(shard_id, dry_run, id_start) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/inbox/__init__.py b/inbox/__init__.py index 436a277ad..2f87315ce 100644 --- a/inbox/__init__.py +++ b/inbox/__init__.py @@ -1,9 +1,11 @@ # Allow out-of-tree submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) try: from inbox.client import APIClient - __all__ = ['APIClient'] + + __all__ = ["APIClient"] except ImportError: pass diff --git a/inbox/actions/backends/__init__.py b/inbox/actions/backends/__init__.py index c07640e26..68c8b4d8d 100644 --- a/inbox/actions/backends/__init__.py +++ b/inbox/actions/backends/__init__.py @@ -9,6 +9,8 @@ """ # Allow out-of-tree action submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) from inbox.util.misc import register_backends + module_registry = register_backends(__name__, __path__) diff --git a/inbox/actions/backends/generic.py b/inbox/actions/backends/generic.py index b2fc6ad17..e4fbcc989 100644 --- a/inbox/actions/backends/generic.py +++ b/inbox/actions/backends/generic.py @@ -15,11 +15,18 @@ log = get_logger() -PROVIDER = 'generic' - -__all__ = ['set_remote_starred', 'set_remote_unread', 'remote_move', - 'remote_save_draft', 'remote_delete_draft', 'remote_create_folder', - 'remote_update_folder', 'remote_delete_folder'] +PROVIDER = "generic" + +__all__ = [ + "set_remote_starred", + "set_remote_unread", + "remote_move", + "remote_save_draft", + "remote_delete_draft", + "remote_create_folder", + "remote_update_folder", + "remote_delete_folder", +] # STOPSHIP(emfree): # * should update local UID state here after action succeeds, instead of @@ -28,8 +35,12 @@ def uids_by_folder(message_id, db_session): - results = db_session.query(ImapUid.msg_uid, Folder.name).join(Folder). \ - filter(ImapUid.message_id == message_id).all() + results = ( + db_session.query(ImapUid.msg_uid, Folder.name) + .join(Folder) + .filter(ImapUid.message_id == message_id) + .all() + ) mapping = defaultdict(list) for uid, folder_name in results: mapping[folder_name].append(uid) @@ -40,27 +51,28 @@ def _create_email(account, message): blocks = [p.block for p in message.attachments] attachments = generate_attachments(message, blocks) from_name, from_email = message.from_addr[0] - msg = create_email(from_name=from_name, - from_email=from_email, - reply_to=message.reply_to, - nylas_uid=message.nylas_uid, - to_addr=message.to_addr, - cc_addr=message.cc_addr, - bcc_addr=message.bcc_addr, - subject=message.subject, - html=message.body, - in_reply_to=message.in_reply_to, - references=message.references, - attachments=attachments) + msg = create_email( + from_name=from_name, + from_email=from_email, + reply_to=message.reply_to, + nylas_uid=message.nylas_uid, + to_addr=message.to_addr, + cc_addr=message.cc_addr, + bcc_addr=message.bcc_addr, + subject=message.subject, + html=message.body, + in_reply_to=message.in_reply_to, + references=message.references, + attachments=attachments, + ) return msg -def _set_flag(crispin_client, account_id, message_id, flag_name, - is_add): +def _set_flag(crispin_client, account_id, message_id, flag_name, is_add): with session_scope(account_id) as db_session: uids_for_message = uids_by_folder(message_id, db_session) if not uids_for_message: - log.warning('No UIDs found for message', message_id=message_id) + log.warning("No UIDs found for message", message_id=message_id) return for folder_name, uids in uids_for_message.items(): @@ -72,21 +84,18 @@ def _set_flag(crispin_client, account_id, message_id, flag_name, def set_remote_starred(crispin_client, account, message_id, starred): - _set_flag(crispin_client, account, message_id, '\\Flagged', - starred) + _set_flag(crispin_client, account, message_id, "\\Flagged", starred) def set_remote_unread(crispin_client, account, message_id, unread): - _set_flag(crispin_client, account, message_id, '\\Seen', - not unread) + _set_flag(crispin_client, account, message_id, "\\Seen", not unread) -def remote_move(crispin_client, account_id, message_id, - destination): +def remote_move(crispin_client, account_id, message_id, destination): with session_scope(account_id) as db_session: uids_for_message = uids_by_folder(message_id, db_session) if not uids_for_message: - log.warning('No UIDs found for message', message_id=message_id) + log.warning("No UIDs found for message", message_id=message_id) return for folder_name, uids in uids_for_message.items(): @@ -104,17 +113,18 @@ def remote_create_folder(crispin_client, account_id, category_id): crispin_client.conn.create_folder(display_name) -def remote_update_folder(crispin_client, account_id, category_id, old_name, - new_name): +def remote_update_folder(crispin_client, account_id, category_id, old_name, new_name): with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) account_provider = account.provider - if account_provider not in ['gmail', 'eas']: + if account_provider not in ["gmail", "eas"]: new_display_name = imap_folder_path( - new_name, separator=crispin_client.folder_separator, - prefix=crispin_client.folder_prefix) + new_name, + separator=crispin_client.folder_separator, + prefix=crispin_client.folder_prefix, + ) else: new_display_name = new_name @@ -160,17 +170,18 @@ def remote_save_draft(crispin_client, account_id, message_id): message = db_session.query(Message).get(message_id) mimemsg = _create_email(account, message) - if 'drafts' not in crispin_client.folder_names(): - log.info('Account has no detected drafts folder; not saving draft', - account_id=account_id) + if "drafts" not in crispin_client.folder_names(): + log.info( + "Account has no detected drafts folder; not saving draft", + account_id=account_id, + ) return - folder_name = crispin_client.folder_names()['drafts'][0] + folder_name = crispin_client.folder_names()["drafts"][0] crispin_client.select_folder_if_necessary(folder_name, uidvalidity_cb) crispin_client.save_draft(mimemsg) -def remote_update_draft(crispin_client, account_id, message_id, - old_message_id_header): +def remote_update_draft(crispin_client, account_id, message_id, old_message_id_header): with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) message = db_session.query(Message).get(message_id) @@ -181,47 +192,54 @@ def remote_update_draft(crispin_client, account_id, message_id, # 1. Create the new message, unless it's somehow already there # 2. Delete the old message the API user is updating - if 'drafts' not in crispin_client.folder_names(): - log.warning('Account has no drafts folder. Will not save draft.', - account_id=account_id) + if "drafts" not in crispin_client.folder_names(): + log.warning( + "Account has no drafts folder. Will not save draft.", account_id=account_id + ) return - folder_name = crispin_client.folder_names()['drafts'][0] + folder_name = crispin_client.folder_names()["drafts"][0] crispin_client.select_folder_if_necessary(folder_name, uidvalidity_cb) - existing_new_draft = crispin_client.find_by_header( - 'Message-Id', message_id_header) + existing_new_draft = crispin_client.find_by_header("Message-Id", message_id_header) if not existing_new_draft: crispin_client.save_draft(mimemsg) else: - log.info('Draft has been saved, will not create a duplicate.', - message_id_header=message_id_header) + log.info( + "Draft has been saved, will not create a duplicate.", + message_id_header=message_id_header, + ) # Check for an older version and delete it. (We can stop once we find # one, to reduce the latency of this operation.). Note that the old # draft does not always have a message id, in which case we can't # replace it. if old_message_id_header: - old_version_deleted = crispin_client.delete_draft( - old_message_id_header) + old_version_deleted = crispin_client.delete_draft(old_message_id_header) if old_version_deleted: - log.info('Cleaned up old draft', - old_message_id_header=old_message_id_header, - message_id_header=message_id_header) - - -def remote_delete_draft(crispin_client, account_id, nylas_uid, - message_id_header): - if 'drafts' not in crispin_client.folder_names(): - log.warning('Account has no detected drafts folder; not deleting draft', - account_id=account_id) + log.info( + "Cleaned up old draft", + old_message_id_header=old_message_id_header, + message_id_header=message_id_header, + ) + + +def remote_delete_draft(crispin_client, account_id, nylas_uid, message_id_header): + if "drafts" not in crispin_client.folder_names(): + log.warning( + "Account has no detected drafts folder; not deleting draft", + account_id=account_id, + ) return crispin_client.delete_draft(message_id_header) -def remote_delete_sent(crispin_client, account_id, message_id_header, - delete_multiple=False): - if 'sent' not in crispin_client.folder_names(): - log.warning('Account has no detected sent folder; not deleting message', - account_id=account_id) +def remote_delete_sent( + crispin_client, account_id, message_id_header, delete_multiple=False +): + if "sent" not in crispin_client.folder_names(): + log.warning( + "Account has no detected sent folder; not deleting message", + account_id=account_id, + ) return crispin_client.delete_sent_message(message_id_header, delete_multiple) @@ -231,21 +249,25 @@ def remote_save_sent(crispin_client, account_id, message_id): account = db_session.query(Account).get(account_id) message = db_session.query(Message).get(message_id) if message is None: - log.info('tried to create nonexistent message', - message_id=message_id, account_id=account_id) + log.info( + "tried to create nonexistent message", + message_id=message_id, + account_id=account_id, + ) return mimemsg = _create_email(account, message) - if 'sent' not in crispin_client.folder_names(): - log.warning('Account has no detected sent folder; not saving message', - account_id=account_id) + if "sent" not in crispin_client.folder_names(): + log.warning( + "Account has no detected sent folder; not saving message", + account_id=account_id, + ) return # If there are multiple sent roles we should at least have a warning about it. - sent_folder_names = crispin_client.folder_names()['sent'] + sent_folder_names = crispin_client.folder_names()["sent"] if len(sent_folder_names) > 1: - log.warning("Multiple sent folders found for account", - account_id=account_id) + log.warning("Multiple sent folders found for account", account_id=account_id) folder_name = sent_folder_names[0] crispin_client.select_folder_if_necessary(folder_name, uidvalidity_cb) diff --git a/inbox/actions/backends/gmail.py b/inbox/actions/backends/gmail.py index 275347877..39b21d746 100644 --- a/inbox/actions/backends/gmail.py +++ b/inbox/actions/backends/gmail.py @@ -7,17 +7,18 @@ from inbox.models.session import session_scope from imaplib import IMAP4 -PROVIDER = 'gmail' +PROVIDER = "gmail" -__all__ = ['remote_create_label', 'remote_update_label', 'remote_delete_label'] +__all__ = ["remote_create_label", "remote_update_label", "remote_delete_label"] def _encode_labels(labels): return map(imapclient.imap_utf7.encode, labels) -def remote_change_labels(crispin_client, account_id, message_ids, - removed_labels, added_labels): +def remote_change_labels( + crispin_client, account_id, message_ids, removed_labels, added_labels +): uids_for_message = {} with session_scope(account_id) as db_session: for message_id in message_ids: @@ -31,10 +32,12 @@ def remote_change_labels(crispin_client, account_id, message_ids, crispin_client.select_folder_if_necessary(folder_name, uidvalidity_cb) if len(added_labels) > 0: crispin_client.conn.add_gmail_labels( - uids, _encode_labels(added_labels), silent=True) + uids, _encode_labels(added_labels), silent=True + ) if len(removed_labels) > 0: crispin_client.conn.remove_gmail_labels( - uids, _encode_labels(removed_labels), silent=True) + uids, _encode_labels(removed_labels), silent=True + ) def remote_create_label(crispin_client, account_id, category_id): @@ -46,8 +49,7 @@ def remote_create_label(crispin_client, account_id, category_id): crispin_client.conn.create_folder(display_name) -def remote_update_label(crispin_client, account_id, category_id, old_name, - new_name): +def remote_update_label(crispin_client, account_id, category_id, old_name, new_name): crispin_client.conn.rename_folder(old_name, new_name) diff --git a/inbox/actions/base.py b/inbox/actions/base.py index b49624bb2..2104df49c 100644 --- a/inbox/actions/base.py +++ b/inbox/actions/base.py @@ -23,52 +23,58 @@ at-least-once semantics. """ -from inbox.actions.backends.generic import (set_remote_unread, - set_remote_starred, - remote_move, - remote_save_draft, - remote_update_draft, - remote_delete_draft, - remote_save_sent, - remote_create_folder, - remote_update_folder, - remote_delete_folder, - remote_delete_sent) -from inbox.actions.backends.gmail import (remote_change_labels, - remote_create_label, - remote_update_label, - remote_delete_label) +from inbox.actions.backends.generic import ( + set_remote_unread, + set_remote_starred, + remote_move, + remote_save_draft, + remote_update_draft, + remote_delete_draft, + remote_save_sent, + remote_create_folder, + remote_update_folder, + remote_delete_folder, + remote_delete_sent, +) +from inbox.actions.backends.gmail import ( + remote_change_labels, + remote_create_label, + remote_update_label, + remote_delete_label, +) from inbox.models import Message from inbox.models.session import session_scope from nylas.logging import get_logger + log = get_logger() def can_handle_multiple_records(action_name): - return action_name == 'change_labels' + return action_name == "change_labels" def mark_unread(crispin_client, account_id, message_id, args): - unread = args['unread'] + unread = args["unread"] set_remote_unread(crispin_client, account_id, message_id, unread) def mark_starred(crispin_client, account_id, message_id, args): - starred = args['starred'] + starred = args["starred"] set_remote_starred(crispin_client, account_id, message_id, starred) def move(crispin_client, account_id, message_id, args): - destination = args['destination'] + destination = args["destination"] remote_move(crispin_client, account_id, message_id, destination) def change_labels(crispin_client, account_id, message_ids, args): - added_labels = args['added_labels'] - removed_labels = args['removed_labels'] - remote_change_labels(crispin_client, account_id, message_ids, - removed_labels, added_labels) + added_labels = args["added_labels"] + removed_labels = args["removed_labels"] + remote_change_labels( + crispin_client, account_id, message_ids, removed_labels, added_labels + ) def create_folder(crispin_client, account_id, category_id): @@ -76,10 +82,9 @@ def create_folder(crispin_client, account_id, category_id): def update_folder(crispin_client, account_id, category_id, args): - old_name = args['old_name'] - new_name = args['new_name'] - remote_update_folder(crispin_client, account_id, category_id, - old_name, new_name) + old_name = args["old_name"] + new_name = args["new_name"] + remote_update_folder(crispin_client, account_id, category_id, old_name, new_name) def delete_folder(crispin_client, account_id, category_id): @@ -91,10 +96,9 @@ def create_label(crispin_client, account_id, category_id): def update_label(crispin_client, account_id, category_id, args): - old_name = args['old_name'] - new_name = args['new_name'] - remote_update_label(crispin_client, account_id, category_id, - old_name, new_name) + old_name = args["old_name"] + new_name = args["new_name"] + remote_update_label(crispin_client, account_id, category_id, old_name, new_name) def delete_label(crispin_client, account_id, category_id): @@ -105,18 +109,23 @@ def save_draft(crispin_client, account_id, message_id, args): """ Sync a new draft back to the remote backend. """ with session_scope(account_id) as db_session: message = db_session.query(Message).get(message_id) - version = args.get('version') + version = args.get("version") if message is None: - log.info('tried to save nonexistent message as draft', - message_id=message_id, account_id=account_id) + log.info( + "tried to save nonexistent message as draft", + message_id=message_id, + account_id=account_id, + ) return if not message.is_draft: - log.warning('tried to save non-draft message as draft', - message_id=message_id, - account_id=account_id) + log.warning( + "tried to save non-draft message as draft", + message_id=message_id, + account_id=account_id, + ) return if version != message.version: - log.warning('tried to save outdated version of draft') + log.warning("tried to save outdated version of draft") return remote_save_draft(crispin_client, account_id, message_id) @@ -126,24 +135,28 @@ def update_draft(crispin_client, account_id, message_id, args): """ Sync an updated draft back to the remote backend. """ with session_scope(account_id) as db_session: message = db_session.query(Message).get(message_id) - version = args.get('version') - old_message_id_header = args.get('old_message_id_header') + version = args.get("version") + old_message_id_header = args.get("old_message_id_header") if message is None: - log.info('tried to save nonexistent message as draft', - message_id=message_id, account_id=account_id) + log.info( + "tried to save nonexistent message as draft", + message_id=message_id, + account_id=account_id, + ) return if not message.is_draft: - log.warning('tried to save non-draft message as draft', - message_id=message_id, - account_id=account_id) + log.warning( + "tried to save non-draft message as draft", + message_id=message_id, + account_id=account_id, + ) return if version != message.version: - log.warning('tried to save outdated version of draft') + log.warning("tried to save outdated version of draft") return - remote_update_draft(crispin_client, account_id, message_id, - old_message_id_header) + remote_update_draft(crispin_client, account_id, message_id, old_message_id_header) def delete_draft(crispin_client, account_id, draft_id, args): @@ -153,11 +166,10 @@ def delete_draft(crispin_client, account_id, draft_id, args): "the backend. """ - nylas_uid = args.get('nylas_uid') - message_id_header = args.get('message_id_header') - assert nylas_uid or message_id_header, 'Need at least one header value' - remote_delete_draft(crispin_client, account_id, nylas_uid, - message_id_header) + nylas_uid = args.get("nylas_uid") + message_id_header = args.get("message_id_header") + assert nylas_uid or message_id_header, "Need at least one header value" + remote_delete_draft(crispin_client, account_id, nylas_uid, message_id_header) def save_sent_email(crispin_client, account_id, message_id): @@ -172,6 +184,6 @@ def delete_sent_email(crispin_client, account_id, message_id, args): """ Delete an email on the remote backend, in the sent folder. """ - message_id_header = args.get('message_id_header') - assert message_id_header, 'Need the message_id_header' + message_id_header = args.get("message_id_header") + assert message_id_header, "Need the message_id_header" remote_delete_sent(crispin_client, account_id, message_id_header) diff --git a/inbox/api/err.py b/inbox/api/err.py index 6eeaa3cba..5d69e1da1 100644 --- a/inbox/api/err.py +++ b/inbox/api/err.py @@ -4,13 +4,14 @@ from flask import jsonify, make_response, request from nylas.logging.sentry import sentry_alert from nylas.logging.log import get_logger, create_error_log_context + log = get_logger() from inbox.config import is_live_env def get_request_uid(headers): - return headers.get('X-Unique-ID') + return headers.get("X-Unique-ID") def log_exception(exc_info, send_to_sentry=True, **kwargs): @@ -33,12 +34,15 @@ def log_exception(exc_info, send_to_sentry=True, **kwargs): # guard against programming errors overriding log fields (confusing!) if set(new_log_context.keys()).intersection( - set(request.environ.get('log_context', {}))): - log.warning("attempt to log more than one error to HTTP request", - request_uid=get_request_uid(request.headers), - **new_log_context) + set(request.environ.get("log_context", {})) + ): + log.warning( + "attempt to log more than one error to HTTP request", + request_uid=get_request_uid(request.headers), + **new_log_context + ) else: - request.environ.setdefault('log_context', {}).update(new_log_context) + request.environ.setdefault("log_context", {}).update(new_log_context) class APIException(Exception): @@ -48,6 +52,7 @@ class APIException(Exception): class InputError(APIException): """Raised on invalid user input (missing required parameter, value too long, etc.)""" + status_code = 400 def __init__(self, message): @@ -57,6 +62,7 @@ def __init__(self, message): class NotFoundError(APIException): """Raised when a requested resource doesn't exist.""" + status_code = 404 def __init__(self, message): @@ -74,22 +80,29 @@ def __init__(self, message): class AccountInvalidError(APIException): """ Raised when an account's credentials are not valid. """ + status_code = 403 - message = "This action can't be performed because the account's " \ - "credentials are out of date. Please reauthenticate and try " \ - "again." + message = ( + "This action can't be performed because the account's " + "credentials are out of date. Please reauthenticate and try " + "again." + ) class AccountStoppedError(APIException): """ Raised when an account has been stopped. """ + status_code = 403 - message = "This action can't be performed because the account's sync " \ - "has been stopped. Please contact support@nylas.com to resume " \ - "sync." + message = ( + "This action can't be performed because the account's sync " + "has been stopped. Please contact support@nylas.com to resume " + "sync." + ) class AccountDoesNotExistError(APIException): """ Raised when an account does not exist (for example, if it was deleted). """ + status_code = 404 message = "The account does not exist." @@ -97,9 +110,6 @@ class AccountDoesNotExistError(APIException): def err(http_code, message, **kwargs): """ Handle unexpected errors, including sending the traceback to Sentry. """ log_exception(sys.exc_info(), user_error_message=message, **kwargs) - resp = { - 'type': 'api_error', - 'message': message - } + resp = {"type": "api_error", "message": message} resp.update(kwargs) return make_response(jsonify(resp), http_code) diff --git a/inbox/api/filtering.py b/inbox/api/filtering.py index 5e3587898..19e60524c 100644 --- a/inbox/api/filtering.py +++ b/inbox/api/filtering.py @@ -2,9 +2,20 @@ from sqlalchemy.orm import subqueryload, contains_eager from inbox.api.err import InputError from inbox.api.validation import valid_public_id -from inbox.models import (Contact, Event, EventContactAssociation, Calendar, - Message, MessageContactAssociation, Thread, - Block, Part, MessageCategory, Category, Metadata) +from inbox.models import ( + Contact, + Event, + EventContactAssociation, + Calendar, + Message, + MessageContactAssociation, + Thread, + Block, + Part, + MessageCategory, + Category, + Metadata, +) from inbox.models.event import RecurringEvent from inbox.sqlalchemy_ext.util import bakery from inbox.ignition import engine_manager @@ -12,24 +23,46 @@ def contact_subquery(db_session, namespace_id, email_address, field): - return db_session.query(Message.thread_id) \ - .join(MessageContactAssociation) \ - .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + return ( + db_session.query(Message.thread_id) + .join(MessageContactAssociation) + .join(Contact, MessageContactAssociation.contact_id == Contact.id) .filter( Contact.email_address == email_address, Contact.namespace_id == namespace_id, - MessageContactAssociation.field == field) \ + MessageContactAssociation.field == field, + ) .subquery() - - -def threads(namespace_id, subject, from_addr, to_addr, cc_addr, bcc_addr, - any_email, message_id_header, thread_public_id, started_before, - started_after, last_message_before, last_message_after, filename, - in_, unread, starred, limit, offset, view, db_session): - - if view == 'count': + ) + + +def threads( + namespace_id, + subject, + from_addr, + to_addr, + cc_addr, + bcc_addr, + any_email, + message_id_header, + thread_public_id, + started_before, + started_after, + last_message_before, + last_message_after, + filename, + in_, + unread, + starred, + limit, + offset, + view, + db_session, +): + + if view == "count": query = db_session.query(func.count(Thread.id)) - elif view == 'ids': + elif view == "ids": query = db_session.query(Thread.public_id) else: query = db_session.query(Thread) @@ -56,45 +89,48 @@ def threads(namespace_id, subject, from_addr, to_addr, cc_addr, bcc_addr, query = query.filter(*filters) if from_addr is not None: - from_query = contact_subquery(db_session, namespace_id, - from_addr, 'from_addr') + from_query = contact_subquery(db_session, namespace_id, from_addr, "from_addr") query = query.filter(Thread.id.in_(from_query)) if to_addr is not None: - to_query = contact_subquery(db_session, namespace_id, - to_addr, 'to_addr') + to_query = contact_subquery(db_session, namespace_id, to_addr, "to_addr") query = query.filter(Thread.id.in_(to_query)) if cc_addr is not None: - cc_query = contact_subquery(db_session, namespace_id, - cc_addr, 'cc_addr') + cc_query = contact_subquery(db_session, namespace_id, cc_addr, "cc_addr") query = query.filter(Thread.id.in_(cc_query)) if bcc_addr is not None: - bcc_query = contact_subquery(db_session, namespace_id, - bcc_addr, 'bcc_addr') + bcc_query = contact_subquery(db_session, namespace_id, bcc_addr, "bcc_addr") query = query.filter(Thread.id.in_(bcc_query)) if any_email is not None: - any_contact_query = db_session.query(Message.thread_id) \ - .join(MessageContactAssociation) \ - .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ - .filter(Contact.email_address.in_(any_email), - Contact.namespace_id == namespace_id)\ + any_contact_query = ( + db_session.query(Message.thread_id) + .join(MessageContactAssociation) + .join(Contact, MessageContactAssociation.contact_id == Contact.id) + .filter( + Contact.email_address.in_(any_email), + Contact.namespace_id == namespace_id, + ) .subquery() + ) query = query.filter(Thread.id.in_(any_contact_query)) if message_id_header is not None: - message_id_query = db_session.query(Message.thread_id). \ - filter(Message.message_id_header == message_id_header) + message_id_query = db_session.query(Message.thread_id).filter( + Message.message_id_header == message_id_header + ) query = query.filter(Thread.id.in_(message_id_query)) if filename is not None: - files_query = db_session.query(Message.thread_id). \ - join(Part).join(Block). \ - filter(Block.filename == filename, - Block.namespace_id == namespace_id). \ - subquery() + files_query = ( + db_session.query(Message.thread_id) + .join(Part) + .join(Block) + .filter(Block.filename == filename, Block.namespace_id == namespace_id) + .subquery() + ) query = query.filter(Thread.id.in_(files_query)) if in_ is not None: @@ -104,33 +140,40 @@ def threads(namespace_id, subject, from_addr, to_addr, cc_addr, bcc_addr, category_filters.append(Category.public_id == in_) except InputError: pass - category_query = db_session.query(Message.thread_id). \ - prefix_with('STRAIGHT_JOIN'). \ - join(Message.messagecategories).join(MessageCategory.category). \ - filter(Category.namespace_id == namespace_id, - or_(*category_filters)).subquery() + category_query = ( + db_session.query(Message.thread_id) + .prefix_with("STRAIGHT_JOIN") + .join(Message.messagecategories) + .join(MessageCategory.category) + .filter(Category.namespace_id == namespace_id, or_(*category_filters)) + .subquery() + ) query = query.filter(Thread.id.in_(category_query)) if unread is not None: read = not unread - unread_query = db_session.query(Message.thread_id).filter( - Message.namespace_id == namespace_id, - Message.is_read == read).subquery() + unread_query = ( + db_session.query(Message.thread_id) + .filter(Message.namespace_id == namespace_id, Message.is_read == read) + .subquery() + ) query = query.filter(Thread.id.in_(unread_query)) if starred is not None: - starred_query = db_session.query(Message.thread_id).filter( - Message.namespace_id == namespace_id, - Message.is_starred == starred).subquery() + starred_query = ( + db_session.query(Message.thread_id) + .filter(Message.namespace_id == namespace_id, Message.is_starred == starred) + .subquery() + ) query = query.filter(Thread.id.in_(starred_query)) - if view == 'count': + if view == "count": return {"count": query.one()[0]} # Eager-load some objects in order to make constructing API # representations faster. - if view != 'ids': - expand = (view == 'expanded') + if view != "ids": + expand = view == "expanded" query = query.options(*Thread.api_loading_options(expand)) query = query.order_by(desc(Thread.recentdate)).limit(limit) @@ -138,18 +181,37 @@ def threads(namespace_id, subject, from_addr, to_addr, cc_addr, bcc_addr, if offset: query = query.offset(offset) - if view == 'ids': + if view == "ids": return [x[0] for x in query.all()] return query.all() -def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, - cc_addr, bcc_addr, any_email, thread_public_id, - started_before, started_after, last_message_before, - last_message_after, received_before, received_after, - filename, in_, unread, starred, limit, offset, view, - db_session): +def messages_or_drafts( + namespace_id, + drafts, + subject, + from_addr, + to_addr, + cc_addr, + bcc_addr, + any_email, + thread_public_id, + started_before, + started_after, + last_message_before, + last_message_after, + received_before, + received_after, + filename, + in_, + unread, + starred, + limit, + offset, + view, + db_session, +): # Warning: complexities ahead. This function sets up the query that gets # results for the /messages API. It loads from several tables, supports a # variety of views and filters, and is performance-critical for the API. As @@ -163,32 +225,32 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, # call query.spoil() on those code paths. param_dict = { - 'namespace_id': namespace_id, - 'drafts': drafts, - 'subject': subject, - 'from_addr': from_addr, - 'to_addr': to_addr, - 'cc_addr': cc_addr, - 'bcc_addr': bcc_addr, - 'any_email': any_email, - 'thread_public_id': thread_public_id, - 'received_before': received_before, - 'received_after': received_after, - 'started_before': started_before, - 'started_after': started_after, - 'last_message_before': last_message_before, - 'last_message_after': last_message_after, - 'filename': filename, - 'in_': in_, - 'unread': unread, - 'starred': starred, - 'limit': limit, - 'offset': offset + "namespace_id": namespace_id, + "drafts": drafts, + "subject": subject, + "from_addr": from_addr, + "to_addr": to_addr, + "cc_addr": cc_addr, + "bcc_addr": bcc_addr, + "any_email": any_email, + "thread_public_id": thread_public_id, + "received_before": received_before, + "received_after": received_after, + "started_before": started_before, + "started_after": started_after, + "last_message_before": last_message_before, + "last_message_after": last_message_after, + "filename": filename, + "in_": in_, + "unread": unread, + "starred": starred, + "limit": limit, + "offset": offset, } - if view == 'count': + if view == "count": query = bakery(lambda s: s.query(func.count(Message.id))) - elif view == 'ids': + elif view == "ids": query = bakery(lambda s: s.query(Message.public_id)) else: query = bakery(lambda s: s.query(Message)) @@ -199,127 +261,170 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, # the namespace_id, is_draft and then order by received_date. # For other "exotic" queries, we let the MySQL query planner # pick the right index. - if all(v is None for v in [subject, from_addr, to_addr, cc_addr, - bcc_addr, any_email, thread_public_id, - filename, in_, started_before, - started_after, last_message_before, - last_message_after]): + if all( + v is None + for v in [ + subject, + from_addr, + to_addr, + cc_addr, + bcc_addr, + any_email, + thread_public_id, + filename, + in_, + started_before, + started_after, + last_message_before, + last_message_after, + ] + ): query += lambda q: q.with_hint( Message, - 'FORCE INDEX (ix_message_ns_id_is_draft_received_date)', - 'mysql') + "FORCE INDEX (ix_message_ns_id_is_draft_received_date)", + "mysql", + ) query += lambda q: q.join(Thread, Message.thread_id == Thread.id) query += lambda q: q.filter( - Message.namespace_id == bindparam('namespace_id'), - Message.is_draft == bindparam('drafts'), - Thread.deleted_at == None) + Message.namespace_id == bindparam("namespace_id"), + Message.is_draft == bindparam("drafts"), + Thread.deleted_at == None, + ) if subject is not None: - query += lambda q: q.filter(Message.subject == bindparam('subject')) + query += lambda q: q.filter(Message.subject == bindparam("subject")) if unread is not None: - query += lambda q: q.filter(Message.is_read != bindparam('unread')) + query += lambda q: q.filter(Message.is_read != bindparam("unread")) if starred is not None: - query += lambda q: q.filter(Message.is_starred == bindparam('starred')) + query += lambda q: q.filter(Message.is_starred == bindparam("starred")) if thread_public_id is not None: - query += lambda q: q.filter( - Thread.public_id == bindparam('thread_public_id')) + query += lambda q: q.filter(Thread.public_id == bindparam("thread_public_id")) # TODO: deprecate thread-oriented date filters on message endpoints. if started_before is not None: query += lambda q: q.filter( - Thread.subjectdate < bindparam('started_before'), - Thread.namespace_id == bindparam('namespace_id')) + Thread.subjectdate < bindparam("started_before"), + Thread.namespace_id == bindparam("namespace_id"), + ) if started_after is not None: query += lambda q: q.filter( - Thread.subjectdate > bindparam('started_after'), - Thread.namespace_id == bindparam('namespace_id')) + Thread.subjectdate > bindparam("started_after"), + Thread.namespace_id == bindparam("namespace_id"), + ) if last_message_before is not None: query += lambda q: q.filter( - Thread.recentdate < bindparam('last_message_before'), - Thread.namespace_id == bindparam('namespace_id')) + Thread.recentdate < bindparam("last_message_before"), + Thread.namespace_id == bindparam("namespace_id"), + ) if last_message_after is not None: query += lambda q: q.filter( - Thread.recentdate > bindparam('last_message_after'), - Thread.namespace_id == bindparam('namespace_id')) + Thread.recentdate > bindparam("last_message_after"), + Thread.namespace_id == bindparam("namespace_id"), + ) if received_before is not None: query += lambda q: q.filter( - Message.received_date <= bindparam('received_before')) + Message.received_date <= bindparam("received_before") + ) if received_after is not None: - query += lambda q: q.filter( - Message.received_date > bindparam('received_after')) + query += lambda q: q.filter(Message.received_date > bindparam("received_after")) if to_addr is not None: query.spoil() - to_query = db_session.query(MessageContactAssociation.message_id) \ - .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + to_query = ( + db_session.query(MessageContactAssociation.message_id) + .join(Contact, MessageContactAssociation.contact_id == Contact.id) .filter( - MessageContactAssociation.field == 'to_addr', + MessageContactAssociation.field == "to_addr", Contact.email_address == to_addr, - Contact.namespace_id == bindparam('namespace_id')).subquery() + Contact.namespace_id == bindparam("namespace_id"), + ) + .subquery() + ) query += lambda q: q.filter(Message.id.in_(to_query)) if from_addr is not None: query.spoil() - from_query = db_session.query(MessageContactAssociation.message_id) \ - .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + from_query = ( + db_session.query(MessageContactAssociation.message_id) + .join(Contact, MessageContactAssociation.contact_id == Contact.id) .filter( - MessageContactAssociation.field == 'from_addr', + MessageContactAssociation.field == "from_addr", Contact.email_address == from_addr, - Contact.namespace_id == bindparam('namespace_id')).subquery() + Contact.namespace_id == bindparam("namespace_id"), + ) + .subquery() + ) query += lambda q: q.filter(Message.id.in_(from_query)) if cc_addr is not None: query.spoil() - cc_query = db_session.query(MessageContactAssociation.message_id) \ - .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + cc_query = ( + db_session.query(MessageContactAssociation.message_id) + .join(Contact, MessageContactAssociation.contact_id == Contact.id) .filter( - MessageContactAssociation.field == 'cc_addr', + MessageContactAssociation.field == "cc_addr", Contact.email_address == cc_addr, - Contact.namespace_id == bindparam('namespace_id')).subquery() + Contact.namespace_id == bindparam("namespace_id"), + ) + .subquery() + ) query += lambda q: q.filter(Message.id.in_(cc_query)) if bcc_addr is not None: query.spoil() - bcc_query = db_session.query(MessageContactAssociation.message_id) \ - .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + bcc_query = ( + db_session.query(MessageContactAssociation.message_id) + .join(Contact, MessageContactAssociation.contact_id == Contact.id) .filter( - MessageContactAssociation.field == 'bcc_addr', + MessageContactAssociation.field == "bcc_addr", Contact.email_address == bcc_addr, - Contact.namespace_id == bindparam('namespace_id')).subquery() + Contact.namespace_id == bindparam("namespace_id"), + ) + .subquery() + ) query += lambda q: q.filter(Message.id.in_(bcc_query)) if any_email is not None: query.spoil() - any_email_query = db_session.query( - MessageContactAssociation.message_id) \ - .join(Contact, MessageContactAssociation.contact_id == Contact.id)\ + any_email_query = ( + db_session.query(MessageContactAssociation.message_id) + .join(Contact, MessageContactAssociation.contact_id == Contact.id) .filter( Contact.email_address.in_(any_email), - Contact.namespace_id == bindparam('namespace_id')) \ + Contact.namespace_id == bindparam("namespace_id"), + ) .subquery() + ) query += lambda q: q.filter(Message.id.in_(any_email_query)) if filename is not None: - query += lambda q: q.join(Part).join(Block). \ - filter(Block.filename == bindparam('filename'), - Block.namespace_id == bindparam('namespace_id')) + query += ( + lambda q: q.join(Part) + .join(Block) + .filter( + Block.filename == bindparam("filename"), + Block.namespace_id == bindparam("namespace_id"), + ) + ) if in_ is not None: query.spoil() - category_filters = [Category.name == bindparam('in_'), - Category.display_name == bindparam('in_')] + category_filters = [ + Category.name == bindparam("in_"), + Category.display_name == bindparam("in_"), + ] try: valid_public_id(in_) - category_filters.append(Category.public_id == bindparam('in_id')) + category_filters.append(Category.public_id == bindparam("in_id")) # Type conversion and bindparams interact poorly -- you can't do # e.g. # query.filter(or_(Category.name == bindparam('in_'), @@ -327,24 +432,26 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, # because the binary conversion defined by Category.public_id will # be applied to the bound value prior to its insertion in the # query. So we define another bindparam for the public_id: - param_dict['in_id'] = in_ + param_dict["in_id"] = in_ except InputError: pass - query += lambda q: q.prefix_with('STRAIGHT_JOIN'). \ - join(Message.messagecategories).join(MessageCategory.category). \ - filter(Category.namespace_id == namespace_id, - or_(*category_filters)) - - if view == 'count': + query += ( + lambda q: q.prefix_with("STRAIGHT_JOIN") + .join(Message.messagecategories) + .join(MessageCategory.category) + .filter(Category.namespace_id == namespace_id, or_(*category_filters)) + ) + + if view == "count": res = query(db_session).params(**param_dict).one()[0] return {"count": res} query += lambda q: q.order_by(desc(Message.received_date)) - query += lambda q: q.limit(bindparam('limit')) + query += lambda q: q.limit(bindparam("limit")) if offset: - query += lambda q: q.offset(bindparam('offset')) + query += lambda q: q.offset(bindparam("offset")) - if view == 'ids': + if view == "ids": res = query(db_session).params(**param_dict).all() return [x[0] for x in res] @@ -354,21 +461,29 @@ def messages_or_drafts(namespace_id, drafts, subject, from_addr, to_addr, # thread table. We should eventually try to simplify this. query += lambda q: q.options( contains_eager(Message.thread), - subqueryload(Message.messagecategories).joinedload('category', - 'created_at'), + subqueryload(Message.messagecategories).joinedload("category", "created_at"), subqueryload(Message.parts).joinedload(Part.block), - subqueryload(Message.events)) + subqueryload(Message.events), + ) prepared = query(db_session).params(**param_dict) return prepared.all() -def files(namespace_id, message_public_id, filename, content_type, - limit, offset, view, db_session): +def files( + namespace_id, + message_public_id, + filename, + content_type, + limit, + offset, + view, + db_session, +): - if view == 'count': + if view == "count": query = db_session.query(func.count(Block.id)) - elif view == 'ids': + elif view == "ids": query = db_session.query(Block.public_id) else: query = db_session.query(Block) @@ -378,22 +493,24 @@ def files(namespace_id, message_public_id, filename, content_type, # limit to actual attachments (no content-disposition == not a real # attachment) query = query.outerjoin(Part) - query = query.filter(or_(Part.id.is_(None), - Part.content_disposition.isnot(None))) + query = query.filter(or_(Part.id.is_(None), Part.content_disposition.isnot(None))) if content_type is not None: - query = query.filter(or_(Block._content_type_common == content_type, - Block._content_type_other == content_type)) + query = query.filter( + or_( + Block._content_type_common == content_type, + Block._content_type_other == content_type, + ) + ) if filename is not None: query = query.filter(Block.filename == filename) # Handle the case of fetching attachments on a particular message. if message_public_id is not None: - query = query.join(Message) \ - .filter(Message.public_id == message_public_id) + query = query.join(Message).filter(Message.public_id == message_public_id) - if view == 'count': + if view == "count": return {"count": query.one()[0]} query = query.order_by(asc(Block.id)).distinct().limit(limit) @@ -401,46 +518,63 @@ def files(namespace_id, message_public_id, filename, content_type, if offset: query = query.offset(offset) - if view == 'ids': + if view == "ids": return [x[0] for x in query.all()] else: return query.all() -def filter_event_query(query, event_cls, namespace_id, event_public_id, - calendar_public_id, title, description, location, busy): +def filter_event_query( + query, + event_cls, + namespace_id, + event_public_id, + calendar_public_id, + title, + description, + location, + busy, +): query = query.filter(event_cls.namespace_id == namespace_id).filter( - event_cls.deleted_at == None) # noqa + event_cls.deleted_at == None + ) # noqa if event_public_id: query = query.filter(event_cls.public_id == event_public_id) if calendar_public_id is not None: - query = query.join(Calendar). \ - filter(Calendar.public_id == calendar_public_id, - Calendar.namespace_id == namespace_id) + query = query.join(Calendar).filter( + Calendar.public_id == calendar_public_id, + Calendar.namespace_id == namespace_id, + ) if title is not None: - query = query.filter(event_cls.title.like(u'%{}%'.format(title))) + query = query.filter(event_cls.title.like(u"%{}%".format(title))) if description is not None: - query = query.filter(event_cls.description.like(u'%{}%' - .format(description))) + query = query.filter(event_cls.description.like(u"%{}%".format(description))) if location is not None: - query = query.filter(event_cls.location.like(u'%{}%'.format(location))) + query = query.filter(event_cls.location.like(u"%{}%".format(location))) if busy is not None: query = query.filter(event_cls.busy == busy) - query = query.filter(event_cls.source == 'local') + query = query.filter(event_cls.source == "local") return query -def recurring_events(filters, starts_before, starts_after, ends_before, - ends_after, db_session, show_cancelled=False): +def recurring_events( + filters, + starts_before, + starts_after, + ends_before, + ends_after, + db_session, + show_cancelled=False, +): # Expands individual recurring events into full instances. # If neither starts_before or ends_before is given, the recurring range # defaults to now + 1 year (see events/recurring.py) @@ -449,7 +583,7 @@ def recurring_events(filters, starts_before, starts_after, ends_before, recur_query = filter_event_query(recur_query, RecurringEvent, *filters) if show_cancelled is False: - recur_query = recur_query.filter(RecurringEvent.status != 'cancelled') + recur_query = recur_query.filter(RecurringEvent.status != "cancelled") before_criteria = [] if starts_before: @@ -460,11 +594,13 @@ def recurring_events(filters, starts_before, starts_after, ends_before, recur_query = recur_query.filter(and_(*before_criteria)) after_criteria = [] if starts_after: - after_criteria.append(or_(RecurringEvent.until > starts_after, - RecurringEvent.until == None)) # noqa + after_criteria.append( + or_(RecurringEvent.until > starts_after, RecurringEvent.until == None) + ) # noqa if ends_after: - after_criteria.append(or_(RecurringEvent.until > ends_after, - RecurringEvent.until == None)) # noqa + after_criteria.append( + or_(RecurringEvent.until > ends_after, RecurringEvent.until == None) + ) # noqa recur_query = recur_query.filter(and_(*after_criteria)) @@ -482,22 +618,48 @@ def recurring_events(filters, starts_before, starts_after, ends_before, return recur_instances -def events(namespace_id, event_public_id, calendar_public_id, title, - description, location, busy, title_email, description_email, - owner_email, participant_email, any_email, starts_before, - starts_after, ends_before, ends_after, limit, offset, view, - expand_recurring, show_cancelled, db_session): +def events( + namespace_id, + event_public_id, + calendar_public_id, + title, + description, + location, + busy, + title_email, + description_email, + owner_email, + participant_email, + any_email, + starts_before, + starts_after, + ends_before, + ends_after, + limit, + offset, + view, + expand_recurring, + show_cancelled, + db_session, +): query = db_session.query(Event) if not expand_recurring: - if view == 'count': + if view == "count": query = db_session.query(func.count(Event.id)) - elif view == 'ids': + elif view == "ids": query = db_session.query(Event.public_id) - filters = [namespace_id, event_public_id, calendar_public_id, - title, description, location, busy] + filters = [ + namespace_id, + event_public_id, + calendar_public_id, + title, + description, + location, + busy, + ] query = filter_event_query(query, Event, *filters) event_criteria = [] @@ -516,7 +678,7 @@ def events(namespace_id, event_public_id, calendar_public_id, title, if not show_cancelled: if expand_recurring: - event_criteria.append(Event.status != 'cancelled') + event_criteria.append(Event.status != "cancelled") else: # It doesn't make sense to hide cancelled events # when we're not expanding recurring events, @@ -524,75 +686,102 @@ def events(namespace_id, event_public_id, calendar_public_id, title, # We still need to show cancelled recurringevents # for those users who want to do event expansion themselves. event_criteria.append( - (Event.discriminator == 'recurringeventoverride') | - ((Event.status != 'cancelled') & (Event.discriminator != - 'recurringeventoverride'))) + (Event.discriminator == "recurringeventoverride") + | ( + (Event.status != "cancelled") + & (Event.discriminator != "recurringeventoverride") + ) + ) if title_email is not None: - title_email_query = db_session.query(EventContactAssociation.event_id) \ - .join(Contact, EventContactAssociation.contact_id == Contact.id)\ - .filter(Contact.email_address == title_email, - Contact.namespace_id == namespace_id, - EventContactAssociation.field == 'title')\ + title_email_query = ( + db_session.query(EventContactAssociation.event_id) + .join(Contact, EventContactAssociation.contact_id == Contact.id) + .filter( + Contact.email_address == title_email, + Contact.namespace_id == namespace_id, + EventContactAssociation.field == "title", + ) .subquery() + ) event_criteria.append(Event.id.in_(title_email_query)) if description_email is not None: - description_email_query = db_session.query(EventContactAssociation.event_id) \ - .join(Contact, EventContactAssociation.contact_id == Contact.id)\ - .filter(Contact.email_address == description_email, - Contact.namespace_id == namespace_id, - EventContactAssociation.field == 'description')\ + description_email_query = ( + db_session.query(EventContactAssociation.event_id) + .join(Contact, EventContactAssociation.contact_id == Contact.id) + .filter( + Contact.email_address == description_email, + Contact.namespace_id == namespace_id, + EventContactAssociation.field == "description", + ) .subquery() + ) event_criteria.append(Event.id.in_(description_email_query)) if owner_email is not None: - owner_email_query = db_session.query(EventContactAssociation.event_id) \ - .join(Contact, EventContactAssociation.contact_id == Contact.id)\ - .filter(Contact.email_address == owner_email, - Contact.namespace_id == namespace_id, - EventContactAssociation.field == 'owner')\ + owner_email_query = ( + db_session.query(EventContactAssociation.event_id) + .join(Contact, EventContactAssociation.contact_id == Contact.id) + .filter( + Contact.email_address == owner_email, + Contact.namespace_id == namespace_id, + EventContactAssociation.field == "owner", + ) .subquery() + ) event_criteria.append(Event.id.in_(owner_email_query)) if participant_email is not None: - participant_email_query = db_session.query(EventContactAssociation.event_id) \ - .join(Contact, EventContactAssociation.contact_id == Contact.id)\ - .filter(Contact.email_address == participant_email, - Contact.namespace_id == namespace_id, - EventContactAssociation.field == 'participant')\ + participant_email_query = ( + db_session.query(EventContactAssociation.event_id) + .join(Contact, EventContactAssociation.contact_id == Contact.id) + .filter( + Contact.email_address == participant_email, + Contact.namespace_id == namespace_id, + EventContactAssociation.field == "participant", + ) .subquery() + ) event_criteria.append(Event.id.in_(participant_email_query)) if any_email is not None: - any_email_query = db_session.query(EventContactAssociation.event_id) \ - .join(Contact, EventContactAssociation.contact_id == Contact.id)\ - .filter(Contact.email_address == any_email, - Contact.namespace_id == namespace_id)\ + any_email_query = ( + db_session.query(EventContactAssociation.event_id) + .join(Contact, EventContactAssociation.contact_id == Contact.id) + .filter( + Contact.email_address == any_email, Contact.namespace_id == namespace_id + ) .subquery() + ) event_criteria.append(Event.id.in_(any_email_query)) event_predicate = and_(*event_criteria) query = query.filter(event_predicate) if expand_recurring: - expanded = recurring_events(filters, starts_before, starts_after, - ends_before, ends_after, db_session, - show_cancelled=show_cancelled) + expanded = recurring_events( + filters, + starts_before, + starts_after, + ends_before, + ends_after, + db_session, + show_cancelled=show_cancelled, + ) # Combine non-recurring events with expanded recurring ones - all_events = query.filter(Event.discriminator == 'event').all() + \ - expanded + all_events = query.filter(Event.discriminator == "event").all() + expanded - if view == 'count': + if view == "count": return {"count": len(all_events)} all_events = sorted(all_events, key=lambda e: e.start) if limit: offset = offset or 0 - all_events = all_events[offset:offset + limit] + all_events = all_events[offset : offset + limit] else: - if view == 'count': + if view == "count": return {"count": query.one()[0]} query = query.order_by(asc(Event.start)).limit(limit) if offset: @@ -601,22 +790,28 @@ def events(namespace_id, event_public_id, calendar_public_id, title, # representations faster. all_events = query.all() - if view == 'ids': + if view == "ids": return [x[0] for x in all_events] else: return all_events def messages_for_contact_scores(db_session, namespace_id, starts_after=None): - query = (db_session.query( - Message.to_addr, Message.cc_addr, Message.bcc_addr, - Message.id, Message.received_date.label('date')) + query = ( + db_session.query( + Message.to_addr, + Message.cc_addr, + Message.bcc_addr, + Message.id, + Message.received_date.label("date"), + ) .join(MessageCategory.message) .join(MessageCategory.category) .filter(Message.namespace_id == namespace_id) - .filter(Category.name == 'sent') + .filter(Category.name == "sent") .filter(~Message.is_draft) - .filter(Category.namespace_id == namespace_id)) + .filter(Category.namespace_id == namespace_id) + ) if starts_after: query = query.filter(Message.received_date > starts_after) @@ -624,23 +819,21 @@ def messages_for_contact_scores(db_session, namespace_id, starts_after=None): return query.all() -def metadata(namespace_id, app_id, view, limit, offset, - db_session): +def metadata(namespace_id, app_id, view, limit, offset, db_session): - if view == 'count': + if view == "count": query = db_session.query(func.count(Metadata.id)) - elif view == 'ids': + elif view == "ids": query = db_session.query(Metadata.object_public_id) else: query = db_session.query(Metadata) - filters = [Metadata.namespace_id == namespace_id, - Metadata.value.isnot(None)] + filters = [Metadata.namespace_id == namespace_id, Metadata.value.isnot(None)] if app_id is not None: filters.append(Metadata.app_id == app_id) query = query.filter(*filters) - if view == 'count': + if view == "count": return {"count": query.scalar()} query = query.order_by(desc(Metadata.id)).limit(limit) @@ -648,7 +841,7 @@ def metadata(namespace_id, app_id, view, limit, offset, if offset: query = query.offset(offset) - if view == 'ids': + if view == "ids": return [x[0] for x in query.all()] return query.all() @@ -656,7 +849,7 @@ def metadata(namespace_id, app_id, view, limit, offset, def metadata_for_app(app_id, limit, last, query_value, query_type, db_session): if app_id is None: - raise ValueError('Must specify an app_id') + raise ValueError("Must specify an app_id") query = db_session.query(Metadata).filter(Metadata.app_id == app_id) if last is not None: @@ -665,8 +858,9 @@ def metadata_for_app(app_id, limit, last, query_value, query_type, db_session): if query_type is not None: if query_type not in METADATA_QUERY_OPERATORS: raise ValueError( - 'Invalid query operator for metadata query_type. Must be ' - 'one of {}'.format(', '.join(METADATA_QUERY_OPERATORS.keys()))) + "Invalid query operator for metadata query_type. Must be " + "one of {}".format(", ".join(METADATA_QUERY_OPERATORS.keys())) + ) operator_filter = METADATA_QUERY_OPERATORS[query_type](query_value) query = query.filter(operator_filter) @@ -699,13 +893,14 @@ def page_over_shards(Model, cursor, limit, get_results=lambda q: q.all()): if latest_results: results.extend(latest_results) last = latest_results[-1] - if hasattr(last, 'id'): + if hasattr(last, "id"): next_cursor = last.id - elif 'id' in last: - next_cursor = last['id'] + elif "id" in last: + next_cursor = last["id"] else: - raise ValueError('Results returned from get_query must' - 'have an id') + raise ValueError( + "Results returned from get_query must" "have an id" + ) # Handle invalid ids cursor_implied_shard = next_cursor >> 48 @@ -715,11 +910,12 @@ def page_over_shards(Model, cursor, limit, get_results=lambda q: q.all()): remaining_limit -= len(latest_results) return results, str(next_cursor) + METADATA_QUERY_OPERATORS = { - '>': lambda v: Metadata.queryable_value > v, - '>=': lambda v: Metadata.queryable_value >= v, - '<': lambda v: Metadata.queryable_value < v, - '<=': lambda v: Metadata.queryable_value <= v, - '==': lambda v: Metadata.queryable_value == v, - '!=': lambda v: Metadata.queryable_value != v, + ">": lambda v: Metadata.queryable_value > v, + ">=": lambda v: Metadata.queryable_value >= v, + "<": lambda v: Metadata.queryable_value < v, + "<=": lambda v: Metadata.queryable_value <= v, + "==": lambda v: Metadata.queryable_value == v, + "!=": lambda v: Metadata.queryable_value != v, } diff --git a/inbox/api/kellogs.py b/inbox/api/kellogs.py index 97adbb846..f0bb18b3b 100644 --- a/inbox/api/kellogs.py +++ b/inbox/api/kellogs.py @@ -4,45 +4,64 @@ from json import JSONEncoder, dumps from flask import Response -from inbox.models import (Message, Contact, Calendar, Event, When, - Thread, Namespace, Block, Category, Account, - Metadata) -from inbox.models.event import (RecurringEvent, RecurringEventOverride, - InflatedEvent) +from inbox.models import ( + Message, + Contact, + Calendar, + Event, + When, + Thread, + Namespace, + Block, + Category, + Account, + Metadata, +) +from inbox.models.event import RecurringEvent, RecurringEventOverride, InflatedEvent from nylas.logging import get_logger + log = get_logger() def format_address_list(addresses): if addresses is None: return [] - return [{'name': name, 'email': email} for name, email in addresses] + return [{"name": name, "email": email} for name, email in addresses] def format_categories(categories): if categories is None: return [] - return [{'id': category.public_id, 'name': category.name or None, - 'display_name': category.api_display_name} for category in - categories] + return [ + { + "id": category.public_id, + "name": category.name or None, + "display_name": category.api_display_name, + } + for category in categories + ] def format_messagecategories(messagecategories): if messagecategories is None: return [] - return [{'id': mc.category.public_id, 'name': mc.category.name or None, - 'display_name': mc.category.api_display_name, - 'created_timestamp': mc.created_at} for mc in - messagecategories] + return [ + { + "id": mc.category.public_id, + "name": mc.category.name or None, + "display_name": mc.category.api_display_name, + "created_timestamp": mc.created_at, + } + for mc in messagecategories + ] def format_phone_numbers(phone_numbers): formatted_phone_numbers = [] for number in phone_numbers: - formatted_phone_numbers.append({ - 'type': number.type, - 'number': number.number, - }) + formatted_phone_numbers.append( + {"type": number.type, "number": number.number,} + ) return formatted_phone_numbers @@ -54,7 +73,7 @@ def encode(obj, namespace_public_id=None, expand=False, is_n1=False): "id": getattr(obj, "id", None), "cls": str(getattr(obj, "__class__", None)), "exception": e, - "exc_info": True + "exc_info": True, } log.error("object encoding failure", **error_context) @@ -80,6 +99,7 @@ def _encode(obj, namespace_public_id=None, expand=False, is_n1=False): dictionary or None """ + def _get_namespace_public_id(obj): return namespace_public_id or obj.namespace.public_id @@ -88,7 +108,7 @@ def _format_participant_data(participant): This function returns a dict with only the data we want to make public.""" dct = {} - for attribute in ['name', 'status', 'email', 'comment']: + for attribute in ["name", "status", "email", "comment"]: dct[attribute] = participant.get(attribute) return dct @@ -110,25 +130,25 @@ def _get_lowercase_class_name(obj): if isinstance(obj, Namespace): # These are now "accounts" acc_state = obj.account.sync_state if acc_state is None: - acc_state = 'running' + acc_state = "running" - if is_n1 and acc_state not in ['running', 'invalid']: - acc_state = 'running' + if is_n1 and acc_state not in ["running", "invalid"]: + acc_state = "running" resp = { - 'id': obj.public_id, - 'object': 'account', - 'account_id': obj.public_id, - 'email_address': obj.account.email_address if obj.account else '', - 'name': obj.account.name, - 'provider': obj.account.provider, - 'organization_unit': obj.account.category_type, - 'sync_state': acc_state + "id": obj.public_id, + "object": "account", + "account_id": obj.public_id, + "email_address": obj.account.email_address if obj.account else "", + "name": obj.account.name, + "provider": obj.account.provider, + "organization_unit": obj.account.category_type, + "sync_state": acc_state, } # Gmail accounts do not set the `server_settings` if expand and obj.account.server_settings: - resp['server_settings'] = obj.account.server_settings + resp["server_settings"] = obj.account.server_settings return resp elif isinstance(obj, Account): @@ -140,77 +160,76 @@ def _get_lowercase_class_name(obj): thread_public_id = obj.thread.public_id resp = { - 'id': obj.public_id, - 'object': 'message', - 'account_id': _get_namespace_public_id(obj), - 'subject': obj.subject, - 'from': format_address_list(obj.from_addr), - 'reply_to': format_address_list(obj.reply_to), - 'to': format_address_list(obj.to_addr), - 'cc': format_address_list(obj.cc_addr), - 'bcc': format_address_list(obj.bcc_addr), - 'date': obj.received_date, - 'thread_id': thread_public_id, - 'snippet': obj.snippet, - 'body': obj.body, - 'unread': not obj.is_read, - 'starred': obj.is_starred, - 'files': obj.api_attachment_metadata, - 'events': [encode(e) for e in obj.events] + "id": obj.public_id, + "object": "message", + "account_id": _get_namespace_public_id(obj), + "subject": obj.subject, + "from": format_address_list(obj.from_addr), + "reply_to": format_address_list(obj.reply_to), + "to": format_address_list(obj.to_addr), + "cc": format_address_list(obj.cc_addr), + "bcc": format_address_list(obj.bcc_addr), + "date": obj.received_date, + "thread_id": thread_public_id, + "snippet": obj.snippet, + "body": obj.body, + "unread": not obj.is_read, + "starred": obj.is_starred, + "files": obj.api_attachment_metadata, + "events": [encode(e) for e in obj.events], } categories = format_messagecategories(obj.messagecategories) - if obj.namespace.account.category_type == 'folder': - resp['folder'] = categories[0] if categories else None + if obj.namespace.account.category_type == "folder": + resp["folder"] = categories[0] if categories else None else: - resp['labels'] = categories + resp["labels"] = categories # If the message is a draft (Nylas-created or otherwise): if obj.is_draft: - resp['object'] = 'draft' - resp['version'] = obj.version + resp["object"] = "draft" + resp["version"] = obj.version if obj.reply_to_message is not None: - resp['reply_to_message_id'] = obj.reply_to_message.public_id + resp["reply_to_message_id"] = obj.reply_to_message.public_id else: - resp['reply_to_message_id'] = None + resp["reply_to_message_id"] = None if expand: - resp['headers'] = { - 'Message-Id': obj.message_id_header, - 'In-Reply-To': obj.in_reply_to, - 'References': obj.references + resp["headers"] = { + "Message-Id": obj.message_id_header, + "In-Reply-To": obj.in_reply_to, + "References": obj.references, } return resp elif isinstance(obj, Thread): base = { - 'id': obj.public_id, - 'object': 'thread', - 'account_id': _get_namespace_public_id(obj), - 'subject': obj.subject, - 'participants': format_address_list(obj.participants), - 'last_message_timestamp': obj.recentdate, - 'last_message_received_timestamp': obj.most_recent_received_date, - 'last_message_sent_timestamp': obj.most_recent_sent_date, - 'first_message_timestamp': obj.subjectdate, - 'snippet': obj.snippet, - 'unread': obj.unread, - 'starred': obj.starred, - 'has_attachments': obj.has_attachments, - 'version': obj.version, + "id": obj.public_id, + "object": "thread", + "account_id": _get_namespace_public_id(obj), + "subject": obj.subject, + "participants": format_address_list(obj.participants), + "last_message_timestamp": obj.recentdate, + "last_message_received_timestamp": obj.most_recent_received_date, + "last_message_sent_timestamp": obj.most_recent_sent_date, + "first_message_timestamp": obj.subjectdate, + "snippet": obj.snippet, + "unread": obj.unread, + "starred": obj.starred, + "has_attachments": obj.has_attachments, + "version": obj.version, } categories = format_categories(obj.categories) - if obj.namespace.account.category_type == 'folder': - base['folders'] = categories + if obj.namespace.account.category_type == "folder": + base["folders"] = categories else: - base['labels'] = categories + base["labels"] = categories if not expand: - base['message_ids'] = \ - [m.public_id for m in obj.messages if not m.is_draft] - base['draft_ids'] = [m.public_id for m in obj.drafts] + base["message_ids"] = [m.public_id for m in obj.messages if not m.is_draft] + base["draft_ids"] = [m.public_id for m in obj.drafts] return base # Expand messages within threads @@ -218,165 +237,166 @@ def _get_lowercase_class_name(obj): all_expanded_drafts = [] for msg in obj.messages: resp = { - 'id': msg.public_id, - 'object': 'message', - 'account_id': _get_namespace_public_id(msg), - 'subject': msg.subject, - 'from': format_address_list(msg.from_addr), - 'reply_to': format_address_list(msg.reply_to), - 'to': format_address_list(msg.to_addr), - 'cc': format_address_list(msg.cc_addr), - 'bcc': format_address_list(msg.bcc_addr), - 'date': msg.received_date, - 'thread_id': obj.public_id, - 'snippet': msg.snippet, - 'unread': not msg.is_read, - 'starred': msg.is_starred, - 'files': msg.api_attachment_metadata + "id": msg.public_id, + "object": "message", + "account_id": _get_namespace_public_id(msg), + "subject": msg.subject, + "from": format_address_list(msg.from_addr), + "reply_to": format_address_list(msg.reply_to), + "to": format_address_list(msg.to_addr), + "cc": format_address_list(msg.cc_addr), + "bcc": format_address_list(msg.bcc_addr), + "date": msg.received_date, + "thread_id": obj.public_id, + "snippet": msg.snippet, + "unread": not msg.is_read, + "starred": msg.is_starred, + "files": msg.api_attachment_metadata, } - resp['headers'] = { - 'Message-Id': msg.message_id_header, - 'In-Reply-To': msg.in_reply_to, - 'References': msg.references + resp["headers"] = { + "Message-Id": msg.message_id_header, + "In-Reply-To": msg.in_reply_to, + "References": msg.references, } categories = format_messagecategories(msg.messagecategories) - if obj.namespace.account.category_type == 'folder': - resp['folder'] = categories[0] if categories else None + if obj.namespace.account.category_type == "folder": + resp["folder"] = categories[0] if categories else None else: - resp['labels'] = categories + resp["labels"] = categories if msg.is_draft: - resp['object'] = 'draft' - resp['version'] = msg.version + resp["object"] = "draft" + resp["version"] = msg.version if msg.reply_to_message is not None: - resp['reply_to_message_id'] = \ - msg.reply_to_message.public_id + resp["reply_to_message_id"] = msg.reply_to_message.public_id else: - resp['reply_to_message_id'] = None + resp["reply_to_message_id"] = None all_expanded_drafts.append(resp) else: all_expanded_messages.append(resp) - base['messages'] = all_expanded_messages - base['drafts'] = all_expanded_drafts + base["messages"] = all_expanded_messages + base["drafts"] = all_expanded_drafts return base elif isinstance(obj, Contact): return { - 'id': obj.public_id, - 'object': 'contact', - 'account_id': _get_namespace_public_id(obj), - 'name': obj.name, - 'email': obj.email_address, - 'phone_numbers': format_phone_numbers(obj.phone_numbers) + "id": obj.public_id, + "object": "contact", + "account_id": _get_namespace_public_id(obj), + "name": obj.name, + "email": obj.email_address, + "phone_numbers": format_phone_numbers(obj.phone_numbers), } elif isinstance(obj, Event): resp = { - 'id': obj.public_id, - 'object': 'event', - 'account_id': _get_namespace_public_id(obj), - 'calendar_id': obj.calendar.public_id if obj.calendar else None, - 'message_id': obj.message.public_id if obj.message else None, - 'title': obj.title, - 'email_addresses_from_title': obj.emails_from_title, - 'description': obj.description, - 'email_addresses_from_description': obj.emails_from_description, - 'owner': obj.owner, - 'is_owner': obj.is_owner, - 'participants': [_format_participant_data(participant) - for participant in obj.participants], - 'read_only': obj.read_only, - 'location': obj.location, - 'when': encode(obj.when), - 'busy': obj.busy, - 'status': obj.status, - 'visibility': obj.visibility, - 'uid': obj.uid, - 'calendar_event_link': obj.calendar_event_link, + "id": obj.public_id, + "object": "event", + "account_id": _get_namespace_public_id(obj), + "calendar_id": obj.calendar.public_id if obj.calendar else None, + "message_id": obj.message.public_id if obj.message else None, + "title": obj.title, + "email_addresses_from_title": obj.emails_from_title, + "description": obj.description, + "email_addresses_from_description": obj.emails_from_description, + "owner": obj.owner, + "is_owner": obj.is_owner, + "participants": [ + _format_participant_data(participant) + for participant in obj.participants + ], + "read_only": obj.read_only, + "location": obj.location, + "when": encode(obj.when), + "busy": obj.busy, + "status": obj.status, + "visibility": obj.visibility, + "uid": obj.uid, + "calendar_event_link": obj.calendar_event_link, } if isinstance(obj, RecurringEvent): - resp['recurrence'] = { - 'rrule': obj.recurring, - 'timezone': obj.start_timezone + resp["recurrence"] = { + "rrule": obj.recurring, + "timezone": obj.start_timezone, } if isinstance(obj, RecurringEventOverride): - resp['original_start_time'] = encode(obj.original_start_time) - resp['master_event_uid'] = obj.master_event_uid + resp["original_start_time"] = encode(obj.original_start_time) + resp["master_event_uid"] = obj.master_event_uid if obj.master: - resp['master_event_id'] = obj.master.public_id + resp["master_event_id"] = obj.master.public_id if isinstance(obj, InflatedEvent): - del resp['message_id'] + del resp["message_id"] if obj.master: - resp['master_event_id'] = obj.master.public_id + resp["master_event_id"] = obj.master.public_id if obj.master.calendar: - resp['calendar_id'] = obj.master.calendar.public_id + resp["calendar_id"] = obj.master.calendar.public_id return resp elif isinstance(obj, Calendar): return { - 'id': obj.public_id, - 'object': 'calendar', - 'account_id': _get_namespace_public_id(obj), - 'name': obj.name, - 'description': obj.description, - 'read_only': obj.read_only, - 'uid': obj.uid, + "id": obj.public_id, + "object": "calendar", + "account_id": _get_namespace_public_id(obj), + "name": obj.name, + "description": obj.description, + "read_only": obj.read_only, + "uid": obj.uid, } elif isinstance(obj, When): # Get time dictionary e.g. 'start_time': x, 'end_time': y or 'date': z times = obj.get_time_dict() resp = {k: encode(v) for k, v in times.iteritems()} - resp['object'] = _get_lowercase_class_name(obj) + resp["object"] = _get_lowercase_class_name(obj) return resp elif isinstance(obj, Block): # ie: Attachments/Files resp = { - 'id': obj.public_id, - 'object': 'file', - 'account_id': _get_namespace_public_id(obj), - 'content_type': obj.content_type, - 'size': obj.size, - 'filename': obj.filename, + "id": obj.public_id, + "object": "file", + "account_id": _get_namespace_public_id(obj), + "content_type": obj.content_type, + "size": obj.size, + "filename": obj.filename, } if len(obj.parts): # if obj is actually a message attachment (and not merely an # uploaded file), set additional properties - resp.update({ - 'message_ids': [p.message.public_id for p in obj.parts]}) + resp.update({"message_ids": [p.message.public_id for p in obj.parts]}) - content_ids = list({p.content_id for p in obj.parts - if p.content_id is not None}) + content_ids = list( + {p.content_id for p in obj.parts if p.content_id is not None} + ) content_id = None if len(content_ids) > 0: content_id = content_ids[0] - resp.update({'content_id': content_id}) + resp.update({"content_id": content_id}) return resp elif isinstance(obj, Category): # 'object' is set to 'folder' or 'label' resp = { - 'id': obj.public_id, - 'object': obj.type, - 'account_id': _get_namespace_public_id(obj), - 'name': obj.name or None, - 'display_name': obj.api_display_name + "id": obj.public_id, + "object": obj.type, + "account_id": _get_namespace_public_id(obj), + "name": obj.name or None, + "display_name": obj.api_display_name, } return resp elif isinstance(obj, Metadata): resp = { - 'id': obj.public_id, - 'account_id': _get_namespace_public_id(obj), - 'application_id': obj.app_client_id, - 'object_type': obj.object_type, - 'object_id': obj.object_public_id, - 'version': obj.version, - 'value': obj.value + "id": obj.public_id, + "account_id": _get_namespace_public_id(obj), + "application_id": obj.app_client_id, + "object_type": obj.object_type, + "object_id": obj.object_public_id, + "version": obj.version, + "value": obj.value, } return resp @@ -398,19 +418,21 @@ class APIEncoder(object): """ def __init__(self, namespace_public_id=None, expand=False, is_n1=False): - self.encoder_class = self._encoder_factory(namespace_public_id, expand, is_n1=is_n1) + self.encoder_class = self._encoder_factory( + namespace_public_id, expand, is_n1=is_n1 + ) def _encoder_factory(self, namespace_public_id, expand, is_n1=False): class InternalEncoder(JSONEncoder): - def default(self, obj): - custom_representation = encode(obj, - namespace_public_id, - expand=expand, is_n1=is_n1) + custom_representation = encode( + obj, namespace_public_id, expand=expand, is_n1=is_n1 + ) if custom_representation is not None: return custom_representation # Let the base class default method raise the TypeError return JSONEncoder.default(self, obj) + return InternalEncoder def cereal(self, obj, pretty=False): @@ -430,11 +452,13 @@ def cereal(self, obj, pretty=False): """ if pretty: - return dumps(obj, - sort_keys=True, - indent=4, - separators=(',', ': '), - cls=self.encoder_class) + return dumps( + obj, + sort_keys=True, + indent=4, + separators=(",", ": "), + cls=self.encoder_class, + ) return dumps(obj, cls=self.encoder_class) def jsonify(self, obj): @@ -452,5 +476,4 @@ def jsonify(self, obj): If obj is not serializable. """ - return Response(self.cereal(obj, pretty=True), - mimetype='application/json') + return Response(self.cereal(obj, pretty=True), mimetype="application/json") diff --git a/inbox/api/metrics_api.py b/inbox/api/metrics_api.py index b5db1e213..a759df4b1 100644 --- a/inbox/api/metrics_api.py +++ b/inbox/api/metrics_api.py @@ -14,10 +14,7 @@ from inbox.models.session import global_session_scope -app = Blueprint( - 'metrics_api', - __name__, - url_prefix='/metrics') +app = Blueprint("metrics_api", __name__, url_prefix="/metrics") def _get_calendar_data(db_session, namespace): @@ -28,7 +25,8 @@ def _get_calendar_data(db_session, namespace): calendars = calendars.options( joinedload(Calendar.namespace) .load_only(Namespace.account_id) - .noload(Namespace.account)) + .noload(Namespace.account) + ) calendar_data = defaultdict(list) for calendar in calendars: @@ -37,16 +35,18 @@ def _get_calendar_data(db_session, namespace): state = None if calendar.can_sync(): if calendar.last_synced: - state = 'running' + state = "running" else: - state = 'initial' + state = "initial" - calendar_data[account_id].append({ - 'uid': calendar.uid, - 'name': calendar.name, - 'last_synced': calendar.last_synced, - 'state': state, - }) + calendar_data[account_id].append( + { + "uid": calendar.uid, + "name": calendar.name, + "last_synced": calendar.last_synced, + "state": state, + } + ) return calendar_data @@ -57,13 +57,14 @@ def _get_folder_data(db_session, accounts): # metrics for a specific account, and 2) fetching metrics for all accounts. if len(accounts) == 1: folder_sync_statuses = folder_sync_statuses.filter( - ImapFolderSyncStatus.account_id==accounts[0].id) + ImapFolderSyncStatus.account_id == accounts[0].id + ) folder_sync_statuses = folder_sync_statuses.join(Folder).with_entities( ImapFolderSyncStatus.account_id, ImapFolderSyncStatus.folder_id, Folder.name, ImapFolderSyncStatus.state, - ImapFolderSyncStatus._metrics + ImapFolderSyncStatus._metrics, ) folder_data = defaultdict(dict) @@ -71,40 +72,44 @@ def _get_folder_data(db_session, accounts): for folder_sync_status in folder_sync_statuses: account_id, folder_id, folder_name, state, metrics = folder_sync_status folder_data[account_id][folder_id] = { - 'remote_uid_count': metrics.get('remote_uid_count'), - 'download_uid_count': metrics.get('download_uid_count'), - 'state': state, - 'name': folder_name, - 'alive': False, - 'heartbeat_at': None, - 'run_state': metrics.get('run_state'), - 'sync_error': metrics.get('sync_error'), + "remote_uid_count": metrics.get("remote_uid_count"), + "download_uid_count": metrics.get("download_uid_count"), + "state": state, + "name": folder_name, + "alive": False, + "heartbeat_at": None, + "run_state": metrics.get("run_state"), + "sync_error": metrics.get("sync_error"), } return folder_data -@app.route('/') +@app.route("/") def index(): with global_session_scope() as db_session: - if 'namespace_id' in request.args: + if "namespace_id" in request.args: try: - namespace = db_session.query(Namespace).filter( - Namespace.public_id == request.args['namespace_id']).one() + namespace = ( + db_session.query(Namespace) + .filter(Namespace.public_id == request.args["namespace_id"]) + .one() + ) except NoResultFound: return APIEncoder().jsonify([]) else: namespace = None - accounts = db_session.query(ImapAccount) \ - .with_polymorphic([GenericAccount]) + accounts = db_session.query(ImapAccount).with_polymorphic([GenericAccount]) if namespace: accounts = accounts.filter(Account.namespace == namespace) else: # Get all account IDs that aren't deleted - account_ids = [result[0] for result in - db_session.query(ImapAccount.id, ImapAccount._sync_status) - if result[1].get('sync_disabled_reason') != 'account deleted'] + account_ids = [ + result[0] + for result in db_session.query(ImapAccount.id, ImapAccount._sync_status) + if result[1].get("sync_disabled_reason") != "account deleted" + ] # This is faster than fetching all accounts. accounts = accounts.filter(ImapAccount.id.in_(account_ids)) @@ -128,14 +133,16 @@ def index(): for folder_status in account_heartbeat.folders: folder_status_id = int(folder_status.id) if folder_status_id in account_folder_data: - account_folder_data[folder_status_id].update({ - 'alive': folder_status.alive, - 'heartbeat_at': folder_status.timestamp - }) + account_folder_data[folder_status_id].update( + { + "alive": folder_status.alive, + "heartbeat_at": folder_status.timestamp, + } + ) elif folder_status_id == EVENT_SYNC_FOLDER_ID: events_alive = folder_status.alive - email_alive = all(f['alive'] for f in account_folder_data.values()) + email_alive = all(f["alive"] for f in account_folder_data.values()) alive = True if account.sync_email and not email_alive: @@ -143,16 +150,22 @@ def index(): if account.sync_events and not events_alive: alive = False - email_initial_sync = any(f['state'] == 'initial' - for f in account_folder_data.values()) - events_initial_sync = any(c['state'] == 'initial' - for c in account_calendar_data) + email_initial_sync = any( + f["state"] == "initial" for f in account_folder_data.values() + ) + events_initial_sync = any( + c["state"] == "initial" for c in account_calendar_data + ) initial_sync = email_initial_sync or events_initial_sync - total_uids = sum(f['remote_uid_count'] or 0 for f in account_folder_data.values()) - remaining_uids = sum(f['download_uid_count'] or 0 for f in account_folder_data.values()) + total_uids = sum( + f["remote_uid_count"] or 0 for f in account_folder_data.values() + ) + remaining_uids = sum( + f["download_uid_count"] or 0 for f in account_folder_data.values() + ) if total_uids: - progress = 100. / total_uids * (total_uids - remaining_uids) + progress = 100.0 / total_uids * (total_uids - remaining_uids) else: progress = None else: @@ -163,53 +176,63 @@ def index(): progress = None sync_status = account.sync_status - is_running = sync_status['state'] == 'running' - if is_running and not sync_status.get('sync_start_time') and not sync_status.get('sync_error'): - sync_status_str = 'starting' + is_running = sync_status["state"] == "running" + if ( + is_running + and not sync_status.get("sync_start_time") + and not sync_status.get("sync_error") + ): + sync_status_str = "starting" elif is_running and alive: if initial_sync: - sync_status_str = 'initial' + sync_status_str = "initial" else: - sync_status_str = 'running' + sync_status_str = "running" elif is_running: # Nylas is syncing, but not all heartbeats are reporting. - sync_status_str = 'delayed' + sync_status_str = "delayed" else: # Nylas is no longer syncing this account. - sync_status_str = 'dead' - - data.append({ - 'account_private_id': account.id, - 'namespace_private_id': account.namespace.id, - 'account_id': account.public_id, - 'namespace_id': account.namespace.public_id, - 'events_alive': events_alive, - 'email_alive': email_alive, - 'alive': alive, - 'email_initial_sync': email_initial_sync, - 'events_initial_sync': events_initial_sync, - 'initial_sync': initial_sync, - 'provider_name': account.provider, - 'email_address': account.email_address, - 'folders': sorted(folder_data[account.id].values(), key=itemgetter('name')), - 'calendars': sorted(calendar_data[account.id], key=itemgetter('name')), - 'sync_email': account.sync_email, - 'sync_events': account.sync_events, - 'sync_status': sync_status_str, - 'sync_error': sync_status.get('sync_error'), - 'sync_end_time': sync_status.get('sync_end_time'), - 'sync_disabled_reason': sync_status.get('sync_disabled_reason'), - 'sync_host': account.sync_host, - 'progress': progress, - 'throttled': account.throttled, - 'created_at': account.created_at, - 'updated_at': account.updated_at, - }) + sync_status_str = "dead" + + data.append( + { + "account_private_id": account.id, + "namespace_private_id": account.namespace.id, + "account_id": account.public_id, + "namespace_id": account.namespace.public_id, + "events_alive": events_alive, + "email_alive": email_alive, + "alive": alive, + "email_initial_sync": email_initial_sync, + "events_initial_sync": events_initial_sync, + "initial_sync": initial_sync, + "provider_name": account.provider, + "email_address": account.email_address, + "folders": sorted( + folder_data[account.id].values(), key=itemgetter("name") + ), + "calendars": sorted( + calendar_data[account.id], key=itemgetter("name") + ), + "sync_email": account.sync_email, + "sync_events": account.sync_events, + "sync_status": sync_status_str, + "sync_error": sync_status.get("sync_error"), + "sync_end_time": sync_status.get("sync_end_time"), + "sync_disabled_reason": sync_status.get("sync_disabled_reason"), + "sync_host": account.sync_host, + "progress": progress, + "throttled": account.throttled, + "created_at": account.created_at, + "updated_at": account.updated_at, + } + ) return APIEncoder().jsonify(data) -@app.route('/global-deltas') +@app.route("/global-deltas") def global_deltas(): """ Return the namespaces with recent transactions. @@ -220,23 +243,24 @@ def global_deltas(): """ from inbox.ignition import redis_txn from inbox.models.transaction import TXN_REDIS_KEY - txnid = request.args.get('txnid', '0') + + txnid = request.args.get("txnid", "0") try: start_pointer = int(txnid) except ValueError: - raise InputError('Invalid cursor parameter') + raise InputError("Invalid cursor parameter") txns = redis_txn.zrangebyscore( TXN_REDIS_KEY, - '({}'.format(start_pointer), # don't include start pointer + "({}".format(start_pointer), # don't include start pointer "+inf", withscores=True, score_cast_func=int, ) response = { - 'txnid_start': start_pointer, - 'txnid_end': max([t[1] for t in txns] or [start_pointer]), - 'deltas': [t[0] for t in txns], + "txnid_start": start_pointer, + "txnid_end": max([t[1] for t in txns] or [start_pointer]), + "deltas": [t[0] for t in txns], } return APIEncoder().jsonify(response) diff --git a/inbox/api/ns_api.py b/inbox/api/ns_api.py index 09853d5e3..c202a05ae 100644 --- a/inbox/api/ns_api.py +++ b/inbox/api/ns_api.py @@ -10,8 +10,7 @@ from datetime import datetime from collections import namedtuple -from flask import (request, g, Blueprint, make_response, Response, - stream_with_context) +from flask import request, g, Blueprint, make_response, Response, stream_with_context from flask import jsonify as flask_jsonify from flask.ext.restful import reqparse from sqlalchemy import asc, func @@ -19,43 +18,90 @@ from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm import joinedload, load_only -from inbox.models import (Message, Block, Part, Thread, Namespace, - Contact, Calendar, Event, Transaction, - DataProcessingCache, Category, MessageCategory) +from inbox.models import ( + Message, + Block, + Part, + Thread, + Namespace, + Contact, + Calendar, + Event, + Transaction, + DataProcessingCache, + Category, + MessageCategory, +) from inbox.models.event import RecurringEvent, RecurringEventOverride from inbox.models.category import EPOCH from inbox.models.backends.generic import GenericAccount -from inbox.api.sending import (send_draft, send_raw_mime, send_draft_copy, - update_draft_on_send) +from inbox.api.sending import ( + send_draft, + send_raw_mime, + send_draft_copy, + update_draft_on_send, +) from inbox.api.update import update_message, update_thread from inbox.api.kellogs import APIEncoder from inbox.api import filtering -from inbox.api.validation import (valid_account, get_attachments, get_calendar, - get_recipients, get_draft, valid_public_id, - valid_event, valid_event_update, timestamp, - bounded_str, view, strict_parse_args, - limit, offset, ValidatableArgument, - strict_bool, validate_draft_recipients, - valid_delta_object_types, valid_display_name, - noop_event_update, valid_category_type, - comma_separated_email_list, - get_sending_draft) +from inbox.api.validation import ( + valid_account, + get_attachments, + get_calendar, + get_recipients, + get_draft, + valid_public_id, + valid_event, + valid_event_update, + timestamp, + bounded_str, + view, + strict_parse_args, + limit, + offset, + ValidatableArgument, + strict_bool, + validate_draft_recipients, + valid_delta_object_types, + valid_display_name, + noop_event_update, + valid_category_type, + comma_separated_email_list, + get_sending_draft, +) from inbox.config import config -from inbox.contacts.algorithms import (calculate_contact_scores, - calculate_group_scores, - calculate_group_counts, is_stale) +from inbox.contacts.algorithms import ( + calculate_contact_scores, + calculate_group_scores, + calculate_group_counts, + is_stale, +) import inbox.contacts.crud from inbox.contacts.search import ContactSearchClient -from inbox.sendmail.base import (create_message_from_json, update_draft, - delete_draft, create_draft_from_mime, - SendMailException) +from inbox.sendmail.base import ( + create_message_from_json, + update_draft, + delete_draft, + create_draft_from_mime, + SendMailException, +) from inbox.ignition import engine_manager from inbox.models.action_log import schedule_action from inbox.models.session import new_session, session_scope -from inbox.search.base import get_search_client, SearchBackendException, SearchStoreException +from inbox.search.base import ( + get_search_client, + SearchBackendException, + SearchStoreException, +) from inbox.transactions import delta_sync -from inbox.api.err import (err, APIException, NotFoundError, InputError, - AccountDoesNotExistError, log_exception) +from inbox.api.err import ( + err, + APIException, + NotFoundError, + InputError, + AccountDoesNotExistError, + log_exception, +) from inbox.events.ical import generate_rsvp, send_rsvp from inbox.events.util import removed_participants from inbox.util import blockstore @@ -63,8 +109,11 @@ from inbox.actions.backends.generic import remote_delete_sent from inbox.crispin import writable_connection_pool from inbox.s3.base import get_raw_from_provider -from inbox.s3.exc import (EmailFetchException, TemporaryEmailFetchException, - EmailDeletedException) +from inbox.s3.exc import ( + EmailFetchException, + TemporaryEmailFetchException, + EmailDeletedException, +) from inbox.util.stats import statsd_client try: @@ -75,6 +124,7 @@ from nylas.logging import get_logger + log = get_logger() DEFAULT_LIMIT = 100 @@ -82,10 +132,7 @@ LONG_POLL_POLL_INTERVAL = 1 SEND_TIMEOUT = 60 -app = Blueprint( - 'namespace_api', - __name__, - url_prefix='') +app = Blueprint("namespace_api", __name__, url_prefix="") app.log_exception = log_exception @@ -93,36 +140,36 @@ # TODO perhaps expand to encompass non-standard mimetypes too # see python mimetypes library common_extensions = {} -mt_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), - 'mime_types.txt') -with open(mt_path, 'r') as f: +mt_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "mime_types.txt") +with open(mt_path, "r") as f: for x in f: x = x.strip() - if not x or x.startswith('#'): + if not x or x.startswith("#"): continue m = x.split() mime_type, extensions = m[0], m[1:] - assert extensions, 'Must have at least one extension per mimetype' + assert extensions, "Must have at least one extension per mimetype" common_extensions[mime_type.lower()] = extensions[0] -if config.get('DEBUG_PROFILING_ON'): +if config.get("DEBUG_PROFILING_ON"): from inbox.util.debug import attach_pyinstrument_profiler + attach_pyinstrument_profiler() -APIFeatures = namedtuple('APIFeatures', ['optimistic_updates']) +APIFeatures = namedtuple("APIFeatures", ["optimistic_updates"]) # The Nylas API supports versioning to be fully compatible with # older clients and apps. Users can specify the version of the # API they want to work with by setting the Api-Version API # header. API versions are defined as dates and stored in the # API_VERSIONS list. -API_VERSIONS = ['2016-03-07', '2016-08-09'] +API_VERSIONS = ["2016-03-07", "2016-08-09"] @app.before_request def start(): - g.api_version = request.headers.get('Api-Version', API_VERSIONS[0]) + g.api_version = request.headers.get("Api-Version", API_VERSIONS[0]) if g.api_version not in API_VERSIONS: g.api_version = API_VERSIONS[0] @@ -132,10 +179,10 @@ def start(): else: g.api_features = APIFeatures(optimistic_updates=False) - request.environ['log_context'] = { - 'endpoint': request.endpoint, - 'api_version': g.api_version, - 'namespace_id': g.namespace_id, + request.environ["log_context"] = { + "endpoint": request.endpoint, + "api_version": g.api_version, + "namespace_id": g.namespace_id, } engine = engine_manager.get_for_id(g.namespace_id) @@ -147,17 +194,16 @@ def start(): # was deleted, but the API access cache entry has not been expired yet. raise AccountDoesNotExistError() - request.environ['log_context']['account_id'] = g.namespace.account_id - if hasattr(g, 'application_id'): - request.environ['log_context']['application_id'] = g.application_id + request.environ["log_context"]["account_id"] = g.namespace.account_id + if hasattr(g, "application_id"): + request.environ["log_context"]["application_id"] = g.application_id - is_n1 = request.environ.get('IS_N1', False) + is_n1 = request.environ.get("IS_N1", False) g.encoder = APIEncoder(g.namespace.public_id, is_n1=is_n1) g.parser = reqparse.RequestParser(argument_class=ValidatableArgument) - g.parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit, - location='args') - g.parser.add_argument('offset', default=0, type=offset, location='args') + g.parser.add_argument("limit", default=DEFAULT_LIMIT, type=limit, location="args") + g.parser.add_argument("offset", default=0, type=offset, location="args") @app.before_request @@ -173,17 +219,17 @@ def before_remote_request(): """ # Search uses 'GET', all the other requests we care about use a write # HTTP method. - if (request.endpoint in ('namespace_api.message_search_api', - 'namespace_api.thread_search_api', - 'namespace_api.message_streaming_search_api', - 'namespace_api.thread_streaming_search_api') or - request.method in ('POST', 'PUT', 'PATCH', 'DELETE')): + if request.endpoint in ( + "namespace_api.message_search_api", + "namespace_api.thread_search_api", + "namespace_api.message_streaming_search_api", + "namespace_api.thread_streaming_search_api", + ) or request.method in ("POST", "PUT", "PATCH", "DELETE"): if g.namespace: # Logging provider here to ensure that the provider is only logged for # requests that modify data or are proxied to remote servers. - request.environ['log_context']['provider'] = \ - g.namespace.account.provider + request.environ["log_context"]["provider"] = g.namespace.account.provider # Disable validation so we can perform requests on paused accounts. # valid_account(g.namespace) @@ -191,9 +237,9 @@ def before_remote_request(): @app.after_request def finish(response): - if response.status_code == 200 and hasattr(g, 'db_session'): # be cautious + if response.status_code == 200 and hasattr(g, "db_session"): # be cautious g.db_session.commit() - if hasattr(g, 'db_session'): + if hasattr(g, "db_session"): g.db_session.close() return response @@ -201,22 +247,23 @@ def finish(response): @app.errorhandler(OperationalError) def handle_operational_error(error): rule = request.url_rule - if 'send' in rule.rule and 'rsvp' not in rule.rule: + if "send" in rule.rule and "rsvp" not in rule.rule: message = "A temporary database error prevented us from serving this request. Your message has NOT been sent. Please try again in a few minutes." else: message = "A temporary database error prevented us from serving this request. Please try again." - log.error('MySQL OperationalError', exc_info=True) - response = flask_jsonify(message=message, type='database_error') + log.error("MySQL OperationalError", exc_info=True) + response = flask_jsonify(message=message, type="database_error") response.status_code = 503 return response @app.errorhandler(NotImplementedError) def handle_not_implemented_error(error): - request.environ['log_context']['error'] = 'NotImplementedError' - response = flask_jsonify(message="API endpoint not yet implemented", - type='api_error') + request.environ["log_context"]["error"] = "NotImplementedError" + response = flask_jsonify( + message="API endpoint not yet implemented", type="api_error" + ) response.status_code = 501 return response @@ -224,10 +271,9 @@ def handle_not_implemented_error(error): @app.errorhandler(APIException) def handle_input_error(error): # these "errors" are normal, so we don't need to save a traceback - request.environ['log_context']['error'] = error.__class__.__name__ - request.environ['log_context']['error_message'] = error.message - response = flask_jsonify(message=error.message, - type='invalid_request_error') + request.environ["log_context"]["error"] = error.__class__.__name__ + request.environ["log_context"]["error_message"] = error.message + response = flask_jsonify(message=error.message, type="invalid_request_error") response.status_code = error.status_code return response @@ -235,25 +281,28 @@ def handle_input_error(error): @app.errorhandler(Exception) def handle_generic_error(error): log_exception(sys.exc_info()) - response = flask_jsonify(message="An internal error occured. If this issue persists, please contact support@nylas.com and include this request_uid: {}".format( - request.headers.get('X-Unique-ID'), type='api_error')) + response = flask_jsonify( + message="An internal error occured. If this issue persists, please contact support@nylas.com and include this request_uid: {}".format( + request.headers.get("X-Unique-ID"), type="api_error" + ) + ) response.status_code = 500 return response -@app.route('/account') +@app.route("/account") def one_account(): - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) # Use a new encoder object with the expand parameter set. - encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') + encoder = APIEncoder(g.namespace.public_id, args["view"] == "expanded") return encoder.jsonify(g.namespace) # # Sync status (enable/disable account / throttling) # -@app.route('/status/', methods=['GET', 'PUT']) +@app.route("/status/", methods=["GET", "PUT"]) def status(): account = g.namespace.account @@ -261,150 +310,149 @@ def status(): if account.is_marked_for_deletion: raise AccountDoesNotExistError() - if request.method == 'PUT': + if request.method == "PUT": data = request.get_json(force=True) - if 'sync_should_run' in data: - if data['sync_should_run']: - sync_host = data.get('sync_host', None) + if "sync_should_run" in data: + if data["sync_should_run"]: + sync_host = data.get("sync_host", None) account.enable_sync(sync_host=sync_host) else: - reason = data.get('disable_reason', None) + reason = data.get("disable_reason", None) account.disable_sync(reason) - if 'throttled' in data: - if data['throttled']: + if "throttled" in data: + if data["throttled"]: account.throttled = True else: account.throttled = False - return g.encoder.jsonify({ - 'sync_status': account.sync_status, - 'throttled': account.throttled, - }) + return g.encoder.jsonify( + {"sync_status": account.sync_status, "throttled": account.throttled,} + ) # # Threads # -@app.route('/threads/') +@app.route("/threads/") def thread_query_api(): - g.parser.add_argument('subject', type=bounded_str, location='args') - g.parser.add_argument('to', type=bounded_str, location='args') - g.parser.add_argument('from', type=bounded_str, location='args') - g.parser.add_argument('cc', type=bounded_str, location='args') - g.parser.add_argument('bcc', type=bounded_str, location='args') - g.parser.add_argument('any_email', type=comma_separated_email_list, - location='args') - g.parser.add_argument('message_id_header', type=bounded_str, location='args') - g.parser.add_argument('started_before', type=timestamp, location='args') - g.parser.add_argument('started_after', type=timestamp, location='args') - g.parser.add_argument('last_message_before', type=timestamp, - location='args') - g.parser.add_argument('last_message_after', type=timestamp, - location='args') - g.parser.add_argument('filename', type=bounded_str, location='args') - g.parser.add_argument('in', type=bounded_str, location='args') - g.parser.add_argument('thread_id', type=valid_public_id, location='args') - g.parser.add_argument('unread', type=strict_bool, location='args') - g.parser.add_argument('starred', type=strict_bool, location='args') - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("subject", type=bounded_str, location="args") + g.parser.add_argument("to", type=bounded_str, location="args") + g.parser.add_argument("from", type=bounded_str, location="args") + g.parser.add_argument("cc", type=bounded_str, location="args") + g.parser.add_argument("bcc", type=bounded_str, location="args") + g.parser.add_argument("any_email", type=comma_separated_email_list, location="args") + g.parser.add_argument("message_id_header", type=bounded_str, location="args") + g.parser.add_argument("started_before", type=timestamp, location="args") + g.parser.add_argument("started_after", type=timestamp, location="args") + g.parser.add_argument("last_message_before", type=timestamp, location="args") + g.parser.add_argument("last_message_after", type=timestamp, location="args") + g.parser.add_argument("filename", type=bounded_str, location="args") + g.parser.add_argument("in", type=bounded_str, location="args") + g.parser.add_argument("thread_id", type=valid_public_id, location="args") + g.parser.add_argument("unread", type=strict_bool, location="args") + g.parser.add_argument("starred", type=strict_bool, location="args") + g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) threads = filtering.threads( namespace_id=g.namespace.id, - subject=args['subject'], - thread_public_id=args['thread_id'], - to_addr=args['to'], - from_addr=args['from'], - cc_addr=args['cc'], - bcc_addr=args['bcc'], - any_email=args['any_email'], - message_id_header=args['message_id_header'], - started_before=args['started_before'], - started_after=args['started_after'], - last_message_before=args['last_message_before'], - last_message_after=args['last_message_after'], - filename=args['filename'], - unread=args['unread'], - starred=args['starred'], - in_=args['in'], - limit=args['limit'], - offset=args['offset'], - view=args['view'], - db_session=g.db_session) + subject=args["subject"], + thread_public_id=args["thread_id"], + to_addr=args["to"], + from_addr=args["from"], + cc_addr=args["cc"], + bcc_addr=args["bcc"], + any_email=args["any_email"], + message_id_header=args["message_id_header"], + started_before=args["started_before"], + started_after=args["started_after"], + last_message_before=args["last_message_before"], + last_message_after=args["last_message_after"], + filename=args["filename"], + unread=args["unread"], + starred=args["starred"], + in_=args["in"], + limit=args["limit"], + offset=args["offset"], + view=args["view"], + db_session=g.db_session, + ) # Use a new encoder object with the expand parameter set. - encoder = APIEncoder(g.namespace.public_id, - args['view'] == 'expanded') + encoder = APIEncoder(g.namespace.public_id, args["view"] == "expanded") return encoder.jsonify(threads) -@app.route('/threads/search', methods=['GET']) +@app.route("/threads/search", methods=["GET"]) def thread_search_api(): - g.parser.add_argument('q', type=bounded_str, location='args') + g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) - if not args['q']: - err_string = ('GET HTTP method must include query' - ' url parameter') + if not args["q"]: + err_string = "GET HTTP method must include query" " url parameter" raise InputError(err_string) try: search_client = get_search_client(g.namespace.account) - results = search_client.search_threads(g.db_session, args['q'], - offset=args['offset'], - limit=args['limit']) + results = search_client.search_threads( + g.db_session, args["q"], offset=args["offset"], limit=args["limit"] + ) return g.encoder.jsonify(results) except SearchBackendException as exc: kwargs = {} if exc.server_error: - kwargs['server_error'] = exc.server_error + kwargs["server_error"] = exc.server_error return err(exc.http_code, exc.message, **kwargs) except SearchStoreException as exc: store_status = STORE_STATUS_CODES.get(str(exc.err_code)) kwargs = {} if store_status.requires_user_action: - kwargs['server_error'] = store_status.resolution + kwargs["server_error"] = store_status.resolution return err(store_status.http_code, store_status.meaning, **kwargs) -@app.route('/threads/search/streaming', methods=['GET']) +@app.route("/threads/search/streaming", methods=["GET"]) def thread_streaming_search_api(): - g.parser.add_argument('q', type=bounded_str, location='args') + g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) - if not args['q']: - err_string = 'GET HTTP method must include query url parameter' + if not args["q"]: + err_string = "GET HTTP method must include query url parameter" raise InputError(err_string) try: search_client = get_search_client(g.namespace.account) - generator = search_client.stream_threads(args['q']) + generator = search_client.stream_threads(args["q"]) - return Response(stream_with_context(generator()), - mimetype='text/json-stream') + return Response(stream_with_context(generator()), mimetype="text/json-stream") except SearchBackendException as exc: kwargs = {} if exc.server_error: - kwargs['server_error'] = exc.server_error + kwargs["server_error"] = exc.server_error return err(exc.http_code, exc.message, **kwargs) except SearchStoreException as exc: store_status = STORE_STATUS_CODES.get(str(exc.err_code)) kwargs = {} if store_status.requires_user_action: - kwargs['server_error'] = store_status.resolution + kwargs["server_error"] = store_status.resolution return err(store_status.http_code, store_status.meaning, **kwargs) -@app.route('/threads/') +@app.route("/threads/") def thread_api(public_id): - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) # Use a new encoder object with the expand parameter set. - encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') + encoder = APIEncoder(g.namespace.public_id, args["view"] == "expanded") try: valid_public_id(public_id) - thread = g.db_session.query(Thread).filter( - Thread.public_id == public_id, # noqa - Thread.deleted_at == None, # noqa - Thread.namespace_id == g.namespace.id).one() + thread = ( + g.db_session.query(Thread) + .filter( + Thread.public_id == public_id, # noqa + Thread.deleted_at == None, # noqa + Thread.namespace_id == g.namespace.id, + ) + .one() + ) return encoder.jsonify(thread) except NoResultFound: raise NotFoundError("Couldn't find thread `{0}`".format(public_id)) @@ -413,22 +461,26 @@ def thread_api(public_id): # # Update thread # -@app.route('/threads/', methods=['PUT', 'PATCH']) +@app.route("/threads/", methods=["PUT", "PATCH"]) def thread_api_update(public_id): try: valid_public_id(public_id) - thread = g.db_session.query(Thread).filter( - Thread.public_id == public_id, # noqa - Thread.deleted_at == None, # noqa - Thread.namespace_id == g.namespace.id).one() + thread = ( + g.db_session.query(Thread) + .filter( + Thread.public_id == public_id, # noqa + Thread.deleted_at == None, # noqa + Thread.namespace_id == g.namespace.id, + ) + .one() + ) except NoResultFound: raise NotFoundError("Couldn't find thread `{0}` ".format(public_id)) data = request.get_json(force=True) if not isinstance(data, dict): - raise InputError('Invalid request body') + raise InputError("Invalid request body") - update_thread(thread, data, g.db_session, - g.api_features.optimistic_updates) + update_thread(thread, data, g.db_session, g.api_features.optimistic_updates) return g.encoder.jsonify(thread) @@ -436,7 +488,7 @@ def thread_api_update(public_id): # # Delete thread # -@app.route('/threads/', methods=['DELETE']) +@app.route("/threads/", methods=["DELETE"]) def thread_api_delete(public_id): """ Moves the thread to the trash """ raise NotImplementedError @@ -445,167 +497,175 @@ def thread_api_delete(public_id): ## # Messages ## -@app.route('/messages/') +@app.route("/messages/") def message_query_api(): - g.parser.add_argument('subject', type=bounded_str, location='args') - g.parser.add_argument('to', type=bounded_str, location='args') - g.parser.add_argument('from', type=bounded_str, location='args') - g.parser.add_argument('cc', type=bounded_str, location='args') - g.parser.add_argument('bcc', type=bounded_str, location='args') - g.parser.add_argument('any_email', type=comma_separated_email_list, - location='args') - g.parser.add_argument('started_before', type=timestamp, location='args') - g.parser.add_argument('started_after', type=timestamp, location='args') - g.parser.add_argument('last_message_before', type=timestamp, - location='args') - g.parser.add_argument('last_message_after', type=timestamp, - location='args') - g.parser.add_argument('received_before', type=timestamp, - location='args') - g.parser.add_argument('received_after', type=timestamp, - location='args') - g.parser.add_argument('filename', type=bounded_str, location='args') - g.parser.add_argument('in', type=bounded_str, location='args') - g.parser.add_argument('thread_id', type=valid_public_id, location='args') - g.parser.add_argument('unread', type=strict_bool, location='args') - g.parser.add_argument('starred', type=strict_bool, location='args') - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("subject", type=bounded_str, location="args") + g.parser.add_argument("to", type=bounded_str, location="args") + g.parser.add_argument("from", type=bounded_str, location="args") + g.parser.add_argument("cc", type=bounded_str, location="args") + g.parser.add_argument("bcc", type=bounded_str, location="args") + g.parser.add_argument("any_email", type=comma_separated_email_list, location="args") + g.parser.add_argument("started_before", type=timestamp, location="args") + g.parser.add_argument("started_after", type=timestamp, location="args") + g.parser.add_argument("last_message_before", type=timestamp, location="args") + g.parser.add_argument("last_message_after", type=timestamp, location="args") + g.parser.add_argument("received_before", type=timestamp, location="args") + g.parser.add_argument("received_after", type=timestamp, location="args") + g.parser.add_argument("filename", type=bounded_str, location="args") + g.parser.add_argument("in", type=bounded_str, location="args") + g.parser.add_argument("thread_id", type=valid_public_id, location="args") + g.parser.add_argument("unread", type=strict_bool, location="args") + g.parser.add_argument("starred", type=strict_bool, location="args") + g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) messages = filtering.messages_or_drafts( namespace_id=g.namespace.id, drafts=False, - subject=args['subject'], - thread_public_id=args['thread_id'], - to_addr=args['to'], - from_addr=args['from'], - cc_addr=args['cc'], - bcc_addr=args['bcc'], - any_email=args['any_email'], - started_before=args['started_before'], - started_after=args['started_after'], - last_message_before=args['last_message_before'], - last_message_after=args['last_message_after'], - received_before=args['received_before'], - received_after=args['received_after'], - filename=args['filename'], - in_=args['in'], - unread=args['unread'], - starred=args['starred'], - limit=args['limit'], - offset=args['offset'], - view=args['view'], - db_session=g.db_session) + subject=args["subject"], + thread_public_id=args["thread_id"], + to_addr=args["to"], + from_addr=args["from"], + cc_addr=args["cc"], + bcc_addr=args["bcc"], + any_email=args["any_email"], + started_before=args["started_before"], + started_after=args["started_after"], + last_message_before=args["last_message_before"], + last_message_after=args["last_message_after"], + received_before=args["received_before"], + received_after=args["received_after"], + filename=args["filename"], + in_=args["in"], + unread=args["unread"], + starred=args["starred"], + limit=args["limit"], + offset=args["offset"], + view=args["view"], + db_session=g.db_session, + ) # Use a new encoder object with the expand parameter set. - encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') + encoder = APIEncoder(g.namespace.public_id, args["view"] == "expanded") return encoder.jsonify(messages) -@app.route('/messages/search', methods=['GET']) +@app.route("/messages/search", methods=["GET"]) def message_search_api(): - g.parser.add_argument('q', type=bounded_str, location='args') + g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) - if not args['q']: - err_string = 'GET HTTP method must include query url parameter' + if not args["q"]: + err_string = "GET HTTP method must include query url parameter" raise InputError(err_string) try: search_client = get_search_client(g.namespace.account) - results = search_client.search_messages(g.db_session, args['q'], - offset=args['offset'], - limit=args['limit']) + results = search_client.search_messages( + g.db_session, args["q"], offset=args["offset"], limit=args["limit"] + ) return g.encoder.jsonify(results) except SearchBackendException as exc: kwargs = {} if exc.server_error: - kwargs['server_error'] = exc.server_error + kwargs["server_error"] = exc.server_error return err(exc.http_code, exc.message, **kwargs) except SearchStoreException as exc: store_status = STORE_STATUS_CODES.get(str(exc.err_code)) kwargs = {} if store_status.requires_user_action: - kwargs['server_error'] = store_status.resolution + kwargs["server_error"] = store_status.resolution return err(store_status.http_code, store_status.meaning, **kwargs) -@app.route('/messages/search/streaming', methods=['GET']) +@app.route("/messages/search/streaming", methods=["GET"]) def message_streaming_search_api(): - g.parser.add_argument('q', type=bounded_str, location='args') + g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) - if not args['q']: - err_string = 'GET HTTP method must include query url parameter' + if not args["q"]: + err_string = "GET HTTP method must include query url parameter" raise InputError(err_string) try: search_client = get_search_client(g.namespace.account) - generator = search_client.stream_messages(args['q']) + generator = search_client.stream_messages(args["q"]) - return Response(stream_with_context(generator()), - mimetype='text/json-stream') + return Response(stream_with_context(generator()), mimetype="text/json-stream") except SearchBackendException as exc: kwargs = {} if exc.server_error: - kwargs['server_error'] = exc.server_error + kwargs["server_error"] = exc.server_error return err(exc.http_code, exc.message, **kwargs) except SearchStoreException as exc: store_status = STORE_STATUS_CODES.get(str(exc.err_code)) kwargs = {} if store_status.requires_user_action: - kwargs['server_error'] = store_status.resolution + kwargs["server_error"] = store_status.resolution return err(store_status.http_code, store_status.meaning, **kwargs) -@app.route('/messages/', methods=['GET']) +@app.route("/messages/", methods=["GET"]) def message_read_api(public_id): - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) - encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') + encoder = APIEncoder(g.namespace.public_id, args["view"] == "expanded") try: valid_public_id(public_id) - message = Message.from_public_id(public_id, g.namespace.id, - g.db_session) + message = Message.from_public_id(public_id, g.namespace.id, g.db_session) except NoResultFound: raise NotFoundError("Couldn't find message {0}".format(public_id)) - if request.headers.get('Accept', None) == 'message/rfc822': + if request.headers.get("Accept", None) == "message/rfc822": raw_message = blockstore.get_from_blockstore(message.data_sha256) if raw_message is not None: - return Response(raw_message, mimetype='message/rfc822') + return Response(raw_message, mimetype="message/rfc822") else: # Try getting the message from the email provider. account = g.namespace.account - statsd_string = 'api.direct_fetching.{}.{}'\ - .format(account.provider, account.id) + statsd_string = "api.direct_fetching.{}.{}".format( + account.provider, account.id + ) try: - with statsd_client.timer('{}.provider_latency'.format( - statsd_string)): + with statsd_client.timer("{}.provider_latency".format(statsd_string)): contents = get_raw_from_provider(message) - statsd_client.incr('{}.successes'.format(statsd_string)) + statsd_client.incr("{}.successes".format(statsd_string)) except TemporaryEmailFetchException: - statsd_client.incr( - '{}.temporary_failure'.format(statsd_string)) - log.warning('Exception when fetching email', - account_id=account.id, provider=account.provider, - logstash_tag='direct_fetching', exc_info=True) - - return err(503, "Email server returned a temporary error. " - "Please try again in a few minutes.") + statsd_client.incr("{}.temporary_failure".format(statsd_string)) + log.warning( + "Exception when fetching email", + account_id=account.id, + provider=account.provider, + logstash_tag="direct_fetching", + exc_info=True, + ) + + return err( + 503, + "Email server returned a temporary error. " + "Please try again in a few minutes.", + ) except EmailDeletedException: - statsd_client.incr('{}.deleted'.format(statsd_string)) - log.warning('Exception when fetching email', - account_id=account.id, provider=account.provider, - logstash_tag='direct_fetching', exc_info=True) + statsd_client.incr("{}.deleted".format(statsd_string)) + log.warning( + "Exception when fetching email", + account_id=account.id, + provider=account.provider, + logstash_tag="direct_fetching", + exc_info=True, + ) return err(404, "The data was deleted on the email server.") except EmailFetchException: - statsd_client.incr('{}.failures'.format(statsd_string)) - log.warning('Exception when fetching email', - account_id=account.id, provider=account.provider, - logstash_tag='direct_fetching', exc_info=True) + statsd_client.incr("{}.failures".format(statsd_string)) + log.warning( + "Exception when fetching email", + account_id=account.id, + provider=account.provider, + logstash_tag="direct_fetching", + exc_info=True, + ) return err(404, "Couldn't find data on the email server.") @@ -615,70 +675,74 @@ def message_read_api(public_id): blockstore.save_to_blockstore(data_sha256, contents) return contents - request.environ['log_context']['message_id'] = message.id + request.environ["log_context"]["message_id"] = message.id raise NotFoundError( "Couldn't find raw contents for message `{0}`. " - "Please try again in a few minutes." - .format(public_id)) + "Please try again in a few minutes.".format(public_id) + ) return encoder.jsonify(message) -@app.route('/messages/', methods=['PUT', 'PATCH']) +@app.route("/messages/", methods=["PUT", "PATCH"]) def message_update_api(public_id): try: valid_public_id(public_id) - message = g.db_session.query(Message).filter( - Message.public_id == public_id, - Message.namespace_id == g.namespace.id).one() + message = ( + g.db_session.query(Message) + .filter( + Message.public_id == public_id, Message.namespace_id == g.namespace.id + ) + .one() + ) except NoResultFound: raise NotFoundError("Couldn't find message {0} ".format(public_id)) data = request.get_json(force=True) if not isinstance(data, dict): - raise InputError('Invalid request body') + raise InputError("Invalid request body") - update_message(message, data, g.db_session, - g.api_features.optimistic_updates) + update_message(message, data, g.db_session, g.api_features.optimistic_updates) return g.encoder.jsonify(message) # Folders / Labels -@app.route('/folders') -@app.route('/labels') +@app.route("/folders") +@app.route("/labels") def folders_labels_query_api(): category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) - g.parser.add_argument('view', type=bounded_str, location='args') + g.parser.add_argument("view", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) - if args['view'] == 'count': + if args["view"] == "count": results = g.db_session.query(func.count(Category.id)) - elif args['view'] == 'ids': + elif args["view"] == "ids": results = g.db_session.query(Category.public_id) else: results = g.db_session.query(Category) - results = results.filter(Category.namespace_id == g.namespace.id, - Category.deleted_at == EPOCH) # noqa + results = results.filter( + Category.namespace_id == g.namespace.id, Category.deleted_at == EPOCH + ) # noqa results = results.order_by(asc(Category.id)) - if args['view'] == 'count': + if args["view"] == "count": return g.encoder.jsonify({"count": results.scalar()}) - results = results.limit(args['limit']).offset(args['offset']).all() - if args['view'] == 'ids': + results = results.limit(args["limit"]).offset(args["offset"]).all() + if args["view"] == "ids": return g.encoder.jsonify([r for r, in results]) return g.encoder.jsonify(results) -@app.route('/folders/') +@app.route("/folders/") def folder_api(public_id): return folders_labels_api_impl(public_id) -@app.route('/labels/') +@app.route("/labels/") def label_api(public_id): return folders_labels_api_impl(public_id) @@ -689,41 +753,50 @@ def folders_labels_api_impl(public_id): valid_category_type(category_type, rule) valid_public_id(public_id) try: - category = g.db_session.query(Category).filter( - Category.namespace_id == g.namespace.id, - Category.public_id == public_id, - Category.deleted_at == EPOCH).one() # noqa + category = ( + g.db_session.query(Category) + .filter( + Category.namespace_id == g.namespace.id, + Category.public_id == public_id, + Category.deleted_at == EPOCH, + ) + .one() + ) # noqa except NoResultFound: - raise NotFoundError('Object not found') + raise NotFoundError("Object not found") return g.encoder.jsonify(category) -@app.route('/folders', methods=['POST']) -@app.route('/labels', methods=['POST']) +@app.route("/folders", methods=["POST"]) +@app.route("/labels", methods=["POST"]) def folders_labels_create_api(): category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) data = request.get_json(force=True) - display_name = data.get('display_name') + display_name = data.get("display_name") # Validates the display_name and checks if there is a non-deleted Category # with this display_name already. If so, we do not allow creating a # duplicate. - valid_display_name(g.namespace.id, category_type, display_name, - g.db_session) + valid_display_name(g.namespace.id, category_type, display_name, g.db_session) - if g.namespace.account.provider not in ['gmail', 'eas']: + if g.namespace.account.provider not in ["gmail", "eas"]: # Translate the name of the folder to an actual IMAP name # (e.g: "Accounting/Taxes" becomes "Accounting.Taxes") display_name = imap_folder_path( display_name, separator=g.namespace.account.folder_separator, - prefix=g.namespace.account.folder_prefix) + prefix=g.namespace.account.folder_prefix, + ) - category = Category.find_or_create(g.db_session, g.namespace.id, - name=None, display_name=display_name, - type_=category_type) + category = Category.find_or_create( + g.db_session, + g.namespace.id, + name=None, + display_name=display_name, + type_=category_type, + ) if category.is_deleted: # The existing category is soft-deleted and will be hard-deleted, # so it is okay to create a new category with the same (display_name, @@ -732,51 +805,58 @@ def folders_labels_create_api(): # its `deleted_at`=EPOCH, because doing so would not be consistent with # the API's semantics -- we want the newly created object to have a # different ID. - category = Category.create(g.db_session, namespace_id=g.namespace.id, - name=None, display_name=display_name, - type_=category_type) + category = Category.create( + g.db_session, + namespace_id=g.namespace.id, + name=None, + display_name=display_name, + type_=category_type, + ) g.db_session.add(category) g.db_session.flush() - if category_type == 'folder': - schedule_action('create_folder', category, g.namespace.id, - g.db_session) + if category_type == "folder": + schedule_action("create_folder", category, g.namespace.id, g.db_session) else: - schedule_action('create_label', category, g.namespace.id, g.db_session) + schedule_action("create_label", category, g.namespace.id, g.db_session) return g.encoder.jsonify(category) -@app.route('/folders/', methods=['PUT', 'PATCH']) -@app.route('/labels/', methods=['PUT', 'PATCH']) +@app.route("/folders/", methods=["PUT", "PATCH"]) +@app.route("/labels/", methods=["PUT", "PATCH"]) def folder_label_update_api(public_id): category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) valid_public_id(public_id) try: - category = g.db_session.query(Category).filter( - Category.namespace_id == g.namespace.id, - Category.public_id == public_id, - Category.deleted_at == EPOCH).one() # noqa + category = ( + g.db_session.query(Category) + .filter( + Category.namespace_id == g.namespace.id, + Category.public_id == public_id, + Category.deleted_at == EPOCH, + ) + .one() + ) # noqa except NoResultFound: - raise InputError("Couldn't find {} {}".format( - category_type, public_id)) + raise InputError("Couldn't find {} {}".format(category_type, public_id)) if category.name: raise InputError("Cannot modify a standard {}".format(category_type)) data = request.get_json(force=True) - display_name = data.get('display_name') - valid_display_name(g.namespace.id, category_type, display_name, - g.db_session) + display_name = data.get("display_name") + valid_display_name(g.namespace.id, category_type, display_name, g.db_session) - if g.namespace.account.provider not in ['gmail', 'eas']: + if g.namespace.account.provider not in ["gmail", "eas"]: # Translate the name of the folder to an actual IMAP name # (e.g: "Accounting/Taxes" becomes "Accounting.Taxes") display_name = imap_folder_path( display_name, separator=g.namespace.account.folder_separator, - prefix=g.namespace.account.folder_prefix) + prefix=g.namespace.account.folder_prefix, + ) current_name = category.display_name @@ -785,55 +865,76 @@ def folder_label_update_api(public_id): category.display_name = display_name g.db_session.flush() - if category_type == 'folder': - schedule_action('update_folder', category, g.namespace.id, - g.db_session, old_name=current_name, - new_name=display_name) + if category_type == "folder": + schedule_action( + "update_folder", + category, + g.namespace.id, + g.db_session, + old_name=current_name, + new_name=display_name, + ) else: - schedule_action('update_label', category, g.namespace.id, - g.db_session, old_name=current_name, - new_name=display_name) + schedule_action( + "update_label", + category, + g.namespace.id, + g.db_session, + old_name=current_name, + new_name=display_name, + ) return g.encoder.jsonify(category) -@app.route('/folders/', methods=['DELETE']) -@app.route('/labels/', methods=['DELETE']) +@app.route("/folders/", methods=["DELETE"]) +@app.route("/labels/", methods=["DELETE"]) def folder_label_delete_api(public_id): category_type = g.namespace.account.category_type rule = request.url_rule.rule valid_category_type(category_type, rule) valid_public_id(public_id) try: - category = g.db_session.query(Category).filter( - Category.namespace_id == g.namespace.id, - Category.public_id == public_id, - Category.deleted_at == EPOCH).one() # noqa + category = ( + g.db_session.query(Category) + .filter( + Category.namespace_id == g.namespace.id, + Category.public_id == public_id, + Category.deleted_at == EPOCH, + ) + .one() + ) # noqa except NoResultFound: - raise InputError("Couldn't find {} {}".format( - category_type, public_id)) + raise InputError("Couldn't find {} {}".format(category_type, public_id)) if category.name: raise InputError("Cannot modify a standard {}".format(category_type)) - if category.type_ == 'folder': - messages_with_category = g.db_session.query(MessageCategory).filter( - MessageCategory.category_id == category.id).exists() + if category.type_ == "folder": + messages_with_category = ( + g.db_session.query(MessageCategory) + .filter(MessageCategory.category_id == category.id) + .exists() + ) messages_exist = g.db_session.query(messages_with_category).scalar() if messages_exist: raise InputError( - "Folder {} cannot be deleted because it contains messages.". - format(public_id)) + "Folder {} cannot be deleted because it contains messages.".format( + public_id + ) + ) if g.api_features.optimistic_updates: deleted_at = datetime.utcnow() category.deleted_at = deleted_at - folders = category.folders if g.namespace.account.discriminator \ - != 'easaccount' else category.easfolders + folders = ( + category.folders + if g.namespace.account.discriminator != "easaccount" + else category.easfolders + ) for folder in folders: folder.deleted_at = deleted_at - schedule_action('delete_folder', category, g.namespace.id, - g.db_session) + schedule_action("delete_folder", category, g.namespace.id, g.db_session) else: if g.api_features.optimistic_updates: deleted_at = datetime.utcnow() @@ -841,8 +942,7 @@ def folder_label_delete_api(public_id): for label in category.labels: label.deleted_at = deleted_at - schedule_action('delete_label', category, g.namespace.id, - g.db_session) + schedule_action("delete_label", category, g.namespace.id, g.db_session) g.db_session.commit() @@ -852,59 +952,59 @@ def folder_label_delete_api(public_id): # # Contacts ## -@app.route('/contacts/', methods=['GET']) +@app.route("/contacts/", methods=["GET"]) def contact_api(): - g.parser.add_argument('filter', type=bounded_str, default='', - location='args') - g.parser.add_argument('view', type=bounded_str, location='args') + g.parser.add_argument("filter", type=bounded_str, default="", location="args") + g.parser.add_argument("view", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) - if args['view'] == 'count': + if args["view"] == "count": results = g.db_session.query(func.count(Contact.id)) - elif args['view'] == 'ids': + elif args["view"] == "ids": results = g.db_session.query(Contact.public_id) else: results = g.db_session.query(Contact) results = results.filter(Contact.namespace_id == g.namespace.id) - if args['filter']: - results = results.filter(Contact.email_address == args['filter']) - results = results.with_hint( - Contact, 'USE INDEX (idx_namespace_created)')\ - .order_by(asc(Contact.created_at)) + if args["filter"]: + results = results.filter(Contact.email_address == args["filter"]) + results = results.with_hint(Contact, "USE INDEX (idx_namespace_created)").order_by( + asc(Contact.created_at) + ) - if args['view'] == 'count': + if args["view"] == "count": return g.encoder.jsonify({"count": results.scalar()}) - if args['view'] != 'ids': - results = results.options(load_only('public_id', '_raw_address', 'name'), - joinedload(Contact.phone_numbers)) + if args["view"] != "ids": + results = results.options( + load_only("public_id", "_raw_address", "name"), + joinedload(Contact.phone_numbers), + ) - results = results.limit(args['limit']).offset(args['offset']).all() - if args['view'] == 'ids': + results = results.limit(args["limit"]).offset(args["offset"]).all() + if args["view"] == "ids": return g.encoder.jsonify([r for r, in results]) return g.encoder.jsonify(results) -@app.route('/contacts/search', methods=['GET']) +@app.route("/contacts/search", methods=["GET"]) def contact_search_api(): - g.parser.add_argument('q', type=bounded_str, location='args') + g.parser.add_argument("q", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) - if not args['q']: - err_string = ('GET HTTP method must include query' - ' url parameter') + if not args["q"]: + err_string = "GET HTTP method must include query" " url parameter" raise InputError(err_string) search_client = ContactSearchClient(g.namespace.id) - results = search_client.search_contacts(g.db_session, args['q'], - offset=args['offset'], - limit=args['limit']) + results = search_client.search_contacts( + g.db_session, args["q"], offset=args["offset"], limit=args["limit"] + ) return g.encoder.jsonify(results) -@app.route('/contacts/', methods=['GET']) +@app.route("/contacts/", methods=["GET"]) def contact_read_api(public_id): # Get all data for an existing contact. valid_public_id(public_id) @@ -917,97 +1017,95 @@ def contact_read_api(public_id): ## # Events ## -@app.route('/events/', methods=['GET']) +@app.route("/events/", methods=["GET"]) def event_api(): - g.parser.add_argument('event_id', type=valid_public_id, location='args') - g.parser.add_argument('calendar_id', type=valid_public_id, location='args') - g.parser.add_argument('title', type=bounded_str, location='args') - g.parser.add_argument('description', type=bounded_str, location='args') - g.parser.add_argument('location', type=bounded_str, location='args') - g.parser.add_argument('busy', type=strict_bool, location='args') - g.parser.add_argument('starts_before', type=timestamp, location='args') - g.parser.add_argument('starts_after', type=timestamp, location='args') - g.parser.add_argument('ends_before', type=timestamp, location='args') - g.parser.add_argument('ends_after', type=timestamp, location='args') - g.parser.add_argument('view', type=bounded_str, location='args') - g.parser.add_argument('expand_recurring', type=strict_bool, - location='args') - g.parser.add_argument('show_cancelled', type=strict_bool, location='args') - g.parser.add_argument('title_email', type=bounded_str, location='args') - g.parser.add_argument('description_email', type=bounded_str, location='args') - g.parser.add_argument('owner_email', type=bounded_str, location='args') - g.parser.add_argument('participant_email', type=bounded_str, location='args') - g.parser.add_argument('any_email', type=bounded_str, location='args') + g.parser.add_argument("event_id", type=valid_public_id, location="args") + g.parser.add_argument("calendar_id", type=valid_public_id, location="args") + g.parser.add_argument("title", type=bounded_str, location="args") + g.parser.add_argument("description", type=bounded_str, location="args") + g.parser.add_argument("location", type=bounded_str, location="args") + g.parser.add_argument("busy", type=strict_bool, location="args") + g.parser.add_argument("starts_before", type=timestamp, location="args") + g.parser.add_argument("starts_after", type=timestamp, location="args") + g.parser.add_argument("ends_before", type=timestamp, location="args") + g.parser.add_argument("ends_after", type=timestamp, location="args") + g.parser.add_argument("view", type=bounded_str, location="args") + g.parser.add_argument("expand_recurring", type=strict_bool, location="args") + g.parser.add_argument("show_cancelled", type=strict_bool, location="args") + g.parser.add_argument("title_email", type=bounded_str, location="args") + g.parser.add_argument("description_email", type=bounded_str, location="args") + g.parser.add_argument("owner_email", type=bounded_str, location="args") + g.parser.add_argument("participant_email", type=bounded_str, location="args") + g.parser.add_argument("any_email", type=bounded_str, location="args") args = strict_parse_args(g.parser, request.args) results = filtering.events( namespace_id=g.namespace.id, - event_public_id=args['event_id'], - calendar_public_id=args['calendar_id'], - title=args['title'], - description=args['description'], - location=args['location'], - busy=args['busy'], - title_email=args['title_email'], - description_email=args['description_email'], - owner_email=args['owner_email'], - participant_email=args['participant_email'], - any_email=args['any_email'], - starts_before=args['starts_before'], - starts_after=args['starts_after'], - ends_before=args['ends_before'], - ends_after=args['ends_after'], - limit=args['limit'], - offset=args['offset'], - view=args['view'], - expand_recurring=args['expand_recurring'], - show_cancelled=args['show_cancelled'], - db_session=g.db_session) + event_public_id=args["event_id"], + calendar_public_id=args["calendar_id"], + title=args["title"], + description=args["description"], + location=args["location"], + busy=args["busy"], + title_email=args["title_email"], + description_email=args["description_email"], + owner_email=args["owner_email"], + participant_email=args["participant_email"], + any_email=args["any_email"], + starts_before=args["starts_before"], + starts_after=args["starts_after"], + ends_before=args["ends_before"], + ends_after=args["ends_after"], + limit=args["limit"], + offset=args["offset"], + view=args["view"], + expand_recurring=args["expand_recurring"], + show_cancelled=args["show_cancelled"], + db_session=g.db_session, + ) return g.encoder.jsonify(results) -@app.route('/events/', methods=['POST']) +@app.route("/events/", methods=["POST"]) def event_create_api(): - g.parser.add_argument('notify_participants', type=strict_bool, - location='args') + g.parser.add_argument("notify_participants", type=strict_bool, location="args") args = strict_parse_args(g.parser, request.args) - notify_participants = args['notify_participants'] + notify_participants = args["notify_participants"] data = request.get_json(force=True) - calendar = get_calendar(data.get('calendar_id'), - g.namespace, g.db_session) + calendar = get_calendar(data.get("calendar_id"), g.namespace, g.db_session) if calendar.read_only: raise InputError("Can't create events on read_only calendar.") valid_event(data) - title = data.get('title', '') - description = data.get('description') - location = data.get('location') - when = data.get('when') - busy = data.get('busy') + title = data.get("title", "") + description = data.get("description") + location = data.get("location") + when = data.get("when") + busy = data.get("busy") # client libraries can send explicit key = None automagically if busy is None: busy = True - participants = data.get('participants') + participants = data.get("participants") if participants is None: participants = [] for p in participants: - p['email'] = p['email'].lower() - if 'status' not in p: - p['status'] = 'noreply' + p["email"] = p["email"].lower() + if "status" not in p: + p["status"] = "noreply" event = Event( calendar=calendar, namespace=g.namespace, uid=uuid.uuid4().hex, provider_name=g.namespace.account.provider, - raw_data='', + raw_data="", title=title, description=description, location=location, @@ -1017,58 +1115,71 @@ def event_create_api(): is_owner=True, participants=participants, sequence_number=0, - source='local') + source="local", + ) g.db_session.add(event) g.db_session.flush() - schedule_action('create_event', event, g.namespace.id, g.db_session, - calendar_uid=event.calendar.uid, - notify_participants=notify_participants) + schedule_action( + "create_event", + event, + g.namespace.id, + g.db_session, + calendar_uid=event.calendar.uid, + notify_participants=notify_participants, + ) return g.encoder.jsonify(event) -@app.route('/events/', methods=['GET']) +@app.route("/events/", methods=["GET"]) def event_read_api(public_id): """Get all data for an existing event.""" valid_public_id(public_id) try: - event = g.db_session.query(Event).filter( - Event.namespace_id == g.namespace.id, - Event.public_id == public_id, - Event.deleted_at == None).one() # noqa + event = ( + g.db_session.query(Event) + .filter( + Event.namespace_id == g.namespace.id, + Event.public_id == public_id, + Event.deleted_at == None, + ) + .one() + ) # noqa except NoResultFound: raise NotFoundError("Couldn't find event id {0}".format(public_id)) return g.encoder.jsonify(event) -@app.route('/events/', methods=['PUT', 'PATCH']) +@app.route("/events/", methods=["PUT", "PATCH"]) def event_update_api(public_id): - g.parser.add_argument('notify_participants', type=strict_bool, - location='args') + g.parser.add_argument("notify_participants", type=strict_bool, location="args") args = strict_parse_args(g.parser, request.args) - notify_participants = args['notify_participants'] + notify_participants = args["notify_participants"] valid_public_id(public_id) try: - event = g.db_session.query(Event).filter( - Event.public_id == public_id, - Event.namespace_id == g.namespace.id, - Event.deleted_at == None).one() # noqa + event = ( + g.db_session.query(Event) + .filter( + Event.public_id == public_id, + Event.namespace_id == g.namespace.id, + Event.deleted_at == None, + ) + .one() + ) # noqa except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) # iCalendar-imported files are read-only by default but let's give a # slightly more helpful error message. if event.calendar == g.namespace.account.emailed_events_calendar: - raise InputError( - 'Can not update an event imported from an iCalendar file.') + raise InputError("Can not update an event imported from an iCalendar file.") if event.read_only: - raise InputError('Cannot update read_only event.') + raise InputError("Cannot update read_only event.") - if (isinstance(event, RecurringEvent) or - isinstance(event, RecurringEventOverride)): - raise InputError('Cannot update a recurring event yet.') + if isinstance(event, RecurringEvent) or isinstance(event, RecurringEventOverride): + raise InputError("Cannot update a recurring event yet.") data = request.get_json(force=True) account = g.namespace.account @@ -1077,20 +1188,24 @@ def event_update_api(public_id): # A list of participants we need to send cancellation invites to. cancelled_participants = [] - if 'participants' in data: - for p in data['participants']: - p['email'] = p['email'].lower() - if 'status' not in p: - p['status'] = 'noreply' + if "participants" in data: + for p in data["participants"]: + p["email"] = p["email"].lower() + if "status" not in p: + p["status"] = "noreply" - cancelled_participants = removed_participants(event.participants, - data['participants']) + cancelled_participants = removed_participants( + event.participants, data["participants"] + ) # We're going to save this data into a JSON-like TEXT field in the # db. With MySQL, this means that the column will be 64k. # Drop the latest participants until it fits in the column. while len(json.dumps(cancelled_participants)) > 63000: - log.warning("Truncating cancelled participants", cancelled_participants=cancelled_participants) + log.warning( + "Truncating cancelled participants", + cancelled_participants=cancelled_participants, + ) cancelled_participants.pop() # Don't update an event if we don't need to. @@ -1105,54 +1220,65 @@ def event_update_api(public_id): event.sequence_number += 1 g.db_session.commit() - schedule_action('update_event', event, g.namespace.id, g.db_session, - calendar_uid=event.calendar.uid, - cancelled_participants=cancelled_participants, - notify_participants=notify_participants) + schedule_action( + "update_event", + event, + g.namespace.id, + g.db_session, + calendar_uid=event.calendar.uid, + cancelled_participants=cancelled_participants, + notify_participants=notify_participants, + ) else: # This isn't an optimistic update, so we need to store the # updated attributes inside the ActionLog entry. # Once we've update the event on the backend, we'll be able # to propagate the changes to our datastore. - kwargs = dict(calendar_uid=event.calendar.uid, - event_data=data, - cancelled_participants=cancelled_participants, - notify_participants=notify_participants) + kwargs = dict( + calendar_uid=event.calendar.uid, + event_data=data, + cancelled_participants=cancelled_participants, + notify_participants=notify_participants, + ) if len(json.dumps(kwargs)) > 2 ** 16 - 12: - raise InputError( - 'Event update too big --- please break it in parts.') + raise InputError("Event update too big --- please break it in parts.") if event.calendar != account.emailed_events_calendar: - schedule_action('update_event', event, g.namespace.id, g.db_session, - **kwargs) + schedule_action( + "update_event", event, g.namespace.id, g.db_session, **kwargs + ) return g.encoder.jsonify(event) -@app.route('/events/', methods=['DELETE']) +@app.route("/events/", methods=["DELETE"]) def event_delete_api(public_id): - g.parser.add_argument('notify_participants', type=strict_bool, - location='args') + g.parser.add_argument("notify_participants", type=strict_bool, location="args") args = strict_parse_args(g.parser, request.args) - notify_participants = args['notify_participants'] + notify_participants = args["notify_participants"] valid_public_id(public_id) try: - event = g.db_session.query(Event).filter( - Event.public_id == public_id, - Event.namespace_id == g.namespace.id, - Event.deleted_at == None).one() # noqa + event = ( + g.db_session.query(Event) + .filter( + Event.public_id == public_id, + Event.namespace_id == g.namespace.id, + Event.deleted_at == None, + ) + .one() + ) # noqa except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) if event.calendar == g.namespace.account.emailed_events_calendar: - raise InputError( - 'Can not update an event imported from an iCalendar file.') + raise InputError("Can not update an event imported from an iCalendar file.") if event.calendar.read_only: - raise InputError('Cannot delete event {} from read_only calendar.'. - format(public_id)) + raise InputError( + "Cannot delete event {} from read_only calendar.".format(public_id) + ) if g.api_features.optimistic_updates: # Set the local event status to 'cancelled' rather than deleting it, @@ -1160,42 +1286,49 @@ def event_delete_api(public_id): # remote, and consequently return them through the events, delta sync # APIs event.sequence_number += 1 - event.status = 'cancelled' + event.status = "cancelled" g.db_session.commit() - schedule_action('delete_event', event, g.namespace.id, g.db_session, - event_uid=event.uid, calendar_name=event.calendar.name, - calendar_uid=event.calendar.uid, - notify_participants=notify_participants) + schedule_action( + "delete_event", + event, + g.namespace.id, + g.db_session, + event_uid=event.uid, + calendar_name=event.calendar.name, + calendar_uid=event.calendar.uid, + notify_participants=notify_participants, + ) return g.encoder.jsonify(None) -@app.route('/send-rsvp', methods=['POST']) +@app.route("/send-rsvp", methods=["POST"]) def event_rsvp_api(): data = request.get_json(force=True) - event_id = data.get('event_id') + event_id = data.get("event_id") valid_public_id(event_id) try: - event = g.db_session.query(Event).filter( - Event.public_id == event_id, - Event.namespace_id == g.namespace.id).one() + event = ( + g.db_session.query(Event) + .filter(Event.public_id == event_id, Event.namespace_id == g.namespace.id) + .one() + ) except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(event_id)) if event.message is None: - raise InputError('This is not a message imported ' - 'from an iCalendar invite.') + raise InputError("This is not a message imported " "from an iCalendar invite.") - status = data.get('status') + status = data.get("status") if not status: - raise InputError('You must define a status to RSVP.') + raise InputError("You must define a status to RSVP.") - if status not in ['yes', 'no', 'maybe']: - raise InputError('Invalid status %s' % status) + if status not in ["yes", "no", "maybe"]: + raise InputError("Invalid status %s" % status) - comment = data.get('comment', '') + comment = data.get("comment", "") # Note: this assumes that the email invite was directly addressed to us # (i.e: that there's no email alias to redirect ben.bitdiddle@nylas @@ -1206,16 +1339,15 @@ def event_rsvp_api(): email = account.email_address if email not in participants: - raise InputError('Cannot find %s among the participants' % email) + raise InputError("Cannot find %s among the participants" % email) p = participants[email] # Make this API idempotent. if p["status"] == status: - if 'comment' not in p and 'comment' not in data: + if "comment" not in p and "comment" not in data: return g.encoder.jsonify(event) - elif ('comment' in p and 'comment' in data and - p['comment'] == data['comment']): + elif "comment" in p and "comment" in data and p["comment"] == data["comment"]: return g.encoder.jsonify(event) participant = {"email": email, "status": status, "comment": comment} @@ -1231,9 +1363,9 @@ def event_rsvp_api(): except SendMailException as exc: kwargs = {} if exc.failures: - kwargs['failures'] = exc.failures + kwargs["failures"] = exc.failures if exc.server_error: - kwargs['server_error'] = exc.server_error + kwargs["server_error"] = exc.server_error return err(exc.http_code, exc.message, **kwargs) # Update the participants status too. @@ -1258,50 +1390,60 @@ def event_rsvp_api(): # # Files # -@app.route('/files/', methods=['GET']) +@app.route("/files/", methods=["GET"]) def files_api(): - g.parser.add_argument('filename', type=bounded_str, location='args') - g.parser.add_argument('message_id', type=valid_public_id, location='args') - g.parser.add_argument('content_type', type=bounded_str, location='args') - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("filename", type=bounded_str, location="args") + g.parser.add_argument("message_id", type=valid_public_id, location="args") + g.parser.add_argument("content_type", type=bounded_str, location="args") + g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) files = filtering.files( namespace_id=g.namespace.id, - message_public_id=args['message_id'], - filename=args['filename'], - content_type=args['content_type'], - limit=args['limit'], - offset=args['offset'], - view=args['view'], - db_session=g.db_session) + message_public_id=args["message_id"], + filename=args["filename"], + content_type=args["content_type"], + limit=args["limit"], + offset=args["offset"], + view=args["view"], + db_session=g.db_session, + ) return g.encoder.jsonify(files) -@app.route('/files/', methods=['GET']) +@app.route("/files/", methods=["GET"]) def file_read_api(public_id): valid_public_id(public_id) try: - f = g.db_session.query(Block).filter( - Block.public_id == public_id, - Block.namespace_id == g.namespace.id).one() + f = ( + g.db_session.query(Block) + .filter(Block.public_id == public_id, Block.namespace_id == g.namespace.id) + .one() + ) return g.encoder.jsonify(f) except NoResultFound: raise NotFoundError("Couldn't find file {0} ".format(public_id)) -@app.route('/files/', methods=['DELETE']) +@app.route("/files/", methods=["DELETE"]) def file_delete_api(public_id): valid_public_id(public_id) try: - f = g.db_session.query(Block).filter( - Block.public_id == public_id, - Block.namespace_id == g.namespace.id).one() - - if g.db_session.query(Block).join(Part) \ - .filter(Block.public_id == public_id).first() is not None: + f = ( + g.db_session.query(Block) + .filter(Block.public_id == public_id, Block.namespace_id == g.namespace.id) + .one() + ) + + if ( + g.db_session.query(Block) + .join(Part) + .filter(Block.public_id == public_id) + .first() + is not None + ): raise InputError("Can't delete file that is attachment.") g.db_session.delete(f) @@ -1319,11 +1461,11 @@ def file_delete_api(public_id): # You can test with # $ curl http://localhost:5555/n/4s4iz36h36w17kumggi36ha2b/files \ # --form upload=@dancingbaby.gif -@app.route('/files/', methods=['POST']) +@app.route("/files/", methods=["POST"]) def file_upload_api(): all_files = [] for name, uploaded in request.files.iteritems(): - request.environ['log_context'].setdefault('filenames', []).append(name) + request.environ["log_context"].setdefault("filenames", []).append(name) f = Block() f.namespace = g.namespace f.content_type = uploaded.content_type @@ -1340,13 +1482,15 @@ def file_upload_api(): # # File downloads # -@app.route('/files//download') +@app.route("/files//download") def file_download_api(public_id): valid_public_id(public_id) try: - f = g.db_session.query(Block).filter( - Block.public_id == public_id, - Block.namespace_id == g.namespace.id).one() + f = ( + g.db_session.query(Block) + .filter(Block.public_id == public_id, Block.namespace_id == g.namespace.id) + .one() + ) except NoResultFound: raise NotFoundError("Couldn't find file {0} ".format(public_id)) @@ -1359,104 +1503,121 @@ def file_download_api(public_id): else: # TODO Detect the content-type using the magic library # and set ct = the content type, which is used below - request.environ['log_context']['no_content_type'] = True - ct = 'text/plain' - request.environ['log_context']['content_type'] = ct + request.environ["log_context"]["no_content_type"] = True + ct = "text/plain" + request.environ["log_context"]["content_type"] = ct if f.filename: name = f.filename else: - request.environ['log_context']['no_filename'] = True + request.environ["log_context"]["no_filename"] = True if ct in common_extensions: - name = 'attachment.{0}'.format(common_extensions[ct]) + name = "attachment.{0}".format(common_extensions[ct]) else: # HACK just append the major part of the content type - name = 'attachment.{0}'.format(ct.split('/')[0]) + name = "attachment.{0}".format(ct.split("/")[0]) # TODO the part.data object should really behave like a stream we can read # & write to try: account = g.namespace.account - statsd_string = 'api.direct_fetching.{}.{}'.format(account.provider, - account.id) + statsd_string = "api.direct_fetching.{}.{}".format(account.provider, account.id) response = make_response(f.data) - statsd_client.incr('{}.successes'.format(statsd_string)) + statsd_client.incr("{}.successes".format(statsd_string)) except TemporaryEmailFetchException: - statsd_client.incr('{}.temporary_failure'.format(statsd_string)) - log.warning('Exception when fetching email', - account_id=account.id, provider=account.provider, - logstash_tag='direct_fetching', exc_info=True) - - return err(503, "Email server returned a temporary error. " - "Please try again in a few minutes.") + statsd_client.incr("{}.temporary_failure".format(statsd_string)) + log.warning( + "Exception when fetching email", + account_id=account.id, + provider=account.provider, + logstash_tag="direct_fetching", + exc_info=True, + ) + + return err( + 503, + "Email server returned a temporary error. " + "Please try again in a few minutes.", + ) except EmailDeletedException: - statsd_client.incr('{}.deleted'.format(statsd_string)) - log.warning('Exception when fetching email', - account_id=account.id, provider=account.provider, - logstash_tag='direct_fetching', exc_info=True) + statsd_client.incr("{}.deleted".format(statsd_string)) + log.warning( + "Exception when fetching email", + account_id=account.id, + provider=account.provider, + logstash_tag="direct_fetching", + exc_info=True, + ) return err(404, "The data was deleted on the email server.") except EmailFetchException: - statsd_client.incr('{}.failures'.format(statsd_string)) - log.warning('Exception when fetching email', - logstash_tag='direct_fetching', exc_info=True) + statsd_client.incr("{}.failures".format(statsd_string)) + log.warning( + "Exception when fetching email", + logstash_tag="direct_fetching", + exc_info=True, + ) return err(404, "Couldn't find data on email server.") - response.headers['Content-Type'] = 'application/octet-stream' # ct + response.headers["Content-Type"] = "application/octet-stream" # ct # Werkzeug will try to encode non-ascii header values as latin-1. Try that # first; if it fails, use RFC2047/MIME encoding. See # https://tools.ietf.org/html/rfc7230#section-3.2.4. try: - name = name.encode('latin-1') + name = name.encode("latin-1") except UnicodeEncodeError: - name = '=?utf-8?b?' + base64.b64encode(name.encode('utf-8')) + '?=' - response.headers['Content-Disposition'] = \ - 'attachment; filename={0}'.format(name) + name = "=?utf-8?b?" + base64.b64encode(name.encode("utf-8")) + "?=" + response.headers["Content-Disposition"] = "attachment; filename={0}".format(name) - request.environ['log_context']['headers'] = response.headers + request.environ["log_context"]["headers"] = response.headers return response ## # Calendars ## -@app.route('/calendars/', methods=['GET']) +@app.route("/calendars/", methods=["GET"]) def calendar_api(): - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) - if args['view'] == 'count': + if args["view"] == "count": query = g.db_session.query(func.count(Calendar.id)) - elif args['view'] == 'ids': + elif args["view"] == "ids": query = g.db_session.query(Calendar.public_id) else: query = g.db_session.query(Calendar) - results = query.filter(Calendar.namespace_id == g.namespace.id). \ - order_by(asc(Calendar.id)) + results = query.filter(Calendar.namespace_id == g.namespace.id).order_by( + asc(Calendar.id) + ) - if args['view'] == 'count': + if args["view"] == "count": return g.encoder.jsonify({"count": results.scalar()}) - results = results.limit(args['limit']).offset(args['offset']).all() - if args['view'] == 'ids': + results = results.limit(args["limit"]).offset(args["offset"]).all() + if args["view"] == "ids": return g.encoder.jsonify([r for r, in results]) return g.encoder.jsonify(results) -@app.route('/calendars/', methods=['GET']) +@app.route("/calendars/", methods=["GET"]) def calendar_read_api(public_id): """Get all data for an existing calendar.""" valid_public_id(public_id) try: - calendar = g.db_session.query(Calendar).filter( - Calendar.public_id == public_id, - Calendar.namespace_id == g.namespace.id).one() + calendar = ( + g.db_session.query(Calendar) + .filter( + Calendar.public_id == public_id, Calendar.namespace_id == g.namespace.id + ) + .one() + ) except NoResultFound: raise NotFoundError("Couldn't find calendar {0}".format(public_id)) return g.encoder.jsonify(calendar) @@ -1469,214 +1630,227 @@ def calendar_read_api(public_id): # TODO(emfree, kavya): Systematically validate user input, and return # meaningful errors for invalid input. -@app.route('/drafts/', methods=['GET']) + +@app.route("/drafts/", methods=["GET"]) def draft_query_api(): - g.parser.add_argument('subject', type=bounded_str, location='args') - g.parser.add_argument('to', type=bounded_str, location='args') - g.parser.add_argument('cc', type=bounded_str, location='args') - g.parser.add_argument('bcc', type=bounded_str, location='args') - g.parser.add_argument('any_email', type=comma_separated_email_list, - location='args') - g.parser.add_argument('started_before', type=timestamp, location='args') - g.parser.add_argument('started_after', type=timestamp, location='args') - g.parser.add_argument('last_message_before', type=timestamp, - location='args') - g.parser.add_argument('last_message_after', type=timestamp, - location='args') - g.parser.add_argument('received_before', type=timestamp, - location='args') - g.parser.add_argument('received_after', type=timestamp, - location='args') - g.parser.add_argument('filename', type=bounded_str, location='args') - g.parser.add_argument('in', type=bounded_str, location='args') - g.parser.add_argument('thread_id', type=valid_public_id, location='args') - g.parser.add_argument('unread', type=strict_bool, location='args') - g.parser.add_argument('starred', type=strict_bool, location='args') - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("subject", type=bounded_str, location="args") + g.parser.add_argument("to", type=bounded_str, location="args") + g.parser.add_argument("cc", type=bounded_str, location="args") + g.parser.add_argument("bcc", type=bounded_str, location="args") + g.parser.add_argument("any_email", type=comma_separated_email_list, location="args") + g.parser.add_argument("started_before", type=timestamp, location="args") + g.parser.add_argument("started_after", type=timestamp, location="args") + g.parser.add_argument("last_message_before", type=timestamp, location="args") + g.parser.add_argument("last_message_after", type=timestamp, location="args") + g.parser.add_argument("received_before", type=timestamp, location="args") + g.parser.add_argument("received_after", type=timestamp, location="args") + g.parser.add_argument("filename", type=bounded_str, location="args") + g.parser.add_argument("in", type=bounded_str, location="args") + g.parser.add_argument("thread_id", type=valid_public_id, location="args") + g.parser.add_argument("unread", type=strict_bool, location="args") + g.parser.add_argument("starred", type=strict_bool, location="args") + g.parser.add_argument("view", type=view, location="args") args = strict_parse_args(g.parser, request.args) drafts = filtering.messages_or_drafts( namespace_id=g.namespace.id, drafts=True, - subject=args['subject'], - thread_public_id=args['thread_id'], - to_addr=args['to'], + subject=args["subject"], + thread_public_id=args["thread_id"], + to_addr=args["to"], from_addr=None, - cc_addr=args['cc'], - bcc_addr=args['bcc'], - any_email=args['any_email'], - started_before=args['started_before'], - started_after=args['started_after'], - last_message_before=args['last_message_before'], - last_message_after=args['last_message_after'], - received_before=args['received_before'], - received_after=args['received_after'], - filename=args['filename'], - in_=args['in'], - unread=args['unread'], - starred=args['starred'], - limit=args['limit'], - offset=args['offset'], - view=args['view'], - db_session=g.db_session) + cc_addr=args["cc"], + bcc_addr=args["bcc"], + any_email=args["any_email"], + started_before=args["started_before"], + started_after=args["started_after"], + last_message_before=args["last_message_before"], + last_message_after=args["last_message_after"], + received_before=args["received_before"], + received_after=args["received_after"], + filename=args["filename"], + in_=args["in"], + unread=args["unread"], + starred=args["starred"], + limit=args["limit"], + offset=args["offset"], + view=args["view"], + db_session=g.db_session, + ) return g.encoder.jsonify(drafts) -@app.route('/drafts/', methods=['GET']) +@app.route("/drafts/", methods=["GET"]) def draft_get_api(public_id): valid_public_id(public_id) - draft = g.db_session.query(Message).filter( - Message.public_id == public_id, - Message.namespace_id == g.namespace.id).first() + draft = ( + g.db_session.query(Message) + .filter(Message.public_id == public_id, Message.namespace_id == g.namespace.id) + .first() + ) if draft is None: raise NotFoundError("Couldn't find draft {}".format(public_id)) return g.encoder.jsonify(draft) -@app.route('/drafts/', methods=['POST']) +@app.route("/drafts/", methods=["POST"]) def draft_create_api(): data = request.get_json(force=True) - draft = create_message_from_json(data, g.namespace, g.db_session, - is_draft=True) + draft = create_message_from_json(data, g.namespace, g.db_session, is_draft=True) return g.encoder.jsonify(draft) -@app.route('/drafts/', methods=['PUT', 'PATCH']) +@app.route("/drafts/", methods=["PUT", "PATCH"]) def draft_update_api(public_id): data = request.get_json(force=True) - original_draft = get_draft(public_id, data.get('version'), g.namespace.id, - g.db_session) + original_draft = get_draft( + public_id, data.get("version"), g.namespace.id, g.db_session + ) # TODO(emfree): what if you try to update a draft on a *thread* that's been # deleted? data = request.get_json(force=True) - to = get_recipients(data.get('to'), 'to') - cc = get_recipients(data.get('cc'), 'cc') - bcc = get_recipients(data.get('bcc'), 'bcc') - from_addr = get_recipients(data.get('from_addr'), 'from_addr') - reply_to = get_recipients(data.get('reply_to'), 'reply_to') + to = get_recipients(data.get("to"), "to") + cc = get_recipients(data.get("cc"), "cc") + bcc = get_recipients(data.get("bcc"), "bcc") + from_addr = get_recipients(data.get("from_addr"), "from_addr") + reply_to = get_recipients(data.get("reply_to"), "reply_to") if from_addr and len(from_addr) > 1: raise InputError("from_addr field can have at most one item") if reply_to and len(reply_to) > 1: raise InputError("reply_to field can have at most one item") - subject = data.get('subject') - body = data.get('body') - files = get_attachments(data.get('file_ids'), g.namespace.id, g.db_session) - - draft = update_draft(g.db_session, g.namespace.account, original_draft, - to, subject, body, files, cc, bcc, from_addr, - reply_to) + subject = data.get("subject") + body = data.get("body") + files = get_attachments(data.get("file_ids"), g.namespace.id, g.db_session) + + draft = update_draft( + g.db_session, + g.namespace.account, + original_draft, + to, + subject, + body, + files, + cc, + bcc, + from_addr, + reply_to, + ) return g.encoder.jsonify(draft) -@app.route('/drafts/', methods=['DELETE']) +@app.route("/drafts/", methods=["DELETE"]) def draft_delete_api(public_id): data = request.get_json(force=True) # Validate draft id, version, etc. - draft = get_draft(public_id, data.get('version'), g.namespace.id, - g.db_session) + draft = get_draft(public_id, data.get("version"), g.namespace.id, g.db_session) result = delete_draft(g.db_session, g.namespace.account, draft) return g.encoder.jsonify(result) -@app.route('/send', methods=['POST']) -@app.route('/send-with-features', methods=['POST']) # TODO deprecate this URL +@app.route("/send", methods=["POST"]) +@app.route("/send-with-features", methods=["POST"]) # TODO deprecate this URL def draft_send_api(): request_started = time.time() account = g.namespace.account if request.content_type == "message/rfc822": - draft = create_draft_from_mime(account, request.data, - g.db_session) + draft = create_draft_from_mime(account, request.data, g.db_session) validate_draft_recipients(draft) if isinstance(account, GenericAccount): - schedule_action('save_sent_email', draft, draft.namespace.id, - g.db_session) + schedule_action("save_sent_email", draft, draft.namespace.id, g.db_session) resp = send_raw_mime(account, g.db_session, draft) return resp data = request.get_json(force=True) # Check if using tracking - tracking_options = data.get('tracking', {}) + tracking_options = data.get("tracking", {}) - draft_public_id = data.get('draft_id') + draft_public_id = data.get("draft_id") if draft_public_id is not None: - draft = get_draft(draft_public_id, data.get('version'), - g.namespace.id, g.db_session) + draft = get_draft( + draft_public_id, data.get("version"), g.namespace.id, g.db_session + ) else: - draft = create_message_from_json(data, g.namespace, - g.db_session, is_draft=False) + draft = create_message_from_json( + data, g.namespace, g.db_session, is_draft=False + ) validate_draft_recipients(draft) if tracking_options: # Open/Link/Reply tracking set try: from redwood.api.tracking import handle_tracking_options except ImportError: - return err(501, - 'Tracking is not implemented in the open source ' - 'Nylas Cloud API. See our hosted version for this ' - 'feature. https://nylas.com/cloud') + return err( + 501, + "Tracking is not implemented in the open source " + "Nylas Cloud API. See our hosted version for this " + "feature. https://nylas.com/cloud", + ) - assert hasattr(g, 'application_id'), \ - 'Tracking requires application ID' + assert hasattr(g, "application_id"), "Tracking requires application ID" handle_tracking_options( - mailsync_db_session=g.db_session, - tracking_options=tracking_options, - draft=draft, - application_id=g.application_id) + mailsync_db_session=g.db_session, + tracking_options=tracking_options, + draft=draft, + application_id=g.application_id, + ) if isinstance(account, GenericAccount): - schedule_action('save_sent_email', draft, draft.namespace.id, - g.db_session) + schedule_action("save_sent_email", draft, draft.namespace.id, g.db_session) if time.time() - request_started > SEND_TIMEOUT: # Preemptively time out the request if we got stuck doing database work # -- we don't want clients to disconnect and then still send the # message. - return err(504, 'Request timed out.') + return err(504, "Request timed out.") resp = send_draft(account, draft, g.db_session) # Only delete the draft once we know it has been sent if draft_public_id is not None and resp.status_code == 200: - schedule_action('delete_draft', draft, draft.namespace.id, - g.db_session, nylas_uid=draft.nylas_uid, - message_id_header=draft.message_id_header) + schedule_action( + "delete_draft", + draft, + draft.namespace.id, + g.db_session, + nylas_uid=draft.nylas_uid, + message_id_header=draft.message_id_header, + ) return resp -@app.route('/send-multiple', methods=['POST']) +@app.route("/send-multiple", methods=["POST"]) def multi_send_create(): """Initiates a multi-send session by creating a new multi-send draft.""" account = g.namespace.account - if account.discriminator == 'easaccount': - raise InputError('Multiple send is not supported for this provider.') + if account.discriminator == "easaccount": + raise InputError("Multiple send is not supported for this provider.") data = request.get_json(force=True) # Make a new draft and don't save it to the remote (by passing # is_draft=False) - draft = create_message_from_json(data, g.namespace, - g.db_session, is_draft=False) + draft = create_message_from_json(data, g.namespace, g.db_session, is_draft=False) validate_draft_recipients(draft) # Mark the draft as sending, which ensures that it cannot be modified. draft.mark_as_sending() g.db_session.add(draft) - request.environ['log_context']['draft_public_id'] = draft.public_id + request.environ["log_context"]["draft_public_id"] = draft.public_id return g.encoder.jsonify(draft) -@app.route('/send-multiple/', methods=['POST']) +@app.route("/send-multiple/", methods=["POST"]) def multi_send(draft_id): """Performs a single send operation in an individualized multi-send session. Sends a copy of the draft at draft_id to the specified address @@ -1686,65 +1860,69 @@ def multi_send(draft_id): request_started = time.time() account = g.namespace.account - if account.discriminator == 'easaccount': - raise InputError('Multiple send is not supported for this provider.') + if account.discriminator == "easaccount": + raise InputError("Multiple send is not supported for this provider.") data = request.get_json(force=True) valid_public_id(draft_id) - body = data.get('body') - send_to = get_recipients([data.get('send_to')], 'to')[0] + body = data.get("body") + send_to = get_recipients([data.get("send_to")], "to")[0] draft = get_sending_draft(draft_id, g.namespace.id, g.db_session) if not draft.is_sending: - raise InputError('Invalid draft, not part of a multi-send transaction') + raise InputError("Invalid draft, not part of a multi-send transaction") - emails = {email for name, email in itertools.chain(draft.to_addr, - draft.cc_addr, - draft.bcc_addr)} + emails = { + email + for name, email in itertools.chain(draft.to_addr, draft.cc_addr, draft.bcc_addr) + } if send_to[1] not in emails: - raise InputError('Invalid send_to, not present in message recipients') + raise InputError("Invalid send_to, not present in message recipients") if time.time() - request_started > SEND_TIMEOUT: # Preemptively time out the request if we got stuck doing database work # -- we don't want clients to disconnect and then still send the # message. - return err(504, 'Request timed out.') + return err(504, "Request timed out.") start_time = time.time() # Send a copy of the draft with the new body to the send_to address resp = send_draft_copy(account, draft, body, send_to) - request.environ['log_context']["time_to_send"] = time.time() - start_time + request.environ["log_context"]["time_to_send"] = time.time() - start_time return resp -@app.route('/send-multiple/', methods=['DELETE']) +@app.route("/send-multiple/", methods=["DELETE"]) def multi_send_finish(draft_id): """Closes out a multi-send session by marking the sending draft as sent and moving it to the user's Sent folder.""" account = g.namespace.account - if account.discriminator == 'easaccount': - raise InputError('Multiple send is not supported for this provider.') + if account.discriminator == "easaccount": + raise InputError("Multiple send is not supported for this provider.") valid_public_id(draft_id) draft = get_sending_draft(draft_id, g.namespace.id, g.db_session) if not draft.is_sending: - raise InputError('Invalid draft, not part of a multi-send transaction') + raise InputError("Invalid draft, not part of a multi-send transaction") # Synchronously delete any matching messages from the sent folder, left # over from the send calls (in gmail only) if not isinstance(account, GenericAccount): try: with writable_connection_pool(account.id).get() as crispin_client: - remote_delete_sent(crispin_client, account.id, - draft.message_id_header, - delete_multiple=True) + remote_delete_sent( + crispin_client, + account.id, + draft.message_id_header, + delete_multiple=True, + ) except Exception: # Even if this fails, we need to finish off the multi-send session log_exception(sys.exc_info(), draft_public_id=draft.public_id) @@ -1753,7 +1931,7 @@ def multi_send_finish(draft_id): update_draft_on_send(account, draft, g.db_session) # Save the sent message with its existing body to the user's sent folder - schedule_action('save_sent_email', draft, draft.namespace.id, g.db_session) + schedule_action("save_sent_email", draft, draft.namespace.id, g.db_session) return g.encoder.jsonify(draft) @@ -1761,57 +1939,69 @@ def multi_send_finish(draft_id): ## # Client syncing ## -@app.route('/delta') -@app.route('/delta/longpoll') +@app.route("/delta") +@app.route("/delta/longpoll") def sync_deltas(): - g.parser.add_argument('cursor', type=valid_public_id, location='args', - required=True) - g.parser.add_argument('exclude_types', type=valid_delta_object_types, - location='args') - g.parser.add_argument('include_types', type=valid_delta_object_types, - location='args') - g.parser.add_argument('timeout', type=int, - default=LONG_POLL_REQUEST_TIMEOUT, location='args') - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument( + "cursor", type=valid_public_id, location="args", required=True + ) + g.parser.add_argument( + "exclude_types", type=valid_delta_object_types, location="args" + ) + g.parser.add_argument( + "include_types", type=valid_delta_object_types, location="args" + ) + g.parser.add_argument( + "timeout", type=int, default=LONG_POLL_REQUEST_TIMEOUT, location="args" + ) + g.parser.add_argument("view", type=view, location="args") # - Begin shim - # Remove after folders and labels exposed in the Delta API for everybody, # right now, only expose for Edgehill. # Same for the account object. - g.parser.add_argument('exclude_folders', type=strict_bool, location='args') - g.parser.add_argument('exclude_account', type=strict_bool, location='args', - default=True) + g.parser.add_argument("exclude_folders", type=strict_bool, location="args") + g.parser.add_argument( + "exclude_account", type=strict_bool, location="args", default=True + ) # - End shim - # Metadata has restricted access - only N1 can make a request with this # arg included. For everyone else, set exclude_metadata to True by default. - g.parser.add_argument('exclude_metadata', type=strict_bool, - location='args', default=True) + g.parser.add_argument( + "exclude_metadata", type=strict_bool, location="args", default=True + ) args = strict_parse_args(g.parser, request.args) - exclude_types = args.get('exclude_types') - include_types = args.get('include_types') - expand = args.get('view') == 'expanded' - exclude_metadata = args.get('exclude_metadata') + exclude_types = args.get("exclude_types") + include_types = args.get("include_types") + expand = args.get("view") == "expanded" + exclude_metadata = args.get("exclude_metadata") # - Begin shim - - exclude_folders = args.get('exclude_folders') + exclude_folders = args.get("exclude_folders") if exclude_folders is None: exclude_folders = True - exclude_account = args.get('exclude_account') + exclude_account = args.get("exclude_account") # - End shim - - cursor = args['cursor'] - timeout = args['timeout'] + cursor = args["cursor"] + timeout = args["timeout"] if include_types and exclude_types: - raise InputError("Invalid Request. Cannot specify both include_types" - "and exclude_types") + raise InputError( + "Invalid Request. Cannot specify both include_types" "and exclude_types" + ) - if cursor == '0': + if cursor == "0": start_pointer = 0 else: try: - start_pointer, = g.db_session.query(Transaction.id). \ - filter(Transaction.public_id == cursor, - Transaction.namespace_id == g.namespace.id).one() + (start_pointer,) = ( + g.db_session.query(Transaction.id) + .filter( + Transaction.public_id == cursor, + Transaction.namespace_id == g.namespace.id, + ) + .one() + ) except NoResultFound: - raise InputError('Invalid cursor parameter') + raise InputError("Invalid cursor parameter") # The client wants us to wait until there are changes g.db_session.expunge(g.namespace) @@ -1822,119 +2012,143 @@ def sync_deltas(): while time.time() - start_time < timeout: with session_scope(g.namespace.id) as db_session: deltas, end_pointer = delta_sync.format_transactions_after_pointer( - g.namespace, start_pointer, db_session, args['limit'], - exclude_types, include_types, exclude_folders, - exclude_metadata, exclude_account, expand=expand) + g.namespace, + start_pointer, + db_session, + args["limit"], + exclude_types, + include_types, + exclude_folders, + exclude_metadata, + exclude_account, + expand=expand, + ) response = { - 'cursor_start': cursor, - 'deltas': deltas, + "cursor_start": cursor, + "deltas": deltas, } if deltas: end_transaction = g.db_session.query(Transaction).get(end_pointer) - response['cursor_end'] = deltas[-1]['cursor'] - response['timestamp'] = end_transaction.created_at + response["cursor_end"] = deltas[-1]["cursor"] + response["timestamp"] = end_transaction.created_at return g.encoder.jsonify(response) # No changes. perhaps wait - elif '/delta/longpoll' in request.url_rule.rule: + elif "/delta/longpoll" in request.url_rule.rule: gevent.sleep(poll_interval) else: # Return immediately - response['cursor_end'] = cursor - response['timestamp'] = datetime.utcnow() + response["cursor_end"] = cursor + response["timestamp"] = datetime.utcnow() return g.encoder.jsonify(response) # If nothing happens until timeout, just return the end of the cursor - response['cursor_end'] = cursor + response["cursor_end"] = cursor return g.encoder.jsonify(response) # TODO Deprecate this -@app.route('/delta/generate_cursor', methods=['POST']) +@app.route("/delta/generate_cursor", methods=["POST"]) def generate_cursor(): data = request.get_json(force=True) - if data.keys() != ['start'] or not isinstance(data['start'], int): - raise InputError('generate_cursor request body must have the format ' - '{"start": (seconds)}') + if data.keys() != ["start"] or not isinstance(data["start"], int): + raise InputError( + "generate_cursor request body must have the format " + '{"start": (seconds)}' + ) - timestamp = int(data['start']) + timestamp = int(data["start"]) try: datetime.utcfromtimestamp(timestamp) except ValueError: - raise InputError('generate_cursor request body must have the format ' - '{"start": (seconds)}') + raise InputError( + "generate_cursor request body must have the format " + '{"start": (seconds)}' + ) cursor = delta_sync.get_transaction_cursor_near_timestamp( - g.namespace.id, timestamp, g.db_session) - return g.encoder.jsonify({'cursor': cursor}) + g.namespace.id, timestamp, g.db_session + ) + return g.encoder.jsonify({"cursor": cursor}) -@app.route('/delta/latest_cursor', methods=['POST']) +@app.route("/delta/latest_cursor", methods=["POST"]) def latest_cursor(): cursor = delta_sync.get_transaction_cursor_near_timestamp( - g.namespace.id, - int(time.time()), - g.db_session) - return g.encoder.jsonify({'cursor': cursor}) + g.namespace.id, int(time.time()), g.db_session + ) + return g.encoder.jsonify({"cursor": cursor}) ## # Streaming ## -@app.route('/delta/streaming') + +@app.route("/delta/streaming") def stream_changes(): - g.parser.add_argument('timeout', type=float, location='args') - g.parser.add_argument('cursor', type=valid_public_id, location='args', - required=True) - g.parser.add_argument('exclude_types', type=valid_delta_object_types, - location='args') - g.parser.add_argument('include_types', type=valid_delta_object_types, - location='args') - g.parser.add_argument('view', type=view, location='args') + g.parser.add_argument("timeout", type=float, location="args") + g.parser.add_argument( + "cursor", type=valid_public_id, location="args", required=True + ) + g.parser.add_argument( + "exclude_types", type=valid_delta_object_types, location="args" + ) + g.parser.add_argument( + "include_types", type=valid_delta_object_types, location="args" + ) + g.parser.add_argument("view", type=view, location="args") # - Begin shim - # Remove after folders and labels exposed in the Delta API for everybody, # right now, only expose for Edgehill. # Same for the account object. - g.parser.add_argument('exclude_folders', type=strict_bool, location='args') - g.parser.add_argument('exclude_account', type=strict_bool, location='args', - default=True) + g.parser.add_argument("exclude_folders", type=strict_bool, location="args") + g.parser.add_argument( + "exclude_account", type=strict_bool, location="args", default=True + ) # - End shim - # Metadata has restricted access - only N1 can make a request with this # arg included. For everyone else, set exclude_metadata to True by default. - g.parser.add_argument('exclude_metadata', type=strict_bool, - location='args', default=True) + g.parser.add_argument( + "exclude_metadata", type=strict_bool, location="args", default=True + ) args = strict_parse_args(g.parser, request.args) - timeout = args['timeout'] or 1800 + timeout = args["timeout"] or 1800 transaction_pointer = None - cursor = args['cursor'] - exclude_types = args.get('exclude_types') - include_types = args.get('include_types') - expand = args.get('view') == 'expanded' - exclude_metadata = args.get('exclude_metadata') + cursor = args["cursor"] + exclude_types = args.get("exclude_types") + include_types = args.get("include_types") + expand = args.get("view") == "expanded" + exclude_metadata = args.get("exclude_metadata") # Begin shim # - exclude_folders = args.get('exclude_folders') + exclude_folders = args.get("exclude_folders") if exclude_folders is None: exclude_folders = True - exclude_account = args.get('exclude_account') + exclude_account = args.get("exclude_account") # End shim # if include_types and exclude_types: - raise InputError("Invalid Request. Cannot specify both include_types" - "and exclude_types") + raise InputError( + "Invalid Request. Cannot specify both include_types" "and exclude_types" + ) - if cursor == '0': + if cursor == "0": transaction_pointer = 0 else: - query_result = g.db_session.query(Transaction.id).filter( - Transaction.namespace_id == g.namespace.id, - Transaction.public_id == cursor).first() + query_result = ( + g.db_session.query(Transaction.id) + .filter( + Transaction.namespace_id == g.namespace.id, + Transaction.public_id == cursor, + ) + .first() + ) if query_result is None: - raise InputError('Invalid cursor {}'.format(args['cursor'])) + raise InputError("Invalid cursor {}".format(args["cursor"])) transaction_pointer = query_result[0] # Hack to not keep a database session open for the entire (long) request @@ -1942,47 +2156,58 @@ def stream_changes(): g.db_session.expunge(g.namespace) g.db_session.close() - poll_interval = config.get('STREAMING_API_POLL_INTERVAL', 1) + poll_interval = config.get("STREAMING_API_POLL_INTERVAL", 1) # TODO make transaction log support the `expand` feature - is_n1 = request.environ.get('IS_N1', False) + is_n1 = request.environ.get("IS_N1", False) generator = delta_sync.streaming_change_generator( - g.namespace, transaction_pointer=transaction_pointer, - poll_interval=poll_interval, timeout=timeout, - exclude_types=exclude_types, include_types=include_types, + g.namespace, + transaction_pointer=transaction_pointer, + poll_interval=poll_interval, + timeout=timeout, + exclude_types=exclude_types, + include_types=include_types, exclude_folders=exclude_folders, - exclude_metadata=exclude_metadata, exclude_account=exclude_account, - expand=expand, is_n1=is_n1) - return Response(stream_with_context(generator), - mimetype='text/event-stream') + exclude_metadata=exclude_metadata, + exclude_account=exclude_account, + expand=expand, + is_n1=is_n1, + ) + return Response(stream_with_context(generator), mimetype="text/event-stream") ## # Groups and Contact Rankings ## -@app.route('/groups/intrinsic') + +@app.route("/groups/intrinsic") def groups_intrinsic(): - g.parser.add_argument('force_recalculate', type=strict_bool, - location='args') + g.parser.add_argument("force_recalculate", type=strict_bool, location="args") args = strict_parse_args(g.parser, request.args) try: - dpcache = g.db_session.query(DataProcessingCache).filter( - DataProcessingCache.namespace_id == g.namespace.id).one() + dpcache = ( + g.db_session.query(DataProcessingCache) + .filter(DataProcessingCache.namespace_id == g.namespace.id) + .one() + ) except NoResultFound: dpcache = DataProcessingCache(namespace_id=g.namespace.id) last_updated = dpcache.contact_groups_last_updated cached_data = dpcache.contact_groups - use_cached_data = (not (is_stale(last_updated) or cached_data is None) and - args['force_recalculate'] is not True) + use_cached_data = ( + not (is_stale(last_updated) or cached_data is None) + and args["force_recalculate"] is not True + ) if not use_cached_data: last_updated = None messages = filtering.messages_for_contact_scores( - g.db_session, g.namespace.id, last_updated) + g.db_session, g.namespace.id, last_updated + ) from_email = g.namespace.email_address @@ -2004,28 +2229,33 @@ def groups_intrinsic(): return g.encoder.jsonify(result) -@app.route('/contacts/rankings') +@app.route("/contacts/rankings") def contact_rankings(): - g.parser.add_argument('force_recalculate', type=strict_bool, - location='args') + g.parser.add_argument("force_recalculate", type=strict_bool, location="args") args = strict_parse_args(g.parser, request.args) try: - dpcache = g.db_session.query(DataProcessingCache).filter( - DataProcessingCache.namespace_id == g.namespace.id).one() + dpcache = ( + g.db_session.query(DataProcessingCache) + .filter(DataProcessingCache.namespace_id == g.namespace.id) + .one() + ) except NoResultFound: dpcache = DataProcessingCache(namespace_id=g.namespace.id) last_updated = dpcache.contact_rankings_last_updated cached_data = dpcache.contact_rankings - use_cached_data = (not (is_stale(last_updated) or cached_data is None) and - args['force_recalculate'] is not True) + use_cached_data = ( + not (is_stale(last_updated) or cached_data is None) + and args["force_recalculate"] is not True + ) if not use_cached_data: last_updated = None messages = filtering.messages_for_contact_scores( - g.db_session, g.namespace.id, last_updated) + g.db_session, g.namespace.id, last_updated + ) if use_cached_data: new_guys = calculate_contact_scores(messages, time_dependent=False) diff --git a/inbox/api/sending.py b/inbox/api/sending.py index a5090f83d..75555210f 100644 --- a/inbox/api/sending.py +++ b/inbox/api/sending.py @@ -3,6 +3,7 @@ from inbox.api.err import err from inbox.api.kellogs import APIEncoder, encode from inbox.sendmail.base import get_sendmail_client, SendMailException + log = get_logger() @@ -20,9 +21,9 @@ def send_draft(account, draft, db_session): except SendMailException as exc: kwargs = {} if exc.failures: - kwargs['failures'] = exc.failures + kwargs["failures"] = exc.failures if exc.server_error: - kwargs['server_error'] = exc.server_error + kwargs["server_error"] = exc.server_error return err(exc.http_code, exc.message, **kwargs) return response_on_success @@ -41,7 +42,7 @@ def send_draft_copy(account, draft, custom_body, recipient): # body (which we still need to retain in the draft for when it's saved to # the sent folder). response_on_success = encode(draft) - response_on_success['body'] = custom_body + response_on_success["body"] = custom_body response_on_success = APIEncoder().jsonify(response_on_success) # Now send the draft to the specified recipient. The send_custom method @@ -53,9 +54,9 @@ def send_draft_copy(account, draft, custom_body, recipient): except SendMailException as exc: kwargs = {} if exc.failures: - kwargs['failures'] = exc.failures + kwargs["failures"] = exc.failures if exc.server_error: - kwargs['server_error'] = exc.server_error + kwargs["server_error"] = exc.server_error return err(exc.http_code, exc.message, **kwargs) return response_on_success @@ -83,9 +84,9 @@ def send_raw_mime(account, db_session, msg): except SendMailException as exc: kwargs = {} if exc.failures: - kwargs['failures'] = exc.failures + kwargs["failures"] = exc.failures if exc.server_error: - kwargs['server_error'] = exc.server_error + kwargs["server_error"] = exc.server_error return err(exc.http_code, exc.message, **kwargs) return response_on_success diff --git a/inbox/api/srv.py b/inbox/api/srv.py index e0639f6d8..2ce477ac6 100644 --- a/inbox/api/srv.py +++ b/inbox/api/srv.py @@ -1,4 +1,3 @@ - from flask import Flask, request, jsonify, make_response, g from flask.ext.restful import reqparse from werkzeug.exceptions import default_exceptions, HTTPException @@ -13,8 +12,12 @@ from inbox.models.backends.gmail import GmailAccount, GOOGLE_EMAIL_SCOPE from inbox.models.session import global_session_scope from inbox.api.err import APIException, InputError, NotFoundError -from inbox.api.validation import (bounded_str, ValidatableArgument, - strict_parse_args, limit) +from inbox.api.validation import ( + bounded_str, + ValidatableArgument, + strict_parse_args, + limit, +) from inbox.api.validation import valid_public_id from metrics_api import app as metrics_api @@ -32,9 +35,10 @@ reconfigure_logging() + @app.errorhandler(APIException) def handle_input_error(error): - response = jsonify(message=error.message, type='invalid_request_error') + response = jsonify(message=error.message, type="invalid_request_error") response.status_code = error.status_code return response @@ -42,13 +46,12 @@ def handle_input_error(error): def default_json_error(ex): """ Exception -> flask JSON responder """ logger = get_logger() - logger.error('Uncaught error thrown by Flask/Werkzeug', exc_info=ex) - response = jsonify(message=str(ex), type='api_error') - response.status_code = (ex.code - if isinstance(ex, HTTPException) - else 500) + logger.error("Uncaught error thrown by Flask/Werkzeug", exc_info=ex) + response = jsonify(message=str(ex), type="api_error") + response.status_code = ex.code if isinstance(ex, HTTPException) else 500 return response + # Patch all error handlers in werkzeug for code in default_exceptions.iterkeys(): app.error_handler_spec[None][code] = default_json_error @@ -57,26 +60,30 @@ def default_json_error(ex): @app.before_request def auth(): """ Check for account ID on all non-root URLS """ - if request.path == '/' \ - or request.path.startswith('/accounts') \ - or request.path.startswith('/w/') \ - or request.path.startswith('/metrics'): + if ( + request.path == "/" + or request.path.startswith("/accounts") + or request.path.startswith("/w/") + or request.path.startswith("/metrics") + ): return if not request.authorization or not request.authorization.username: - AUTH_ERROR_MSG = ("Could not verify access credential.", 401, - {'WWW-Authenticate': 'Basic realm="API ' - 'Access Token Required"'}) + AUTH_ERROR_MSG = ( + "Could not verify access credential.", + 401, + {"WWW-Authenticate": 'Basic realm="API ' 'Access Token Required"'}, + ) - auth_header = request.headers.get('Authorization', None) + auth_header = request.headers.get("Authorization", None) if not auth_header: return make_response(AUTH_ERROR_MSG) parts = auth_header.split() - if (len(parts) != 2 or parts[0].lower() != 'bearer' or not parts[1]): + if len(parts) != 2 or parts[0].lower() != "bearer" or not parts[1]: return make_response(AUTH_ERROR_MSG) namespace_public_id = parts[1] @@ -86,31 +93,37 @@ def auth(): with global_session_scope() as db_session: try: valid_public_id(namespace_public_id) - namespace = db_session.query(Namespace) \ - .filter(Namespace.public_id == namespace_public_id).one() + namespace = ( + db_session.query(Namespace) + .filter(Namespace.public_id == namespace_public_id) + .one() + ) g.namespace_id = namespace.id g.account_id = namespace.account.id except NoResultFound: - return make_response(( - "Could not verify access credential.", 401, - {'WWW-Authenticate': 'Basic realm="API ' - 'Access Token Required"'})) + return make_response( + ( + "Could not verify access credential.", + 401, + {"WWW-Authenticate": 'Basic realm="API ' 'Access Token Required"'}, + ) + ) @app.after_request def finish(response): - origin = request.headers.get('origin') + origin = request.headers.get("origin") if origin: # means it's just a regular request - response.headers['Access-Control-Allow-Origin'] = origin - response.headers['Access-Control-Allow-Headers'] = \ - 'Authorization,Content-Type' - response.headers['Access-Control-Allow-Methods'] = \ - 'GET,PUT,POST,DELETE,OPTIONS,PATCH' - response.headers['Access-Control-Allow-Credentials'] = 'true' + response.headers["Access-Control-Allow-Origin"] = origin + response.headers["Access-Control-Allow-Headers"] = "Authorization,Content-Type" + response.headers[ + "Access-Control-Allow-Methods" + ] = "GET,PUT,POST,DELETE,OPTIONS,PATCH" + response.headers["Access-Control-Allow-Credentials"] = "true" return response -@app.route('/accounts/', methods=['GET']) +@app.route("/accounts/", methods=["GET"]) def ns_all(): """ Return all namespaces """ # We do this outside the blueprint to support the case of an empty @@ -118,72 +131,75 @@ def ns_all(): # to make our own session with global_session_scope() as db_session: parser = reqparse.RequestParser(argument_class=ValidatableArgument) - parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit, - location='args') - parser.add_argument('offset', default=0, type=int, location='args') - parser.add_argument('email_address', type=bounded_str, location='args') + parser.add_argument("limit", default=DEFAULT_LIMIT, type=limit, location="args") + parser.add_argument("offset", default=0, type=int, location="args") + parser.add_argument("email_address", type=bounded_str, location="args") args = strict_parse_args(parser, request.args) query = db_session.query(Namespace) - if args['email_address']: + if args["email_address"]: query = query.join(Account) - query = query.filter_by(email_address=args['email_address']) + query = query.filter_by(email_address=args["email_address"]) - query = query.limit(args['limit']) - if args['offset']: - query = query.offset(args['offset']) + query = query.limit(args["limit"]) + if args["offset"]: + query = query.offset(args["offset"]) namespaces = query.all() encoder = APIEncoder() return encoder.jsonify(namespaces) -@app.route('/accounts/', methods=['POST']) +@app.route("/accounts/", methods=["POST"]) def create_account(): """ Create a new account """ data = request.get_json(force=True) - provider = data.get('provider', 'custom') - email_address = data['email_address'] + provider = data.get("provider", "custom") + email_address = data["email_address"] - sync_email = data.get('sync_email', True) - sync_calendar = data.get('sync_calendar', False) + sync_email = data.get("sync_email", True) + sync_calendar = data.get("sync_calendar", False) - if data['type'] == 'generic': + if data["type"] == "generic": auth_handler = GenericAuthHandler(provider) - account = auth_handler.create_account(email_address, { - 'name': '', - 'email': email_address, - 'imap_server_host': data['imap_server_host'], - 'imap_server_port': data['imap_server_port'], - 'imap_username': data['imap_username'], - 'imap_password': data['imap_password'], - - # Make Nylas happy with dummy values - 'smtp_server_host': 'localhost', - 'smtp_server_port': 25, - 'smtp_username': 'dummy', - 'smtp_password': 'dummy', - - 'sync_email': sync_email, - }) - - elif data['type'] == 'gmail': - scopes = data.get('scopes', GOOGLE_EMAIL_SCOPE) + account = auth_handler.create_account( + email_address, + { + "name": "", + "email": email_address, + "imap_server_host": data["imap_server_host"], + "imap_server_port": data["imap_server_port"], + "imap_username": data["imap_username"], + "imap_password": data["imap_password"], + # Make Nylas happy with dummy values + "smtp_server_host": "localhost", + "smtp_server_port": 25, + "smtp_username": "dummy", + "smtp_password": "dummy", + "sync_email": sync_email, + }, + ) + + elif data["type"] == "gmail": + scopes = data.get("scopes", GOOGLE_EMAIL_SCOPE) auth_handler = GmailAuthHandler(provider) - account = auth_handler.create_account(email_address, { - 'name': '', - 'email': email_address, - 'refresh_token': data['refresh_token'], - 'scope': scopes, - 'id_token': '', - 'contacts': False, - 'sync_email': sync_email, - 'events': sync_calendar, - }) + account = auth_handler.create_account( + email_address, + { + "name": "", + "email": email_address, + "refresh_token": data["refresh_token"], + "scope": scopes, + "id_token": "", + "contacts": False, + "sync_email": sync_email, + "events": sync_calendar, + }, + ) else: - raise ValueError('Account type not supported.') + raise ValueError("Account type not supported.") with global_session_scope() as db_session: # By default, don't enable accounts so we have the ability to set a @@ -195,7 +211,8 @@ def create_account(): encoder = APIEncoder() return encoder.jsonify(account.namespace) -@app.route('/accounts//', methods=['PUT']) + +@app.route("/accounts//", methods=["PUT"]) def modify_account(namespace_public_id): """ Modify an existing account @@ -205,66 +222,77 @@ def modify_account(namespace_public_id): data = request.get_json(force=True) - provider = data.get('provider', 'custom') - email_address = data['email_address'] + provider = data.get("provider", "custom") + email_address = data["email_address"] - sync_email = data.get('sync_email', True) - sync_calendar = data.get('sync_calendar', False) + sync_email = data.get("sync_email", True) + sync_calendar = data.get("sync_calendar", False) with global_session_scope() as db_session: - namespace = db_session.query(Namespace) \ - .filter(Namespace.public_id == namespace_public_id).one() + namespace = ( + db_session.query(Namespace) + .filter(Namespace.public_id == namespace_public_id) + .one() + ) account = namespace.account if isinstance(account, GenericAccount): - if 'refresh_token' in data: - raise InputError('Cannot change the refresh token on a password account.') + if "refresh_token" in data: + raise InputError( + "Cannot change the refresh token on a password account." + ) auth_handler = GenericAuthHandler(provider) - auth_handler.update_account(account, { - 'name': '', - 'email': email_address, - 'imap_server_host': data['imap_server_host'], - 'imap_server_port': data['imap_server_port'], - 'imap_username': data['imap_username'], - 'imap_password': data['imap_password'], - - # Make Nylas happy with dummy values - 'smtp_server_host': 'localhost', - 'smtp_server_port': 25, - 'smtp_username': 'dummy', - 'smtp_password': 'dummy', - - 'sync_email': sync_email, - }) + auth_handler.update_account( + account, + { + "name": "", + "email": email_address, + "imap_server_host": data["imap_server_host"], + "imap_server_port": data["imap_server_port"], + "imap_username": data["imap_username"], + "imap_password": data["imap_password"], + # Make Nylas happy with dummy values + "smtp_server_host": "localhost", + "smtp_server_port": 25, + "smtp_username": "dummy", + "smtp_password": "dummy", + "sync_email": sync_email, + }, + ) elif isinstance(account, GmailAccount): - scopes = data.get('scopes', GOOGLE_EMAIL_SCOPE) + scopes = data.get("scopes", GOOGLE_EMAIL_SCOPE) auth_handler = GmailAuthHandler(provider) - if 'refresh_token' in data: - account = auth_handler.update_account(account, { - 'name': '', - 'email': email_address, - 'refresh_token': data['refresh_token'], - 'scope': scopes, - 'id_token': '', - 'sync_email': sync_email, - 'contacts': False, - 'events': sync_calendar, - }) + if "refresh_token" in data: + account = auth_handler.update_account( + account, + { + "name": "", + "email": email_address, + "refresh_token": data["refresh_token"], + "scope": scopes, + "id_token": "", + "sync_email": sync_email, + "contacts": False, + "events": sync_calendar, + }, + ) else: - if 'imap_server_host' in data or \ - 'imap_server_port' in data or \ - 'imap_username' in data or \ - 'imap_password' in data: - raise InputError('Cannot change IMAP fields on a Gmail account.') + if ( + "imap_server_host" in data + or "imap_server_port" in data + or "imap_username" in data + or "imap_password" in data + ): + raise InputError("Cannot change IMAP fields on a Gmail account.") else: - raise ValueError('Account type not supported.') + raise ValueError("Account type not supported.") # By default, don't enable accounts so we have the ability to set a # custom sync host. - account.disable_sync('modified-account') + account.disable_sync("modified-account") db_session.add(account) db_session.commit() @@ -272,13 +300,16 @@ def modify_account(namespace_public_id): return encoder.jsonify(account.namespace) -@app.route('/accounts//', methods=['DELETE']) +@app.route("/accounts//", methods=["DELETE"]) def delete_account(namespace_public_id): """ Mark an existing account for deletion. """ try: with global_session_scope() as db_session: - namespace = db_session.query(Namespace) \ - .filter(Namespace.public_id == namespace_public_id).one() + namespace = ( + db_session.query(Namespace) + .filter(Namespace.public_id == namespace_public_id) + .one() + ) account = namespace.account account.mark_for_deletion() db_session.commit() @@ -289,18 +320,22 @@ def delete_account(namespace_public_id): return encoder.jsonify({}) -@app.route('/') +@app.route("/") def home(): return "Nylas ready.\n" -@app.route('/logout') + +@app.route("/logout") def logout(): """ Utility function used to force browsers to reset cached HTTP Basic Auth credentials """ - return make_response(( - ".", - 401, - {'WWW-Authenticate': 'Basic realm="API Access Token Required"'})) + return make_response( + ( + ".", + 401, + {"WWW-Authenticate": 'Basic realm="API Access Token Required"'}, + ) + ) app.register_blueprint(metrics_api) diff --git a/inbox/api/update.py b/inbox/api/update.py index d44b2e4a6..d1a59e3a5 100644 --- a/inbox/api/update.py +++ b/inbox/api/update.py @@ -3,16 +3,18 @@ from sqlalchemy.orm.exc import NoResultFound from nylas.logging import get_logger + log = get_logger() from inbox.models import Category, MessageCategory from inbox.models.action_log import schedule_action from inbox.api.validation import valid_public_id from inbox.api.err import InputError + # STOPSHIP(emfree): better naming/structure for this module def update_message(message, request_data, db_session, optimistic): - accept_labels = message.namespace.account.provider == 'gmail' + accept_labels = message.namespace.account.provider == "gmail" # Update flags (message.{is_read, is_starred}) unread, starred = parse_flags(request_data) update_message_flags(message, db_session, optimistic, unread, starred) @@ -22,17 +24,17 @@ def update_message(message, request_data, db_session, optimistic): if labels is not None: added_labels = labels - set(message.categories) removed_labels = set(message.categories) - labels - update_message_labels(message, db_session, added_labels, - removed_labels, optimistic) + update_message_labels( + message, db_session, added_labels, removed_labels, optimistic + ) else: folder = parse_folder(request_data, db_session, message.namespace_id) if folder is not None: - update_message_folder(message, db_session, folder, - optimistic) + update_message_folder(message, db_session, folder, optimistic) def update_thread(thread, request_data, db_session, optimistic): - accept_labels = thread.namespace.account.provider == 'gmail' + accept_labels = thread.namespace.account.provider == "gmail" unread, starred, = parse_flags(request_data) if accept_labels: @@ -40,8 +42,7 @@ def update_thread(thread, request_data, db_session, optimistic): else: folder = parse_folder(request_data, db_session, thread.namespace_id) if request_data: - raise InputError(u'Unexpected attribute: {}'. - format(request_data.keys()[0])) + raise InputError(u"Unexpected attribute: {}".format(request_data.keys()[0])) if accept_labels: if labels is not None: @@ -50,74 +51,84 @@ def update_thread(thread, request_data, db_session, optimistic): for message in thread.messages: if not message.is_draft: - update_message_labels(message, db_session, new_labels, - removed_labels, optimistic) + update_message_labels( + message, db_session, new_labels, removed_labels, optimistic + ) elif folder is not None: for message in thread.messages: # Exclude drafts and sent messages from thread-level moves. - if (not message.is_draft and not message.is_sent and - 'sent' not in {c.name for c in message.categories}): - update_message_folder(message, db_session, folder, - optimistic) + if ( + not message.is_draft + and not message.is_sent + and "sent" not in {c.name for c in message.categories} + ): + update_message_folder(message, db_session, folder, optimistic) for message in thread.messages: if not message.is_draft: - update_message_flags(message, db_session, optimistic, unread, - starred) + update_message_flags(message, db_session, optimistic, unread, starred) + ## FLAG UPDATES ## def parse_flags(request_data): - unread = request_data.pop('unread', None) + unread = request_data.pop("unread", None) if unread is not None and not isinstance(unread, bool): raise InputError('"unread" must be true or false') - starred = request_data.pop('starred', None) + starred = request_data.pop("starred", None) if starred is not None and not isinstance(starred, bool): raise InputError('"starred" must be true or false') return unread, starred -def update_message_flags(message, db_session, optimistic, unread=None, - starred=None): +def update_message_flags(message, db_session, optimistic, unread=None, starred=None): if unread is not None: if optimistic: message.is_read = not unread - schedule_action('mark_unread', message, message.namespace_id, - db_session, unread=unread) + schedule_action( + "mark_unread", message, message.namespace_id, db_session, unread=unread + ) if starred is not None: if optimistic: message.is_starred = starred - schedule_action('mark_starred', message, message.namespace_id, - db_session, starred=starred) + schedule_action( + "mark_starred", message, message.namespace_id, db_session, starred=starred + ) + ## FOLDER UPDATES ## def parse_folder(request_data, db_session, namespace_id): # TODO deprecate being able to post "folder" and not "folder_id" - if 'folder_id' not in request_data and 'folder' not in request_data: + if "folder_id" not in request_data and "folder" not in request_data: return - folder_public_id = request_data.pop('folder_id', None) or \ - request_data.pop('folder', None) + folder_public_id = request_data.pop("folder_id", None) or request_data.pop( + "folder", None + ) if folder_public_id is None: # One of 'folder_id'/ 'folder' was present AND set to None. # Not allowed. - raise InputError('Removing all folders is not allowed.') + raise InputError("Removing all folders is not allowed.") valid_public_id(folder_public_id) try: - return db_session.query(Category). \ - filter(Category.namespace_id == namespace_id, - Category.public_id == folder_public_id).one() + return ( + db_session.query(Category) + .filter( + Category.namespace_id == namespace_id, + Category.public_id == folder_public_id, + ) + .one() + ) except NoResultFound: - raise InputError(u'The folder {} does not exist'. - format(folder_public_id)) + raise InputError(u"The folder {} does not exist".format(folder_public_id)) def update_message_folder(message, db_session, category, optimistic): @@ -126,24 +137,31 @@ def update_message_folder(message, db_session, category, optimistic): message.categories = [category] message.categories_changes = True - schedule_action('move', message, message.namespace_id, db_session, - destination=category.display_name) + schedule_action( + "move", + message, + message.namespace_id, + db_session, + destination=category.display_name, + ) + ### LABEL UPDATES ### def parse_labels(request_data, db_session, namespace_id): # TODO deprecate being able to post "labels" and not "label_ids" - if 'label_ids' not in request_data and 'labels' not in request_data: + if "label_ids" not in request_data and "labels" not in request_data: return - label_public_ids = request_data.pop('label_ids', []) or \ - request_data.pop('labels', []) + label_public_ids = request_data.pop("label_ids", []) or request_data.pop( + "labels", [] + ) if not label_public_ids: # One of 'label_ids'/ 'labels' was present AND set to []. # Not allowed. - raise InputError('Removing all labels is not allowed.') + raise InputError("Removing all labels is not allowed.") # TODO(emfree): Use a real JSON schema validator for this sort of thing. if not isinstance(label_public_ids, list): @@ -155,23 +173,28 @@ def parse_labels(request_data, db_session, namespace_id): labels = set() for id_ in label_public_ids: try: - category = db_session.query(Category).filter( - Category.namespace_id == namespace_id, - Category.public_id == id_).one() + category = ( + db_session.query(Category) + .filter( + Category.namespace_id == namespace_id, Category.public_id == id_ + ) + .one() + ) labels.add(category) except NoResultFound: - raise InputError(u'The label {} does not exist'.format(id_)) + raise InputError(u"The label {} does not exist".format(id_)) return labels -def update_message_labels(message, db_session, added_categories, - removed_categories, optimistic): +def update_message_labels( + message, db_session, added_categories, removed_categories, optimistic +): special_label_map = { - 'inbox': '\\Inbox', - 'important': '\\Important', - 'all': '\\All', # STOPSHIP(emfree): verify - 'trash': '\\Trash', - 'spam': '\\Spam' + "inbox": "\\Inbox", + "important": "\\Important", + "all": "\\All", # STOPSHIP(emfree): verify + "trash": "\\Trash", + "spam": "\\Spam", } validate_labels(db_session, added_categories, removed_categories) @@ -181,18 +204,16 @@ def update_message_labels(message, db_session, added_categories, for category in added_categories: if category.name in special_label_map: added_labels.append(special_label_map[category.name]) - elif category.name in ('drafts', 'sent'): - raise InputError('The "{}" label cannot be changed'. - format(category.name)) + elif category.name in ("drafts", "sent"): + raise InputError('The "{}" label cannot be changed'.format(category.name)) else: added_labels.append(category.display_name) for category in removed_categories: if category.name in special_label_map: removed_labels.append(special_label_map[category.name]) - elif category.name in ('drafts', 'sent'): - raise InputError('The "{}" label cannot be changed'. - format(category.name)) + elif category.name in ("drafts", "sent"): + raise InputError('The "{}" label cannot be changed'.format(category.name)) else: removed_labels.append(category.display_name) @@ -212,8 +233,7 @@ def update_message_labels(message, db_session, added_categories, # created_at value. Taken from # https://docs.sqlalchemy.org/en/13/orm/extensions/ # associationproxy.html#simplifying-association-objects - MessageCategory(category=cat, message=message, - created_at=update_time) + MessageCategory(category=cat, message=message, created_at=update_time) for cat in removed_categories: # Removing '\\All'/ \\Trash'/ '\\Spam' does not do anything on Gmail @@ -221,7 +241,7 @@ def update_message_labels(message, db_session, added_categories, # discard the corresponding category yet. # If one of these has been *added* too, apply_gmail_label_rules() # will do the right thing to ensure mutual exclusion. - if cat.name not in ('all', 'trash', 'spam'): + if cat.name not in ("all", "trash", "spam"): message.categories.discard(cat) # Update the message updated_at field so that it can be used in @@ -237,16 +257,22 @@ def update_message_labels(message, db_session, added_categories, if removed_categories or added_categories: message.updated_at = update_time - apply_gmail_label_rules(db_session, message, added_categories, removed_categories) + apply_gmail_label_rules( + db_session, message, added_categories, removed_categories + ) if removed_labels or added_labels: message.categories_changes = True if removed_labels or added_labels: - schedule_action('change_labels', message, message.namespace_id, - removed_labels=removed_labels, - added_labels=added_labels, - db_session=db_session) + schedule_action( + "change_labels", + message, + message.namespace_id, + removed_labels=removed_labels, + added_labels=added_labels, + db_session=db_session, + ) def validate_labels(db_session, added_categories, removed_categories): @@ -257,19 +283,19 @@ def validate_labels(db_session, added_categories, removed_categories): """ add = {c.name for c in added_categories if c.name} - add_all = ('all' in add) - add_trash = ('trash' in add) - add_spam = ('spam' in add) + add_all = "all" in add + add_trash = "trash" in add + add_spam = "spam" in add if (add_all and (add_trash or add_spam)) or (add_trash and add_spam): raise InputError('Only one of "all", "trash" or "spam" can be added') remove = {c.name for c in removed_categories if c.name} - remove_all = ('all' in remove) - remove_trash = ('trash' in remove) - remove_spam = ('spam' in remove) + remove_all = "all" in remove + remove_trash = "trash" in remove + remove_spam = "spam" in remove - if (remove_all and remove_trash and remove_spam): + if remove_all and remove_trash and remove_spam: raise InputError('"all", "trash" and "spam" cannot all be removed') @@ -293,31 +319,35 @@ def apply_gmail_label_rules(db_session, message, added_categories, removed_categ categories = {c.name: c for c in message.categories if c.name} for cat in added_categories: - if cat.name == 'all': + if cat.name == "all": # Adding the 'all' label should remove the 'trash'/'spam' and # preserve all else. - discard = {'trash', 'spam'} - elif cat.name == 'trash': + discard = {"trash", "spam"} + elif cat.name == "trash": # Adding the 'trash' label should remove the 'all'/'spam' and 'inbox', # and preserve all else. - discard = {'all', 'spam', 'inbox'} - elif cat.name == 'spam': + discard = {"all", "spam", "inbox"} + elif cat.name == "spam": # Adding the 'spam' label should remove the 'all'/'trash' and 'inbox', # and preserve all else. - discard = {'all', 'trash', 'inbox'} - elif cat.name == 'inbox': + discard = {"all", "trash", "inbox"} + elif cat.name == "inbox": # Adding the 'inbox' label should remove the 'trash'/ 'spam', # adding 'all' if needed, and preserve all else. - add = {'all'} - discard = {'trash', 'spam'} + add = {"all"} + discard = {"trash", "spam"} # Adding any other label does not change the associated folder # so nothing additional needs to be done. for name in add: if name not in categories: - category = db_session.query(Category).filter( - Category.namespace_id == message.namespace_id, - Category.name == name).one() + category = ( + db_session.query(Category) + .filter( + Category.namespace_id == message.namespace_id, Category.name == name + ) + .one() + ) message.categories.add(category) for name in discard: diff --git a/inbox/api/validation.py b/inbox/api/validation.py index 5b6592522..c96b45802 100644 --- a/inbox/api/validation.py +++ b/inbox/api/validation.py @@ -9,8 +9,13 @@ from inbox.models.when import parse_as_when from inbox.models.category import EPOCH from inbox.models.constants import MAX_INDEXABLE_LENGTH -from inbox.api.err import (InputError, NotFoundError, ConflictError, - AccountInvalidError, AccountStoppedError) +from inbox.api.err import ( + InputError, + NotFoundError, + ConflictError, + AccountInvalidError, + AccountStoppedError, +) from inbox.api.kellogs import encode from inbox.util.addr import valid_email @@ -18,49 +23,51 @@ class ValidatableArgument(reqparse.Argument): - def handle_validation_error(self, error): raise InputError(unicode(error)) # Custom parameter types + def bounded_str(value, key): if len(value) > 255: - raise ValueError('Value {} for {} is too long'.format(value, key)) + raise ValueError("Value {} for {} is too long".format(value, key)) return value def comma_separated_email_list(value, key): - addresses = value.split(',') + addresses = value.split(",") # Note that something like "foo,bar"@example.com is technical a valid # email address, but in practice nobody does this (and they shouldn't!) if len(addresses) > 25: # arbitrary limit - raise InputError(u'Too many emails. The current limit is 25') + raise InputError(u"Too many emails. The current limit is 25") good_emails = [] for unvalidated_address in addresses: parsed = address.parse(unvalidated_address, addr_spec_only=True) if not isinstance(parsed, address.EmailAddress): - raise InputError(u'Invalid recipient address {}'. - format(unvalidated_address)) + raise InputError( + u"Invalid recipient address {}".format(unvalidated_address) + ) good_emails.append(parsed.address) return good_emails def strict_bool(value, key): - if value.lower() not in ['true', 'false']: - raise ValueError('Value must be "true" or "false" (not "{}") for {}' - .format(value, key)) - return value.lower() == 'true' + if value.lower() not in ["true", "false"]: + raise ValueError( + 'Value must be "true" or "false" (not "{}") for {}'.format(value, key) + ) + return value.lower() == "true" def view(value, key): allowed_views = ["count", "ids", "expanded"] if value not in allowed_views: - raise ValueError('Unknown view type {}.'.format(value)) + raise ValueError("Unknown view type {}.".format(value)) return value @@ -68,12 +75,13 @@ def limit(value): try: value = int(value) except ValueError: - raise ValueError('Limit parameter must be an integer.') + raise ValueError("Limit parameter must be an integer.") if value < 0: - raise ValueError('Limit parameter must be nonnegative.') + raise ValueError("Limit parameter must be nonnegative.") if value > MAX_LIMIT: - raise ValueError('Cannot request more than {} resources at once.'. - format(MAX_LIMIT)) + raise ValueError( + "Cannot request more than {} resources at once.".format(MAX_LIMIT) + ) return value @@ -81,9 +89,9 @@ def offset(value): try: value = int(value) except ValueError: - raise ValueError('Offset parameter must be an integer.') + raise ValueError("Offset parameter must be an integer.") if value < 0: - raise ValueError('Offset parameter must be nonnegative.') + raise ValueError("Offset parameter must be nonnegative.") return value @@ -93,22 +101,22 @@ def valid_public_id(value): # raise TypeError if an integer is passed in int(value, 36) except (TypeError, ValueError): - raise InputError(u'Invalid id: {}'.format(value)) + raise InputError(u"Invalid id: {}".format(value)) return value def valid_account(namespace): - if namespace.account.sync_state == 'invalid': + if namespace.account.sync_state == "invalid": raise AccountInvalidError() - if namespace.account.sync_state == 'stopped': + if namespace.account.sync_state == "stopped": raise AccountStoppedError() def valid_category_type(category_type, rule): if category_type not in rule: - if category_type == 'label': + if category_type == "label": raise NotFoundError("GMail accounts don't support folders") - elif category_type == 'folder': + elif category_type == "folder": raise NotFoundError("Non-GMail accounts don't support labels") return category_type @@ -117,10 +125,9 @@ def timestamp(value, key): try: return arrow.get(value).datetime except ValueError: - raise ValueError('Invalid timestamp value {} for {}'. - format(value, key)) + raise ValueError("Invalid timestamp value {} for {}".format(value, key)) except ParserError: - raise ValueError('Invalid datetime value {} for {}'.format(value, key)) + raise ValueError("Invalid datetime value {} for {}".format(value, key)) def strict_parse_args(parser, raw_args): @@ -130,51 +137,61 @@ def strict_parse_args(parser, raw_args): """ args = parser.parse_args() - unexpected_params = (set(raw_args) - {allowed_arg.name for allowed_arg in - parser.args}) + unexpected_params = set(raw_args) - { + allowed_arg.name for allowed_arg in parser.args + } if unexpected_params: - raise InputError('Unexpected query parameters {}'.format( - unexpected_params)) + raise InputError("Unexpected query parameters {}".format(unexpected_params)) return args def get_sending_draft(draft_public_id, namespace_id, db_session): valid_public_id(draft_public_id) try: - draft = db_session.query(Message).filter( - Message.public_id == draft_public_id, - Message.namespace_id == namespace_id).one() + draft = ( + db_session.query(Message) + .filter( + Message.public_id == draft_public_id, + Message.namespace_id == namespace_id, + ) + .one() + ) except NoResultFound: - raise NotFoundError("Couldn't find multi-send draft {}" - .format(draft_public_id)) + raise NotFoundError("Couldn't find multi-send draft {}".format(draft_public_id)) if draft.is_sent or not draft.is_sending: - raise InputError('Message {} is not a multi-send draft' - .format(draft_public_id)) + raise InputError("Message {} is not a multi-send draft".format(draft_public_id)) return draft def get_draft(draft_public_id, version, namespace_id, db_session): valid_public_id(draft_public_id) if version is None: - raise InputError('Must specify draft version') + raise InputError("Must specify draft version") try: version = int(version) except ValueError: - raise InputError('Invalid draft version') + raise InputError("Invalid draft version") try: - draft = db_session.query(Message).filter( - Message.public_id == draft_public_id, - Message.namespace_id == namespace_id).one() + draft = ( + db_session.query(Message) + .filter( + Message.public_id == draft_public_id, + Message.namespace_id == namespace_id, + ) + .one() + ) except NoResultFound: raise NotFoundError("Couldn't find draft {}".format(draft_public_id)) if draft.is_sent or not draft.is_draft: - raise InputError('Message {} is not a draft'.format(draft_public_id)) + raise InputError("Message {} is not a draft".format(draft_public_id)) if draft.version != version: raise ConflictError( - 'Draft {0}.{1} has already been updated to version {2}'. - format(draft_public_id, version, draft.version)) + "Draft {0}.{1} has already been updated to version {2}".format( + draft_public_id, version, draft.version + ) + ) return draft @@ -183,21 +200,24 @@ def get_attachments(block_public_ids, namespace_id, db_session): if block_public_ids is None: return attachments if not isinstance(block_public_ids, list): - raise InputError('{} is not a list of block ids'. - format(block_public_ids)) + raise InputError("{} is not a list of block ids".format(block_public_ids)) for block_public_id in block_public_ids: # Validate public ids before querying with them valid_public_id(block_public_id) try: - block = db_session.query(Block). \ - filter(Block.public_id == block_public_id, - Block.namespace_id == namespace_id).one() + block = ( + db_session.query(Block) + .filter( + Block.public_id == block_public_id, + Block.namespace_id == namespace_id, + ) + .one() + ) # In the future we may consider discovering the filetype from the # data by using #magic.from_buffer(data, mime=True)) attachments.add(block) except NoResultFound: - raise InputError('Invalid block public id {}'. - format(block_public_id)) + raise InputError("Invalid block public id {}".format(block_public_id)) return attachments @@ -206,12 +226,16 @@ def get_message(message_public_id, namespace_id, db_session): return None valid_public_id(message_public_id) try: - return db_session.query(Message). \ - filter(Message.public_id == message_public_id, - Message.namespace_id == namespace_id).one() + return ( + db_session.query(Message) + .filter( + Message.public_id == message_public_id, + Message.namespace_id == namespace_id, + ) + .one() + ) except NoResultFound: - raise InputError('Invalid message public id {}'. - format(message_public_id)) + raise InputError("Invalid message public id {}".format(message_public_id)) def get_thread(thread_public_id, namespace_id, db_session): @@ -219,39 +243,49 @@ def get_thread(thread_public_id, namespace_id, db_session): return None valid_public_id(thread_public_id) try: - return db_session.query(Thread). \ - filter(Thread.public_id == thread_public_id, - Thread.deleted_at == None, - Thread.namespace_id == namespace_id).one() + return ( + db_session.query(Thread) + .filter( + Thread.public_id == thread_public_id, + Thread.deleted_at == None, + Thread.namespace_id == namespace_id, + ) + .one() + ) except NoResultFound: - raise InputError('Invalid thread public id {}'. - format(thread_public_id)) + raise InputError("Invalid thread public id {}".format(thread_public_id)) def get_recipients(recipients, field): if recipients is None: return None if not isinstance(recipients, list): - raise InputError('Invalid {} field'.format(field)) + raise InputError("Invalid {} field".format(field)) for r in recipients: - if not (isinstance(r, dict) and 'email' in r and - isinstance(r['email'], basestring)): - raise InputError('Invalid {} field'.format(field)) - if 'name' in r and not isinstance(r['name'], basestring): - raise InputError('Invalid {} field'.format(field)) + if not ( + isinstance(r, dict) and "email" in r and isinstance(r["email"], basestring) + ): + raise InputError("Invalid {} field".format(field)) + if "name" in r and not isinstance(r["name"], basestring): + raise InputError("Invalid {} field".format(field)) - return [(r.get('name', ''), r.get('email', '')) for r in recipients] + return [(r.get("name", ""), r.get("email", "")) for r in recipients] def get_calendar(calendar_public_id, namespace, db_session): valid_public_id(calendar_public_id) try: - return db_session.query(Calendar). \ - filter(Calendar.public_id == calendar_public_id, - Calendar.namespace_id == namespace.id).one() + return ( + db_session.query(Calendar) + .filter( + Calendar.public_id == calendar_public_id, + Calendar.namespace_id == namespace.id, + ) + .one() + ) except NoResultFound: - raise NotFoundError('Calendar {} not found'.format(calendar_public_id)) + raise NotFoundError("Calendar {} not found".format(calendar_public_id)) def valid_when(when): @@ -262,47 +296,49 @@ def valid_when(when): def valid_event(event): - if 'when' not in event: + if "when" not in event: raise InputError("Must specify 'when' when creating an event.") - valid_when(event['when']) + valid_when(event["when"]) - if 'busy' in event and event.get('busy') is not None: + if "busy" in event and event.get("busy") is not None: # client libraries can send busy: None - if not isinstance(event.get('busy'), bool): + if not isinstance(event.get("busy"), bool): raise InputError("'busy' must be true or false") - participants = event.get('participants') + participants = event.get("participants") if participants is None: participants = [] for p in participants: - if 'email' not in p: + if "email" not in p: raise InputError("'participants' must must have email") - if not valid_email(p['email']): - raise InputError("'{}' is not a valid email".format(p['email'])) + if not valid_email(p["email"]): + raise InputError("'{}' is not a valid email".format(p["email"])) - if 'status' in p: - if p['status'] not in ('yes', 'no', 'maybe', 'noreply'): - raise InputError("'participants' status must be one of: " - "yes, no, maybe, noreply") + if "status" in p: + if p["status"] not in ("yes", "no", "maybe", "noreply"): + raise InputError( + "'participants' status must be one of: " "yes, no, maybe, noreply" + ) def valid_event_update(event, namespace, db_session): - if 'when' in event: - valid_when(event['when']) + if "when" in event: + valid_when(event["when"]) - if 'busy' in event and not isinstance(event.get('busy'), bool): + if "busy" in event and not isinstance(event.get("busy"), bool): raise InputError("'busy' must be true or false") - participants = event.get('participants', []) + participants = event.get("participants", []) for p in participants: - if 'email' not in p: + if "email" not in p: raise InputError("'participants' must have email") - if 'status' in p: - if p['status'] not in ('yes', 'no', 'maybe', 'noreply'): - raise InputError("'participants' status must be one of: " - "yes, no, maybe, noreply") + if "status" in p: + if p["status"] not in ("yes", "no", "maybe", "noreply"): + raise InputError( + "'participants' status must be one of: " "yes, no, maybe, noreply" + ) def noop_event_update(event, data): @@ -324,7 +360,7 @@ def noop_event_update(event, data): for attr in Event.API_MODIFIABLE_FIELDS: # We have to handle participants a bit differently because # it's a list which can be permuted. - if attr == 'participants': + if attr == "participants": continue event_value = e1.get(attr) @@ -332,8 +368,8 @@ def noop_event_update(event, data): if event_value != e_value: return False - e_participants = {p['email']: p for p in e.participants} - event_participants = {p['email']: p for p in event.participants} + e_participants = {p["email"]: p for p in e.participants} + event_participants = {p["email"]: p for p in event.participants} if len(e_participants.keys()) != len(event_participants.keys()): return False @@ -344,13 +380,13 @@ def noop_event_update(event, data): p1 = e_participants[email] p2 = event_participants[email] - p1_status = p1.get('status') - p2_status = p2.get('status') + p1_status = p1.get("status") + p2_status = p2.get("status") if p1_status != p2_status: return False - p1_comment = p1.get('comment') - p2_comment = p2.get('comment') + p1_comment = p1.get("comment") + p2_comment = p2.get("comment") if p1_comment != p2_comment: return False @@ -358,12 +394,21 @@ def noop_event_update(event, data): def valid_delta_object_types(types_arg): - types = [item.strip() for item in types_arg.split(',')] - allowed_types = ('contact', 'message', 'event', 'file', - 'thread', 'calendar', 'draft', 'folder', 'label') + types = [item.strip() for item in types_arg.split(",")] + allowed_types = ( + "contact", + "message", + "event", + "file", + "thread", + "calendar", + "draft", + "folder", + "label", + ) for type_ in types: if type_ not in allowed_types: - raise InputError('Invalid object type {}'.format(type_)) + raise InputError("Invalid object type {}".format(type_)) return types @@ -374,14 +419,15 @@ def validate_draft_recipients(draft): """ if not any((draft.to_addr, draft.bcc_addr, draft.cc_addr)): - raise InputError('No recipients specified') + raise InputError("No recipients specified") for field in draft.to_addr, draft.bcc_addr, draft.cc_addr: if field is not None: for _, email_address in field: parsed = address.parse(email_address, addr_spec_only=True) if not isinstance(parsed, address.EmailAddress): - raise InputError(u'Invalid recipient address {}'. - format(email_address)) + raise InputError( + u"Invalid recipient address {}".format(email_address) + ) def valid_display_name(namespace_id, category_type, display_name, db_session): @@ -393,12 +439,19 @@ def valid_display_name(namespace_id, category_type, display_name, db_session): # Set as MAX_FOLDER_LENGTH, MAX_LABEL_LENGTH raise InputError('"display_name" is too long') - if db_session.query(Category).filter( + if ( + db_session.query(Category) + .filter( Category.namespace_id == namespace_id, Category.lowercase_name == display_name, Category.type_ == category_type, - Category.deleted_at == EPOCH).first() is not None: - raise InputError('{} with name "{}" already exists'.format( - category_type, display_name)) + Category.deleted_at == EPOCH, + ) + .first() + is not None + ): + raise InputError( + '{} with name "{}" already exists'.format(category_type, display_name) + ) return display_name diff --git a/inbox/api/wsgi.py b/inbox/api/wsgi.py index 7574c9c91..2dd4359be 100644 --- a/inbox/api/wsgi.py +++ b/inbox/api/wsgi.py @@ -2,18 +2,22 @@ import nylas.api.wsgi -from nylas.api.wsgi import (NylasWSGIHandler, NylasWSGIWorker, - NylasGunicornLogger) +from nylas.api.wsgi import NylasWSGIHandler, NylasWSGIWorker, NylasGunicornLogger -nylas.api.wsgi.MAX_BLOCKING_TIME = config.get('MAX_BLOCKING_TIME', - nylas.api.wsgi.MAX_BLOCKING_TIME) -nylas.api.wsgi.LOGLEVEL = config.get('LOGLEVEL', - nylas.api.wsgi.LOGLEVEL) +nylas.api.wsgi.MAX_BLOCKING_TIME = config.get( + "MAX_BLOCKING_TIME", nylas.api.wsgi.MAX_BLOCKING_TIME +) +nylas.api.wsgi.LOGLEVEL = config.get("LOGLEVEL", nylas.api.wsgi.LOGLEVEL) # legacy names for backcompat InboxWSGIWorker = NylasWSGIWorker GunicornLogger = NylasGunicornLogger -__all__ = ['NylasWSGIHandler', 'NylasWSGIWorker', 'NylasGunicornLogger', - 'InboxWSGIWorker', 'GunicornLogger'] +__all__ = [ + "NylasWSGIHandler", + "NylasWSGIWorker", + "NylasGunicornLogger", + "InboxWSGIWorker", + "GunicornLogger", +] diff --git a/inbox/auth/__init__.py b/inbox/auth/__init__.py index 84cea2d83..de31fa0e5 100644 --- a/inbox/auth/__init__.py +++ b/inbox/auth/__init__.py @@ -13,5 +13,6 @@ # Allow out-of-tree auth submodules. from pkgutil import extend_path from inbox.util.misc import register_backends + __path__ = extend_path(__path__, __name__) module_registry = register_backends(__name__, __path__) diff --git a/inbox/auth/base.py b/inbox/auth/base.py index 679f6e553..83f86556a 100644 --- a/inbox/auth/base.py +++ b/inbox/auth/base.py @@ -25,18 +25,19 @@ def handler_from_provider(provider_name): """ from inbox.auth import module_registry + auth_mod = module_registry.get(provider_name) if auth_mod is None: # Try to get a generic provider info = providers.get(provider_name, None) if info: - provider_type = info.get('type', None) + provider_type = info.get("type", None) if provider_type: - auth_mod = module_registry.get('generic') + auth_mod = module_registry.get("generic") if auth_mod is None: - raise NotSupportedError('Nylas does not support the email provider.') + raise NotSupportedError("Nylas does not support the email provider.") auth_handler_class = getattr(auth_mod, auth_mod.AUTH_HANDLER_CLS) auth_handler = auth_handler_class(provider_name=provider_name) @@ -64,8 +65,9 @@ def account_or_none(target, cls, email_address): shard_id = target << 48 with session_scope(shard_id) as db_session: try: - account = db_session.query(cls).filter( - cls.email_address == email_address).one() + account = ( + db_session.query(cls).filter(cls.email_address == email_address).one() + ) except NoResultFound: return db_session.expunge(account) diff --git a/inbox/auth/generic.py b/inbox/auth/generic.py index e87033e52..8a0cecaba 100644 --- a/inbox/auth/generic.py +++ b/inbox/auth/generic.py @@ -6,24 +6,28 @@ from OpenSSL._util import lib as ossllib from nylas.logging import get_logger + log = get_logger() from inbox.auth.base import AuthHandler, account_or_none -from inbox.basicauth import (ValidationError, UserRecoverableConfigError, - SSLNotSupportedError, SettingUpdateError, - AppPasswordError) +from inbox.basicauth import ( + ValidationError, + UserRecoverableConfigError, + SSLNotSupportedError, + SettingUpdateError, + AppPasswordError, +) from inbox.models import Namespace from inbox.models.backends.generic import GenericAccount from inbox.sendmail.smtp.postel import SMTPClient from inbox.util.url import matching_subdomains from inbox.crispin import CrispinClient -PROVIDER = 'generic' -AUTH_HANDLER_CLS = 'GenericAuthHandler' +PROVIDER = "generic" +AUTH_HANDLER_CLS = "GenericAuthHandler" class GenericAuthHandler(AuthHandler): - def get_account(self, target, email_address, response): account = account_or_none(target, GenericAccount, email_address) if not account: @@ -42,56 +46,66 @@ def create_account(self, email_address, response): # The server endpoints can ONLY be set at account creation and # CANNOT be subsequently changed in order to prevent MITM attacks. account.provider = self.provider_name - if self.provider_name == 'custom': - account.imap_endpoint = (response['imap_server_host'], - response['imap_server_port']) - account.smtp_endpoint = (response['smtp_server_host'], - response['smtp_server_port']) + if self.provider_name == "custom": + account.imap_endpoint = ( + response["imap_server_host"], + response["imap_server_port"], + ) + account.smtp_endpoint = ( + response["smtp_server_host"], + response["smtp_server_port"], + ) account.create_emailed_events_calendar() # Shim for back-compatability with legacy auth # The old API does NOT send these but authentication now uses them # so set them (included here, set in update_account()). - for username in ['imap_username', 'smtp_username']: + for username in ["imap_username", "smtp_username"]: if username not in response: response[username] = email_address - for password in ['imap_password', 'smtp_password']: + for password in ["imap_password", "smtp_password"]: if password not in response: - response[password] = response['password'] + response[password] = response["password"] return self.update_account(account, response) def update_account(self, account, response): - account.email_address = response['email'] - for attribute in ['name', 'imap_username', 'imap_password', - 'smtp_username', 'smtp_password', 'password']: + account.email_address = response["email"] + for attribute in [ + "name", + "imap_username", + "imap_password", + "smtp_username", + "smtp_password", + "password", + ]: if response.get(attribute): setattr(account, attribute, response[attribute]) # Shim for back-compatability with legacy auth - if response.get('imap_password'): + if response.get("imap_password"): # The new API sends separate IMAP/ SMTP credentials but we need to # set the legacy password attribute. # TODO[k]: Remove once column in dropped. - account.password = response['imap_password'] + account.password = response["imap_password"] else: # The old API does NOT send these but authentication now uses them # so update them. - for attr in ('imap_username', 'smtp_username'): + for attr in ("imap_username", "smtp_username"): if attr not in response: - setattr(account, attr, response['email']) - for attr in ('imap_password', 'smtp_password'): + setattr(account, attr, response["email"]) + for attr in ("imap_password", "smtp_password"): if attr not in response: - setattr(account, attr, response['password']) + setattr(account, attr, response["password"]) account.date = datetime.datetime.utcnow() - if self.provider_name == 'custom': - for attribute in ('imap_server_host', 'smtp_server_host'): - old_value = getattr(account, '_{}'.format(attribute), None) + if self.provider_name == "custom": + for attribute in ("imap_server_host", "smtp_server_host"): + old_value = getattr(account, "_{}".format(attribute), None) new_value = response.get(attribute) - if (new_value and old_value and new_value != old_value): + if new_value and old_value and new_value != old_value: """ # Before updating the domain name, check if: # 1/ they have the same parent domain @@ -106,11 +120,11 @@ def update_account(self, account, response): """ # If all those conditions are met, update the address. - setattr(account, '_{}'.format(attribute), new_value) + setattr(account, "_{}".format(attribute), new_value) - account.ssl_required = response.get('ssl_required', True) + account.ssl_required = response.get("ssl_required", True) - account.sync_email = response.get('sync_email', True) + account.sync_email = response.get("sync_email", True) # Ensure account has sync enabled after authing. account.enable_sync() @@ -131,38 +145,44 @@ def connect_account(self, account, use_timeout=True): host, port = account.imap_endpoint ssl_required = account.ssl_required try: - conn = create_imap_connection(host, port, ssl_required, - use_timeout) + conn = create_imap_connection(host, port, ssl_required, use_timeout) except (IMAPClient.Error, socket.error) as exc: - log.error('Error instantiating IMAP connection', - account_id=account.id, - host=host, - port=port, - ssl_required=ssl_required, - error=exc) + log.error( + "Error instantiating IMAP connection", + account_id=account.id, + host=host, + port=port, + ssl_required=ssl_required, + error=exc, + ) raise try: conn.login(account.imap_username, account.imap_password) except IMAPClient.Error as exc: if _auth_is_invalid(exc): - log.error('IMAP login failed', - account_id=account.id, - host=host, port=port, - ssl_required=ssl_required, - error=exc) + log.error( + "IMAP login failed", + account_id=account.id, + host=host, + port=port, + ssl_required=ssl_required, + error=exc, + ) raise ValidationError(exc) elif _auth_requires_app_password(exc): raise AppPasswordError(exc) else: - log.error('IMAP login failed for an unknown reason. Check _auth_is_invalid', - account_id=account.id, - host=host, - port=port, - ssl_required=ssl_required, - error=exc) + log.error( + "IMAP login failed for an unknown reason. Check _auth_is_invalid", + account_id=account.id, + host=host, + port=port, + ssl_required=ssl_required, + error=exc, + ) raise - if 'ID' in conn.capabilities(): + if "ID" in conn.capabilities(): # Try to issue an IMAP ID command. Some whacky servers # (163.com) require this, but it's an encouraged practice in any # case. Since this isn't integral to the sync in general, don't @@ -170,15 +190,22 @@ def connect_account(self, account, use_timeout=True): # (Note that as of May 2015, this depends on a patched imapclient # that implements the ID command.) try: - conn.id_({'name': 'Nylas Sync Engine', 'vendor': 'Nylas', - 'contact': 'support@nylas.com'}) + conn.id_( + { + "name": "Nylas Sync Engine", + "vendor": "Nylas", + "contact": "support@nylas.com", + } + ) except Exception as exc: - log.warning('Error issuing IMAP ID command; continuing', - account_id=account.id, - host=host, - port=port, - ssl_required=ssl_required, - error=exc) + log.warning( + "Error issuing IMAP ID command; continuing", + account_id=account.id, + host=host, + port=port, + ssl_required=ssl_required, + error=exc, + ) return conn @@ -213,8 +240,9 @@ def verify_account(self, account): """ # Verify IMAP login conn = self.connect_account(account) - crispin = CrispinClient(account.id, account.provider_info, - account.email_address, conn) + crispin = CrispinClient( + account.id, account.provider_info, account.email_address, conn + ) info = account.provider_info if "condstore" not in info: @@ -225,12 +253,14 @@ def verify_account(self, account): account.folder_separator = crispin.folder_separator account.folder_prefix = crispin.folder_prefix except Exception as e: - log.error("account_folder_list_failed", - account_id=account.id, - error=e.message) - error_message = ("Full IMAP support is not enabled for this account. " - "Please contact your domain " - "administrator and try again.") + log.error( + "account_folder_list_failed", account_id=account.id, error=e.message + ) + error_message = ( + "Full IMAP support is not enabled for this account. " + "Please contact your domain " + "administrator and try again." + ) raise UserRecoverableConfigError(error_message) finally: conn.logout() @@ -243,72 +273,81 @@ def verify_account(self, account): with smtp_client._get_connection(): pass except socket.gaierror as exc: - log.error('Failed to resolve SMTP server domain', - account_id=account.id, - error=exc) - error_message = ("Couldn't resolve the SMTP server domain name. " - "Please check that your SMTP settings are correct.") + log.error( + "Failed to resolve SMTP server domain", account_id=account.id, error=exc + ) + error_message = ( + "Couldn't resolve the SMTP server domain name. " + "Please check that your SMTP settings are correct." + ) raise UserRecoverableConfigError(error_message) except socket.timeout as exc: - log.error('TCP timeout when connecting to SMTP server', - account_id=account.id, - error=exc) - - error_message = ("Connection timeout when connecting to SMTP server. " - "Please check that your SMTP settings are correct.") + log.error( + "TCP timeout when connecting to SMTP server", + account_id=account.id, + error=exc, + ) + + error_message = ( + "Connection timeout when connecting to SMTP server. " + "Please check that your SMTP settings are correct." + ) raise UserRecoverableConfigError(error_message) except Exception as exc: - log.error('Failed to establish an SMTP connection', - smtp_endpoint=account.smtp_endpoint, - account_id=account.id, - error=exc) - raise UserRecoverableConfigError("Please check that your SMTP " - "settings are correct.") + log.error( + "Failed to establish an SMTP connection", + smtp_endpoint=account.smtp_endpoint, + account_id=account.id, + error=exc, + ) + raise UserRecoverableConfigError( + "Please check that your SMTP " "settings are correct." + ) # Reset the sync_state to 'running' on a successful re-auth. # Necessary for API requests to proceed and an account modify delta to # be returned to delta/ streaming clients. # NOTE: Setting this does not restart the sync. Sync scheduling occurs # via the sync_should_run bit (set to True in update_account() above). - account.sync_state = ('running' if account.sync_state else - account.sync_state) + account.sync_state = "running" if account.sync_state else account.sync_state return True def interactive_auth(self, email_address): response = dict(email=email_address) - if self.provider_name == 'custom': - imap_server_host = raw_input('IMAP server host: ').strip() - imap_server_port = raw_input('IMAP server port: ').strip() or 993 - imap_um = 'IMAP username (empty for same as email address): ' + if self.provider_name == "custom": + imap_server_host = raw_input("IMAP server host: ").strip() + imap_server_port = raw_input("IMAP server port: ").strip() or 993 + imap_um = "IMAP username (empty for same as email address): " imap_user = raw_input(imap_um).strip() or email_address - imap_pwm = 'IMAP password for {0}: ' + imap_pwm = "IMAP password for {0}: " imap_p = getpass.getpass(imap_pwm.format(email_address)) - smtp_server_host = raw_input('SMTP server host: ').strip() - smtp_server_port = raw_input('SMTP server port: ').strip() or 587 - smtp_um = 'SMTP username (empty for same as email address): ' + smtp_server_host = raw_input("SMTP server host: ").strip() + smtp_server_port = raw_input("SMTP server port: ").strip() or 587 + smtp_um = "SMTP username (empty for same as email address): " smtp_user = raw_input(smtp_um).strip() or email_address - smtp_pwm = 'SMTP password for {0} (empty for same as IMAP): ' + smtp_pwm = "SMTP password for {0} (empty for same as IMAP): " smtp_p = getpass.getpass(smtp_pwm.format(email_address)) or imap_p - ssl_required = raw_input('Require SSL? [Y/n] ').strip().\ - lower() != 'n' - - response.update(imap_server_host=imap_server_host, - imap_server_port=imap_server_port, - imap_username=imap_user, - imap_password=imap_p, - smtp_server_host=smtp_server_host, - smtp_server_port=smtp_server_port, - smtp_username=smtp_user, - smtp_password=smtp_p, - ssl_required=ssl_required) + ssl_required = raw_input("Require SSL? [Y/n] ").strip().lower() != "n" + + response.update( + imap_server_host=imap_server_host, + imap_server_port=imap_server_port, + imap_username=imap_user, + imap_password=imap_p, + smtp_server_host=smtp_server_host, + smtp_server_port=smtp_server_port, + smtp_username=smtp_user, + smtp_password=smtp_p, + ssl_required=ssl_required, + ) else: - password_message = 'Password for {0} (hidden): ' - pw = '' + password_message = "Password for {0} (hidden): " + pw = "" while not pw: pw = getpass.getpass(password_message.format(email_address)) response.update(password=pw) @@ -320,12 +359,11 @@ def _auth_requires_app_password(exc): # Some servers require an application specific password, token, or # authorization code to login PREFIXES = ( - 'Please using authorized code to login.', # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=28&&no=1001256 - 'Authorized code is incorrect', # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=28&&no=1001256 - 'Login fail. Please using weixin token', # http://service.exmail.qq.com/cgi-bin/help?subtype=1&no=1001023&id=23. + "Please using authorized code to login.", # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=28&&no=1001256 + "Authorized code is incorrect", # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=28&&no=1001256 + "Login fail. Please using weixin token", # http://service.exmail.qq.com/cgi-bin/help?subtype=1&no=1001023&id=23. ) - return any(exc.message.lower().startswith(msg.lower()) for msg in - PREFIXES) + return any(exc.message.lower().startswith(msg.lower()) for msg in PREFIXES) def _auth_is_invalid(exc): @@ -334,28 +372,29 @@ def _auth_is_invalid(exc): # couldn't log in because the credentials are invalid, or because of some # temporary server error. AUTH_INVALID_PREFIXES = ( - '[authenticationfailed]', - 'incorrect username or password', - 'invalid login or password', - 'login login error password error', - '[auth] authentication failed.', - 'invalid login credentials', - '[ALERT] Please log in via your web browser', - 'LOGIN Authentication failed', - 'authentication failed', - '[ALERT] Invalid credentials(Failure)', - 'Invalid email login', - 'failed: Re-Authentication Failure', - 'Invalid', - 'Login incorrect', - 'LOGIN GroupWise login failed', - 'authentication failed', - 'LOGIN bad', # LOGIN bad username or password - '[AUTHORIZATIONFAILED]', - 'incorrect password', + "[authenticationfailed]", + "incorrect username or password", + "invalid login or password", + "login login error password error", + "[auth] authentication failed.", + "invalid login credentials", + "[ALERT] Please log in via your web browser", + "LOGIN Authentication failed", + "authentication failed", + "[ALERT] Invalid credentials(Failure)", + "Invalid email login", + "failed: Re-Authentication Failure", + "Invalid", + "Login incorrect", + "LOGIN GroupWise login failed", + "authentication failed", + "LOGIN bad", # LOGIN bad username or password + "[AUTHORIZATIONFAILED]", + "incorrect password", + ) + return any( + exc.message.lower().startswith(msg.lower()) for msg in AUTH_INVALID_PREFIXES ) - return any(exc.message.lower().startswith(msg.lower()) for msg in - AUTH_INVALID_PREFIXES) def create_imap_connection(host, port, ssl_required, use_timeout=True): @@ -372,24 +411,28 @@ def create_imap_connection(host, port, ssl_required, use_timeout=True): # TODO: certificate pinning for well known sites context = create_default_context() - conn = IMAPClient(host, port=port, use_uid=True, - ssl=use_ssl, ssl_context=context, timeout=timeout) + conn = IMAPClient( + host, port=port, use_uid=True, ssl=use_ssl, ssl_context=context, timeout=timeout + ) if not use_ssl: # If STARTTLS is available, always use it. If it's not/ it fails, use # `ssl_required` to determine whether to fail or continue with # plaintext authentication. - if conn.has_capability('STARTTLS'): + if conn.has_capability("STARTTLS"): try: conn.starttls(context) except Exception: if not ssl_required: - log.warning('STARTTLS supported but failed for SSL NOT ' - 'required authentication', exc_info=True) + log.warning( + "STARTTLS supported but failed for SSL NOT " + "required authentication", + exc_info=True, + ) else: raise elif ssl_required: - raise SSLNotSupportedError('Required IMAP STARTTLS not supported.') + raise SSLNotSupportedError("Required IMAP STARTTLS not supported.") return conn @@ -428,8 +471,10 @@ def create_default_context(): context.options |= ossllib.SSL_OP_SINGLE_DH_USE context.options |= ossllib.SSL_OP_SINGLE_ECDH_USE - context._ctx.set_mode(ossllib.SSL_MODE_ENABLE_PARTIAL_WRITE | - ossllib.SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER | - ossllib.SSL_MODE_AUTO_RETRY) + context._ctx.set_mode( + ossllib.SSL_MODE_ENABLE_PARTIAL_WRITE + | ossllib.SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER + | ossllib.SSL_MODE_AUTO_RETRY + ) return context diff --git a/inbox/auth/gmail.py b/inbox/auth/gmail.py index bca1cc6e2..a024e1e49 100644 --- a/inbox/auth/gmail.py +++ b/inbox/auth/gmail.py @@ -15,29 +15,32 @@ from inbox.crispin import GmailCrispinClient from nylas.logging import get_logger + log = get_logger() -PROVIDER = 'gmail' -AUTH_HANDLER_CLS = 'GmailAuthHandler' +PROVIDER = "gmail" +AUTH_HANDLER_CLS = "GmailAuthHandler" # Google OAuth app credentials -OAUTH_CLIENT_ID = config.get_required('GOOGLE_OAUTH_CLIENT_ID') -OAUTH_CLIENT_SECRET = config.get_required('GOOGLE_OAUTH_CLIENT_SECRET') -OAUTH_REDIRECT_URI = config.get_required('GOOGLE_OAUTH_REDIRECT_URI') +OAUTH_CLIENT_ID = config.get_required("GOOGLE_OAUTH_CLIENT_ID") +OAUTH_CLIENT_SECRET = config.get_required("GOOGLE_OAUTH_CLIENT_SECRET") +OAUTH_REDIRECT_URI = config.get_required("GOOGLE_OAUTH_REDIRECT_URI") -OAUTH_AUTHENTICATE_URL = 'https://accounts.google.com/o/oauth2/auth' -OAUTH_ACCESS_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token' -OAUTH_TOKEN_VALIDATION_URL = 'https://www.googleapis.com/oauth2/v2/tokeninfo' -OAUTH_USER_INFO_URL = 'https://www.googleapis.com/oauth2/v1/userinfo' +OAUTH_AUTHENTICATE_URL = "https://accounts.google.com/o/oauth2/auth" +OAUTH_ACCESS_TOKEN_URL = "https://www.googleapis.com/oauth2/v4/token" +OAUTH_TOKEN_VALIDATION_URL = "https://www.googleapis.com/oauth2/v2/tokeninfo" +OAUTH_USER_INFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" # NOTE: urls for email address and G+ profile are deprecated -OAUTH_SCOPE = ' '.join([ - 'https://www.googleapis.com/auth/userinfo.email', # email address - 'https://www.googleapis.com/auth/userinfo.profile', # G+ profile - 'https://mail.google.com/', # email - 'https://www.google.com/m8/feeds', # contacts - 'https://www.googleapis.com/auth/calendar' # calendar -]) +OAUTH_SCOPE = " ".join( + [ + "https://www.googleapis.com/auth/userinfo.email", # email address + "https://www.googleapis.com/auth/userinfo.profile", # G+ profile + "https://mail.google.com/", # email + "https://www.google.com/m8/feeds", # contacts + "https://www.googleapis.com/auth/calendar", # calendar + ] +) class GmailAuthHandler(OAuthAuthHandler): @@ -64,31 +67,38 @@ def _authenticate_IMAP_connection(self, account, conn): exc = _process_imap_exception(exc) # Raise all imap disabled errors except authentication_failed # error, swhich we handle differently - if isinstance(exc, ImapSupportDisabledError) and \ - exc.reason != 'authentication_failed': + if ( + isinstance(exc, ImapSupportDisabledError) + and exc.reason != "authentication_failed" + ): raise exc - log.error('Error during IMAP XOAUTH2 login', - account_id=account.id, - host=host, port=port, error=exc) + log.error( + "Error during IMAP XOAUTH2 login", + account_id=account.id, + host=host, + port=port, + error=exc, + ) if not isinstance(exc, ImapSupportDisabledError): raise # Unknown IMAPClient error, reraise # If we got an AUTHENTICATIONFAILED response, force a token refresh # and try again. If IMAP auth still fails, it's likely that IMAP # access is disabled, so propagate that errror. - token = g_token_manager.get_token_for_email( - account, force_refresh=True) + token = g_token_manager.get_token_for_email(account, force_refresh=True) try: conn.oauth2_login(account.email_address, token) except IMAPClient.Error as exc: exc = _process_imap_exception(exc) - if not isinstance(exc, ImapSupportDisabledError) or \ - exc.reason != 'authentication_failed': + if ( + not isinstance(exc, ImapSupportDisabledError) + or exc.reason != "authentication_failed" + ): raise exc else: # Instead of authentication_failed, report imap disabled - raise ImapSupportDisabledError('imap_disabled_for_account') + raise ImapSupportDisabledError("imap_disabled_for_account") def get_account(self, target, email_address, response): account = account_or_none(target, GmailAccount, email_address) @@ -108,63 +118,70 @@ def create_account(self, email_address, response): return self.update_account(account, response) def update_account(self, account, response): - email_address = response.get('email') + email_address = response.get("email") # We only get refresh tokens on initial login (or failed credentials) # otherwise, we don't force the login screen and therefore don't get a # refresh token back from google. - new_refresh_token = response.get('refresh_token') + new_refresh_token = response.get("refresh_token") if new_refresh_token: account.refresh_token = new_refresh_token else: - if (len(account.valid_auth_credentials) == 0 or - account.sync_state == 'invalid'): + if ( + len(account.valid_auth_credentials) == 0 + or account.sync_state == "invalid" + ): # We got a new auth without a refresh token, so we need to back # out and force the auth flow, since we don't already have # a refresh (or the ones we have don't work.) - raise OAuthError('No valid refresh tokens') + raise OAuthError("No valid refresh tokens") account.email_address = email_address - account.family_name = response.get('family_name') - account.given_name = response.get('given_name') - account.name = response.get('name') - account.gender = response.get('gender') - account.g_id = response.get('id') - account.g_user_id = response.get('user_id') - account.link = response.get('link') - account.locale = response.get('locale') - account.picture = response.get('picture') - account.home_domain = response.get('hd') - account.sync_email = response.get('sync_email', True) - account.sync_contacts = (account.sync_contacts or - response.get('contacts', True)) - account.sync_events = response.get('events', False) + account.family_name = response.get("family_name") + account.given_name = response.get("given_name") + account.name = response.get("name") + account.gender = response.get("gender") + account.g_id = response.get("id") + account.g_user_id = response.get("user_id") + account.link = response.get("link") + account.locale = response.get("locale") + account.picture = response.get("picture") + account.home_domain = response.get("hd") + account.sync_email = response.get("sync_email", True) + account.sync_contacts = account.sync_contacts or response.get("contacts", True) + account.sync_events = response.get("events", False) # These values are deprecated and should not be used, along # with the account's refresh_token. Access all these values # through the GmailAuthCredentials objects instead. - account.client_id = response.get('client_id') - account.client_secret = response.get('client_secret') - account.scope = response.get('scope') - account.g_id_token = response.get('id_token') + account.client_id = response.get("client_id") + account.client_secret = response.get("client_secret") + account.scope = response.get("scope") + account.g_id_token = response.get("id_token") # Don't need to actually save these now # tok = response.get('access_token') # expires_in = response.get('expires_in') - client_id = response.get('client_id') or OAUTH_CLIENT_ID - client_secret = response.get('client_secret') or OAUTH_CLIENT_SECRET + client_id = response.get("client_id") or OAUTH_CLIENT_ID + client_secret = response.get("client_secret") or OAUTH_CLIENT_SECRET if new_refresh_token: # See if we already have credentials for this client_id/secret # pair. If those don't exist, make a new GmailAuthCredentials auth_creds = next( - (auth_creds for auth_creds in account.auth_credentials - if (auth_creds.client_id == client_id and - auth_creds.client_secret == client_secret)), - GmailAuthCredentials()) + ( + auth_creds + for auth_creds in account.auth_credentials + if ( + auth_creds.client_id == client_id + and auth_creds.client_secret == client_secret + ) + ), + GmailAuthCredentials(), + ) auth_creds.gmailaccount = account - auth_creds.scopes = response.get('scope') - auth_creds.g_id_token = response.get('id_token') + auth_creds.scopes = response.get("scope") + auth_creds.g_id_token = response.get("id_token") auth_creds.client_id = client_id auth_creds.client_secret = client_secret auth_creds.refresh_token = new_refresh_token @@ -190,11 +207,13 @@ def verify_account(self, account): # Verify login. conn = self.connect_account(account) # Verify configuration. - client = GmailCrispinClient(account.id, - provider_info('gmail'), - account.email_address, - conn, - readonly=True) + client = GmailCrispinClient( + account.id, + provider_info("gmail"), + account.email_address, + conn, + readonly=True, + ) client.sync_folders() conn.logout() except ImapSupportDisabledError: @@ -206,39 +225,41 @@ def verify_account(self, account): # be returned to delta/ streaming clients. # NOTE: Setting this does not restart the sync. Sync scheduling occurs # via the sync_should_run bit (set to True in update_account() above). - account.sync_state = ('running' if account.sync_state else - account.sync_state) + account.sync_state = "running" if account.sync_state else account.sync_state return True def validate_token(self, access_token): - response = requests.get(self.OAUTH_TOKEN_VALIDATION_URL, - params={'access_token': access_token}) + response = requests.get( + self.OAUTH_TOKEN_VALIDATION_URL, params={"access_token": access_token} + ) validation_dict = response.json() - if 'error' in validation_dict: - raise OAuthError(validation_dict['error']) + if "error" in validation_dict: + raise OAuthError(validation_dict["error"]) return validation_dict def interactive_auth(self, email_address=None): - url_args = {'redirect_uri': self.OAUTH_REDIRECT_URI, - 'client_id': self.OAUTH_CLIENT_ID, - 'response_type': 'code', - 'scope': self.OAUTH_SCOPE, - 'access_type': 'offline'} + url_args = { + "redirect_uri": self.OAUTH_REDIRECT_URI, + "client_id": self.OAUTH_CLIENT_ID, + "response_type": "code", + "scope": self.OAUTH_SCOPE, + "access_type": "offline", + } if email_address: - url_args['login_hint'] = email_address + url_args["login_hint"] = email_address url = url_concat(self.OAUTH_AUTHENTICATE_URL, url_args) - print 'To authorize Nylas, visit this URL and follow the directions:' - print '\n{}'.format(url) + print "To authorize Nylas, visit this URL and follow the directions:" + print "\n{}".format(url) while True: - auth_code = raw_input('Enter authorization code: ').strip() + auth_code = raw_input("Enter authorization code: ").strip() try: auth_response = self._get_authenticated_user(auth_code) - auth_response['contacts'] = True - auth_response['events'] = True + auth_response["contacts"] = True + auth_response["events"] = True return auth_response except OAuthError: print "\nInvalid authorization code, try again...\n" @@ -246,15 +267,16 @@ def interactive_auth(self, email_address=None): def _process_imap_exception(exc): - if 'Lookup failed' in exc.message: + if "Lookup failed" in exc.message: # Gmail is disabled for this apps account - return ImapSupportDisabledError('gmail_disabled_for_domain') - elif 'IMAP access is disabled for your domain.' in exc.message: + return ImapSupportDisabledError("gmail_disabled_for_domain") + elif "IMAP access is disabled for your domain." in exc.message: # IMAP is disabled for this domain - return ImapSupportDisabledError('imap_disabled_for_domain') - elif exc.message.startswith('[AUTHENTICATIONFAILED] Invalid credentials ' - '(Failure)'): - return ImapSupportDisabledError('authentication_failed') + return ImapSupportDisabledError("imap_disabled_for_domain") + elif exc.message.startswith( + "[AUTHENTICATIONFAILED] Invalid credentials " "(Failure)" + ): + return ImapSupportDisabledError("authentication_failed") else: # Unknown IMAPClient error return exc diff --git a/inbox/auth/oauth.py b/inbox/auth/oauth.py index c41ae28d9..229a28733 100644 --- a/inbox/auth/oauth.py +++ b/inbox/auth/oauth.py @@ -6,6 +6,7 @@ from imapclient import IMAPClient from nylas.logging import get_logger + log = get_logger() from inbox.auth.base import AuthHandler from inbox.auth.generic import create_imap_connection @@ -14,7 +15,6 @@ class OAuthAuthHandler(AuthHandler): - def connect_account(self, account, use_timeout=True): """ Returns an authenticated IMAP connection for the given account. @@ -37,14 +37,17 @@ def connect_account(self, account, use_timeout=True): def _get_IMAP_connection(self, account, use_timeout=True): host, port = account.imap_endpoint try: - conn = create_imap_connection(host, port, ssl_required=True, - use_timeout=use_timeout) + conn = create_imap_connection( + host, port, ssl_required=True, use_timeout=use_timeout + ) except (IMAPClient.Error, socket.error) as exc: - log.error('Error instantiating IMAP connection', - account_id=account.id, - imap_host=host, - imap_port=port, - error=exc) + log.error( + "Error instantiating IMAP connection", + account_id=account.id, + imap_host=host, + imap_port=port, + error=exc, + ) raise return conn @@ -55,11 +58,13 @@ def _authenticate_IMAP_connection(self, account, conn): token = token_manager.get_token(account) conn.oauth2_login(account.email_address, token) except IMAPClient.Error as exc: - log.error('Error during IMAP XOAUTH2 login', - account_id=account.id, - host=host, - port=port, - error=exc) + log.error( + "Error during IMAP XOAUTH2 login", + account_id=account.id, + host=host, + port=port, + error=exc, + ) raise def verify_account(self, account): @@ -70,7 +75,7 @@ def verify_account(self, account): def new_token(self, refresh_token, client_id=None, client_secret=None): if not refresh_token: - raise OAuthError('refresh_token required') + raise OAuthError("refresh_token required") # If these aren't set on the Account object, use the values from # config so that the dev version of the sync engine continues to work. @@ -78,66 +83,68 @@ def new_token(self, refresh_token, client_id=None, client_secret=None): client_secret = client_secret or self.OAUTH_CLIENT_SECRET access_token_url = self.OAUTH_ACCESS_TOKEN_URL - data = urllib.urlencode({ - 'refresh_token': refresh_token, - 'client_id': client_id, - 'client_secret': client_secret, - 'grant_type': 'refresh_token' - }) - headers = {'Content-type': 'application/x-www-form-urlencoded', - 'Accept': 'text/plain'} + data = urllib.urlencode( + { + "refresh_token": refresh_token, + "client_id": client_id, + "client_secret": client_secret, + "grant_type": "refresh_token", + } + ) + headers = { + "Content-type": "application/x-www-form-urlencoded", + "Accept": "text/plain", + } try: - response = requests.post(access_token_url, data=data, - headers=headers) + response = requests.post(access_token_url, data=data, headers=headers) except requests.exceptions.ConnectionError as e: - log.error('Network error renewing access token', error=e) + log.error("Network error renewing access token", error=e) raise ConnectionError() try: session_dict = response.json() except JSONDecodeError: - log.error('Invalid JSON renewing on renewing token', - response=response.text) - raise ConnectionError('Invalid JSON response on renewing token') + log.error("Invalid JSON renewing on renewing token", response=response.text) + raise ConnectionError("Invalid JSON response on renewing token") - if 'error' in session_dict: - if session_dict['error'] == 'invalid_grant': + if "error" in session_dict: + if session_dict["error"] == "invalid_grant": # This is raised if the user has revoked access to the # application (or if the refresh token is otherwise invalid). - raise OAuthError('invalid_grant') - elif session_dict['error'] == 'deleted_client': + raise OAuthError("invalid_grant") + elif session_dict["error"] == "deleted_client": # If the developer has outright deleted their Google OAuth app # ID. We treat this too as a case of 'invalid credentials'. - raise OAuthError('deleted_client') + raise OAuthError("deleted_client") else: # You can also get e.g. {"error": "internal_failure"} - log.error('Error renewing access token', - session_dict=session_dict) - raise ConnectionError('Server error renewing access token') + log.error("Error renewing access token", session_dict=session_dict) + raise ConnectionError("Server error renewing access token") - return session_dict['access_token'], session_dict['expires_in'] + return session_dict["access_token"], session_dict["expires_in"] def _get_authenticated_user(self, authorization_code): args = { - 'client_id': self.OAUTH_CLIENT_ID, - 'client_secret': self.OAUTH_CLIENT_SECRET, - 'redirect_uri': self.OAUTH_REDIRECT_URI, - 'code': authorization_code, - 'grant_type': 'authorization_code' + "client_id": self.OAUTH_CLIENT_ID, + "client_secret": self.OAUTH_CLIENT_SECRET, + "redirect_uri": self.OAUTH_REDIRECT_URI, + "code": authorization_code, + "grant_type": "authorization_code", } - headers = {'Content-type': 'application/x-www-form-urlencoded', - 'Accept': 'text/plain'} + headers = { + "Content-type": "application/x-www-form-urlencoded", + "Accept": "text/plain", + } data = urllib.urlencode(args) - resp = requests.post(self.OAUTH_ACCESS_TOKEN_URL, data=data, - headers=headers) + resp = requests.post(self.OAUTH_ACCESS_TOKEN_URL, data=data, headers=headers) session_dict = resp.json() - if u'error' in session_dict: - raise OAuthError(session_dict['error']) + if u"error" in session_dict: + raise OAuthError(session_dict["error"]) - access_token = session_dict['access_token'] + access_token = session_dict["access_token"] validation_dict = self.validate_token(access_token) userinfo_dict = self._get_user_info(access_token) @@ -149,21 +156,25 @@ def _get_authenticated_user(self, authorization_code): def _get_user_info(self, access_token): try: - response = requests.get(self.OAUTH_USER_INFO_URL, - params={'access_token': access_token}) + response = requests.get( + self.OAUTH_USER_INFO_URL, params={"access_token": access_token} + ) except requests.exceptions.ConnectionError as e: - log.error('user_info_fetch_failed', error=e) + log.error("user_info_fetch_failed", error=e) raise ConnectionError() userinfo_dict = response.json() - if 'error' in userinfo_dict: - assert userinfo_dict['error'] == 'invalid_token' - log.error('user_info_fetch_failed', - error=userinfo_dict['error'], - error_description=userinfo_dict['error_description']) - log.error('%s - %s' % (userinfo_dict['error'], - userinfo_dict['error_description'])) + if "error" in userinfo_dict: + assert userinfo_dict["error"] == "invalid_token" + log.error( + "user_info_fetch_failed", + error=userinfo_dict["error"], + error_description=userinfo_dict["error_description"], + ) + log.error( + "%s - %s" % (userinfo_dict["error"], userinfo_dict["error_description"]) + ) raise OAuthError() return userinfo_dict @@ -176,5 +187,5 @@ def __init__(self, token): self.token = token def __call__(self, r): - r.headers['Authorization'] = 'Bearer {}'.format(self.token) + r.headers["Authorization"] = "Bearer {}".format(self.token) return r diff --git a/inbox/basicauth.py b/inbox/basicauth.py index 13b1962be..819c5ebb9 100644 --- a/inbox/basicauth.py +++ b/inbox/basicauth.py @@ -43,7 +43,6 @@ class GmailSettingError(ValidationError): class ImapSupportDisabledError(ValidationError): - def __init__(self, reason=None): super(ImapSupportDisabledError, self).__init__(reason) self.reason = reason diff --git a/inbox/config.py b/inbox/config.py index 81cfc3b1d..c15031c8a 100644 --- a/inbox/config.py +++ b/inbox/config.py @@ -7,51 +7,58 @@ # more secure than the default python ssl module in python 2.7.4 import requests import urllib3.contrib.pyopenssl + urllib3.contrib.pyopenssl.inject_into_urllib3() urllib3.disable_warnings() from requests.packages.urllib3.exceptions import InsecureRequestWarning + requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # TODO[mike]: This shold be removed once we've updated our base OS. openssl 1.0.1 doesn't support cross-signed certs # https://github.com/certifi/python-certifi/issues/26#issuecomment-138322515 import certifi + os.environ["REQUESTS_CA_BUNDLE"] = certifi.old_where() -__all__ = ['config'] +__all__ = ["config"] -if 'NYLAS_ENV' in os.environ: - assert os.environ['NYLAS_ENV'] in ('dev', 'test', 'staging', 'prod'), \ - "NYLAS_ENV must be either 'dev', 'test', staging, or 'prod'" - env = os.environ['NYLAS_ENV'] +if "NYLAS_ENV" in os.environ: + assert os.environ["NYLAS_ENV"] in ( + "dev", + "test", + "staging", + "prod", + ), "NYLAS_ENV must be either 'dev', 'test', staging, or 'prod'" + env = os.environ["NYLAS_ENV"] else: - env = 'prod' + env = "prod" def is_live_env(): - return env == 'prod' or env == 'staging' + return env == "prod" or env == "staging" class ConfigError(Exception): - def __init__(self, error=None, help=None): - self.error = error or '' - self.help = help or \ - 'Run `sudo cp etc/config-dev.json /etc/inboxapp/config.json` and retry.' + self.error = error or "" + self.help = ( + help + or "Run `sudo cp etc/config-dev.json /etc/inboxapp/config.json` and retry." + ) def __str__(self): - return '{0} {1}'.format(self.error, self.help) + return "{0} {1}".format(self.error, self.help) class Configuration(dict): - def __init__(self, *args, **kwargs): dict.__init__(self, *args, **kwargs) def get_required(self, key): if key not in self: - raise ConfigError('Missing config value for {0}.'.format(key)) + raise ConfigError("Missing config value for {0}.".format(key)) return self[key] @@ -81,22 +88,22 @@ def _update_config_from_env(config, env): Missing files in the path will be ignored. """ - srcdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..') + srcdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..") - if env in ['prod', 'staging']: + if env in ["prod", "staging"]: base_cfg_path = [ - '/etc/inboxapp/secrets.yml', - '/etc/inboxapp/config.json', + "/etc/inboxapp/secrets.yml", + "/etc/inboxapp/config.json", ] else: - v = {'env': env, 'srcdir': srcdir} + v = {"env": env, "srcdir": srcdir} base_cfg_path = [ - '{srcdir}/etc/secrets-{env}.yml'.format(**v), - '{srcdir}/etc/config-{env}.json'.format(**v), + "{srcdir}/etc/secrets-{env}.yml".format(**v), + "{srcdir}/etc/config-{env}.json".format(**v), ] - if 'SYNC_ENGINE_CFG_PATH' in os.environ: - cfg_path = os.environ.get('SYNC_ENGINE_CFG_PATH', '').split(os.path.pathsep) + if "SYNC_ENGINE_CFG_PATH" in os.environ: + cfg_path = os.environ.get("SYNC_ENGINE_CFG_PATH", "").split(os.path.pathsep) cfg_path = list(p.strip() for p in cfg_path if p.strip()) else: cfg_path = [] @@ -116,16 +123,17 @@ def _update_config_from_env(config, env): def _get_local_feature_flags(config): - if os.environ.get('FEATURE_FLAGS') is not None: - flags = os.environ.get('FEATURE_FLAGS').split() + if os.environ.get("FEATURE_FLAGS") is not None: + flags = os.environ.get("FEATURE_FLAGS").split() else: - flags = config.get('FEATURE_FLAGS', '').split() - config['FEATURE_FLAGS'] = flags + flags = config.get("FEATURE_FLAGS", "").split() + config["FEATURE_FLAGS"] = flags def _get_process_name(config): - if os.environ.get('PROCESS_NAME') is not None: - config['PROCESS_NAME'] = os.environ.get("PROCESS_NAME") + if os.environ.get("PROCESS_NAME") is not None: + config["PROCESS_NAME"] = os.environ.get("PROCESS_NAME") + config = Configuration() _update_config_from_env(config, env) diff --git a/inbox/console.py b/inbox/console.py index ecb362489..ad503a35a 100755 --- a/inbox/console.py +++ b/inbox/console.py @@ -9,8 +9,9 @@ def user_console(user_email_address): with global_session_scope() as db_session: - result = db_session.query(Account).filter_by( - email_address=user_email_address).all() + result = ( + db_session.query(Account).filter_by(email_address=user_email_address).all() + ) account = None @@ -19,9 +20,12 @@ def user_console(user_email_address): elif len(result) > 1: print "\n{} accounts found for that email.\n".format(len(result)) for idx, acc in enumerate(result): - print "[{}] - {} {} {}".format(idx, acc.provider, - acc.namespace.email_address, - acc.namespace.public_id) + print "[{}] - {} {} {}".format( + idx, + acc.provider, + acc.namespace.email_address, + acc.namespace.public_id, + ) choice = int(raw_input("\nWhich # do you want to select? ")) account = result[choice] @@ -29,19 +33,22 @@ def user_console(user_email_address): print "No account found with email '{}'".format(user_email_address) return - if account.provider == 'eas': + if account.provider == "eas": banner = """ You can access the account instance with the 'account' variable. """ IPython.embed(banner1=banner) else: - with writable_connection_pool(account.id, pool_size=1).get()\ - as crispin_client: - if account.provider == 'gmail' \ - and 'all' in crispin_client.folder_names(): + with writable_connection_pool( + account.id, pool_size=1 + ).get() as crispin_client: + if ( + account.provider == "gmail" + and "all" in crispin_client.folder_names() + ): crispin_client.select_folder( - crispin_client.folder_names()['all'][0], - uidvalidity_cb) + crispin_client.folder_names()["all"][0], uidvalidity_cb + ) banner = """ You can access the crispin instance with the 'crispin_client' variable, @@ -69,8 +76,10 @@ def start_client_console(user_email_address=None): try: from tests.system.client import NylasTestClient except ImportError: - sys.exit("You need to have the Nylas Python SDK installed to use this" - " option.") + sys.exit( + "You need to have the Nylas Python SDK installed to use this" " option." + ) client = NylasTestClient(user_email_address) # noqa - IPython.embed(banner1=("You can access a Nylas API client " - "using the 'client' variable.")) + IPython.embed( + banner1=("You can access a Nylas API client " "using the 'client' variable.") + ) diff --git a/inbox/contacts/__init__.py b/inbox/contacts/__init__.py index b88765afd..1e6dbf001 100644 --- a/inbox/contacts/__init__.py +++ b/inbox/contacts/__init__.py @@ -1,3 +1,4 @@ # Allow out-of-tree backend submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/inbox/contacts/algorithms.py b/inbox/contacts/algorithms.py index 37c26658d..680027c53 100644 --- a/inbox/contacts/algorithms.py +++ b/inbox/contacts/algorithms.py @@ -1,20 +1,20 @@ import datetime from collections import defaultdict -''' +""" This file currently contains algorithms for the contacts/rankings endpoint and the groups/intrinsic endpoint. -''' +""" # For calculating message weights LOOKBACK_TIME = 63072000.0 # datetime.timedelta(days=2*365).total_seconds() -MIN_MESSAGE_WEIGHT = .01 +MIN_MESSAGE_WEIGHT = 0.01 # For calculate_group_scores MIN_GROUP_SIZE = 2 MIN_MESSAGE_COUNT = 2.5 # Might want to tune this param. (1.5, 2.5?) SELF_IDENTITY_THRESHOLD = 0.3 # Also tunable -JACCARD_THRESHOLD = .35 # probably shouldn't tune this +JACCARD_THRESHOLD = 0.35 # probably shouldn't tune this SOCIAL_MOLECULE_EXPANSION_LIMIT = 1000 # Don't add too many molecules! SOCIAL_MOLECULE_LIMIT = 5000 # Give up if there are too many messages @@ -40,8 +40,17 @@ def _get_participants(msg, excluded_emails=[]): emails addresses that msg was sent to (including cc and bcc) """ participants = msg.to_addr + msg.cc_addr + msg.bcc_addr - return sorted(list(set([email.lower() for _, email in participants - if email not in excluded_emails]))) + return sorted( + list( + set( + [ + email.lower() + for _, email in participants + if email not in excluded_emails + ] + ) + ) + ) # Not really an algorithm, but it seemed reasonable to put this here? @@ -59,6 +68,7 @@ def is_stale(last_updated, lifespan=14): # The actual algorithms for contact rankings and groupings! ## + def calculate_contact_scores(messages, time_dependent=True): now = datetime.datetime.now() res = defaultdict(int) @@ -81,7 +91,7 @@ def calculate_group_counts(messages, user_email): for msg in messages: participants = _get_participants(msg, [user_email]) if len(participants) >= MIN_GROUP_SIZE: - res[', '.join(participants)] += 1 + res[", ".join(participants)] += 1 return res @@ -107,8 +117,7 @@ def get_message_list_weight(message_ids): participants = _get_participants(msg, [user_email]) if len(participants) >= MIN_GROUP_SIZE: molecules_dict[tuple(participants)].add(msg.id) - message_ids_to_scores[msg.id] = \ - _get_message_weight(now, msg.date) + message_ids_to_scores[msg.id] = _get_message_weight(now, msg.date) if len(molecules_dict) > SOCIAL_MOLECULE_LIMIT: return {} # Not worth the calculation @@ -119,19 +128,21 @@ def get_message_list_weight(message_ids): _expand_molecule_pool(molecules_dict) # Filter out infrequent molecules - molecules_list = [(set(emails), set(msgs)) - for (emails, msgs) in molecules_dict.iteritems() - if get_message_list_weight(msgs) >= MIN_MESSAGE_COUNT] + molecules_list = [ + (set(emails), set(msgs)) + for (emails, msgs) in molecules_dict.iteritems() + if get_message_list_weight(msgs) >= MIN_MESSAGE_COUNT + ] # Subsets get absorbed by supersets (if minimal info lost) - molecules_list = _subsume_molecules( - molecules_list, get_message_list_weight) + molecules_list = _subsume_molecules(molecules_list, get_message_list_weight) molecules_list = _combine_similar_molecules(molecules_list) # Give a score to each group. - return {', '.join(sorted(g)): get_message_list_weight(m) - for (g, m) in molecules_list} + return { + ", ".join(sorted(g)): get_message_list_weight(m) for (g, m) in molecules_list + } # Helper functions for calculating group scores @@ -143,8 +154,9 @@ def _expand_molecule_pool(molecules_dict): g2, m2 = mditems[j] new_molecule = tuple(sorted(list(g1.intersection(g2)))) if len(new_molecule) >= MIN_GROUP_SIZE: - molecules_dict[new_molecule] = \ + molecules_dict[new_molecule] = ( molecules_dict[new_molecule].union(m1).union(m2) + ) def _subsume_molecules(molecules_list, get_message_list_weight): @@ -161,8 +173,11 @@ def _subsume_molecules(molecules_list, get_message_list_weight): g2, m2 = molecules_list[j] # Bigger group m2_size = mol_weights[j] if g1.issubset(g2): - sharing_error = ((len(g2) - len(g1)) * (m1_size - m2_size) / - (1.0 * (len(g2) * m1_size))) + sharing_error = ( + (len(g2) - len(g1)) + * (m1_size - m2_size) + / (1.0 * (len(g2) * m1_size)) + ) if sharing_error < SELF_IDENTITY_THRESHOLD: is_subsumed[i] = True break @@ -187,9 +202,11 @@ def _combine_similar_molecules(molecules_list): combined[i], combined[j] = True, True break - molecules_list = [molecule for molecule, was_combined - in zip(molecules_list, combined) - if not was_combined] + molecules_list = [ + molecule + for molecule, was_combined in zip(molecules_list, combined) + if not was_combined + ] new_guys_start_idx = len(molecules_list) molecules_list.extend(new_guys) diff --git a/inbox/contacts/carddav.py b/inbox/contacts/carddav.py index 9f5aad582..d7a93b113 100644 --- a/inbox/contacts/carddav.py +++ b/inbox/contacts/carddav.py @@ -25,19 +25,20 @@ # Fake it till you make it -USER_AGENT = "User-Agent: DAVKit/4.0.1 (730); CalendarStore/4.0.1 " + \ - "(973); iCal/4.0.1 (1374); Mac OS X/10.6.2 (10C540)" +USER_AGENT = ( + "User-Agent: DAVKit/4.0.1 (730); CalendarStore/4.0.1 " + + "(973); iCal/4.0.1 (1374); Mac OS X/10.6.2 (10C540)" +) def supports_carddav(url): """ Basic verification that the endpoint supports CardDav """ response = requests.request( - 'OPTIONS', url, - headers={'User-Agent': USER_AGENT, - 'Depth': '1'}) - response.raise_for_status() # if not 2XX status - if 'addressbook' not in response.headers.get('DAV', ''): + "OPTIONS", url, headers={"User-Agent": USER_AGENT, "Depth": "1"} + ) + response.raise_for_status() # if not 2XX status + if "addressbook" not in response.headers.get("DAV", ""): raise Exception("URL is not a CardDAV resource") @@ -48,8 +49,7 @@ def __init__(self, email_address, password, base_url): self.session = requests.Session() self.session.auth = (email_address, password) self.session.verify = True # verify SSL certs - self.session.headers.update({'User-Agent': USER_AGENT, - 'Depth': '1'}) + self.session.headers.update({"User-Agent": USER_AGENT, "Depth": "1"}) self.base_url = base_url def get_principal_url(self): @@ -62,9 +62,7 @@ def get_principal_url(self): """ - response = self.session.request('PROPFIND', - self.base_url, - data=payload) + response = self.session.request("PROPFIND", self.base_url, data=payload) response.raise_for_status() xml = response.content @@ -84,9 +82,7 @@ def get_address_book_home(self, url): """ - response = self.session.request('PROPFIND', - url, - data=payload) + response = self.session.request("PROPFIND", url, data=payload) response.raise_for_status() xml = response.content element = ET.XML(xml) @@ -124,9 +120,7 @@ def get_cards(self, url): """ - response = self.session.request('REPORT', - url, - data=payload,) + response = self.session.request("REPORT", url, data=payload,) response.raise_for_status() return response.content diff --git a/inbox/contacts/crud.py b/inbox/contacts/crud.py index 01c7fec89..9947116e0 100644 --- a/inbox/contacts/crud.py +++ b/inbox/contacts/crud.py @@ -4,7 +4,7 @@ from inbox.models import Contact -INBOX_PROVIDER_NAME = 'inbox' +INBOX_PROVIDER_NAME = "inbox" def create(namespace, db_session, name, email): @@ -13,16 +13,21 @@ def create(namespace, db_session, name, email): provider_name=INBOX_PROVIDER_NAME, uid=uuid.uuid4().hex, name=name, - email_address=email) + email_address=email, + ) db_session.add(contact) db_session.commit() return contact def read(namespace, db_session, contact_public_id): - return db_session.query(Contact).filter( - Contact.public_id == contact_public_id, - Contact.namespace_id == namespace.id).first() + return ( + db_session.query(Contact) + .filter( + Contact.public_id == contact_public_id, Contact.namespace_id == namespace.id + ) + .first() + ) def update(namespace, db_session, contact_public_id, name, email): diff --git a/inbox/contacts/google.py b/inbox/contacts/google.py index 5f7fc9281..8c29f94e7 100644 --- a/inbox/contacts/google.py +++ b/inbox/contacts/google.py @@ -9,6 +9,7 @@ import gdata.contacts.client from nylas.logging import get_logger + logger = get_logger() from inbox.basicauth import ConnectionError, ValidationError from inbox.basicauth import OAuthError @@ -17,7 +18,7 @@ from inbox.models.backends.gmail import GmailAccount, g_token_manager from inbox.models.backends.gmail import GmailAuthCredentials -SOURCE_APP_NAME = 'Nylas Sync Engine' +SOURCE_APP_NAME = "Nylas Sync Engine" class GoogleContactsProvider(object): @@ -41,13 +42,17 @@ class GoogleContactsProvider(object): Logging handler. """ - PROVIDER_NAME = 'google' + + PROVIDER_NAME = "google" def __init__(self, account_id, namespace_id): self.account_id = account_id self.namespace_id = namespace_id - self.log = logger.new(account_id=account_id, component='contacts sync', - provider=self.PROVIDER_NAME) + self.log = logger.new( + account_id=account_id, + component="contacts sync", + provider=self.PROVIDER_NAME, + ) def _get_google_client(self, retry_conn_errors=True): """Return the Google API client.""" @@ -56,11 +61,11 @@ def _get_google_client(self, retry_conn_errors=True): with session_scope(self.namespace_id) as db_session: try: account = db_session.query(GmailAccount).get(self.account_id) - access_token, auth_creds_id = \ - g_token_manager.get_token_and_auth_creds_id_for_contacts( - account) - auth_creds = db_session.query(GmailAuthCredentials) \ - .get(auth_creds_id) + ( + access_token, + auth_creds_id, + ) = g_token_manager.get_token_and_auth_creds_id_for_contacts(account) + auth_creds = db_session.query(GmailAuthCredentials).get(auth_creds_id) two_legged_oauth_token = gdata.gauth.OAuth2Token( client_id=auth_creds.client_id, @@ -68,25 +73,29 @@ def _get_google_client(self, retry_conn_errors=True): scope=auth_creds.scopes, # FIXME: string not list? user_agent=SOURCE_APP_NAME, access_token=access_token, - refresh_token=auth_creds.refresh_token) + refresh_token=auth_creds.refresh_token, + ) google_client = gdata.contacts.client.ContactsClient( - source=SOURCE_APP_NAME) + source=SOURCE_APP_NAME + ) google_client.auth_token = two_legged_oauth_token return google_client - except (gdata.client.BadAuthentication, - gdata.client.Unauthorized, OAuthError): + except ( + gdata.client.BadAuthentication, + gdata.client.Unauthorized, + OAuthError, + ): if not retry_conn_errors: # end of the line raise ValidationError # If there are no valid refresh_tokens, will raise an # OAuthError, stopping the sync - g_token_manager.get_token_for_contacts( - account, force_refresh=True) + g_token_manager.get_token_for_contacts(account, force_refresh=True) return self._google_client(retry_conn_errors=False) except ConnectionError: - self.log.error('Connection error') + self.log.error("Connection error") raise def _parse_contact_result(self, google_contact): @@ -107,11 +116,12 @@ def _parse_contact_result(self, google_contact): AttributeError If the contact data could not be parsed correctly. """ - email_addresses = [email for email in google_contact.email if - email.primary] + email_addresses = [email for email in google_contact.email if email.primary] if email_addresses and len(email_addresses) > 1: - self.log.error("Should not have more than one email per entry!", - num_email=len(email_addresses)) + self.log.error( + "Should not have more than one email per entry!", + num_email=len(email_addresses), + ) try: # The id.text field of a ContactEntry object takes the form @@ -119,26 +129,31 @@ def _parse_contact_result(self, google_contact): # We only want the part. raw_google_id = google_contact.id.text _, g_id = posixpath.split(raw_google_id) - name = (google_contact.name.full_name.text - if (google_contact.name and google_contact.name.full_name) - else None) - email_address = (email_addresses[0].address if email_addresses else - None) + name = ( + google_contact.name.full_name.text + if (google_contact.name and google_contact.name.full_name) + else None + ) + email_address = email_addresses[0].address if email_addresses else None # The entirety of the raw contact data in XML string # representation. raw_data = google_contact.to_string() except AttributeError as e: - self.log.error('Something is wrong with contact', - contact=google_contact) + self.log.error("Something is wrong with contact", contact=google_contact) raise e deleted = google_contact.deleted is not None - return Contact(namespace_id=self.namespace_id, uid=g_id, name=name, - provider_name=self.PROVIDER_NAME, - email_address=email_address, deleted=deleted, - raw_data=raw_data) + return Contact( + namespace_id=self.namespace_id, + uid=g_id, + name=name, + provider_name=self.PROVIDER_NAME, + email_address=email_address, + deleted=deleted, + raw_data=raw_data, + ) def get_items(self, sync_from_dt=None, max_results=100000): """ @@ -171,29 +186,23 @@ def get_items(self, sync_from_dt=None, max_results=100000): # number by default. query.max_results = max_results if sync_from_dt: - query.updated_min = datetime.isoformat(sync_from_dt) + 'Z' + query.updated_min = datetime.isoformat(sync_from_dt) + "Z" query.showdeleted = True while True: try: google_client = self._get_google_client() results = google_client.GetContacts(q=query).entry - return [self._parse_contact_result(result) for result in - results] + return [self._parse_contact_result(result) for result in results] except gdata.client.RequestError as e: if e.status == 503: - self.log.info('Ran into Google bot detection. Sleeping.', - message=e) + self.log.info("Ran into Google bot detection. Sleeping.", message=e) gevent.sleep(5 * 60 + random.randrange(0, 60)) else: - self.log.info('contact sync request failure; retrying', - message=e) + self.log.info("contact sync request failure; retrying", message=e) gevent.sleep(30 + random.randrange(0, 60)) except gdata.client.Unauthorized: - self.log.warning( - 'Invalid access token; refreshing and retrying') + self.log.warning("Invalid access token; refreshing and retrying") # Raises an OAuth error if no valid token exists with session_scope(self.namespace_id) as db_session: - account = db_session.query(GmailAccount).get( - self.account_id) - g_token_manager.get_token_for_contacts( - account, force_refresh=True) + account = db_session.query(GmailAccount).get(self.account_id) + g_token_manager.get_token_for_contacts(account, force_refresh=True) diff --git a/inbox/contacts/icloud.py b/inbox/contacts/icloud.py index 0652588bd..7fa45c163 100644 --- a/inbox/contacts/icloud.py +++ b/inbox/contacts/icloud.py @@ -1,6 +1,7 @@ """Provide iCloud contacts""" from nylas.logging import get_logger + logger = get_logger() from carddav import CardDav @@ -13,7 +14,7 @@ from inbox.models.backends.generic import GenericAccount -ICLOUD_CONTACTS_URL = 'https://contacts.icloud.com' +ICLOUD_CONTACTS_URL = "https://contacts.icloud.com" class ICloudContactsProvider(object): @@ -21,14 +22,17 @@ class ICloudContactsProvider(object): Base class to fetch and parse iCloud contacts """ - PROVIDER_NAME = 'icloud' + PROVIDER_NAME = "icloud" def __init__(self, account_id, namespace_id): supports_carddav(ICLOUD_CONTACTS_URL) self.account_id = account_id self.namespace_id = namespace_id - self.log = logger.new(account_id=account_id, component='contacts sync', - provider=self.PROVIDER_NAME) + self.log = logger.new( + account_id=account_id, + component="contacts sync", + provider=self.PROVIDER_NAME, + ) def _vCard_raw_to_contact(self, cardstring): card = vcard_from_string(cardstring) @@ -41,33 +45,37 @@ def _x(key): # Ugly parsing helper for ugly formats pass # Skip contact groups for now - if _x('X-ADDRESSBOOKSERVER-KIND') == 'group': + if _x("X-ADDRESSBOOKSERVER-KIND") == "group": return None - uid = _x('UID') - name = _x('FN') - email_address = _x('EMAIL') + uid = _x("UID") + name = _x("FN") + email_address = _x("EMAIL") # TODO add these later # street_address = _x('ADR') # phone_number = _x('TEL') # organization = _x('ORG') - return Contact(namespace_id=self.namespace_id, - provider_name=self.PROVIDER_NAME, - uid=uid, - name=name, - email_address=email_address, - raw_data=cardstring) + return Contact( + namespace_id=self.namespace_id, + provider_name=self.PROVIDER_NAME, + uid=uid, + name=name, + email_address=email_address, + raw_data=cardstring, + ) def get_items(self, sync_from_dt=None, max_results=100000): with session_scope(self.namespace_id) as db_session: account = db_session.query(GenericAccount).get(self.account_id) email_address = account.email_address password = account.password - if account.provider != 'icloud': - self.log.error("Can't sync contacts for non iCloud provider", - account_id=account.id, - provider=account.provider) + if account.provider != "icloud": + self.log.error( + "Can't sync contacts for non iCloud provider", + account_id=account.id, + provider=account.provider, + ) return [] c = CardDav(email_address, password, ICLOUD_CONTACTS_URL) @@ -81,7 +89,7 @@ def get_items(self, sync_from_dt=None, max_results=100000): self.log.debug("Requesting cards for user") # This request is limited to returning 5000 items - returned_cards = c.get_cards(home_url + 'card/') + returned_cards = c.get_cards(home_url + "card/") root = ET.XML(returned_cards) @@ -95,14 +103,15 @@ def get_items(self, sync_from_dt=None, max_results=100000): # Currently if there are over 5000 contacts, it trigger the # response number-of-matches-within-limits # TODO add paging for requesting all - self.log.error("Error parsing CardDav response into contact: " - "{}".format(ET.tostring(refprop))) + self.log.error( + "Error parsing CardDav response into contact: " + "{}".format(ET.tostring(refprop)) + ) continue new_contact = self._vCard_raw_to_contact(cardstring) if new_contact: all_contacts.append(new_contact) - self.log.info("Saving {} contacts from iCloud sync" - .format(len(all_contacts))) + self.log.info("Saving {} contacts from iCloud sync".format(len(all_contacts))) return all_contacts diff --git a/inbox/contacts/processing.py b/inbox/contacts/processing.py index 0978de465..5b2d07951 100644 --- a/inbox/contacts/processing.py +++ b/inbox/contacts/processing.py @@ -2,11 +2,7 @@ from inbox.util.addr import valid_email from inbox.util.addr import canonicalize_address as canonicalize -from inbox.models import ( - Contact, - EventContactAssociation, - MessageContactAssociation -) +from inbox.models import Contact, EventContactAssociation, MessageContactAssociation from inbox.contacts.crud import INBOX_PROVIDER_NAME @@ -16,24 +12,31 @@ def _get_contact_map(db_session, namespace_id, all_addresses): Retrieves or creates contacts for the given address pairs, returning a dict with the canonicalized emails mapped to Contact objects. """ - canonicalized_addresses = [canonicalize(addr) for _, addr in - all_addresses] + canonicalized_addresses = [canonicalize(addr) for _, addr in all_addresses] if not canonicalized_addresses: return {} - existing_contacts = db_session.query(Contact).filter( - Contact._canonicalized_address.in_(canonicalized_addresses), - Contact.namespace_id == namespace_id).all() + existing_contacts = ( + db_session.query(Contact) + .filter( + Contact._canonicalized_address.in_(canonicalized_addresses), + Contact.namespace_id == namespace_id, + ) + .all() + ) contact_map = {c._canonicalized_address: c for c in existing_contacts} for name, email_address in all_addresses: canonicalized_address = canonicalize(email_address) if canonicalized_address not in contact_map: - new_contact = Contact(name=name, email_address=email_address, - namespace_id=namespace_id, - provider_name=INBOX_PROVIDER_NAME, - uid=uuid.uuid4().hex) + new_contact = Contact( + name=name, + email_address=email_address, + namespace_id=namespace_id, + provider_name=INBOX_PROVIDER_NAME, + uid=uuid.uuid4().hex, + ) contact_map[canonicalized_address] = new_contact return contact_map @@ -50,7 +53,7 @@ def _get_contact_from_map(contact_map, name, email_address): # "Christine Spang (via Google Drive) DOC_UPLOAD_CHUNK_SIZE: doc_service.upload_documents( - documents=json.dumps(docs), - contentType='application/json') + documents=json.dumps(docs), contentType="application/json" + ) indexed += len(docs) docs = [] @@ -209,22 +218,24 @@ def index_namespace(namespace_id): deleted_records = set(previous_records).difference(current_records) for id_ in deleted_records: log.info("deleting", contact_id=id_) - docs.append({'type': 'delete', 'id': id_}) + docs.append({"type": "delete", "id": id_}) if docs: doc_service.upload_documents( - documents=json.dumps(docs), - contentType='application/json') + documents=json.dumps(docs), contentType="application/json" + ) - log.info("namespace index complete", - namespace_id=namespace_id, - total_contacts_indexed=indexed, - total_contacts_deleted=len(deleted_records)) + log.info( + "namespace index complete", + namespace_id=namespace_id, + total_contacts_indexed=indexed, + total_contacts_deleted=len(deleted_records), + ) def delete_namespace_indexes(namespace_ids): if not search_service_url or not doc_service_url: - raise Exception('CloudSearch not configured; cannot update index') + raise Exception("CloudSearch not configured; cannot update index") else: doc_service = get_doc_service() @@ -233,25 +244,27 @@ def delete_namespace_indexes(namespace_ids): record_ids = search_client.fetch_all_matching_ids() - log.info("deleting all record_ids", - namespace_id=namespace_id, - total=len(record_ids), - ids=record_ids) + log.info( + "deleting all record_ids", + namespace_id=namespace_id, + total=len(record_ids), + ids=record_ids, + ) # Keep upload under 5 MB if each delete doc is about 265 bytes. chunk_size = 18000 docs = [] for id_ in record_ids: - docs.append({'type': 'delete', 'id': id_}) + docs.append({"type": "delete", "id": id_}) if len(docs) > chunk_size: doc_service.upload_documents( - documents=json.dumps(docs), - contentType='application/json') + documents=json.dumps(docs), contentType="application/json" + ) docs = [] if docs: doc_service.upload_documents( - documents=json.dumps(docs), - contentType='application/json') + documents=json.dumps(docs), contentType="application/json" + ) diff --git a/inbox/contacts/vcard.py b/inbox/contacts/vcard.py index 745be3428..e938d0c4e 100644 --- a/inbox/contacts/vcard.py +++ b/inbox/contacts/vcard.py @@ -42,50 +42,103 @@ def list_clean(string): returns: list() """ - string = string.split(',') + string = string.split(",") rstring = list() for element in string: - rstring.append(element.strip(' ')) + rstring.append(element.strip(" ")) return rstring NO_STRINGS = [u"n", "n", u"no", "no"] YES_STRINGS = [u"y", "y", u"yes", "yes"] -PROPERTIES = ['EMAIL', 'TEL'] -PROPS_ALL = ['FN', 'N', 'VERSION', 'NICKNAME', 'PHOTO', 'BDAY', 'ADR', - 'LABEL', 'TEL', 'EMAIL', 'MAILER', 'TZ', 'GEO', 'TITLE', 'ROLE', - 'LOGO', 'AGENT', 'ORG', 'NOTE', 'REV', 'SOUND', 'URL', 'UID', - 'KEY', 'CATEGORIES', 'PRODID', 'REV', 'SORT-STRING', 'SOUND', - 'URL', 'VERSION', 'UTC-OFFSET'] -PROPS_ALLOWED = ['NICKNAME', 'BDAY', 'ADR', 'LABEL', 'TEL', 'EMAIL', - 'MAILER', 'TZ', 'GEO', 'TITLE', 'ROLE', 'AGENT', - 'ORG', 'NOTE', 'REV', 'SOUND', 'URL', 'UID', 'KEY', - 'CATEGORIES', 'PRODID', 'REV', 'SORT-STRING', 'SOUND', - 'URL', 'VERSION', 'UTC-OFFSET'] -PROPS_ONCE = ['FN', 'N', 'VERSION'] -PROPS_LIST = ['NICKNAME', 'CATEGORIES'] -PROPS_BIN = ['PHOTO', 'LOGO', 'SOUND', 'KEY'] - - -RTEXT = '\x1b[7m' -NTEXT = '\x1b[0m' -BTEXT = '\x1b[1m' +PROPERTIES = ["EMAIL", "TEL"] +PROPS_ALL = [ + "FN", + "N", + "VERSION", + "NICKNAME", + "PHOTO", + "BDAY", + "ADR", + "LABEL", + "TEL", + "EMAIL", + "MAILER", + "TZ", + "GEO", + "TITLE", + "ROLE", + "LOGO", + "AGENT", + "ORG", + "NOTE", + "REV", + "SOUND", + "URL", + "UID", + "KEY", + "CATEGORIES", + "PRODID", + "REV", + "SORT-STRING", + "SOUND", + "URL", + "VERSION", + "UTC-OFFSET", +] +PROPS_ALLOWED = [ + "NICKNAME", + "BDAY", + "ADR", + "LABEL", + "TEL", + "EMAIL", + "MAILER", + "TZ", + "GEO", + "TITLE", + "ROLE", + "AGENT", + "ORG", + "NOTE", + "REV", + "SOUND", + "URL", + "UID", + "KEY", + "CATEGORIES", + "PRODID", + "REV", + "SORT-STRING", + "SOUND", + "URL", + "VERSION", + "UTC-OFFSET", +] +PROPS_ONCE = ["FN", "N", "VERSION"] +PROPS_LIST = ["NICKNAME", "CATEGORIES"] +PROPS_BIN = ["PHOTO", "LOGO", "SOUND", "KEY"] + + +RTEXT = "\x1b[7m" +NTEXT = "\x1b[0m" +BTEXT = "\x1b[1m" def get_names(display_name): - first_name, last_name = '', display_name + first_name, last_name = "", display_name - if display_name.find(',') > 0: + if display_name.find(",") > 0: # Parsing something like 'Doe, John Abraham' - last_name, first_name = display_name.split(',') + last_name, first_name = display_name.split(",") - elif display_name.find(' '): + elif display_name.find(" "): # Parsing something like 'John Abraham Doe' # TODO: This fails for compound names. What is the most common case? - name_list = display_name.split(' ') - last_name = ''.join(name_list[-1]) - first_name = ' '.join(name_list[:-1]) + name_list = display_name.split(" ") + last_name = "".join(name_list[-1]) + first_name = " ".join(name_list[:-1]) return first_name.strip().capitalize(), last_name.strip().capitalize() @@ -97,11 +150,11 @@ def fix_vobject(vcard): :type vcard: vobject.base.Component (vobject based vcard) """ - if 'fn' not in vcard.contents: - logging.debug('vcard has no formatted name, reconstructing...') - fname = vcard.contents['n'][0].valueRepr() + if "fn" not in vcard.contents: + logging.debug("vcard has no formatted name, reconstructing...") + fname = vcard.contents["n"][0].valueRepr() fname = fname.strip() - vcard.add('fn') + vcard.add("fn") vcard.fn.value = fname return vcard @@ -118,14 +171,13 @@ def vcard_from_vobject(vcard): property_value = line.value try: - if line.ENCODING_paramlist == [u'b'] or \ - line.ENCODING_paramlist == [u'B']: + if line.ENCODING_paramlist == [u"b"] or line.ENCODING_paramlist == [u"B"]: property_value = base64.b64encode(line.value) except AttributeError: pass if isinstance(property_value, list): - property_value = (',').join(property_value) + property_value = (",").join(property_value) vdict[property_name].append((property_value, line.params,)) return vdict @@ -146,13 +198,13 @@ def vcard_from_string(vcard_string): def vcard_from_email(display_name, email): fname, lname = get_names(display_name) vcard = vobject.vCard() - vcard.add('n') + vcard.add("n") vcard.n.value = vobject.vcard.Name(family=lname, given=fname) - vcard.add('fn') + vcard.add("fn") vcard.fn.value = display_name - vcard.add('email') + vcard.add("email") vcard.email.value = email - vcard.email.type_param = 'INTERNET' + vcard.email.type_param = "INTERNET" return vcard_from_vobject(vcard) @@ -182,15 +234,15 @@ class VCard(defaultdict): 2: some property was deleted """ - def __init__(self, ddict=''): + def __init__(self, ddict=""): - if ddict == '': + if ddict == "": defaultdict.__init__(self, list) else: defaultdict.__init__(self, list, ddict) - self.href = '' - self.account = '' - self.etag = '' + self.href = "" + self.account = "" + self.etag = "" self.edited = 0 def serialize(self): @@ -198,25 +250,25 @@ def serialize(self): @property def name(self): - return unicode(self['N'][0][0]) if self['N'] else '' + return unicode(self["N"][0][0]) if self["N"] else "" @name.setter def name(self, value): - if not self['N']: - self['N'] = [('', {})] - self['N'][0][0] = value + if not self["N"]: + self["N"] = [("", {})] + self["N"][0][0] = value @property def fname(self): - return unicode(self['FN'][0][0]) if self['FN'] else '' + return unicode(self["FN"][0][0]) if self["FN"] else "" @fname.setter def fname(self, value): - self['FN'][0] = (value, {}) + self["FN"][0] = (value, {}) def alt_keys(self): keylist = self.keys() - for one in [x for x in ['FN', 'N', 'VERSION'] if x in keylist]: + for one in [x for x in ["FN", "N", "VERSION"] if x in keylist]: keylist.remove(one) keylist.sort() return keylist @@ -225,29 +277,29 @@ def print_email(self): """prints only name, email and type for use with mutt""" collector = list() try: - for one in self['EMAIL']: + for one in self["EMAIL"]: try: - typelist = ','.join(one[1][u'TYPE']) + typelist = ",".join(one[1][u"TYPE"]) except KeyError: - typelist = '' + typelist = "" collector.append(one[0] + "\t" + self.fname + "\t" + typelist) - return '\n'.join(collector) + return "\n".join(collector) except KeyError: - return '' + return "" def print_tel(self): """prints only name, email and type for use with mutt""" collector = list() try: - for one in self['TEL']: + for one in self["TEL"]: try: - typelist = ','.join(one[1][u'TYPE']) + typelist = ",".join(one[1][u"TYPE"]) except KeyError: - typelist = '' + typelist = "" collector.append(self.fname + "\t" + one[0] + "\t" + typelist) - return '\n'.join(collector) + return "\n".join(collector) except KeyError: - return '' + return "" @property def pretty(self): @@ -255,32 +307,32 @@ def pretty(self): @property def pretty_min(self): - return self._pretty_base(['TEL', 'EMAIL']) + return self._pretty_base(["TEL", "EMAIL"]) def _pretty_base(self, keylist): collector = list() if sys.stdout.isatty(): - collector.append('\n' + BTEXT + 'Name: ' + self.fname + NTEXT) + collector.append("\n" + BTEXT + "Name: " + self.fname + NTEXT) else: - collector.append('\n' + 'Name: ' + self.fname) + collector.append("\n" + "Name: " + self.fname) for key in keylist: for value in self[key]: try: - types = ' (' + ', '.join(value[1]['TYPE']) + ')' + types = " (" + ", ".join(value[1]["TYPE"]) + ")" except KeyError: - types = '' - line = key + types + ': ' + value[0] + types = "" + line = key + types + ": " + value[0] collector.append(line) - return '\n'.join(collector) + return "\n".join(collector) def _line_helper(self, line): collector = list() for key in line[1].keys(): - collector.append(key + '=' + ','.join(line[1][key])) + collector.append(key + "=" + ",".join(line[1][key])) if collector == list(): - return '' + return "" else: - return (';' + ';'.join(collector)) + return ";" + ";".join(collector) @property def vcf(self): @@ -295,21 +347,21 @@ def generate_random_uid(): """generate a random uid, when random isn't broken, getting a random UID from a pool of roughly 10^56 should be good enough""" choice = string.ascii_uppercase + string.digits - return ''.join([random.choice(choice) for _ in range(36)]) + return "".join([random.choice(choice) for _ in range(36)]) - if 'UID' not in self.keys(): - self['UID'] = [(generate_random_uid(), dict())] + if "UID" not in self.keys(): + self["UID"] = [(generate_random_uid(), dict())] collector = list() - collector.append('BEGIN:VCARD') - collector.append('VERSION:3.0') - for key in ['FN', 'N']: + collector.append("BEGIN:VCARD") + collector.append("VERSION:3.0") + for key in ["FN", "N"]: try: - collector.append(key + ':' + self[key][0][0]) + collector.append(key + ":" + self[key][0][0]) except IndexError: # broken vcard without FN or N - collector.append(key + ':') + collector.append(key + ":") for prop in self.alt_keys(): for line in self[prop]: types = self._line_helper(line) - collector.append(prop + types + ':' + line[0]) - collector.append('END:VCARD') - return '\n'.join(collector) + collector.append(prop + types + ":" + line[0]) + collector.append("END:VCARD") + return "\n".join(collector) diff --git a/inbox/crispin.py b/inbox/crispin.py index 51b831cbb..f9c7a62cc 100644 --- a/inbox/crispin.py +++ b/inbox/crispin.py @@ -13,14 +13,15 @@ # return one digit. Fun times. imaplib.InternalDate = re.compile( r'.*INTERNALDATE "' - r'(?P[ 0123]?[0-9])-' # insert that `?` to make first digit optional - r'(?P[A-Z][a-z][a-z])-' - r'(?P[0-9][0-9][0-9][0-9])' - r' (?P[0-9][0-9]):' - r'(?P[0-9][0-9]):' - r'(?P[0-9][0-9])' - r' (?P[-+])(?P[0-9][0-9])(?P[0-9][0-9])' - r'"') + r"(?P[ 0123]?[0-9])-" # insert that `?` to make first digit optional + r"(?P[A-Z][a-z][a-z])-" + r"(?P[0-9][0-9][0-9][0-9])" + r" (?P[0-9][0-9]):" + r"(?P[0-9][0-9]):" + r"(?P[0-9][0-9])" + r" (?P[-+])(?P[0-9][0-9])(?P[0-9][0-9])" + r'"' +) import functools import threading @@ -46,20 +47,21 @@ from inbox.models.backends.gmail import GmailAccount from inbox.folder_edge_cases import localized_folder_names from nylas.logging import get_logger + log = get_logger() -__all__ = ['CrispinClient', 'GmailCrispinClient'] +__all__ = ["CrispinClient", "GmailCrispinClient"] # Unify flags API across IMAP and Gmail -Flags = namedtuple('Flags', 'flags modseq') +Flags = namedtuple("Flags", "flags modseq") # Flags includes labels on Gmail because Gmail doesn't use \Draft. -GmailFlags = namedtuple('GmailFlags', 'flags labels modseq') -GMetadata = namedtuple('GMetadata', 'g_msgid g_thrid size') +GmailFlags = namedtuple("GmailFlags", "flags labels modseq") +GMetadata = namedtuple("GMetadata", "g_msgid g_thrid size") RawMessage = namedtuple( - 'RawImapMessage', - 'uid internaldate flags body g_thrid g_msgid g_labels') -RawFolder = namedtuple('RawFolder', 'display_name role') + "RawImapMessage", "uid internaldate flags body g_thrid g_msgid g_labels" +) +RawFolder = namedtuple("RawFolder", "display_name role") # Lazily-initialized map of account ids to lock objects. # This prevents multiple greenlets from concurrently creating duplicate @@ -74,13 +76,17 @@ CONN_RETRY_EXC_CLASSES = CONN_NETWORK_EXC_CLASSES + (imaplib.IMAP4.error,) # Exception classes on which connections should be discarded. -CONN_DISCARD_EXC_CLASSES = CONN_NETWORK_EXC_CLASSES + \ - (ssl.CertificateError, imaplib.IMAP4.error) +CONN_DISCARD_EXC_CLASSES = CONN_NETWORK_EXC_CLASSES + ( + ssl.CertificateError, + imaplib.IMAP4.error, +) # Exception classes which indicate the IMAP connection has become # unusable. -CONN_UNUSABLE_EXC_CLASSES = CONN_NETWORK_EXC_CLASSES + \ - (ssl.CertificateError, imaplib.IMAP4.abort) +CONN_UNUSABLE_EXC_CLASSES = CONN_NETWORK_EXC_CLASSES + ( + ssl.CertificateError, + imaplib.IMAP4.abort, +) class FolderMissingError(Exception): @@ -95,8 +101,8 @@ def _get_connection_pool(account_id, pool_size, pool_map, readonly): with _lock_map[account_id]: if account_id not in pool_map: pool_map[account_id] = CrispinConnectionPool( - account_id, num_connections=pool_size, - readonly=readonly) + account_id, num_connections=pool_size, readonly=readonly + ) return pool_map[account_id] @@ -154,8 +160,11 @@ class CrispinConnectionPool(object): """ def __init__(self, account_id, num_connections, readonly): - log.info('Creating Crispin connection pool', - account_id=account_id, num_connections=num_connections) + log.info( + "Creating Crispin connection pool", + account_id=account_id, + num_connections=num_connections, + ) self.account_id = account_id self.readonly = readonly self._queue = Queue(num_connections, items=num_connections * [None]) @@ -172,7 +181,7 @@ def _logout(self, client): try: client.logout() except Exception: - log.info('Error on IMAP logout', exc_info=True) + log.info("Error on IMAP logout", exc_info=True) @contextlib.contextmanager def get(self): @@ -201,10 +210,8 @@ def get(self): # isn't always necessary, since if you got e.g. a FETCH failure you # could reuse the same connection. But for now it's the simplest # thing to do. - log.info('IMAP connection error; discarding connection', - exc_info=True) - if client is not None and \ - not isinstance(exc, CONN_UNUSABLE_EXC_CLASSES): + log.info("IMAP connection error; discarding connection", exc_info=True) + if client is not None and not isinstance(exc, CONN_UNUSABLE_EXC_CLASSES): self._logout(client) client = None raise exc @@ -222,7 +229,7 @@ def _set_account_info(self): self.provider_info = account.provider_info self.email_address = account.email_address self.auth_handler = account.auth_handler - if account.provider == 'gmail': + if account.provider == "gmail": self.client_cls = GmailCrispinClient else: self.client_cls = CrispinClient @@ -230,35 +237,47 @@ def _set_account_info(self): def _new_raw_connection(self): """Returns a new, authenticated IMAPClient instance for the account.""" from inbox.auth.gmail import GmailAuthHandler + with session_scope(self.account_id) as db_session: if isinstance(self.auth_handler, GmailAuthHandler): - account = db_session.query(GmailAccount).options( - joinedload(GmailAccount.auth_credentials)).get( - self.account_id) + account = ( + db_session.query(GmailAccount) + .options(joinedload(GmailAccount.auth_credentials)) + .get(self.account_id) + ) else: - account = db_session.query(GenericAccount).options( - joinedload(GenericAccount.imap_secret) - ).get(self.account_id) + account = ( + db_session.query(GenericAccount) + .options(joinedload(GenericAccount.imap_secret)) + .get(self.account_id) + ) db_session.expunge(account) return self.auth_handler.connect_account( - account, self._should_timeout_connection()) + account, self._should_timeout_connection() + ) def _new_connection(self): conn = self._new_raw_connection() - return self.client_cls(self.account_id, self.provider_info, - self.email_address, conn, - readonly=self.readonly) + return self.client_cls( + self.account_id, + self.provider_info, + self.email_address, + conn, + readonly=self.readonly, + ) def _exc_callback(exc): - log.info('Connection broken with error; retrying with new connection', - exc_info=True) + log.info( + "Connection broken with error; retrying with new connection", exc_info=True + ) gevent.sleep(5) retry_crispin = functools.partial( - retry, retry_classes=CONN_RETRY_EXC_CLASSES, exc_callback=_exc_callback) + retry, retry_classes=CONN_RETRY_EXC_CLASSES, exc_callback=_exc_callback +) class CrispinClient(object): @@ -300,8 +319,7 @@ class CrispinClient(object): """ - def __init__(self, account_id, provider_info, email_address, conn, - readonly=True): + def __init__(self, account_id, provider_info, email_address, conn, readonly=True): self.account_id = account_id self.provider_info = provider_info self.email_address = email_address @@ -366,18 +384,19 @@ def select_folder(self, folder, uidvalidity_cb): cached/out-of-date values for HIGHESTMODSEQ from the IMAP server. """ try: - select_info = self.conn.select_folder( - folder, readonly=self.readonly) + select_info = self.conn.select_folder(folder, readonly=self.readonly) except imapclient.IMAPClient.Error as e: # Specifically point out folders that come back as missing by # checking for Yahoo / Gmail / Outlook (Hotmail) specific errors: # TODO: match with FolderSyncEngine.get_new_uids - if '[NONEXISTENT] Unknown Mailbox:' in e.message or \ - 'does not exist' in e.message or \ - "doesn't exist" in e.message: + if ( + "[NONEXISTENT] Unknown Mailbox:" in e.message + or "does not exist" in e.message + or "doesn't exist" in e.message + ): raise FolderMissingError(folder) - if 'Access denied' in e.message: + if "Access denied" in e.message: # TODO: This is not the best exception name, but it does the # expected thing here: We stop syncing the folder (but would # attempt selecting the folder again later). @@ -387,11 +406,10 @@ def select_folder(self, folder, uidvalidity_cb): # being deleted, as other connection errors could occur - but we # want to make sure we keep track of different providers' # "nonexistent" messages, so log this event. - log.error("IMAPClient error selecting folder. May be deleted", - error=str(e)) + log.error("IMAPClient error selecting folder. May be deleted", error=str(e)) raise - select_info['UIDVALIDITY'] = long(select_info['UIDVALIDITY']) + select_info["UIDVALIDITY"] = long(select_info["UIDVALIDITY"]) self.selected_folder = (folder, select_info) # Don't propagate cached information from previous session self._folder_names = None @@ -407,11 +425,11 @@ def selected_folder_info(self): @property def selected_uidvalidity(self): - return or_none(self.selected_folder_info, lambda i: i['UIDVALIDITY']) + return or_none(self.selected_folder_info, lambda i: i["UIDVALIDITY"]) @property def selected_uidnext(self): - return or_none(self.selected_folder_info, lambda i: i.get('UIDNEXT')) + return or_none(self.selected_folder_info, lambda i: i.get("UIDNEXT")) @property def folder_separator(self): @@ -419,7 +437,7 @@ def folder_separator(self): folders_list = self.conn.list_folders() if len(folders_list) == 0: - return '.' + return "." return folders_list[0][1] @@ -427,11 +445,11 @@ def folder_separator(self): def folder_prefix(self): # Unfortunately, some servers don't support the NAMESPACE command. # In this case, assume that there's no folder prefix. - if self.conn.has_capability('NAMESPACE'): + if self.conn.has_capability("NAMESPACE"): folder_prefix, folder_separator = self.conn.namespace()[0][0] return folder_prefix else: - return '' + return "" def sync_folders(self): """ @@ -449,15 +467,15 @@ def sync_folders(self): to_sync = [] have_folders = self.folder_names() - assert 'inbox' in have_folders, \ - "Missing required 'inbox' folder for account_id: {}".\ - format(self.account_id) + assert ( + "inbox" in have_folders + ), "Missing required 'inbox' folder for account_id: {}".format(self.account_id) # Sync inbox folder first, then sent, then others. - to_sync = have_folders['inbox'] - to_sync.extend(have_folders.get('sent', [])) + to_sync = have_folders["inbox"] + to_sync.extend(have_folders.get("sent", [])) for role, folder_names in have_folders.items(): - if role == 'inbox' or role == 'sent': + if role == "inbox" or role == "sent": continue to_sync.extend(folder_names) @@ -505,12 +523,15 @@ def folders(self): raw_folders = [] # Folders that provide basic functionality of email - system_role_names = ['inbox', 'sent', 'trash', 'spam'] + system_role_names = ["inbox", "sent", "trash", "spam"] folders = self._fetch_folder_list() for flags, delimiter, name in folders: - if u'\\Noselect' in flags or u'\\NoSelect' in flags \ - or u'\\NonExistent' in flags: + if ( + u"\\Noselect" in flags + or u"\\NoSelect" in flags + or u"\\NonExistent" in flags + ): # Special folders that can't contain messages continue @@ -518,22 +539,22 @@ def folders(self): raw_folders.append(raw_folder) # Check to see if we have to guess the roles for any system role - missing_roles = self._get_missing_roles(raw_folders, - system_role_names) - guessed_roles = [self._guess_role(folder.display_name) - for folder in raw_folders] + missing_roles = self._get_missing_roles(raw_folders, system_role_names) + guessed_roles = [ + self._guess_role(folder.display_name) for folder in raw_folders + ] for role in missing_roles: if guessed_roles.count(role) == 1: guess_index = guessed_roles.index(role) raw_folders[guess_index] = RawFolder( - display_name=raw_folders[guess_index].display_name, - role=role) + display_name=raw_folders[guess_index].display_name, role=role + ) return raw_folders def _get_missing_roles(self, folders, roles): - ''' + """ Given a list of folders, and a list of roles, returns a list a list of roles that did not appear in the list of folders @@ -543,7 +564,7 @@ def _get_missing_roles(self, folders, roles): Returns: a list of roles that did not appear as a role in folders - ''' + """ assert len(folders) > 0 assert len(roles) > 0 @@ -557,7 +578,7 @@ def _get_missing_roles(self, folders, roles): return missing_roles.keys() def _guess_role(self, folder): - ''' + """ Given a folder, guess the system role that corresponds to that folder Parameters: @@ -565,7 +586,7 @@ def _guess_role(self, folder): Returns: string representing role that most likely correpsonds to folder - ''' + """ # localized_folder_names is an external map of folders we have seen # in the wild with implicit roles that we were unable to determine # because they had missing flags. We've manually gone through the @@ -590,25 +611,32 @@ def _process_folder(self, display_name, flags): # we have a default map for common name mapping, additional # mappings can be provided via the provider configuration file default_folder_map = { - 'inbox': 'inbox', - 'drafts': 'drafts', - 'draft': 'drafts', - u'entw\xfcrfe': 'drafts', - 'junk': 'spam', - 'spam': 'spam', - 'archive': 'archive', - 'sent': 'sent', - 'sent items': 'sent', - 'trash': 'trash'} + "inbox": "inbox", + "drafts": "drafts", + "draft": "drafts", + u"entw\xfcrfe": "drafts", + "junk": "spam", + "spam": "spam", + "archive": "archive", + "sent": "sent", + "sent items": "sent", + "trash": "trash", + } # Additionally we provide a custom mapping for providers that # don't fit into the defaults. - folder_map = self.provider_info.get('folder_map', {}) + folder_map = self.provider_info.get("folder_map", {}) # Some providers also provide flags to determine common folders # Here we read these flags and apply the mapping - flag_map = {'\\Trash': 'trash', '\\Sent': 'sent', '\\Drafts': 'drafts', - '\\Junk': 'spam', '\\Inbox': 'inbox', '\\Spam': 'spam'} + flag_map = { + "\\Trash": "trash", + "\\Sent": "sent", + "\\Drafts": "drafts", + "\\Junk": "spam", + "\\Inbox": "inbox", + "\\Spam": "spam", + } role = default_folder_map.get(display_name.lower()) @@ -630,10 +658,10 @@ def condstore_supported(self): # Technically QRESYNC implies CONDSTORE, although this is unlikely to # matter in practice. capabilities = self.conn.capabilities() - return 'CONDSTORE' in capabilities or 'QRESYNC' in capabilities + return "CONDSTORE" in capabilities or "QRESYNC" in capabilities def idle_supported(self): - return 'IDLE' in self.conn.capabilities() + return "IDLE" in self.conn.capabilities() def search_uids(self, criteria): """ @@ -661,29 +689,35 @@ def all_uids(self): try: t = time.time() - fetch_result = self.conn.search(['ALL']) + fetch_result = self.conn.search(["ALL"]) except imaplib.IMAP4.error as e: - if e.message.find('UID SEARCH wrong arguments passed') >= 0: + if e.message.find("UID SEARCH wrong arguments passed") >= 0: # Search query must not have parentheses for Mail2World servers - log.debug("Getting UIDs failed when using 'UID SEARCH " - "(ALL)'. Switching to alternative 'UID SEARCH " - "ALL", exception=e) + log.debug( + "Getting UIDs failed when using 'UID SEARCH " + "(ALL)'. Switching to alternative 'UID SEARCH " + "ALL", + exception=e, + ) t = time.time() - fetch_result = self.conn._search(['ALL'], None) - elif e.message.find('UID SEARCH failed: Internal error') >= 0: + fetch_result = self.conn._search(["ALL"], None) + elif e.message.find("UID SEARCH failed: Internal error") >= 0: # Oracle Beehive fails for some folders - log.debug("Getting UIDs failed when using 'UID SEARCH " - "ALL'. Switching to alternative 'UID SEARCH " - "1:*", exception=e) + log.debug( + "Getting UIDs failed when using 'UID SEARCH " + "ALL'. Switching to alternative 'UID SEARCH " + "1:*", + exception=e, + ) t = time.time() - fetch_result = self.conn.search(['1:*']) + fetch_result = self.conn.search(["1:*"]) else: raise elapsed = time.time() - t - log.debug('Requested all UIDs', - search_time=elapsed, - total_uids=len(fetch_result)) + log.debug( + "Requested all UIDs", search_time=elapsed, total_uids=len(fetch_result) + ) return sorted([long(uid) for uid in fetch_result]) def uids(self, uids): @@ -698,22 +732,29 @@ def uids(self, uids): # back at the first try. for n in range(3): result = self.conn.fetch( - uid, ['BODY.PEEK[]', 'INTERNALDATE', 'FLAGS']) + uid, ["BODY.PEEK[]", "INTERNALDATE", "FLAGS"] + ) if uid in result: raw_messages[uid] = result[uid] break except imapclient.IMAPClient.Error as e: - if ('[UNAVAILABLE] UID FETCH Server error ' - 'while fetching messages') in str(e): - log.info('Got an exception while requesting an UID', - uid=uid, error=e, - logstash_tag='imap_download_exception') + if ( + "[UNAVAILABLE] UID FETCH Server error " "while fetching messages" + ) in str(e): + log.info( + "Got an exception while requesting an UID", + uid=uid, + error=e, + logstash_tag="imap_download_exception", + ) continue else: - log.info(('Got an unhandled exception while ' - 'requesting an UID'), - uid=uid, error=e, - logstash_tag='imap_download_exception') + log.info( + ("Got an unhandled exception while " "requesting an UID"), + uid=uid, + error=e, + logstash_tag="imap_download_exception", + ) raise for uid in sorted(raw_messages.iterkeys(), key=long): @@ -721,18 +762,23 @@ def uids(self, uids): if uid not in uid_set: continue msg = raw_messages[uid] - if msg.keys() == ['SEQ']: - log.error('No data returned for UID, skipping', uid=uid) + if msg.keys() == ["SEQ"]: + log.error("No data returned for UID, skipping", uid=uid) continue - messages.append(RawMessage(uid=long(uid), - internaldate=msg['INTERNALDATE'], - flags=msg['FLAGS'], - body=msg['BODY[]'], - # TODO: use data structure that isn't - # Gmail-specific - g_thrid=None, g_msgid=None, - g_labels=None)) + messages.append( + RawMessage( + uid=long(uid), + internaldate=msg["INTERNALDATE"], + flags=msg["FLAGS"], + body=msg["BODY[]"], + # TODO: use data structure that isn't + # Gmail-specific + g_thrid=None, + g_msgid=None, + g_labels=None, + ) + ) return messages def flags(self, uids): @@ -740,13 +786,16 @@ def flags(self, uids): # Some backends abort the connection if you give them a really # long sequence set of individual UIDs, so instead fetch flags for # all UIDs greater than or equal to min(uids). - seqset = '{}:*'.format(min(uids)) + seqset = "{}:*".format(min(uids)) else: seqset = uids - data = self.conn.fetch(seqset, ['FLAGS']) + data = self.conn.fetch(seqset, ["FLAGS"]) uid_set = set(uids) - return {uid: Flags(ret['FLAGS'], None) - for uid, ret in data.items() if uid in uid_set} + return { + uid: Flags(ret["FLAGS"], None) + for uid, ret in data.items() + if uid in uid_set + } def delete_uids(self, uids): uids = [str(u) for u in uids] @@ -755,24 +804,25 @@ def delete_uids(self, uids): def set_starred(self, uids, starred): if starred: - self.conn.add_flags(uids, ['\\Flagged'], silent=True) + self.conn.add_flags(uids, ["\\Flagged"], silent=True) else: - self.conn.remove_flags(uids, ['\\Flagged'], silent=True) + self.conn.remove_flags(uids, ["\\Flagged"], silent=True) def set_unread(self, uids, unread): uids = [str(u) for u in uids] if unread: - self.conn.remove_flags(uids, ['\\Seen'], silent=True) + self.conn.remove_flags(uids, ["\\Seen"], silent=True) else: - self.conn.add_flags(uids, ['\\Seen'], silent=True) + self.conn.add_flags(uids, ["\\Seen"], silent=True) def save_draft(self, message, date=None): - assert self.selected_folder_name in self.folder_names()['drafts'], \ - 'Must select a drafts folder first ({0})'.\ - format(self.selected_folder_name) + assert ( + self.selected_folder_name in self.folder_names()["drafts"] + ), "Must select a drafts folder first ({0})".format(self.selected_folder_name) - self.conn.append(self.selected_folder_name, message, ['\\Draft', - '\\Seen'], date) + self.conn.append( + self.selected_folder_name, message, ["\\Draft", "\\Seen"], date + ) def create_message(self, message, date=None): """ @@ -780,12 +830,11 @@ def create_message(self, message, date=None): like iCloud not saving Sent messages. """ - assert self.selected_folder_name in self.folder_names()['sent'], \ - 'Must select sent folder first ({0})'.\ - format(self.selected_folder_name) + assert ( + self.selected_folder_name in self.folder_names()["sent"] + ), "Must select sent folder first ({0})".format(self.selected_folder_name) - return self.conn.append(self.selected_folder_name, message, ['\\Seen'], - date) + return self.conn.append(self.selected_folder_name, message, ["\\Seen"], date) def fetch_headers(self, uids): """ @@ -796,8 +845,7 @@ def fetch_headers(self, uids): """ headers = {} for uid_chunk in chunk(uids, 100): - headers.update(self.conn.fetch( - uid_chunk, ['BODY.PEEK[HEADER]'])) + headers.update(self.conn.fetch(uid_chunk, ["BODY.PEEK[HEADER]"])) return headers def find_by_header(self, header_name, header_value): @@ -812,7 +860,7 @@ def find_by_header(self, header_name, header_value): matching_draft_headers = self.fetch_headers(all_uids) results = [] for uid, response in matching_draft_headers.iteritems(): - headers = response['BODY[HEADER]'] + headers = response["BODY[HEADER]"] parser = HeaderParser() header = parser.parsestr(headers).get(header_name) if header == header_value: @@ -829,13 +877,12 @@ def delete_sent_message(self, message_id_header, delete_multiple=False): Leaves the Trash folder selected at the end of the method. """ - log.info('Trying to delete sent message', - message_id_header=message_id_header) - sent_folder_name = self.folder_names()['sent'][0] + log.info("Trying to delete sent message", message_id_header=message_id_header) + sent_folder_name = self.folder_names()["sent"][0] self.conn.select_folder(sent_folder_name) msg_deleted = self._delete_message(message_id_header, delete_multiple) if msg_deleted: - trash_folder_name = self.folder_names()['trash'][0] + trash_folder_name = self.folder_names()["trash"][0] self.conn.select_folder(trash_folder_name) self._delete_message(message_id_header, delete_multiple) return msg_deleted @@ -849,13 +896,16 @@ def delete_draft(self, message_id_header): Leaves the Trash folder selected at the end of the method. """ - drafts_folder_name = self.folder_names()['drafts'][0] - log.info('Trying to delete draft', - message_id_header=message_id_header, folder=drafts_folder_name) + drafts_folder_name = self.folder_names()["drafts"][0] + log.info( + "Trying to delete draft", + message_id_header=message_id_header, + folder=drafts_folder_name, + ) self.conn.select_folder(drafts_folder_name) draft_deleted = self._delete_message(message_id_header) if draft_deleted: - trash_folder_name = self.folder_names()['trash'][0] + trash_folder_name = self.folder_names()["trash"][0] self.conn.select_folder(trash_folder_name) self._delete_message(message_id_header) return draft_deleted @@ -867,15 +917,19 @@ def _delete_message(self, message_id_header, delete_multiple=False): more than one matching message is found. """ - matching_uids = self.find_by_header('Message-Id', message_id_header) + matching_uids = self.find_by_header("Message-Id", message_id_header) if not matching_uids: - log.error('No remote messages found to delete', - message_id_header=message_id_header) + log.error( + "No remote messages found to delete", + message_id_header=message_id_header, + ) return False if len(matching_uids) > 1 and not delete_multiple: - log.error('Multiple remote messages found to delete', - message_id_header=message_id_header, - uids=matching_uids) + log.error( + "Multiple remote messages found to delete", + message_id_header=message_id_header, + uids=matching_uids, + ) return False self.conn.delete_messages(matching_uids, silent=True) self.conn.expunge() @@ -898,15 +952,17 @@ def idle(self, timeout): return r def condstore_changed_flags(self, modseq): - data = self.conn.fetch('1:*', ['FLAGS'], - modifiers=['CHANGEDSINCE {}'.format(modseq)]) - return {uid: Flags(ret['FLAGS'], ret['MODSEQ'][0] - if 'MODSEQ' in ret else None) - for uid, ret in data.items()} + data = self.conn.fetch( + "1:*", ["FLAGS"], modifiers=["CHANGEDSINCE {}".format(modseq)] + ) + return { + uid: Flags(ret["FLAGS"], ret["MODSEQ"][0] if "MODSEQ" in ret else None) + for uid, ret in data.items() + } class GmailCrispinClient(CrispinClient): - PROVIDER = 'gmail' + PROVIDER = "gmail" def sync_folders(self): """ @@ -924,17 +980,17 @@ def sync_folders(self): """ present_folders = self.folder_names() - if 'all' not in present_folders: + if "all" not in present_folders: raise GmailSettingError( "Account {} is missing the 'All Mail' folder. This is " "probably due to 'Show in IMAP' being disabled. " "See https://support.nylas.com/hc/en-us/articles/217562277 " - "for more details." - .format(self.email_address)) + "for more details.".format(self.email_address) + ) # If the account has Trash, Spam folders, sync those too. to_sync = [] - for folder in ['all', 'trash', 'spam']: + for folder in ["all", "trash", "spam"]: if folder in present_folders: to_sync.append(present_folders[folder][0]) return to_sync @@ -949,31 +1005,40 @@ def flags(self, uids): Mapping of `uid` : GmailFlags. """ - data = self.conn.fetch(uids, ['FLAGS', 'X-GM-LABELS']) + data = self.conn.fetch(uids, ["FLAGS", "X-GM-LABELS"]) uid_set = set(uids) - return {uid: GmailFlags(ret['FLAGS'], - self._decode_labels(ret['X-GM-LABELS']), - ret['MODSEQ'][0] if 'MODSEQ' in ret else None) - for uid, ret in data.items() if uid in uid_set} + return { + uid: GmailFlags( + ret["FLAGS"], + self._decode_labels(ret["X-GM-LABELS"]), + ret["MODSEQ"][0] if "MODSEQ" in ret else None, + ) + for uid, ret in data.items() + if uid in uid_set + } def condstore_changed_flags(self, modseq): - data = self.conn.fetch('1:*', ['FLAGS', 'X-GM-LABELS'], - modifiers=['CHANGEDSINCE {}'.format(modseq)]) + data = self.conn.fetch( + "1:*", + ["FLAGS", "X-GM-LABELS"], + modifiers=["CHANGEDSINCE {}".format(modseq)], + ) results = {} for uid, ret in data.items(): - if 'FLAGS' not in ret or 'X-GM-LABELS' not in ret: + if "FLAGS" not in ret or "X-GM-LABELS" not in ret: # We might have gotten an unsolicited fetch response that # doesn't have all the data we asked for -- if so, explicitly # fetch flags and labels for that UID. - log.info('Got incomplete response in flags fetch', uid=uid, - ret=str(ret)) - data_for_uid = self.conn.fetch(uid, ['FLAGS', 'X-GM-LABELS']) + log.info( + "Got incomplete response in flags fetch", uid=uid, ret=str(ret) + ) + data_for_uid = self.conn.fetch(uid, ["FLAGS", "X-GM-LABELS"]) if not data_for_uid: continue ret = data_for_uid[uid] - results[uid] = GmailFlags(ret['FLAGS'], - self._decode_labels(ret['X-GM-LABELS']), - ret['MODSEQ'][0]) + results[uid] = GmailFlags( + ret["FLAGS"], self._decode_labels(ret["X-GM-LABELS"]), ret["MODSEQ"][0] + ) return results def g_msgids(self, uids): @@ -986,10 +1051,9 @@ def g_msgids(self, uids): Mapping of `uid` (long) : `g_msgid` (long) """ - data = self.conn.fetch(uids, ['X-GM-MSGID']) + data = self.conn.fetch(uids, ["X-GM-MSGID"]) uid_set = set(uids) - return {uid: ret['X-GM-MSGID'] - for uid, ret in data.items() if uid in uid_set} + return {uid: ret["X-GM-MSGID"] for uid, ret in data.items() if uid in uid_set} def g_msgid_to_uids(self, g_msgid): """ @@ -1000,8 +1064,7 @@ def g_msgid_to_uids(self, g_msgid): ------- list """ - uids = [long(uid) for uid in - self.conn.search(['X-GM-MSGID', g_msgid])] + uids = [long(uid) for uid in self.conn.search(["X-GM-MSGID", g_msgid])] # UIDs ascend over time; return in order most-recent first return sorted(uids, reverse=True) @@ -1045,20 +1108,25 @@ def _process_folder(self, display_name, flags): RawFolder representing the folder """ - flag_map = {'\\Drafts': 'drafts', '\\Important': 'important', - '\\Sent': 'sent', '\\Junk': 'spam', '\\Flagged': 'starred', - '\\Trash': 'trash'} + flag_map = { + "\\Drafts": "drafts", + "\\Important": "important", + "\\Sent": "sent", + "\\Junk": "spam", + "\\Flagged": "starred", + "\\Trash": "trash", + } role = None - if '\\All' in flags: - role = 'all' - elif display_name.lower() == 'inbox': + if "\\All" in flags: + role = "all" + elif display_name.lower() == "inbox": # Special-case the display name here. In Gmail, the inbox # folder shows up in the folder list as 'INBOX', and in sync as # the label '\\Inbox'. We're just always going to give it the # display name 'Inbox'. - role = 'inbox' - display_name = 'Inbox' + role = "inbox" + display_name = "Inbox" else: for flag in flags: if flag in flag_map: @@ -1068,9 +1136,17 @@ def _process_folder(self, display_name, flags): return RawFolder(display_name=display_name, role=role) def uids(self, uids): - raw_messages = self.conn.fetch(uids, ['BODY.PEEK[]', 'INTERNALDATE', - 'FLAGS', 'X-GM-THRID', - 'X-GM-MSGID', 'X-GM-LABELS']) + raw_messages = self.conn.fetch( + uids, + [ + "BODY.PEEK[]", + "INTERNALDATE", + "FLAGS", + "X-GM-THRID", + "X-GM-MSGID", + "X-GM-LABELS", + ], + ) messages = [] uid_set = set(uids) @@ -1080,13 +1156,16 @@ def uids(self, uids): continue msg = raw_messages[uid] messages.append( - RawMessage(uid=long(uid), - internaldate=msg['INTERNALDATE'], - flags=msg['FLAGS'], - body=msg['BODY[]'], - g_thrid=long(msg['X-GM-THRID']), - g_msgid=long(msg['X-GM-MSGID']), - g_labels=self._decode_labels(msg['X-GM-LABELS']))) + RawMessage( + uid=long(uid), + internaldate=msg["INTERNALDATE"], + flags=msg["FLAGS"], + body=msg["BODY[]"], + g_thrid=long(msg["X-GM-THRID"]), + g_msgid=long(msg["X-GM-MSGID"]), + g_labels=self._decode_labels(msg["X-GM-LABELS"]), + ) + ) return messages def g_metadata(self, uids): @@ -1105,13 +1184,14 @@ def g_metadata(self, uids): """ # Super long sets of uids may fail with BAD ['Could not parse command'] # In that case, just fetch metadata for /all/ uids. - seqset = uids if len(uids) < 1e6 else '1:*' - data = self.conn.fetch(seqset, ['X-GM-MSGID', 'X-GM-THRID', - 'RFC822.SIZE']) + seqset = uids if len(uids) < 1e6 else "1:*" + data = self.conn.fetch(seqset, ["X-GM-MSGID", "X-GM-THRID", "RFC822.SIZE"]) uid_set = set(uids) - return {uid: GMetadata(ret['X-GM-MSGID'], ret['X-GM-THRID'], - ret['RFC822.SIZE']) - for uid, ret in data.items() if uid in uid_set} + return { + uid: GMetadata(ret["X-GM-MSGID"], ret["X-GM-THRID"], ret["RFC822.SIZE"]) + for uid, ret in data.items() + if uid in uid_set + } def expand_thread(self, g_thrid): """ @@ -1122,13 +1202,12 @@ def expand_thread(self, g_thrid): ------- list """ - uids = [long(uid) for uid in - self.conn.search(['X-GM-THRID', g_thrid])] + uids = [long(uid) for uid in self.conn.search(["X-GM-THRID", g_thrid])] # UIDs ascend over time; return in order most-recent first return sorted(uids, reverse=True) def find_by_header(self, header_name, header_value): - return self.conn.search(['HEADER', header_name, header_value]) + return self.conn.search(["HEADER", header_name, header_value]) def _decode_labels(self, labels): return map(imapclient.imap_utf7.decode, labels) @@ -1146,11 +1225,10 @@ def delete_draft(self, message_id_header): Leaves the Trash folder selected at the end of the method. """ - log.info('Trying to delete gmail draft', - message_id_header=message_id_header) - drafts_folder_name = self.folder_names()['drafts'][0] - trash_folder_name = self.folder_names()['trash'][0] - sent_folder_name = self.folder_names()['sent'][0] + log.info("Trying to delete gmail draft", message_id_header=message_id_header) + drafts_folder_name = self.folder_names()["drafts"][0] + trash_folder_name = self.folder_names()["trash"][0] + sent_folder_name = self.folder_names()["sent"][0] # There's a race condition in how Gmail reconciles sent messages # which sometimes causes us to delete both the sent and draft @@ -1161,34 +1239,33 @@ def delete_draft(self, message_id_header): # First find the message in the sent folder self.conn.select_folder(sent_folder_name) - matching_uids = self.find_by_header('Message-Id', message_id_header) + matching_uids = self.find_by_header("Message-Id", message_id_header) if len(matching_uids) == 0: - raise DraftDeletionException( - "Couldn't find sent message in sent folder.") + raise DraftDeletionException("Couldn't find sent message in sent folder.") sent_gm_msgids = self.g_msgids(matching_uids) if len(sent_gm_msgids) != 1: - raise DraftDeletionException( - "Only one message should have this msgid") + raise DraftDeletionException("Only one message should have this msgid") # Then find the draft in the draft folder self.conn.select_folder(drafts_folder_name) - matching_uids = self.find_by_header('Message-Id', message_id_header) + matching_uids = self.find_by_header("Message-Id", message_id_header) if not matching_uids: return False # Make sure to remove the \\Draft flags so that Gmail removes it from # the draft folder. - self.conn.remove_flags(matching_uids, ['\\Draft']) - self.conn.remove_gmail_labels(matching_uids, ['\\Draft']) + self.conn.remove_flags(matching_uids, ["\\Draft"]) + self.conn.remove_gmail_labels(matching_uids, ["\\Draft"]) gm_msgids = self.g_msgids(matching_uids) for msgid in gm_msgids.values(): if msgid == sent_gm_msgids.values()[0]: raise DraftDeletionException( "Send and draft should have been reconciled as " - "different messages.") + "different messages." + ) self.conn.copy(matching_uids, trash_folder_name) self.conn.select_folder(trash_folder_name) @@ -1213,13 +1290,12 @@ def delete_sent_message(self, message_id_header, delete_multiple=False): Leaves the Trash folder selected at the end of the method. """ - log.info('Trying to delete sent message', - message_id_header=message_id_header) - sent_folder_name = self.folder_names()['sent'][0] - trash_folder_name = self.folder_names()['trash'][0] + log.info("Trying to delete sent message", message_id_header=message_id_header) + sent_folder_name = self.folder_names()["sent"][0] + trash_folder_name = self.folder_names()["trash"][0] # First find the message in Sent self.conn.select_folder(sent_folder_name) - matching_uids = self.find_by_header('Message-Id', message_id_header) + matching_uids = self.find_by_header("Message-Id", message_id_header) if not matching_uids: return False diff --git a/inbox/events/__init__.py b/inbox/events/__init__.py index b88765afd..1e6dbf001 100644 --- a/inbox/events/__init__.py +++ b/inbox/events/__init__.py @@ -1,3 +1,4 @@ # Allow out-of-tree backend submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/inbox/events/actions/backends/__init__.py b/inbox/events/actions/backends/__init__.py index e1f148b21..77ca7e7c9 100644 --- a/inbox/events/actions/backends/__init__.py +++ b/inbox/events/actions/backends/__init__.py @@ -1,5 +1,7 @@ # Allow out-of-tree action submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) from inbox.util.misc import register_backends + module_registry = register_backends(__name__, __path__) diff --git a/inbox/events/actions/backends/gmail.py b/inbox/events/actions/backends/gmail.py index e63d1f7bc..1e758e78d 100644 --- a/inbox/events/actions/backends/gmail.py +++ b/inbox/events/actions/backends/gmail.py @@ -2,9 +2,9 @@ from inbox.events.google import GoogleEventsProvider -PROVIDER = 'gmail' +PROVIDER = "gmail" -__all__ = ['remote_create_event', 'remote_update_event', 'remote_delete_event'] +__all__ = ["remote_create_event", "remote_update_event", "remote_delete_event"] def remote_create_event(account, event, db_session, extra_args): @@ -12,7 +12,7 @@ def remote_create_event(account, event, db_session, extra_args): result = provider.create_remote_event(event, **extra_args) # The events crud API assigns a random uid to an event when creating it. # We need to update it to the value returned by the Google calendar API. - event.uid = result['id'] + event.uid = result["id"] db_session.commit() @@ -21,7 +21,8 @@ def remote_update_event(account, event, db_session, extra_args): provider.update_remote_event(event, **extra_args) -def remote_delete_event(account, event_uid, calendar_name, calendar_uid, - db_session, extra_args): +def remote_delete_event( + account, event_uid, calendar_name, calendar_uid, db_session, extra_args +): provider = GoogleEventsProvider(account.id, account.namespace.id) provider.delete_remote_event(calendar_uid, event_uid, **extra_args) diff --git a/inbox/events/actions/base.py b/inbox/events/actions/base.py index 3d79472f6..9f50debc0 100644 --- a/inbox/events/actions/base.py +++ b/inbox/events/actions/base.py @@ -2,36 +2,36 @@ from inbox.models.event import Event from inbox.models.session import session_scope from inbox.events.actions.backends import module_registry -from inbox.events.ical import (generate_icalendar_invite, send_invite) +from inbox.events.ical import generate_icalendar_invite, send_invite def create_event(account_id, event_id, extra_args): with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) event = db_session.query(Event).get(event_id) - remote_create_event = module_registry[account.provider]. \ - remote_create_event + remote_create_event = module_registry[account.provider].remote_create_event remote_create_event(account, event, db_session, extra_args) - notify_participants = extra_args.get('notify_participants', False) - cancelled_participants = extra_args.get('cancelled_participants', []) + notify_participants = extra_args.get("notify_participants", False) + cancelled_participants = extra_args.get("cancelled_participants", []) # Do we need to send an RSVP message? # We use gmail's sendNotification API for google accounts. # but we need create and send an iCalendar invite ourselves # for non-gmail accounts. - if notify_participants and account.provider != 'gmail': + if notify_participants and account.provider != "gmail": ical_file = generate_icalendar_invite(event).to_ical() - send_invite(ical_file, event, account, invite_type='request') + send_invite(ical_file, event, account, invite_type="request") if cancelled_participants != []: # Some people got removed from the event. Send them a # cancellation email. - event.status = 'cancelled' + event.status = "cancelled" event.participants = cancelled_participants - ical_file = generate_icalendar_invite(event, - invite_type='cancel').to_ical() - send_invite(ical_file, event, account, invite_type='cancel') + ical_file = generate_icalendar_invite( + event, invite_type="cancel" + ).to_ical() + send_invite(ical_file, event, account, invite_type="cancel") def update_event(account_id, event_id, extra_args): @@ -40,10 +40,10 @@ def update_event(account_id, event_id, extra_args): event = db_session.query(Event).get(event_id) # Update our copy of the event before sending it. - if 'event_data' in extra_args: - data = extra_args['event_data'] + if "event_data" in extra_args: + data = extra_args["event_data"] for attr in Event.API_MODIFIABLE_FIELDS: - if attr in extra_args['event_data']: + if attr in extra_args["event_data"]: setattr(event, attr, data[attr]) event.sequence_number += 1 @@ -53,16 +53,15 @@ def update_event(account_id, event_id, extra_args): if event.calendar == account.emailed_events_calendar: return - remote_update_event = module_registry[account.provider]. \ - remote_update_event + remote_update_event = module_registry[account.provider].remote_update_event remote_update_event(account, event, db_session, extra_args) - notify_participants = extra_args.get('notify_participants', False) + notify_participants = extra_args.get("notify_participants", False) - if notify_participants and account.provider != 'gmail': + if notify_participants and account.provider != "gmail": ical_file = generate_icalendar_invite(event).to_ical() - send_invite(ical_file, event, account, invite_type='update') + send_invite(ical_file, event, account, invite_type="update") db_session.commit() @@ -71,29 +70,28 @@ def delete_event(account_id, event_id, extra_args): with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) event = db_session.query(Event).get(event_id) - notify_participants = extra_args.get('notify_participants', False) + notify_participants = extra_args.get("notify_participants", False) - remote_delete_event = module_registry[account.provider]. \ - remote_delete_event - event_uid = extra_args.pop('event_uid', None) - calendar_name = extra_args.pop('calendar_name', None) + remote_delete_event = module_registry[account.provider].remote_delete_event + event_uid = extra_args.pop("event_uid", None) + calendar_name = extra_args.pop("calendar_name", None) # The calendar_uid argument is required for some providers, like EAS. - calendar_uid = extra_args.pop('calendar_uid', None) + calendar_uid = extra_args.pop("calendar_uid", None) if event.calendar == account.emailed_events_calendar: return - remote_delete_event(account, event_uid, calendar_name, calendar_uid, - db_session, extra_args) + remote_delete_event( + account, event_uid, calendar_name, calendar_uid, db_session, extra_args + ) # Finally, update the event. event.sequence_number += 1 - event.status = 'cancelled' + event.status = "cancelled" db_session.commit() - if notify_participants and account.provider != 'gmail': - ical_file = generate_icalendar_invite(event, - invite_type='cancel').to_ical() + if notify_participants and account.provider != "gmail": + ical_file = generate_icalendar_invite(event, invite_type="cancel").to_ical() - send_invite(ical_file, event, account, invite_type='cancel') + send_invite(ical_file, event, account, invite_type="cancel") diff --git a/inbox/events/google.py b/inbox/events/google.py index 502e1265f..a93b12cbd 100644 --- a/inbox/events/google.py +++ b/inbox/events/google.py @@ -10,6 +10,7 @@ import arrow from nylas.logging import get_logger + log = get_logger() from inbox.auth.oauth import OAuthRequestsWrapper from inbox.basicauth import AccessNotEnabledError @@ -18,24 +19,31 @@ from inbox.models.event import Event, EVENT_STATUSES from inbox.models.session import session_scope from inbox.models.backends.gmail import g_token_manager -from inbox.events.util import (google_to_event_time, parse_google_time, - parse_datetime, CalendarSyncResponse) +from inbox.events.util import ( + google_to_event_time, + parse_google_time, + parse_datetime, + CalendarSyncResponse, +) -CALENDARS_URL = 'https://www.googleapis.com/calendar/v3/users/me/calendarList' -STATUS_MAP = {'accepted': 'yes', 'needsAction': 'noreply', - 'declined': 'no', 'tentative': 'maybe'} +CALENDARS_URL = "https://www.googleapis.com/calendar/v3/users/me/calendarList" +STATUS_MAP = { + "accepted": "yes", + "needsAction": "noreply", + "declined": "no", + "tentative": "maybe", +} -URL_PREFIX = config.get('API_URL', 'https://api.nylas.com') +URL_PREFIX = config.get("API_URL", "https://api.nylas.com") -PUSH_ENABLED_CLIENT_IDS = config.get('PUSH_ENABLED_CLIENT_IDS', []) +PUSH_ENABLED_CLIENT_IDS = config.get("PUSH_ENABLED_CLIENT_IDS", []) -CALENDAR_LIST_WEBHOOK_URL = URL_PREFIX + '/w/calendar_list_update/{}' -EVENTS_LIST_WEHOOK_URL = URL_PREFIX + '/w/calendar_update/{}' +CALENDAR_LIST_WEBHOOK_URL = URL_PREFIX + "/w/calendar_list_update/{}" +EVENTS_LIST_WEHOOK_URL = URL_PREFIX + "/w/calendar_update/{}" -WATCH_CALENDARS_URL = CALENDARS_URL + '/watch' -WATCH_EVENTS_URL = \ - 'https://www.googleapis.com/calendar/v3/calendars/{}/events/watch' +WATCH_CALENDARS_URL = CALENDARS_URL + "/watch" +WATCH_EVENTS_URL = "https://www.googleapis.com/calendar/v3/calendars/{}/events/watch" class GoogleEventsProvider(object): @@ -47,7 +55,7 @@ class GoogleEventsProvider(object): def __init__(self, account_id, namespace_id): self.account_id = account_id self.namespace_id = namespace_id - self.log = log.new(account_id=account_id, component='calendar sync') + self.log = log.new(account_id=account_id, component="calendar sync") # A hash to store whether a calendar is read-only or not. # This is a bit of a hack because this isn't exposed at the event level @@ -65,11 +73,11 @@ def sync_calendars(self): updates = [] items = self._get_raw_calendars() for item in items: - if item.get('deleted'): - deletes.append(item['id']) + if item.get("deleted"): + deletes.append(item["id"]) else: cal = parse_calendar_response(item) - self.calendars_table[item['id']] = cal.read_only + self.calendars_table[item["id"]] = cal.read_only updates.append(cal) return CalendarSyncResponse(deletes, updates) @@ -100,8 +108,7 @@ def sync_events(self, calendar_uid, sync_from_time=None): parsed = parse_event_response(item, read_only_calendar) updates.append(parsed) except arrow.parser.ParserError: - log.warning('Skipping unparseable event', exc_info=True, - raw=item) + log.warning("Skipping unparseable event", exc_info=True, raw=item) return updates @@ -126,10 +133,11 @@ def _get_raw_events(self, calendar_uid, sync_from_time=None): """ if sync_from_time is not None: # Note explicit offset is required by Google calendar API. - sync_from_time = datetime.datetime.isoformat(sync_from_time) + 'Z' + sync_from_time = datetime.datetime.isoformat(sync_from_time) + "Z" - url = 'https://www.googleapis.com/calendar/v3/' \ - 'calendars/{}/events'.format(urllib.quote(calendar_uid)) + url = "https://www.googleapis.com/calendar/v3/" "calendars/{}/events".format( + urllib.quote(calendar_uid) + ) try: return self._get_resource_list(url, updatedMin=sync_from_time) except requests.exceptions.HTTPError as exc: @@ -147,88 +155,99 @@ def _get_access_token(self, force_refresh=False): # This will raise OAuthError if OAuth access was revoked. The # BaseSyncMonitor loop will catch this, clean up, and exit. return g_token_manager.get_token_for_calendars( - acc, force_refresh=force_refresh) + acc, force_refresh=force_refresh + ) def _get_resource_list(self, url, **params): """Handles response pagination.""" token = self._get_access_token() items = [] next_page_token = None - params['showDeleted'] = True + params["showDeleted"] = True while True: if next_page_token is not None: - params['pageToken'] = next_page_token + params["pageToken"] = next_page_token try: - r = requests.get(url, params=params, - auth=OAuthRequestsWrapper(token)) + r = requests.get(url, params=params, auth=OAuthRequestsWrapper(token)) r.raise_for_status() data = r.json() - items += data['items'] - next_page_token = data.get('nextPageToken') + items += data["items"] + next_page_token = data.get("nextPageToken") if next_page_token is None: return items except requests.exceptions.SSLError: self.log.warning( - 'SSLError making Google Calendar API request, retrying.', - url=url, exc_info=True) + "SSLError making Google Calendar API request, retrying.", + url=url, + exc_info=True, + ) gevent.sleep(30 + random.randrange(0, 60)) continue except requests.HTTPError: self.log.warning( - 'HTTP error making Google Calendar API request', url=r.url, - response=r.content, status=r.status_code) + "HTTP error making Google Calendar API request", + url=r.url, + response=r.content, + status=r.status_code, + ) if r.status_code == 401: self.log.warning( - 'Invalid access token; refreshing and retrying', - url=r.url, response=r.content, status=r.status_code) + "Invalid access token; refreshing and retrying", + url=r.url, + response=r.content, + status=r.status_code, + ) token = self._get_access_token(force_refresh=True) continue elif r.status_code in (500, 503): - log.warning('Backend error in calendar API; retrying') + log.warning("Backend error in calendar API; retrying") gevent.sleep(30 + random.randrange(0, 60)) continue elif r.status_code == 403: try: - reason = r.json()['error']['errors'][0]['reason'] + reason = r.json()["error"]["errors"][0]["reason"] except (KeyError, ValueError): - log.error("Couldn't parse API error response", - response=r.content, status=r.status_code) + log.error( + "Couldn't parse API error response", + response=r.content, + status=r.status_code, + ) r.raise_for_status() - if reason == 'userRateLimitExceeded': - log.warning('API request was rate-limited; retrying') + if reason == "userRateLimitExceeded": + log.warning("API request was rate-limited; retrying") gevent.sleep(30 + random.randrange(0, 60)) continue - elif reason == 'accessNotConfigured': - log.warning('API not enabled; returning empty result') + elif reason == "accessNotConfigured": + log.warning("API not enabled; returning empty result") raise AccessNotEnabledError() # Unexpected error; raise. raise - def _make_event_request(self, method, calendar_uid, event_uid=None, - **kwargs): + def _make_event_request(self, method, calendar_uid, event_uid=None, **kwargs): """ Makes a POST/PUT/DELETE request for a particular event. """ - event_uid = event_uid or '' - url = 'https://www.googleapis.com/calendar/v3/' \ - 'calendars/{}/events/{}'.format(urllib.quote(calendar_uid), - urllib.quote(event_uid)) + event_uid = event_uid or "" + url = "https://www.googleapis.com/calendar/v3/" "calendars/{}/events/{}".format( + urllib.quote(calendar_uid), urllib.quote(event_uid) + ) token = self._get_access_token() - response = requests.request(method, url, - auth=OAuthRequestsWrapper(token), - **kwargs) + response = requests.request( + method, url, auth=OAuthRequestsWrapper(token), **kwargs + ) return response def create_remote_event(self, event, **kwargs): data = _dump_event(event) params = {} - if kwargs.get('notify_participants') is True: + if kwargs.get("notify_participants") is True: params["sendNotifications"] = "true" else: params["sendNotifications"] = "false" - response = self._make_event_request('post', event.calendar.uid, - json=data, params=params) + response = self._make_event_request( + "post", event.calendar.uid, json=data, params=params + ) # All non-200 statuses are considered errors response.raise_for_status() @@ -238,14 +257,14 @@ def update_remote_event(self, event, **kwargs): data = _dump_event(event) params = {} - if kwargs.get('notify_participants') is True: + if kwargs.get("notify_participants") is True: params["sendNotifications"] = "true" else: params["sendNotifications"] = "false" - response = self._make_event_request('put', event.calendar.uid, - event.uid, json=data, - params=params) + response = self._make_event_request( + "put", event.calendar.uid, event.uid, json=data, params=params + ) # All non-200 statuses are considered errors response.raise_for_status() @@ -253,37 +272,44 @@ def update_remote_event(self, event, **kwargs): def delete_remote_event(self, calendar_uid, event_uid, **kwargs): params = {} - if kwargs.get('notify_participants') is True: + if kwargs.get("notify_participants") is True: params["sendNotifications"] = "true" else: params["sendNotifications"] = "false" - response = self._make_event_request('delete', calendar_uid, event_uid, - params=params) + response = self._make_event_request( + "delete", calendar_uid, event_uid, params=params + ) if response.status_code == 410: # The Google API returns an 'HTTPError: 410 Client Error: Gone' # for an event that no longer exists on the remote - log.warning('Event no longer exists on remote', - calendar_uid=calendar_uid, event_uid=event_uid) + log.warning( + "Event no longer exists on remote", + calendar_uid=calendar_uid, + event_uid=event_uid, + ) else: # All other non-200 statuses are considered errors response.raise_for_status() # -------- logic for push notification subscriptions -------- # - def _get_access_token_for_push_notifications(self, - account, - force_refresh=False): + def _get_access_token_for_push_notifications(self, account, force_refresh=False): # Raises an OAuthError if no such token exists return g_token_manager.get_token_for_calendars_restrict_ids( - account, PUSH_ENABLED_CLIENT_IDS, force_refresh) + account, PUSH_ENABLED_CLIENT_IDS, force_refresh + ) def push_notifications_enabled(self, account): push_enabled_creds = next( - (creds for creds in account.valid_auth_credentials - if creds.client_id in PUSH_ENABLED_CLIENT_IDS), - None) + ( + creds + for creds in account.valid_auth_credentials + if creds.client_id in PUSH_ENABLED_CLIENT_IDS + ), + None, + ) return push_enabled_creds is not None def watch_calendar_list(self, account): @@ -301,7 +327,8 @@ def watch_calendar_list(self, account): """ token = self._get_access_token_for_push_notifications(account) receiving_url = CALENDAR_LIST_WEBHOOK_URL.format( - urllib.quote(account.public_id)) + urllib.quote(account.public_id) + ) one_week = datetime.timedelta(weeks=1) in_a_week = datetime.datetime.utcnow() + one_week @@ -315,17 +342,17 @@ def watch_calendar_list(self, account): "address": receiving_url, "expiration": expiration_date, } - headers = { - 'content-type': 'application/json' - } - r = requests.post(WATCH_CALENDARS_URL, - data=json.dumps(data), - headers=headers, - auth=OAuthRequestsWrapper(token)) + headers = {"content-type": "application/json"} + r = requests.post( + WATCH_CALENDARS_URL, + data=json.dumps(data), + headers=headers, + auth=OAuthRequestsWrapper(token), + ) if r.status_code == 200: data = r.json() - return data.get('expiration') + return data.get("expiration") else: # Handle error and return None self.handle_watch_errors(r) @@ -348,8 +375,7 @@ def watch_calendar(self, account, calendar): """ token = self._get_access_token_for_push_notifications(account) watch_url = WATCH_EVENTS_URL.format(urllib.quote(calendar.uid)) - receiving_url = EVENTS_LIST_WEHOOK_URL.format( - urllib.quote(calendar.public_id)) + receiving_url = EVENTS_LIST_WEHOOK_URL.format(urllib.quote(calendar.public_id)) one_week = datetime.timedelta(weeks=1) in_a_week = datetime.datetime.utcnow() + one_week @@ -363,64 +389,79 @@ def watch_calendar(self, account, calendar): "address": receiving_url, "expiration": expiration_date, } - headers = { - 'content-type': 'application/json' - } + headers = {"content-type": "application/json"} try: - r = requests.post(watch_url, - data=json.dumps(data), - headers=headers, - auth=OAuthRequestsWrapper(token)) + r = requests.post( + watch_url, + data=json.dumps(data), + headers=headers, + auth=OAuthRequestsWrapper(token), + ) except requests.exceptions.SSLError: self.log.warning( - 'SSLError subscribing to Google push notifications', - url=watch_url, exc_info=True) + "SSLError subscribing to Google push notifications", + url=watch_url, + exc_info=True, + ) return if r.status_code == 200: data = r.json() - return data.get('expiration') + return data.get("expiration") else: # Handle error and return None self.handle_watch_errors(r) def handle_watch_errors(self, r): self.log.warning( - 'Error subscribing to Google push notifications', - url=r.url, response=r.content, status=r.status_code) + "Error subscribing to Google push notifications", + url=r.url, + response=r.content, + status=r.status_code, + ) if r.status_code == 400: - reason = r.json()['error']['errors'][0]['reason'] - self.log.warning('Invalid request', - status=r.status_code, reason=reason) - if reason == 'pushNotSupportedForRequestedResource': + reason = r.json()["error"]["errors"][0]["reason"] + self.log.warning("Invalid request", status=r.status_code, reason=reason) + if reason == "pushNotSupportedForRequestedResource": raise AccessNotEnabledError() elif r.status_code == 401: - self.log.warning('Invalid: could be invalid auth credentials', - url=r.url, response=r.content, status=r.status_code) + self.log.warning( + "Invalid: could be invalid auth credentials", + url=r.url, + response=r.content, + status=r.status_code, + ) elif r.status_code in (500, 503): - self.log.warning('Backend error in calendar API', status=r.status_code) + self.log.warning("Backend error in calendar API", status=r.status_code) elif r.status_code == 403: try: - reason = r.json()['error']['errors'][0]['reason'] + reason = r.json()["error"]["errors"][0]["reason"] except (KeyError, ValueError): - self.log.error("Couldn't parse API error response", - response=r.content, status=r.status_code) - if reason == 'userRateLimitExceeded': + self.log.error( + "Couldn't parse API error response", + response=r.content, + status=r.status_code, + ) + if reason == "userRateLimitExceeded": # Sleep before proceeding (naive backoff) gevent.sleep(30 + random.randrange(0, 60)) - self.log.warning('API request was rate-limited') - elif reason == 'accessNotConfigured': - self.log.warning('API not enabled.') + self.log.warning("API request was rate-limited") + elif reason == "accessNotConfigured": + self.log.warning("API not enabled.") raise AccessNotEnabledError() elif r.status_code == 404: # Resource deleted! - self.log.warning('Raising exception for status', - status_code=r.status_code, response=r.content) + self.log.warning( + "Raising exception for status", + status_code=r.status_code, + response=r.content, + ) r.raise_for_status() else: - self.log.warning('Unexpected error', response=r.content, - status=r.status_code) + self.log.warning( + "Unexpected error", response=r.content, status=r.status_code + ) def parse_calendar_response(calendar): @@ -437,19 +478,16 @@ def parse_calendar_response(calendar): ------- A corresponding Calendar instance. """ - uid = calendar['id'] - name = calendar['summary'] + uid = calendar["id"] + name = calendar["summary"] - role = calendar['accessRole'] + role = calendar["accessRole"] read_only = True if role == "owner" or role == "writer": read_only = False - description = calendar.get('description', None) - return Calendar(uid=uid, - name=name, - read_only=read_only, - description=description) + description = calendar.get("description", None) + return Calendar(uid=uid, name=name, read_only=read_only, description=description) def parse_event_response(event, read_only_calendar): @@ -466,54 +504,57 @@ def parse_event_response(event, read_only_calendar): A corresponding Event instance. This instance is not committed or added to a session. """ - uid = str(event['id']) + uid = str(event["id"]) # The entirety of the raw event data in json representation. raw_data = json.dumps(event) - title = event.get('summary', '') + title = event.get("summary", "") # Timing data - _start = event['start'] - _end = event['end'] - _original = event.get('originalStartTime', {}) + _start = event["start"] + _end = event["end"] + _original = event.get("originalStartTime", {}) event_time = google_to_event_time(_start, _end) original_start = parse_google_time(_original) - start_tz = _start.get('timeZone') + start_tz = _start.get("timeZone") - last_modified = parse_datetime(event.get('updated')) + last_modified = parse_datetime(event.get("updated")) - description = event.get('description') - location = event.get('location') - busy = event.get('transparency') != 'transparent' - sequence = event.get('sequence', 0) + description = event.get("description") + location = event.get("location") + busy = event.get("transparency") != "transparent" + sequence = event.get("sequence", 0) # We're lucky because event statuses follow the icalendar # spec. - event_status = event.get('status', 'confirmed') + event_status = event.get("status", "confirmed") assert event_status in EVENT_STATUSES # Ownership, read_only information - creator = event.get('creator') + creator = event.get("creator") if creator: - owner = u'{} <{}>'.format( - creator.get('displayName', ''), creator.get('email', '')) + owner = u"{} <{}>".format( + creator.get("displayName", ""), creator.get("email", "") + ) else: - owner = '' + owner = "" participants = [] - attendees = event.get('attendees', []) + attendees = event.get("attendees", []) for attendee in attendees: - status = STATUS_MAP[attendee.get('responseStatus')] - participants.append({ - 'email': attendee.get('email'), - 'name': attendee.get('displayName'), - 'status': status, - 'notes': attendee.get('comment') - }) - - organizer = event.get('organizer') - is_owner = bool(organizer and organizer.get('self')) + status = STATUS_MAP[attendee.get("responseStatus")] + participants.append( + { + "email": attendee.get("email"), + "name": attendee.get("displayName"), + "status": status, + "notes": attendee.get("comment"), + } + ) + + organizer = event.get("organizer") + is_owner = bool(organizer and organizer.get("self")) # FIXME @karim: The right thing here would be to use Google's ACL API. # There's some obscure cases, like an autoimported event which guests can @@ -523,42 +564,44 @@ def parse_event_response(event, read_only_calendar): read_only = False # Recurring master or override info - recurrence = event.get('recurrence') - master_uid = event.get('recurringEventId') - cancelled = (event.get('status') == 'cancelled') + recurrence = event.get("recurrence") + master_uid = event.get("recurringEventId") + cancelled = event.get("status") == "cancelled" - visibility = event.get('visibility') + visibility = event.get("visibility") # Rewrite some values documented in # https://developers.google.com/calendar/v3/reference/events - if visibility == 'default': + if visibility == "default": visibility = None - elif visibility == 'confidential': - visibility = 'private' - - return Event(uid=uid, - raw_data=raw_data, - title=title, - description=description, - location=location, - busy=busy, - start=event_time.start, - end=event_time.end, - all_day=event_time.all_day, - owner=owner, - is_owner=is_owner, - read_only=read_only, - participants=participants, - recurrence=recurrence, - last_modified=last_modified, - original_start_tz=start_tz, - original_start_time=original_start, - master_event_uid=master_uid, - cancelled=cancelled, - status=event_status, - sequence_number=sequence, - source='local', - visibility=visibility) + elif visibility == "confidential": + visibility = "private" + + return Event( + uid=uid, + raw_data=raw_data, + title=title, + description=description, + location=location, + busy=busy, + start=event_time.start, + end=event_time.end, + all_day=event_time.all_day, + owner=owner, + is_owner=is_owner, + read_only=read_only, + participants=participants, + recurrence=recurrence, + last_modified=last_modified, + original_start_tz=start_tz, + original_start_time=original_start, + master_event_uid=master_uid, + cancelled=cancelled, + status=event_status, + sequence_number=sequence, + source="local", + visibility=visibility, + ) def _dump_event(event): @@ -569,32 +612,29 @@ def _dump_event(event): dump["location"] = event.location # Whether the event blocks time on the calendar. - dump['transparency'] = 'opaque' if event.busy else 'transparent' + dump["transparency"] = "opaque" if event.busy else "transparent" if event.all_day: - dump["start"] = {"date": event.start.strftime('%Y-%m-%d')} - dump["end"] = {"date": event.end.strftime('%Y-%m-%d')} + dump["start"] = {"date": event.start.strftime("%Y-%m-%d")} + dump["end"] = {"date": event.end.strftime("%Y-%m-%d")} else: - dump["start"] = {"dateTime": event.start.isoformat('T'), - "timeZone": "UTC"} - dump["end"] = {"dateTime": event.end.isoformat('T'), - "timeZone": "UTC"} + dump["start"] = {"dateTime": event.start.isoformat("T"), "timeZone": "UTC"} + dump["end"] = {"dateTime": event.end.isoformat("T"), "timeZone": "UTC"} if event.participants: - dump['attendees'] = [] + dump["attendees"] = [] inverse_status_map = {value: key for key, value in STATUS_MAP.items()} for participant in event.participants: attendee = {} - if 'name' in participant: - attendee['displayName'] = participant['name'] - if 'status' in participant: - attendee['responseStatus'] = inverse_status_map[ - participant['status']] - if 'email' in participant: - attendee['email'] = participant['email'] - if 'guests' in participant: - attendee['additionalGuests'] = participant['guests'] + if "name" in participant: + attendee["displayName"] = participant["name"] + if "status" in participant: + attendee["responseStatus"] = inverse_status_map[participant["status"]] + if "email" in participant: + attendee["email"] = participant["email"] + if "guests" in participant: + attendee["additionalGuests"] = participant["guests"] if attendee: - dump['attendees'].append(attendee) + dump["attendees"].append(attendee) return dump diff --git a/inbox/events/ical.py b/inbox/events/ical.py index f39a33473..6eb8af632 100644 --- a/inbox/events/ical.py +++ b/inbox/events/ical.py @@ -20,13 +20,16 @@ from inbox.models.action_log import schedule_action from nylas.logging import get_logger + log = get_logger() -STATUS_MAP = {'NEEDS-ACTION': 'noreply', - 'ACCEPTED': 'yes', - 'DECLINED': 'no', - 'TENTATIVE': 'maybe'} +STATUS_MAP = { + "NEEDS-ACTION": "noreply", + "ACCEPTED": "yes", + "DECLINED": "no", + "TENTATIVE": "maybe", +} INVERTED_STATUS_MAP = {value: key for key, value in STATUS_MAP.iteritems()} @@ -43,18 +46,17 @@ def events_from_ics(namespace, calendar, ics_str): for component in cal.walk(): if component.name == "VCALENDAR": - calendar_method = component.get('method') + calendar_method = component.get("method") if component.name == "VTIMEZONE": - tzname = component.get('TZID') - assert tzname in timezones_table,\ - "Non-UTC timezone should be in table" + tzname = component.get("TZID") + assert tzname in timezones_table, "Non-UTC timezone should be in table" if component.name == "VEVENT": # Make sure the times are in UTC. try: - original_start = component.get('dtstart').dt - original_end = component.get('dtend').dt + original_start = component.get("dtstart").dt + original_end = component.get("dtend").dt except AttributeError: raise MalformedEventError("Event lacks start and/or end time") @@ -69,9 +71,10 @@ def events_from_ics(namespace, calendar, ics_str): original_start_tz = timezones_table[tzid] if original_start.tzinfo is None: - tzid = component.get('dtstart').params.get('TZID', None) - assert tzid in timezones_table,\ - "Non-UTC timezone should be in table" + tzid = component.get("dtstart").params.get("TZID", None) + assert ( + tzid in timezones_table + ), "Non-UTC timezone should be in table" corresponding_tz = timezones_table[tzid] original_start_tz = corresponding_tz @@ -80,9 +83,10 @@ def events_from_ics(namespace, calendar, ics_str): original_start = local_timezone.localize(original_start) if original_end.tzinfo is None: - tzid = component.get('dtend').params.get('TZID', None) - assert tzid in timezones_table,\ - "Non-UTC timezone should be in table" + tzid = component.get("dtend").params.get("TZID", None) + assert ( + tzid in timezones_table + ), "Non-UTC timezone should be in table" corresponding_tz = timezones_table[tzid] local_timezone = pytz.timezone(corresponding_tz) @@ -97,13 +101,14 @@ def events_from_ics(namespace, calendar, ics_str): start = arrow.get(start) end = arrow.get(end) - assert isinstance(start, type(end)), "Start and end should be of "\ - "the same type" + assert isinstance(start, type(end)), ( + "Start and end should be of " "the same type" + ) # Get the last modification date. # Exchange uses DtStamp, iCloud and Gmail LAST-MODIFIED. - component_dtstamp = component.get('dtstamp') - component_last_modified = component.get('last-modified') + component_dtstamp = component.get("dtstamp") + component_last_modified = component.get("last-modified") last_modified = None if component_dtstamp is not None: @@ -114,8 +119,9 @@ def events_from_ics(namespace, calendar, ics_str): if component_dtstamp.dt.tzinfo is not None: last_modified = component_dtstamp.dt else: - raise NotImplementedError("We don't support arcane Windows" - " timezones in timestamps yet") + raise NotImplementedError( + "We don't support arcane Windows" " timezones in timestamps yet" + ) elif component_last_modified is not None: # Try to look for a LAST-MODIFIED element instead. # Note: LAST-MODIFIED is always in UTC. @@ -123,63 +129,64 @@ def events_from_ics(namespace, calendar, ics_str): last_modified = component_last_modified.dt title = None - summaries = component.get('summary', []) + summaries = component.get("summary", []) if not isinstance(summaries, list): summaries = [summaries] if summaries != []: title = " - ".join(summaries) - description = component.get('description') + description = component.get("description") if description is not None: description = unicode(description) - event_status = component.get('status') + event_status = component.get("status") if event_status is not None: event_status = event_status.lower() else: # Some providers (e.g: iCloud) don't use the status field. # Instead they use the METHOD field to signal cancellations. - method = component.get('method') - if method and method.lower() == 'cancel': - event_status = 'cancelled' - elif calendar_method and calendar_method.lower() == 'cancel': + method = component.get("method") + if method and method.lower() == "cancel": + event_status = "cancelled" + elif calendar_method and calendar_method.lower() == "cancel": # So, this particular event was not cancelled. Maybe the # whole calendar was. - event_status = 'cancelled' + event_status = "cancelled" else: # Otherwise assume the event has been confirmed. - event_status = 'confirmed' + event_status = "confirmed" assert event_status in EVENT_STATUSES - recur = component.get('rrule') + recur = component.get("rrule") if recur: recur = "RRULE:{}".format(recur.to_ical()) participants = [] - organizer = component.get('organizer') + organizer = component.get("organizer") organizer_name = None organizer_email = None if organizer: organizer_email = unicode(organizer) - if organizer_email.lower().startswith('mailto:'): + if organizer_email.lower().startswith("mailto:"): organizer_email = organizer_email[7:] - if 'CN' in organizer.params: - organizer_name = organizer.params['CN'] + if "CN" in organizer.params: + organizer_name = organizer.params["CN"] owner = formataddr([organizer_name, organizer_email.lower()]) else: owner = None is_owner = False - if owner is not None and (namespace.account.email_address == - canonicalize_address(organizer_email)): + if owner is not None and ( + namespace.account.email_address == canonicalize_address(organizer_email) + ): is_owner = True - attendees = component.get('attendee', []) + attendees = component.get("attendee", []) # the iCalendar python module doesn't return a list when # there's only one attendee. Go figure. @@ -189,40 +196,46 @@ def events_from_ics(namespace, calendar, ics_str): for attendee in attendees: email = unicode(attendee) # strip mailto: if it exists - if email.lower().startswith('mailto:'): + if email.lower().startswith("mailto:"): email = email[7:] try: - name = attendee.params['CN'] + name = attendee.params["CN"] except KeyError: name = None - status_map = {'NEEDS-ACTION': 'noreply', - 'ACCEPTED': 'yes', - 'DECLINED': 'no', - 'TENTATIVE': 'maybe'} - status = 'noreply' + status_map = { + "NEEDS-ACTION": "noreply", + "ACCEPTED": "yes", + "DECLINED": "no", + "TENTATIVE": "maybe", + } + status = "noreply" try: - a_status = attendee.params['PARTSTAT'] + a_status = attendee.params["PARTSTAT"] status = status_map[a_status] except KeyError: pass notes = None try: - guests = attendee.params['X-NUM-GUESTS'] + guests = attendee.params["X-NUM-GUESTS"] notes = u"Guests: {}".format(guests) except KeyError: pass - participants.append({'email': email.lower(), - 'name': name, - 'status': status, - 'notes': notes, - 'guests': []}) + participants.append( + { + "email": email.lower(), + "name": name, + "status": status, + "notes": notes, + "guests": [], + } + ) - location = component.get('location') - uid = str(component.get('uid')) - sequence_number = int(component.get('sequence', 0)) + location = component.get("location") + uid = str(component.get("uid")) + sequence_number = int(component.get("sequence", 0)) # Some services (I'm looking at you, http://www.foogi.me/) # don't follow the spec and generate icalendar files with @@ -235,7 +248,7 @@ def events_from_ics(namespace, calendar, ics_str): namespace=namespace, calendar=calendar, uid=uid, - provider_name='ics', + provider_name="ics", raw_data=component.to_ical(), title=title, description=description, @@ -251,17 +264,18 @@ def events_from_ics(namespace, calendar, ics_str): is_owner=is_owner, last_modified=last_modified, original_start_tz=original_start_tz, - source='local', + source="local", status=event_status, sequence_number=sequence_number, - participants=participants) + participants=participants, + ) # We need to distinguish between invites/updates/cancellations # and RSVPs. - if calendar_method == 'REQUEST' or calendar_method == 'CANCEL': - events['invites'].append(event) - elif calendar_method == 'REPLY': - events['rsvps'].append(event) + if calendar_method == "REQUEST" or calendar_method == "CANCEL": + events["invites"].append(event) + elif calendar_method == "REPLY": + events["rsvps"].append(event) return events @@ -272,10 +286,15 @@ def process_invites(db_session, message, account, invites): # Get the list of events which share a uid with those we received. # Note that we're limiting this query to events in the 'emailed events' # calendar, because that's where all the invites go. - existing_events = db_session.query(Event).filter( - Event.calendar_id == account.emailed_events_calendar_id, - Event.namespace_id == account.namespace.id, - Event.uid.in_(new_uids)).all() + existing_events = ( + db_session.query(Event) + .filter( + Event.calendar_id == account.emailed_events_calendar_id, + Event.namespace_id == account.namespace.id, + Event.uid.in_(new_uids), + ) + .all() + ) existing_events_table = {event.uid: event for event in existing_events} @@ -300,8 +319,7 @@ def process_invites(db_session, message, account, invites): existing_event = existing_events_table[event.uid] if existing_event.sequence_number <= event.sequence_number: - merged_participants = existing_event.\ - _partial_participants_merge(event) + merged_participants = existing_event._partial_participants_merge(event) existing_event.update(event) existing_event.message = message @@ -317,13 +335,14 @@ def process_invites(db_session, message, account, invites): db_session.flush() existing_event.contacts = [] - update_contacts_from_event(db_session, existing_event, - account.namespace.id) + update_contacts_from_event( + db_session, existing_event, account.namespace.id + ) def _cleanup_nylas_uid(uid): uid = uid.lower() - if '@nylas.com' in uid: + if "@nylas.com" in uid: return uid[:-10] return uid @@ -334,8 +353,9 @@ def process_nylas_rsvps(db_session, message, account, rsvps): # `public_id@nylas.com`. We couldn't use Event.uid for this because # it wouldn't work with Exchange (Exchange uids are of the form # 1:2323 and aren't guaranteed to be unique). - new_uids = [_cleanup_nylas_uid(event.uid) for event in rsvps - if '@nylas.com' in event.uid] + new_uids = [ + _cleanup_nylas_uid(event.uid) for event in rsvps if "@nylas.com" in event.uid + ] # Drop uids which aren't base36 uids. new_uids = [uid for uid in new_uids if valid_base36(uid)] @@ -343,13 +363,17 @@ def process_nylas_rsvps(db_session, message, account, rsvps): # Get the list of events which share a uid with those we received. # Note that we're excluding events from "Emailed events" because # we don't want to process RSVPs to invites we received. - existing_events = db_session.query(Event).filter( - Event.namespace_id == account.namespace.id, - Event.calendar_id != account.emailed_events_calendar_id, - Event.public_id.in_(new_uids)).all() - - existing_events_table = {event.public_id: event - for event in existing_events} + existing_events = ( + db_session.query(Event) + .filter( + Event.namespace_id == account.namespace.id, + Event.calendar_id != account.emailed_events_calendar_id, + Event.public_id.in_(new_uids), + ) + .all() + ) + + existing_events_table = {event.public_id: event for event in existing_events} for event in rsvps: event_uid = _cleanup_nylas_uid(event.uid) @@ -363,8 +387,7 @@ def process_nylas_rsvps(db_session, message, account, rsvps): # Is the current event an update? if existing_event.sequence_number == event.sequence_number: - merged_participants = existing_event.\ - _partial_participants_merge(event) + merged_participants = existing_event._partial_participants_merge(event) # We have to do this mumbo-jumbo because MutableList does # not register changes to nested elements. @@ -377,9 +400,13 @@ def process_nylas_rsvps(db_session, message, account, rsvps): # We need to sync back changes to the event manually if existing_event.calendar != account.emailed_events_calendar: - schedule_action('update_event', existing_event, - existing_event.namespace.id, db_session, - calendar_uid=existing_event.calendar.uid) + schedule_action( + "update_event", + existing_event, + existing_event.namespace.id, + db_session, + calendar_uid=existing_event.calendar.uid, + ) db_session.flush() @@ -389,37 +416,49 @@ def import_attached_events(db_session, account, message): assert account is not None for part in message.attached_event_files: - part_data = '' + part_data = "" try: part_data = part.block.data - if part_data == '': + if part_data == "": continue - new_events = events_from_ics(account.namespace, - account.emailed_events_calendar, - part_data) + new_events = events_from_ics( + account.namespace, account.emailed_events_calendar, part_data + ) except MalformedEventError: - log.error('Attached event parsing error', - account_id=account.id, message_id=message.id, - logstash_tag='icalendar_autoimport', - event_part_id=part.id) + log.error( + "Attached event parsing error", + account_id=account.id, + message_id=message.id, + logstash_tag="icalendar_autoimport", + event_part_id=part.id, + ) continue - except (AssertionError, TypeError, RuntimeError, - AttributeError, ValueError, UnboundLocalError, - LookupError, ImportError, NameError): + except ( + AssertionError, + TypeError, + RuntimeError, + AttributeError, + ValueError, + UnboundLocalError, + LookupError, + ImportError, + NameError, + ): # Kind of ugly but we don't want to derail message # creation because of an error in the attached calendar. - log.error('Unhandled exception during message parsing', - message_id=message.id, - event_part_id=part.id, - logstash_tag='icalendar_autoimport', - traceback=traceback.format_exception( - sys.exc_info()[0], - sys.exc_info()[1], - sys.exc_info()[2])) + log.error( + "Unhandled exception during message parsing", + message_id=message.id, + event_part_id=part.id, + logstash_tag="icalendar_autoimport", + traceback=traceback.format_exception( + sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2] + ), + ) continue - process_invites(db_session, message, account, new_events['invites']) + process_invites(db_session, message, account, new_events["invites"]) # Gmail has a very very annoying feature: it doesn't use email to RSVP # to an invite sent by another gmail account. This makes it impossible @@ -427,130 +466,127 @@ def import_attached_events(db_session, account, message): # Gmail API handle invite sending. For other providers we process this # ourselves. # - karim - if account.provider != 'gmail': - process_nylas_rsvps(db_session, message, account, - new_events['rsvps']) + if account.provider != "gmail": + process_nylas_rsvps(db_session, message, account, new_events["rsvps"]) -def generate_icalendar_invite(event, invite_type='request'): +def generate_icalendar_invite(event, invite_type="request"): # Generates an iCalendar invite from an event. - assert invite_type in ['request', 'cancel'] + assert invite_type in ["request", "cancel"] cal = iCalendar() - cal.add('PRODID', '-//Nylas sync engine//nylas.com//') + cal.add("PRODID", "-//Nylas sync engine//nylas.com//") - if invite_type in ['request', 'update']: - cal.add('METHOD', 'REQUEST') - elif invite_type == 'cancel': - cal.add('METHOD', 'CANCEL') + if invite_type in ["request", "update"]: + cal.add("METHOD", "REQUEST") + elif invite_type == "cancel": + cal.add("METHOD", "CANCEL") - cal.add('VERSION', '2.0') - cal.add('CALSCALE', 'GREGORIAN') + cal.add("VERSION", "2.0") + cal.add("CALSCALE", "GREGORIAN") icalendar_event = icalendar.Event() account = event.namespace.account - organizer = icalendar.vCalAddress(u"MAILTO:{}".format( - account.email_address)) - if account.name is not None and account.name != '': - organizer.params['CN'] = account.name - - icalendar_event['organizer'] = organizer - icalendar_event['sequence'] = str(event.sequence_number) - icalendar_event['X-MICROSOFT-CDO-APPT-SEQUENCE'] = \ - icalendar_event['sequence'] - - if invite_type == 'cancel': - icalendar_event['status'] = 'CANCELLED' + organizer = icalendar.vCalAddress(u"MAILTO:{}".format(account.email_address)) + if account.name is not None and account.name != "": + organizer.params["CN"] = account.name + + icalendar_event["organizer"] = organizer + icalendar_event["sequence"] = str(event.sequence_number) + icalendar_event["X-MICROSOFT-CDO-APPT-SEQUENCE"] = icalendar_event["sequence"] + + if invite_type == "cancel": + icalendar_event["status"] = "CANCELLED" else: - icalendar_event['status'] = 'CONFIRMED' - - icalendar_event['uid'] = u"{}@nylas.com".format(event.public_id) - icalendar_event['description'] = event.description or '' - icalendar_event['summary'] = event.title or '' - icalendar_event['last-modified'] = serialize_datetime(event.updated_at) - icalendar_event['dtstamp'] = icalendar_event['last-modified'] - icalendar_event['created'] = serialize_datetime(event.created_at) - icalendar_event['dtstart'] = serialize_datetime(event.start) - icalendar_event['dtend'] = serialize_datetime(event.end) - icalendar_event['transp'] = 'OPAQUE' if event.busy else 'TRANSPARENT' - icalendar_event['location'] = event.location or '' + icalendar_event["status"] = "CONFIRMED" + + icalendar_event["uid"] = u"{}@nylas.com".format(event.public_id) + icalendar_event["description"] = event.description or "" + icalendar_event["summary"] = event.title or "" + icalendar_event["last-modified"] = serialize_datetime(event.updated_at) + icalendar_event["dtstamp"] = icalendar_event["last-modified"] + icalendar_event["created"] = serialize_datetime(event.created_at) + icalendar_event["dtstart"] = serialize_datetime(event.start) + icalendar_event["dtend"] = serialize_datetime(event.end) + icalendar_event["transp"] = "OPAQUE" if event.busy else "TRANSPARENT" + icalendar_event["location"] = event.location or "" attendees = [] for participant in event.participants: - email = participant.get('email', None) + email = participant.get("email", None) # FIXME @karim: handle the case where a participant has no address. # We may have to patch the iCalendar module for this. assert email is not None and email != "" attendee = icalendar.vCalAddress(u"MAILTO:{}".format(email)) - name = participant.get('name', None) + name = participant.get("name", None) if name is not None: - attendee.params['CN'] = name + attendee.params["CN"] = name - attendee.params['RSVP'] = 'TRUE' - attendee.params['ROLE'] = 'REQ-PARTICIPANT' - attendee.params['CUTYPE'] = 'INDIVIDUAL' + attendee.params["RSVP"] = "TRUE" + attendee.params["ROLE"] = "REQ-PARTICIPANT" + attendee.params["CUTYPE"] = "INDIVIDUAL" - status = participant.get('status', 'noreply') - attendee.params['PARTSTAT'] = INVERTED_STATUS_MAP.get(status) + status = participant.get("status", "noreply") + attendee.params["PARTSTAT"] = INVERTED_STATUS_MAP.get(status) attendees.append(attendee) if attendees != []: - icalendar_event.add('ATTENDEE', attendees) + icalendar_event.add("ATTENDEE", attendees) cal.add_component(icalendar_event) return cal -def generate_invite_message(ical_txt, event, account, invite_type='request'): - assert invite_type in ['request', 'update', 'cancel'] - html_body = event.description or '' +def generate_invite_message(ical_txt, event, account, invite_type="request"): + assert invite_type in ["request", "update", "cancel"] + html_body = event.description or "" text_body = html2text(html_body) - msg = mime.create.multipart('mixed') + msg = mime.create.multipart("mixed") - body = mime.create.multipart('alternative') + body = mime.create.multipart("alternative") - if invite_type in ['request', 'update']: + if invite_type in ["request", "update"]: body.append( - mime.create.text('plain', text_body), - mime.create.text('html', html_body), - mime.create.text('calendar; method=REQUEST', - ical_txt, charset='utf8')) + mime.create.text("plain", text_body), + mime.create.text("html", html_body), + mime.create.text("calendar; method=REQUEST", ical_txt, charset="utf8"), + ) msg.append(body) - elif invite_type == 'cancel': + elif invite_type == "cancel": body.append( - mime.create.text('plain', text_body), - mime.create.text('html', html_body), - mime.create.text('calendar; method=CANCEL', - ical_txt, charset='utf8')) + mime.create.text("plain", text_body), + mime.create.text("html", html_body), + mime.create.text("calendar; method=CANCEL", ical_txt, charset="utf8"), + ) msg.append(body) # From should match our mailsend provider (mailgun) so it doesn't confuse # spam filters - msg.headers['From'] = "automated@notifications.nylas.com" - msg.headers['Reply-To'] = account.email_address + msg.headers["From"] = "automated@notifications.nylas.com" + msg.headers["Reply-To"] = account.email_address - if invite_type == 'request': - msg.headers['Subject'] = u'Invitation: {}'.format(event.title) - elif invite_type == 'update': - msg.headers['Subject'] = u'Updated Invitation: {}'.format(event.title) - elif invite_type == 'cancel': - msg.headers['Subject'] = u'Cancelled: {}'.format(event.title) + if invite_type == "request": + msg.headers["Subject"] = u"Invitation: {}".format(event.title) + elif invite_type == "update": + msg.headers["Subject"] = u"Updated Invitation: {}".format(event.title) + elif invite_type == "cancel": + msg.headers["Subject"] = u"Cancelled: {}".format(event.title) return msg -def send_invite(ical_txt, event, account, invite_type='request'): +def send_invite(ical_txt, event, account, invite_type="request"): # We send those transactional emails through a separate domain. - MAILGUN_API_KEY = config.get('NOTIFICATIONS_MAILGUN_API_KEY') - MAILGUN_DOMAIN = config.get('NOTIFICATIONS_MAILGUN_DOMAIN') + MAILGUN_API_KEY = config.get("NOTIFICATIONS_MAILGUN_API_KEY") + MAILGUN_DOMAIN = config.get("NOTIFICATIONS_MAILGUN_DOMAIN") assert MAILGUN_DOMAIN is not None and MAILGUN_API_KEY is not None for participant in event.participants: - email = participant.get('email', None) + email = participant.get("email", None) if email is None: continue @@ -561,18 +597,26 @@ def send_invite(ical_txt, event, account, invite_type='request'): continue msg = generate_invite_message(ical_txt, event, account, invite_type) - msg.headers['To'] = email + msg.headers["To"] = email final_message = msg.to_string() - mg_url = 'https://api.mailgun.net/v3/{}/messages.mime'.format(MAILGUN_DOMAIN) - r = requests.post(mg_url, auth=("api", MAILGUN_API_KEY), - data={"to": email}, - files={"message": final_message}) + mg_url = "https://api.mailgun.net/v3/{}/messages.mime".format(MAILGUN_DOMAIN) + r = requests.post( + mg_url, + auth=("api", MAILGUN_API_KEY), + data={"to": email}, + files={"message": final_message}, + ) if r.status_code != 200: - log.error("Couldnt send invite email for", email_address=email, - event_id=event.id, account_id=account.id, - logstash_tag='invite_sending', status_code=r.status_code) + log.error( + "Couldnt send invite email for", + email_address=email, + event_id=event.id, + account_id=account.id, + logstash_tag="invite_sending", + status_code=r.status_code, + ) def _generate_rsvp(status, account, event): @@ -580,45 +624,44 @@ def _generate_rsvp(status, account, event): # in the RVSP reply. I suppose it's for reconciling the reply with the # invite. - karim cal = iCalendar() - cal.add('PRODID', '-//Nylas sync engine//nylas.com//') - cal.add('METHOD', 'REPLY') - cal.add('VERSION', '2.0') - cal.add('CALSCALE', 'GREGORIAN') + cal.add("PRODID", "-//Nylas sync engine//nylas.com//") + cal.add("METHOD", "REPLY") + cal.add("VERSION", "2.0") + cal.add("CALSCALE", "GREGORIAN") icalevent = icalendar.Event() - icalevent['uid'] = event.uid + icalevent["uid"] = event.uid if event.organizer_email is not None: - icalevent['organizer'] = event.organizer_email + icalevent["organizer"] = event.organizer_email - icalevent['sequence'] = event.sequence_number - icalevent['X-MICROSOFT-CDO-APPT-SEQUENCE'] = icalevent['sequence'] + icalevent["sequence"] = event.sequence_number + icalevent["X-MICROSOFT-CDO-APPT-SEQUENCE"] = icalevent["sequence"] - if event.status == 'confirmed': - icalevent['status'] = 'CONFIRMED' + if event.status == "confirmed": + icalevent["status"] = "CONFIRMED" - icalevent['dtstamp'] = serialize_datetime(datetime.utcnow()) + icalevent["dtstamp"] = serialize_datetime(datetime.utcnow()) if event.start is not None: - icalevent['dtstart'] = serialize_datetime(event.start) + icalevent["dtstart"] = serialize_datetime(event.start) if event.end is not None: - icalevent['dtend'] = serialize_datetime(event.end) + icalevent["dtend"] = serialize_datetime(event.end) if event.description is not None: - icalevent['description'] = event.description + icalevent["description"] = event.description if event.location is not None: - icalevent['location'] = event.location + icalevent["location"] = event.location if event.title is not None: - icalevent['summary'] = event.title + icalevent["summary"] = event.title - attendee = icalendar.vCalAddress(u'MAILTO:{}'.format( - account.email_address)) - attendee.params['cn'] = account.name - attendee.params['partstat'] = status - icalevent.add('attendee', attendee, encode=0) + attendee = icalendar.vCalAddress(u"MAILTO:{}".format(account.email_address)) + attendee.params["cn"] = account.name + attendee.params["partstat"] = status + icalevent.add("attendee", attendee, encode=0) cal.add_component(icalevent) ret = {} @@ -643,13 +686,13 @@ def rsvp_recipient(event): # A stupid bug made us create some db entries of the # form "None ". - if event.organizer_email not in [None, 'None']: + if event.organizer_email not in [None, "None"]: return event.organizer_email if event.message is not None: if event.message.from_addr is not None and len(event.message.from_addr) == 1: from_addr = event.message.from_addr[0][1] - if from_addr is not None and from_addr != '': + if from_addr is not None and from_addr != "": return from_addr return None @@ -667,28 +710,30 @@ def send_rsvp(ical_data, event, body_text, status, account): sendmail_client = get_sendmail_client(account) - msg = mime.create.multipart('mixed') + msg = mime.create.multipart("mixed") - body = mime.create.multipart('alternative') + body = mime.create.multipart("alternative") body.append( - mime.create.text('plain', ''), - mime.create.text('calendar;method=REPLY', ical_txt)) + mime.create.text("plain", ""), + mime.create.text("calendar;method=REPLY", ical_txt), + ) msg.append(body) - msg.headers['Reply-To'] = account.email_address - msg.headers['From'] = account.email_address - msg.headers['To'] = rsvp_to + msg.headers["Reply-To"] = account.email_address + msg.headers["From"] = account.email_address + msg.headers["To"] = rsvp_to - assert status in ['yes', 'no', 'maybe'] + assert status in ["yes", "no", "maybe"] - if status == 'yes': - msg.headers['Subject'] = u'Accepted: {}'.format(event.message.subject) - elif status == 'maybe': - msg.headers['Subject'] = u'Tentatively accepted: {}'.format( - event.message.subject) - elif status == 'no': - msg.headers['Subject'] = u'Declined: {}'.format(event.message.subject) + if status == "yes": + msg.headers["Subject"] = u"Accepted: {}".format(event.message.subject) + elif status == "maybe": + msg.headers["Subject"] = u"Tentatively accepted: {}".format( + event.message.subject + ) + elif status == "no": + msg.headers["Subject"] = u"Declined: {}".format(event.message.subject) final_message = msg.to_string() diff --git a/inbox/events/recurring.py b/inbox/events/recurring.py index 027bb507f..523fe24f1 100644 --- a/inbox/events/recurring.py +++ b/inbox/events/recurring.py @@ -1,12 +1,12 @@ import arrow -from dateutil.rrule import (rrulestr, rrule, rruleset, - MO, TU, WE, TH, FR, SA, SU) +from dateutil.rrule import rrulestr, rrule, rruleset, MO, TU, WE, TH, FR, SA, SU from inbox.models.event import RecurringEvent, RecurringEventOverride from inbox.events.util import parse_rrule_datetime from timezones import timezones_table from nylas.logging import get_logger + log = get_logger() # How far in the future to expand recurring events @@ -25,11 +25,16 @@ def link_events(db_session, event): def link_overrides(db_session, event): # Find event instances which override this specific # RecurringEvent instance. - overrides = db_session.query(RecurringEventOverride).\ - filter_by(namespace_id=event.namespace_id, - calendar_id=event.calendar_id, - master_event_uid=event.uid, - source=event.source).all() + overrides = ( + db_session.query(RecurringEventOverride) + .filter_by( + namespace_id=event.namespace_id, + calendar_id=event.calendar_id, + master_event_uid=event.uid, + source=event.source, + ) + .all() + ) for o in overrides: if not o.master: o.master = event @@ -42,11 +47,16 @@ def link_master(db_session, event): # been synced yet) if not event.master: if event.master_event_uid: - master = db_session.query(RecurringEvent).\ - filter_by(namespace_id=event.namespace_id, - calendar_id=event.calendar_id, - uid=event.master_event_uid, - source=event.source).first() + master = ( + db_session.query(RecurringEvent) + .filter_by( + namespace_id=event.namespace_id, + calendar_id=event.calendar_id, + uid=event.master_event_uid, + source=event.source, + ) + .first() + ) if master: event.master = master return event.master # This may be None. @@ -56,32 +66,34 @@ def parse_rrule(event): # Parse the RRULE string and return a dateutil.rrule.rrule object if event.rrule is not None: if event.all_day: - start = event.start.to('utc').naive + start = event.start.to("utc").naive ignoretz = True else: start = event.start.datetime ignoretz = False try: - rule = rrulestr(event.rrule, dtstart=start, ignoretz=ignoretz, - compatible=True) + rule = rrulestr( + event.rrule, dtstart=start, ignoretz=ignoretz, compatible=True + ) return rule except Exception as e: - log.error("Error parsing RRULE entry", event_id=event.id, - error=e, exc_info=True) + log.error( + "Error parsing RRULE entry", event_id=event.id, error=e, exc_info=True + ) def parse_exdate(event): # Parse the EXDATE string and return a list of arrow datetimes excl_dates = [] if event.exdate: - name, values = event.exdate.split(':', 1) - tzinfo = 'UTC' - for p in name.split(';'): + name, values = event.exdate.split(":", 1) + tzinfo = "UTC" + for p in name.split(";"): # Handle TZID in EXDATE (TODO: submit PR to python-dateutil) - if p.startswith('TZID'): + if p.startswith("TZID"): tzinfo = p[5:] - for v in values.split(','): + for v in values.split(","): # convert to timezone-aware dates t = parse_rrule_datetime(v, tzinfo) excl_dates.append(t) @@ -119,8 +131,7 @@ def get_start_times(event, start=None, end=None): rrules = parse_rrule(event) if not rrules: - log.warn('Tried to expand a non-recurring event', - event_id=event.id) + log.warn("Tried to expand a non-recurring event", event_id=event.id) return [event.start] excl_dates = parse_exdate(event) @@ -139,13 +150,13 @@ def get_start_times(event, start=None, end=None): if event.all_day: # compare naive times, since date handling in rrulestr is naive # when UNTIL takes the form YYYYMMDD - start = start.to('utc').naive - end = end.to('utc').naive + start = start.to("utc").naive + end = end.to("utc").naive start_times = rrules.between(start, end, inc=True) # Convert back to UTC, which covers daylight savings differences - start_times = [arrow.get(t).to('utc') for t in start_times] + start_times = [arrow.get(t).to("utc") for t in start_times] return start_times @@ -153,13 +164,7 @@ def get_start_times(event, start=None, end=None): # rrule constant values -freq_map = ('YEARLY', - 'MONTHLY', - 'WEEKLY', - 'DAILY', - 'HOURLY', - 'MINUTELY', - 'SECONDLY') +freq_map = ("YEARLY", "MONTHLY", "WEEKLY", "DAILY", "HOURLY", "MINUTELY", "SECONDLY") weekday_map = (MO, TU, WE, TH, FR, SA, SU) @@ -172,17 +177,16 @@ def rrule_to_json(r): for field, value in info.iteritems(): if isinstance(value, tuple) and len(value) == 1: value = value[0] - if field[0] == '_': + if field[0] == "_": fieldname = field[1:] else: continue - if fieldname.startswith('by') and value is not None: - if fieldname == 'byweekday': + if fieldname.startswith("by") and value is not None: + if fieldname == "byweekday": value = str(weekday_map[value]) j[fieldname] = value - elif fieldname == 'freq': + elif fieldname == "freq": j[fieldname] = freq_map[value] - elif fieldname in ['dtstart', 'interval', 'wkst', - 'count', 'until']: # tzinfo? + elif fieldname in ["dtstart", "interval", "wkst", "count", "until"]: # tzinfo? j[fieldname] = value return j diff --git a/inbox/events/remote_sync.py b/inbox/events/remote_sync.py index b4599a777..c55410b5f 100644 --- a/inbox/events/remote_sync.py +++ b/inbox/events/remote_sync.py @@ -2,6 +2,7 @@ from requests.exceptions import HTTPError from nylas.logging import get_logger + logger = get_logger() from inbox.basicauth import AccessNotEnabledError, OAuthError @@ -20,10 +21,10 @@ EVENT_SYNC_FOLDER_ID = -2 -EVENT_SYNC_FOLDER_NAME = 'Events' +EVENT_SYNC_FOLDER_NAME = "Events" # Update frequency for accounts without push notifications -POLL_FREQUENCY = config.get('CALENDAR_POLL_FREQUENCY', 300) +POLL_FREQUENCY = config.get("CALENDAR_POLL_FREQUENCY", 300) # Update frequency for accounts with push notifications (accounts are only # updated if there was a recent push notification). @@ -37,42 +38,51 @@ class EventSync(BaseSyncMonitor): """Per-account event sync engine.""" - def __init__(self, email_address, provider_name, account_id, namespace_id, - poll_frequency=POLL_FREQUENCY): - bind_context(self, 'eventsync', account_id) + def __init__( + self, + email_address, + provider_name, + account_id, + namespace_id, + poll_frequency=POLL_FREQUENCY, + ): + bind_context(self, "eventsync", account_id) # Only Google for now, can easily parametrize by provider later. self.provider = GoogleEventsProvider(account_id, namespace_id) - self.log = logger.new(account_id=account_id, component='calendar sync') - - BaseSyncMonitor.__init__(self, - account_id, - namespace_id, - email_address, - EVENT_SYNC_FOLDER_ID, - EVENT_SYNC_FOLDER_NAME, - provider_name, - poll_frequency=poll_frequency, - scope='calendar') + self.log = logger.new(account_id=account_id, component="calendar sync") + + BaseSyncMonitor.__init__( + self, + account_id, + namespace_id, + email_address, + EVENT_SYNC_FOLDER_ID, + EVENT_SYNC_FOLDER_NAME, + provider_name, + poll_frequency=poll_frequency, + scope="calendar", + ) def sync(self): """Query a remote provider for updates and persist them to the database. This function runs every `self.poll_frequency`. """ - self.log.debug('syncing events') + self.log.debug("syncing events") try: deleted_uids, calendar_changes = self.provider.sync_calendars() except AccessNotEnabledError: self.log.warning( - 'Access to provider calendar API not enabled; bypassing sync') + "Access to provider calendar API not enabled; bypassing sync" + ) return with session_scope(self.namespace_id) as db_session: - handle_calendar_deletes(self.namespace_id, deleted_uids, - self.log, db_session) - calendar_uids_and_ids = handle_calendar_updates(self.namespace_id, - calendar_changes, - self.log, - db_session) + handle_calendar_deletes( + self.namespace_id, deleted_uids, self.log, db_session + ) + calendar_uids_and_ids = handle_calendar_updates( + self.namespace_id, calendar_changes, self.log, db_session + ) db_session.commit() for (uid, id_) in calendar_uids_and_ids: @@ -80,22 +90,24 @@ def sync(self): # miss remote updates that happen while the poll loop is executing. sync_timestamp = datetime.utcnow() with session_scope(self.namespace_id) as db_session: - last_sync = db_session.query(Calendar.last_synced).filter( - Calendar.id == id_).scalar() + last_sync = ( + db_session.query(Calendar.last_synced) + .filter(Calendar.id == id_) + .scalar() + ) - event_changes = self.provider.sync_events( - uid, sync_from_time=last_sync) + event_changes = self.provider.sync_events(uid, sync_from_time=last_sync) with session_scope(self.namespace_id) as db_session: - handle_event_updates(self.namespace_id, id_, event_changes, - self.log, db_session) + handle_event_updates( + self.namespace_id, id_, event_changes, self.log, db_session + ) cal = db_session.query(Calendar).get(id_) cal.last_synced = sync_timestamp db_session.commit() -def handle_calendar_deletes(namespace_id, deleted_calendar_uids, log, - db_session): +def handle_calendar_deletes(namespace_id, deleted_calendar_uids, log, db_session): """ Delete any local Calendar rows with uid in `deleted_calendar_uids`. This delete cascades to associated events (if the calendar is gone, so are all @@ -104,13 +116,15 @@ def handle_calendar_deletes(namespace_id, deleted_calendar_uids, log, """ deleted_count = 0 for uid in deleted_calendar_uids: - local_calendar = db_session.query(Calendar).filter( - Calendar.namespace_id == namespace_id, - Calendar.uid == uid).first() + local_calendar = ( + db_session.query(Calendar) + .filter(Calendar.namespace_id == namespace_id, Calendar.uid == uid) + .first() + ) if local_calendar is not None: _delete_calendar(db_session, local_calendar) deleted_count += 1 - log.info('deleted calendars', deleted=deleted_count) + log.info("deleted calendars", deleted=deleted_count) def handle_calendar_updates(namespace_id, calendars, log, db_session): @@ -119,11 +133,13 @@ def handle_calendar_updates(namespace_id, calendars, log, db_session): added_count = 0 updated_count = 0 for calendar in calendars: - assert calendar.uid is not None, 'Got remote item with null uid' + assert calendar.uid is not None, "Got remote item with null uid" - local_calendar = db_session.query(Calendar).filter( - Calendar.namespace_id == namespace_id, - Calendar.uid == calendar.uid).first() + local_calendar = ( + db_session.query(Calendar) + .filter(Calendar.namespace_id == namespace_id, Calendar.uid == calendar.uid) + .first() + ) if local_calendar is not None: local_calendar.update(calendar) @@ -137,8 +153,9 @@ def handle_calendar_updates(namespace_id, calendars, log, db_session): db_session.commit() ids_.append((local_calendar.uid, local_calendar.id)) - log.info('synced added and updated calendars', added=added_count, - updated=updated_count) + log.info( + "synced added and updated calendars", added=added_count, updated=updated_count + ) return ids_ @@ -146,31 +163,40 @@ def handle_event_updates(namespace_id, calendar_id, events, log, db_session): """Persists new or updated Event objects to the database.""" added_count = 0 updated_count = 0 - existing_event_query = db_session.query(Event).filter( - Event.namespace_id == namespace_id, - Event.calendar_id == calendar_id).exists() + existing_event_query = ( + db_session.query(Event) + .filter(Event.namespace_id == namespace_id, Event.calendar_id == calendar_id) + .exists() + ) events_exist = db_session.query(existing_event_query).scalar() for event in events: - assert event.uid is not None, 'Got remote item with null uid' + assert event.uid is not None, "Got remote item with null uid" local_event = None if events_exist: # Skip this lookup if there are no local events at all, for faster # first sync. - local_event = db_session.query(Event).filter( - Event.namespace_id == namespace_id, - Event.calendar_id == calendar_id, - Event.uid == event.uid).first() + local_event = ( + db_session.query(Event) + .filter( + Event.namespace_id == namespace_id, + Event.calendar_id == calendar_id, + Event.uid == event.uid, + ) + .first() + ) if local_event is not None: # We also need to mark all overrides as cancelled if we're # cancelling a recurring event. However, note the original event # may not itself be recurring (recurrence may have been added). - if isinstance(local_event, RecurringEvent) and \ - event.status == 'cancelled' and \ - local_event.status != 'cancelled': + if ( + isinstance(local_event, RecurringEvent) + and event.status == "cancelled" + and local_event.status != "cancelled" + ): for override in local_event.overrides: - override.status = 'cancelled' + override.status = "cancelled" local_event.update(event) local_event.participants = event.participants @@ -190,29 +216,31 @@ def handle_event_updates(namespace_id, calendar_id, events, log, db_session): # If we just updated/added a recurring event or override, make sure # we link it to the right master event. - if isinstance(event, RecurringEvent) or \ - isinstance(event, RecurringEventOverride): + if isinstance(event, RecurringEvent) or isinstance( + event, RecurringEventOverride + ): link_events(db_session, event) # Batch commits to avoid long transactions that may lock calendar rows. if (added_count + updated_count) % 10 == 0: db_session.commit() - log.info('synced added and updated events', - calendar_id=calendar_id, - added=added_count, - updated=updated_count) + log.info( + "synced added and updated events", + calendar_id=calendar_id, + added=added_count, + updated=updated_count, + ) class GoogleEventSync(EventSync): - def __init__(self, *args, **kwargs): super(GoogleEventSync, self).__init__(*args, **kwargs) with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) if ( - self.provider.push_notifications_enabled(account) and - kwargs.get('poll_frequency') is None + self.provider.push_notifications_enabled(account) + and kwargs.get("poll_frequency") is None ): # Run the sync loop more frequently if push notifications are # enabled. Note that we'll only update the calendar if a @@ -229,27 +257,29 @@ def sync(self): currently subscribed to push notificaitons and haven't heard anything new from Google. """ - self.log.debug('syncing events') + self.log.debug("syncing events") try: self._refresh_gpush_subscriptions() except AccessNotEnabledError: self.log.warning( - 'Access to provider calendar API not enabled; ' - 'cannot sign up for push notifications') + "Access to provider calendar API not enabled; " + "cannot sign up for push notifications" + ) except OAuthError: # Not enough of a reason to halt the sync! self.log.warning( - 'Not authorized to set up push notifications for account' - '(Safe to ignore this message if not recurring.)', - account_id=self.account_id) + "Not authorized to set up push notifications for account" + "(Safe to ignore this message if not recurring.)", + account_id=self.account_id, + ) try: self._sync_data() except AccessNotEnabledError: self.log.warning( - 'Access to provider calendar API not enabled; ' - 'bypassing sync') + "Access to provider calendar API not enabled; " "bypassing sync" + ) def _refresh_gpush_subscriptions(self): with session_scope(self.namespace_id) as db_session: @@ -263,8 +293,9 @@ def _refresh_gpush_subscriptions(self): if expir is not None: account.new_calendar_list_watch(expir) - cals_to_update = (cal for cal in account.namespace.calendars - if cal.needs_new_watch()) + cals_to_update = ( + cal for cal in account.namespace.calendars if cal.needs_new_watch() + ) for cal in cals_to_update: try: expir = self.provider.watch_calendar(account, cal) @@ -273,31 +304,37 @@ def _refresh_gpush_subscriptions(self): except HTTPError as exc: if exc.response.status_code == 404: self.log.warning( - 'Tried to subscribe to push notifications' - ' for a deleted or inaccessible calendar. Deleting' - ' local calendar', - calendar_id=cal.id, calendar_uid=cal.uid) + "Tried to subscribe to push notifications" + " for a deleted or inaccessible calendar. Deleting" + " local calendar", + calendar_id=cal.id, + calendar_uid=cal.uid, + ) _delete_calendar(db_session, cal) else: self.log.error( - 'Error while updating calendar push notification ' - 'subscription', cal_id=cal.id, calendar_uid=cal.uid, - status_code=exc.response.status_code) + "Error while updating calendar push notification " + "subscription", + cal_id=cal.id, + calendar_uid=cal.uid, + status_code=exc.response.status_code, + ) raise exc def _sync_data(self): with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) - if ( - account.should_update_calendars( - MAX_TIME_WITHOUT_SYNC, timedelta(seconds=POLL_FREQUENCY)) + if account.should_update_calendars( + MAX_TIME_WITHOUT_SYNC, timedelta(seconds=POLL_FREQUENCY) ): self._sync_calendar_list(account, db_session) stale_calendars = ( - cal for cal in account.namespace.calendars - if cal.should_update_events(MAX_TIME_WITHOUT_SYNC, - timedelta(seconds=POLL_FREQUENCY)) + cal + for cal in account.namespace.calendars + if cal.should_update_events( + MAX_TIME_WITHOUT_SYNC, timedelta(seconds=POLL_FREQUENCY) + ) ) # Sync user's primary calendar first. Note that the UID of the @@ -313,27 +350,29 @@ def _sync_data(self): except HTTPError as exc: if exc.response.status_code == 404: self.log.warning( - 'Tried to sync a deleted calendar.' - 'Deleting local calendar.', - calendar_id=cal.id, calendar_uid=cal.uid) + "Tried to sync a deleted calendar." + "Deleting local calendar.", + calendar_id=cal.id, + calendar_uid=cal.uid, + ) _delete_calendar(db_session, cal) else: self.log.error( - 'Error while syncing calendar', - cal_id=cal.id, calendar_uid=cal.uid, - status_code=exc.response.status_code) + "Error while syncing calendar", + cal_id=cal.id, + calendar_uid=cal.uid, + status_code=exc.response.status_code, + ) raise exc def _sync_calendar_list(self, account, db_session): sync_timestamp = datetime.utcnow() deleted_uids, calendar_changes = self.provider.sync_calendars() - handle_calendar_deletes(self.namespace_id, deleted_uids, - self.log, db_session) - handle_calendar_updates(self.namespace_id, - calendar_changes, - self.log, - db_session) + handle_calendar_deletes(self.namespace_id, deleted_uids, self.log, db_session) + handle_calendar_updates( + self.namespace_id, calendar_changes, self.log, db_session + ) account.last_calendar_list_sync = sync_timestamp db_session.commit() @@ -341,10 +380,12 @@ def _sync_calendar_list(self, account, db_session): def _sync_calendar(self, calendar, db_session): sync_timestamp = datetime.utcnow() event_changes = self.provider.sync_events( - calendar.uid, sync_from_time=calendar.last_synced) + calendar.uid, sync_from_time=calendar.last_synced + ) - handle_event_updates(self.namespace_id, calendar.id, - event_changes, self.log, db_session) + handle_event_updates( + self.namespace_id, calendar.id, event_changes, self.log, db_session + ) calendar.last_synced = sync_timestamp db_session.commit() diff --git a/inbox/events/timezones.py b/inbox/events/timezones.py index 0fa033e11..2e7b3a014 100644 --- a/inbox/events/timezones.py +++ b/inbox/events/timezones.py @@ -4,261 +4,263 @@ import pytz windows_timezones = { - 'AUS Central Standard Time': 'Australia/Darwin', - 'AUS Eastern Standard Time': 'Australia/Sydney', - 'Afghanistan Standard Time': 'Asia/Kabul', - 'Alaskan Standard Time': 'America/Anchorage', - 'Arab Standard Time': 'Asia/Riyadh', - 'Arabian Standard Time': 'Asia/Dubai', - 'Arabic Standard Time': 'Asia/Baghdad', - 'Argentina Standard Time': 'America/Buenos_Aires', - 'Atlantic Standard Time': 'America/Halifax', - 'Azerbaijan Standard Time': 'Asia/Baku', - 'Azores Standard Time': 'Atlantic/Azores', - 'Bahia Standard Time': 'America/Bahia', - 'Bangladesh Standard Time': 'Asia/Dhaka', - 'Belarus Standard Time': 'Europe/Minsk', - 'Canada Central Standard Time': 'America/Regina', - 'Cape Verde Standard Time': 'Atlantic/Cape_Verde', - 'Caucasus Standard Time': 'Asia/Yerevan', - 'Cen. Australia Standard Time': 'Australia/Adelaide', - 'Central America Standard Time': 'America/Guatemala', - 'Central Asia Standard Time': 'Asia/Almaty', - 'Central Brazilian Standard Time': 'America/Cuiaba', - 'Central Europe Standard Time': 'Europe/Budapest', - 'Central European Standard Time': 'Europe/Warsaw', - 'Central Pacific Standard Time': 'Pacific/Guadalcanal', - 'Central Standard Time': 'America/Chicago', - 'Central Standard Time (Mexico)': 'America/Mexico_City', - 'China Standard Time': 'Asia/Shanghai', - 'Dateline Standard Time': 'Etc/GMT+12', - 'E. Africa Standard Time': 'Africa/Nairobi', - 'E. Australia Standard Time': 'Australia/Brisbane', - 'E. Europe Standard Time': 'Europe/Chisinau', - 'E. South America Standard Time': 'America/Sao_Paulo', - 'Eastern Standard Time': 'America/New_York', - 'Eastern Standard Time (Mexico)': 'America/Cancun', - 'Egypt Standard Time': 'Africa/Cairo', - 'Ekaterinburg Standard Time': 'Asia/Yekaterinburg', - 'FLE Standard Time': 'Europe/Kiev', - 'Fiji Standard Time': 'Pacific/Fiji', - 'GMT Standard Time': 'Europe/London', - 'GTB Standard Time': 'Europe/Bucharest', - 'Georgian Standard Time': 'Asia/Tbilisi', - 'Greenland Standard Time': 'America/Godthab', - 'Greenwich Standard Time': 'Atlantic/Reykjavik', - 'Hawaiian Standard Time': 'Pacific/Honolulu', - 'India Standard Time': 'Asia/Calcutta', - 'Iran Standard Time': 'Asia/Tehran', - 'Israel Standard Time': 'Asia/Jerusalem', - 'Jordan Standard Time': 'Asia/Amman', - 'Kaliningrad Standard Time': 'Europe/Kaliningrad', - 'Korea Standard Time': 'Asia/Seoul', - 'Libya Standard Time': 'Africa/Tripoli', - 'Line Islands Standard Time': 'Pacific/Kiritimati', - 'Magadan Standard Time': 'Asia/Magadan', - 'Mauritius Standard Time': 'Indian/Mauritius', - 'Middle East Standard Time': 'Asia/Beirut', - 'Montevideo Standard Time': 'America/Montevideo', - 'Morocco Standard Time': 'Africa/Casablanca', - 'Mountain Standard Time': 'America/Denver', - 'Mountain Standard Time (Mexico)': 'America/Chihuahua', - 'Myanmar Standard Time': 'Asia/Rangoon', - 'N. Central Asia Standard Time': 'Asia/Novosibirsk', - 'Namibia Standard Time': 'Africa/Windhoek', - 'Nepal Standard Time': 'Asia/Katmandu', - 'New Zealand Standard Time': 'Pacific/Auckland', - 'Newfoundland Standard Time': 'America/St_Johns', - 'North Asia East Standard Time': 'Asia/Irkutsk', - 'North Asia Standard Time': 'Asia/Krasnoyarsk', - 'North Korea Standard Time': 'Asia/Pyongyang', - 'Pacific SA Standard Time': 'America/Santiago', - 'Pacific Standard Time': 'America/Los_Angeles', - 'Pacific Standard Time (Mexico)': 'America/Santa_Isabel', - 'Pakistan Standard Time': 'Asia/Karachi', - 'Paraguay Standard Time': 'America/Asuncion', - 'Romance Standard Time': 'Europe/Paris', - 'Russia Time Zone 10': 'Asia/Srednekolymsk', - 'Russia Time Zone 11': 'Asia/Kamchatka', - 'Russia Time Zone 3': 'Europe/Samara', - 'Russian Standard Time': 'Europe/Moscow', - 'SA Eastern Standard Time': 'America/Cayenne', - 'SA Pacific Standard Time': 'America/Bogota', - 'SA Western Standard Time': 'America/La_Paz', - 'SE Asia Standard Time': 'Asia/Bangkok', - 'Samoa Standard Time': 'Pacific/Apia', - 'Singapore Standard Time': 'Asia/Singapore', - 'South Africa Standard Time': 'Africa/Johannesburg', - 'Sri Lanka Standard Time': 'Asia/Colombo', - 'Syria Standard Time': 'Asia/Damascus', - 'Taipei Standard Time': 'Asia/Taipei', - 'Tasmania Standard Time': 'Australia/Hobart', - 'Tokyo Standard Time': 'Asia/Tokyo', - 'Tonga Standard Time': 'Pacific/Tongatapu', - 'Turkey Standard Time': 'Europe/Istanbul', - 'US Eastern Standard Time': 'America/Indianapolis', - 'US Mountain Standard Time': 'America/Phoenix', - 'UTC': 'Etc/GMT', - 'UTC+12': 'Etc/GMT-12', - 'UTC-02': 'Etc/GMT+2', - 'UTC-11': 'Etc/GMT+11', - 'Ulaanbaatar Standard Time': 'Asia/Ulaanbaatar', - 'Venezuela Standard Time': 'America/Caracas', - 'Vladivostok Standard Time': 'Asia/Vladivostok', - 'W. Australia Standard Time': 'Australia/Perth', - 'W. Central Africa Standard Time': 'Africa/Lagos', - 'W. Europe Standard Time': 'Europe/Berlin', - 'West Asia Standard Time': 'Asia/Tashkent', - 'West Pacific Standard Time': 'Pacific/Port_Moresby', - 'Yakutsk Standard Time': 'Asia/Yakutsk' + "AUS Central Standard Time": "Australia/Darwin", + "AUS Eastern Standard Time": "Australia/Sydney", + "Afghanistan Standard Time": "Asia/Kabul", + "Alaskan Standard Time": "America/Anchorage", + "Arab Standard Time": "Asia/Riyadh", + "Arabian Standard Time": "Asia/Dubai", + "Arabic Standard Time": "Asia/Baghdad", + "Argentina Standard Time": "America/Buenos_Aires", + "Atlantic Standard Time": "America/Halifax", + "Azerbaijan Standard Time": "Asia/Baku", + "Azores Standard Time": "Atlantic/Azores", + "Bahia Standard Time": "America/Bahia", + "Bangladesh Standard Time": "Asia/Dhaka", + "Belarus Standard Time": "Europe/Minsk", + "Canada Central Standard Time": "America/Regina", + "Cape Verde Standard Time": "Atlantic/Cape_Verde", + "Caucasus Standard Time": "Asia/Yerevan", + "Cen. Australia Standard Time": "Australia/Adelaide", + "Central America Standard Time": "America/Guatemala", + "Central Asia Standard Time": "Asia/Almaty", + "Central Brazilian Standard Time": "America/Cuiaba", + "Central Europe Standard Time": "Europe/Budapest", + "Central European Standard Time": "Europe/Warsaw", + "Central Pacific Standard Time": "Pacific/Guadalcanal", + "Central Standard Time": "America/Chicago", + "Central Standard Time (Mexico)": "America/Mexico_City", + "China Standard Time": "Asia/Shanghai", + "Dateline Standard Time": "Etc/GMT+12", + "E. Africa Standard Time": "Africa/Nairobi", + "E. Australia Standard Time": "Australia/Brisbane", + "E. Europe Standard Time": "Europe/Chisinau", + "E. South America Standard Time": "America/Sao_Paulo", + "Eastern Standard Time": "America/New_York", + "Eastern Standard Time (Mexico)": "America/Cancun", + "Egypt Standard Time": "Africa/Cairo", + "Ekaterinburg Standard Time": "Asia/Yekaterinburg", + "FLE Standard Time": "Europe/Kiev", + "Fiji Standard Time": "Pacific/Fiji", + "GMT Standard Time": "Europe/London", + "GTB Standard Time": "Europe/Bucharest", + "Georgian Standard Time": "Asia/Tbilisi", + "Greenland Standard Time": "America/Godthab", + "Greenwich Standard Time": "Atlantic/Reykjavik", + "Hawaiian Standard Time": "Pacific/Honolulu", + "India Standard Time": "Asia/Calcutta", + "Iran Standard Time": "Asia/Tehran", + "Israel Standard Time": "Asia/Jerusalem", + "Jordan Standard Time": "Asia/Amman", + "Kaliningrad Standard Time": "Europe/Kaliningrad", + "Korea Standard Time": "Asia/Seoul", + "Libya Standard Time": "Africa/Tripoli", + "Line Islands Standard Time": "Pacific/Kiritimati", + "Magadan Standard Time": "Asia/Magadan", + "Mauritius Standard Time": "Indian/Mauritius", + "Middle East Standard Time": "Asia/Beirut", + "Montevideo Standard Time": "America/Montevideo", + "Morocco Standard Time": "Africa/Casablanca", + "Mountain Standard Time": "America/Denver", + "Mountain Standard Time (Mexico)": "America/Chihuahua", + "Myanmar Standard Time": "Asia/Rangoon", + "N. Central Asia Standard Time": "Asia/Novosibirsk", + "Namibia Standard Time": "Africa/Windhoek", + "Nepal Standard Time": "Asia/Katmandu", + "New Zealand Standard Time": "Pacific/Auckland", + "Newfoundland Standard Time": "America/St_Johns", + "North Asia East Standard Time": "Asia/Irkutsk", + "North Asia Standard Time": "Asia/Krasnoyarsk", + "North Korea Standard Time": "Asia/Pyongyang", + "Pacific SA Standard Time": "America/Santiago", + "Pacific Standard Time": "America/Los_Angeles", + "Pacific Standard Time (Mexico)": "America/Santa_Isabel", + "Pakistan Standard Time": "Asia/Karachi", + "Paraguay Standard Time": "America/Asuncion", + "Romance Standard Time": "Europe/Paris", + "Russia Time Zone 10": "Asia/Srednekolymsk", + "Russia Time Zone 11": "Asia/Kamchatka", + "Russia Time Zone 3": "Europe/Samara", + "Russian Standard Time": "Europe/Moscow", + "SA Eastern Standard Time": "America/Cayenne", + "SA Pacific Standard Time": "America/Bogota", + "SA Western Standard Time": "America/La_Paz", + "SE Asia Standard Time": "Asia/Bangkok", + "Samoa Standard Time": "Pacific/Apia", + "Singapore Standard Time": "Asia/Singapore", + "South Africa Standard Time": "Africa/Johannesburg", + "Sri Lanka Standard Time": "Asia/Colombo", + "Syria Standard Time": "Asia/Damascus", + "Taipei Standard Time": "Asia/Taipei", + "Tasmania Standard Time": "Australia/Hobart", + "Tokyo Standard Time": "Asia/Tokyo", + "Tonga Standard Time": "Pacific/Tongatapu", + "Turkey Standard Time": "Europe/Istanbul", + "US Eastern Standard Time": "America/Indianapolis", + "US Mountain Standard Time": "America/Phoenix", + "UTC": "Etc/GMT", + "UTC+12": "Etc/GMT-12", + "UTC-02": "Etc/GMT+2", + "UTC-11": "Etc/GMT+11", + "Ulaanbaatar Standard Time": "Asia/Ulaanbaatar", + "Venezuela Standard Time": "America/Caracas", + "Vladivostok Standard Time": "Asia/Vladivostok", + "W. Australia Standard Time": "Australia/Perth", + "W. Central Africa Standard Time": "Africa/Lagos", + "W. Europe Standard Time": "Europe/Berlin", + "West Asia Standard Time": "Asia/Tashkent", + "West Pacific Standard Time": "Pacific/Port_Moresby", + "Yakutsk Standard Time": "Asia/Yakutsk", } # Also add the friendly timezone names UTC_friendly_timezones = { - '(UTC-12:00) International Date Line West': 'Etc/GMT+12', - '(UTC-11:00) Coordinated Universal Time-11': 'Etc/GMT+11', - '(UTC-10:00) Hawaii': 'Pacific/Honolulu', - '(UTC-09:00) Alaska': 'America/Anchorage', - '(UTC-08:00) Baja California': 'America/Los_Angeles', - '(UTC-08:00) Pacific Time (US & Canada)': 'America/Los_Angeles', - '(UTC-07:00) Arizona': 'America/Phoenix', - '(UTC-07:00) Chihuahua, La Paz, Mazatlan': 'America/Chihuahua', - '(UTC-07:00) Mountain Time (US & Canada)': 'America/Denver', - '(UTC-06:00) Central America': 'America/Guatemala', - '(UTC-06:00) Central Time (US & Canada)': 'America/Chicago', - '(UTC-06:00) Guadalajara, Mexico City, Monterrey': 'America/Mexico_City', - '(UTC-06:00) Saskatchewan': 'America/Regina', - '(UTC-05:00) Bogota, Lima, Quito': 'America/Bogota', - '(UTC-05:00) Bogota, Lima, Quito, Rio Branco': 'America/Bogota', - '(UTC-05:00) Eastern Time (US & Canada)': 'America/New_York', - '(UTC-05:00) Indiana (East)': 'America/Indianapolis', - '(UTC-04:30) Caracas': 'America/Caracas', - '(UTC-04:00) Asuncion': 'America/Asuncion', - '(UTC-04:00) Atlantic Time (Canada)': 'America/Halifax', - '(UTC-04:00) Cuiaba': 'America/Cuiaba', - '(UTC-04:00) Georgetown, La Paz, Manaus, San Juan': 'America/La_Paz', - '(UTC-03:00) Santiago': 'America/Santiago', - '(UTC-03:30) Newfoundland': 'America/St_Johns', - '(UTC-03:30) Newfoundland and Labrador': 'America/St_Johns', - '(UTC-03:00) Brasilia': 'America/Sao_Paulo', - '(UTC-03:00) Buenos Aires': 'America/Buenos_Aires', - '(UTC-03:00) Buenos Aires, Georgetown': 'America/Buenos_Aires', - '(UTC-03:00) Cayenne, Fortaleza': 'America/Cayenne', - '(UTC-03:00) Greenland': 'America/Godthab', - '(UTC-03:00) Montevideo': 'America/Montevideo', - '(UTC-03:00) Salvador': 'America/Bahia', - '(UTC-02:00) Coordinated Universal Time-02': 'America/Noronha', - '(UTC-02:00) Mid-Atlantic': 'Etc/GMT-2', - '(UTC-01:00) Azores': 'Atlantic/Azores', - '(UTC-01:00) Cabo Verde Is.': 'Atlantic/Cape_Verde', - '(UTC-01:00) Cape Verde Islands': 'Atlantic/Cape_Verde', - '(UTC) Casablanca': 'Africa/Casablanca', - '(UTC) Coordinated Universal Time': 'Etc/GMT', - '(UTC) Greenwich Mean Time: Dublin, Edinburgh, Lisbon, London': 'Etc/GMT', - '(UTC) Dublin, Edinburgh, Lisbon, London': 'Europe/London', - '(UTC) Monrovia, Reykjavik': 'Atlantic/Reykjavik', - '(UTC+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna': 'Europe/Berlin', - '(UTC+01:00) Belgrade, Bratislava, Budapest, Ljubljana, Prague': 'Europe/Budapest', - '(UTC+01:00) Brussels, Copenhagen, Madrid, Paris': 'Europe/Paris', - '(UTC+01:00) Sarajevo, Skopje, Warsaw, Zagreb': 'Europe/Warsaw', - '(UTC+01:00) West Central Africa': 'Africa/Lagos', - '(UTC+01:00) Windhoek': 'Africa/Windhoek', - '(UTC+02:00) Amman': 'Asia/Amman', - '(UTC+02:00) Athens, Bucharest': 'Europe/Bucharest', - '(UTC+02:00) Athens, Istanbul, Minsk': 'Europe/Bucharest', - '(UTC+02:00) Bucharest': 'Europe/Bucharest', - '(UTC+02:00) Beirut': 'Asia/Beirut', - '(UTC+02:00) Cairo': 'Africa/Cairo', - '(UTC+02:00) Damascus': 'Asia/Damascus', - '(UTC+02:00) E. Europe': 'Europe/Chisinau', - '(UTC+02:00) Harare, Pretoria': 'Africa/Johannesburg', - '(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius': 'Europe/Kiev', - '(UTC+02:00) Helsinki, Kiev, Riga, Sofia, Tallinn, Vilnius': 'Europe/Kiev', - '(UTC+02:00) Istanbul': 'Europe/Istanbul', - '(UTC+02:00) Jerusalem': 'Asia/Jerusalem', - '(UTC+02:00) Kaliningrad (RTZ 1)': 'Europe/Kaliningrad', - '(UTC+02:00) Tripoli': 'Africa/Tripoli', - '(UTC+03:00) Baghdad': 'Asia/Baghdad', - '(UTC+03:00) Kuwait, Riyadh': 'Asia/Riyadh', - '(UTC+03:00) Minsk': 'Europe/Minsk', - '(UTC+03:00) Moscow, St. Petersburg, Volgograd': 'Europe/Moscow', - '(UTC+03:00) Moscow, St. Petersburg, Volgograd (RTZ 2)': 'Europe/Moscow', - '(UTC+03:00) Nairobi': 'Africa/Nairobi', - '(UTC+03:30) Tehran': 'Asia/Tehran', - '(UTC+04:00) Abu Dhabi, Muscat': 'Asia/Dubai', - '(UTC+04:00) Baku': 'Asia/Baku', - '(UTC+04:00) Baku, Tbilisi, Yerevan': 'Asia/Baku', - '(UTC+04:00) Izhevsk, Samara (RTZ 3)': 'Europe/Samara', - '(UTC+04:00) Port Louis': 'Indian/Mauritius', - '(UTC+04:00) Tbilisi': 'Asia/Tbilisi', - '(UTC+04:00) Yerevan': 'Asia/Yerevan', - '(UTC+04:30) Kabul': 'Asia/Kabul', - '(UTC+05:00) Ashgabat, Tashkent': 'Asia/Tashkent', - '(UTC+05:00) Ekaterinburg': 'Asia/Yekaterinburg', - '(UTC+05:00) Ekaterinburg (RTZ 4)': 'Asia/Yekaterinburg', - '(UTC+05:00) Islamabad, Karachi': 'Asia/Karachi', - '(UTC+05:30) Chennai, Kolkata, Mumbai, New Delhi': 'Asia/Calcutta', - '(UTC+06:00) Sri Jayawardenepura': 'Asia/Colombo', - '(UTC+05:45) Kathmandu': 'Asia/Katmandu', - '(UTC+06:00) Astana': 'Asia/Almaty', - '(UTC+06:00) Astana, Dhaka': 'Asia/Almaty', - '(UTC+06:00) Dhaka': 'Asia/Dhaka', - '(UTC+06:00) Almaty, Novosibirsk': 'Asia/Novosibirsk', - '(UTC+06:00) Novosibirsk (RTZ 5)': 'Asia/Novosibirsk', - '(UTC+06:30) Yangon Rangoon': 'Asia/Rangoon', - '(UTC+06:30) Yangon (Rangoon)': 'Asia/Rangoon', - '(UTC+07:00) Bangkok, Hanoi, Jakarta': 'Asia/Bangkok', - '(UTC+07:00) Krasnoyarsk': 'Asia/Krasnoyarsk', - '(UTC+07:00) Krasnoyarsk (RTZ 6)': 'Asia/Krasnoyarsk', - '(UTC+08:00) Beijing, Chongqing, Hong Kong, Urumqi': 'Asia/Shanghai', - '(UTC+08:00) Irkutsk, Ulaanbaatar': 'Asia/Irkutsk', - '(UTC+08:00) Irkutsk (RTZ 7)': 'Asia/Irkutsk', - '(UTC+08:00) Kuala Lumpur, Singapore': 'Asia/Singapore', - '(UTC+08:00) Perth': 'Australia/Perth', - '(UTC+08:00) Taipei': 'Asia/Taipei', - '(UTC+08:00) Ulaanbaatar': 'Asia/Ulaanbaatar', - '(UTC+09:00) Osaka, Sapporo, Tokyo': 'Asia/Tokyo', - '(UTC+09:00) Seoul': 'Asia/Seoul', - '(UTC+09:00) Yakutsk': 'Asia/Yakutsk', - '(UTC+09:00) Yakutsk (RTZ 8)': 'Asia/Yakutsk', - '(UTC+09:30) Adelaide': 'Australia/Adelaide', - '(UTC+09:30) Darwin': 'Australia/Darwin', - '(UTC+10:00) Brisbane': 'Australia/Brisbane', - '(UTC+10:00) Canberra, Melborune, Sydney': 'Australia/Sydney', - '(UTC+10:00) Guam, Port Moresby': 'Pacific/Port_Moresby', - '(UTC+10:00) Hobart': 'Australia/Hobart', - '(UTC+10:00) Magadan': 'Asia/Magadan', - '(UTC+10:00) Vladivostok': 'Asia/Vladivostok', - '(UTC+10:00) Vladivostok, Magadan (RTZ 9)': 'Asia/Vladivostok', - '(UTC+11:00) Chokurdakh (RTZ 10)': 'Asia/Srednekolymsk', - '(UTC+11:00) Solomon Is., New Caledonia': 'Pacific/Guadalcanal', - '(UTC+12:00) Auckland, Wellington': 'Pacific/Auckland', - '(UTC+12:00) Coordinated Universal Time+12': 'Pacific/Tarawa', - '(UTC+12:00) Fiji': 'Pacific/Fiji', - '(UTC+12:00) Anadyr, Petropavlovsk-Kamchatsky': 'Asia/Kamchatka', - '(UTC+12:00) Anadyr, Petropavlovsk-Kamchatsky (RTZ 11)': 'Asia/Kamchatka', - '(UTC+13:00) Nuku\'alofa': 'Pacific/Tongatapu', - '(UTC+13:00) Samoa': 'Pacific/Apia', - '(UTC+14:00) Kiritimati Island': 'Pacific/Kiritimati' + "(UTC-12:00) International Date Line West": "Etc/GMT+12", + "(UTC-11:00) Coordinated Universal Time-11": "Etc/GMT+11", + "(UTC-10:00) Hawaii": "Pacific/Honolulu", + "(UTC-09:00) Alaska": "America/Anchorage", + "(UTC-08:00) Baja California": "America/Los_Angeles", + "(UTC-08:00) Pacific Time (US & Canada)": "America/Los_Angeles", + "(UTC-07:00) Arizona": "America/Phoenix", + "(UTC-07:00) Chihuahua, La Paz, Mazatlan": "America/Chihuahua", + "(UTC-07:00) Mountain Time (US & Canada)": "America/Denver", + "(UTC-06:00) Central America": "America/Guatemala", + "(UTC-06:00) Central Time (US & Canada)": "America/Chicago", + "(UTC-06:00) Guadalajara, Mexico City, Monterrey": "America/Mexico_City", + "(UTC-06:00) Saskatchewan": "America/Regina", + "(UTC-05:00) Bogota, Lima, Quito": "America/Bogota", + "(UTC-05:00) Bogota, Lima, Quito, Rio Branco": "America/Bogota", + "(UTC-05:00) Eastern Time (US & Canada)": "America/New_York", + "(UTC-05:00) Indiana (East)": "America/Indianapolis", + "(UTC-04:30) Caracas": "America/Caracas", + "(UTC-04:00) Asuncion": "America/Asuncion", + "(UTC-04:00) Atlantic Time (Canada)": "America/Halifax", + "(UTC-04:00) Cuiaba": "America/Cuiaba", + "(UTC-04:00) Georgetown, La Paz, Manaus, San Juan": "America/La_Paz", + "(UTC-03:00) Santiago": "America/Santiago", + "(UTC-03:30) Newfoundland": "America/St_Johns", + "(UTC-03:30) Newfoundland and Labrador": "America/St_Johns", + "(UTC-03:00) Brasilia": "America/Sao_Paulo", + "(UTC-03:00) Buenos Aires": "America/Buenos_Aires", + "(UTC-03:00) Buenos Aires, Georgetown": "America/Buenos_Aires", + "(UTC-03:00) Cayenne, Fortaleza": "America/Cayenne", + "(UTC-03:00) Greenland": "America/Godthab", + "(UTC-03:00) Montevideo": "America/Montevideo", + "(UTC-03:00) Salvador": "America/Bahia", + "(UTC-02:00) Coordinated Universal Time-02": "America/Noronha", + "(UTC-02:00) Mid-Atlantic": "Etc/GMT-2", + "(UTC-01:00) Azores": "Atlantic/Azores", + "(UTC-01:00) Cabo Verde Is.": "Atlantic/Cape_Verde", + "(UTC-01:00) Cape Verde Islands": "Atlantic/Cape_Verde", + "(UTC) Casablanca": "Africa/Casablanca", + "(UTC) Coordinated Universal Time": "Etc/GMT", + "(UTC) Greenwich Mean Time: Dublin, Edinburgh, Lisbon, London": "Etc/GMT", + "(UTC) Dublin, Edinburgh, Lisbon, London": "Europe/London", + "(UTC) Monrovia, Reykjavik": "Atlantic/Reykjavik", + "(UTC+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna": "Europe/Berlin", + "(UTC+01:00) Belgrade, Bratislava, Budapest, Ljubljana, Prague": "Europe/Budapest", + "(UTC+01:00) Brussels, Copenhagen, Madrid, Paris": "Europe/Paris", + "(UTC+01:00) Sarajevo, Skopje, Warsaw, Zagreb": "Europe/Warsaw", + "(UTC+01:00) West Central Africa": "Africa/Lagos", + "(UTC+01:00) Windhoek": "Africa/Windhoek", + "(UTC+02:00) Amman": "Asia/Amman", + "(UTC+02:00) Athens, Bucharest": "Europe/Bucharest", + "(UTC+02:00) Athens, Istanbul, Minsk": "Europe/Bucharest", + "(UTC+02:00) Bucharest": "Europe/Bucharest", + "(UTC+02:00) Beirut": "Asia/Beirut", + "(UTC+02:00) Cairo": "Africa/Cairo", + "(UTC+02:00) Damascus": "Asia/Damascus", + "(UTC+02:00) E. Europe": "Europe/Chisinau", + "(UTC+02:00) Harare, Pretoria": "Africa/Johannesburg", + "(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius": "Europe/Kiev", + "(UTC+02:00) Helsinki, Kiev, Riga, Sofia, Tallinn, Vilnius": "Europe/Kiev", + "(UTC+02:00) Istanbul": "Europe/Istanbul", + "(UTC+02:00) Jerusalem": "Asia/Jerusalem", + "(UTC+02:00) Kaliningrad (RTZ 1)": "Europe/Kaliningrad", + "(UTC+02:00) Tripoli": "Africa/Tripoli", + "(UTC+03:00) Baghdad": "Asia/Baghdad", + "(UTC+03:00) Kuwait, Riyadh": "Asia/Riyadh", + "(UTC+03:00) Minsk": "Europe/Minsk", + "(UTC+03:00) Moscow, St. Petersburg, Volgograd": "Europe/Moscow", + "(UTC+03:00) Moscow, St. Petersburg, Volgograd (RTZ 2)": "Europe/Moscow", + "(UTC+03:00) Nairobi": "Africa/Nairobi", + "(UTC+03:30) Tehran": "Asia/Tehran", + "(UTC+04:00) Abu Dhabi, Muscat": "Asia/Dubai", + "(UTC+04:00) Baku": "Asia/Baku", + "(UTC+04:00) Baku, Tbilisi, Yerevan": "Asia/Baku", + "(UTC+04:00) Izhevsk, Samara (RTZ 3)": "Europe/Samara", + "(UTC+04:00) Port Louis": "Indian/Mauritius", + "(UTC+04:00) Tbilisi": "Asia/Tbilisi", + "(UTC+04:00) Yerevan": "Asia/Yerevan", + "(UTC+04:30) Kabul": "Asia/Kabul", + "(UTC+05:00) Ashgabat, Tashkent": "Asia/Tashkent", + "(UTC+05:00) Ekaterinburg": "Asia/Yekaterinburg", + "(UTC+05:00) Ekaterinburg (RTZ 4)": "Asia/Yekaterinburg", + "(UTC+05:00) Islamabad, Karachi": "Asia/Karachi", + "(UTC+05:30) Chennai, Kolkata, Mumbai, New Delhi": "Asia/Calcutta", + "(UTC+06:00) Sri Jayawardenepura": "Asia/Colombo", + "(UTC+05:45) Kathmandu": "Asia/Katmandu", + "(UTC+06:00) Astana": "Asia/Almaty", + "(UTC+06:00) Astana, Dhaka": "Asia/Almaty", + "(UTC+06:00) Dhaka": "Asia/Dhaka", + "(UTC+06:00) Almaty, Novosibirsk": "Asia/Novosibirsk", + "(UTC+06:00) Novosibirsk (RTZ 5)": "Asia/Novosibirsk", + "(UTC+06:30) Yangon Rangoon": "Asia/Rangoon", + "(UTC+06:30) Yangon (Rangoon)": "Asia/Rangoon", + "(UTC+07:00) Bangkok, Hanoi, Jakarta": "Asia/Bangkok", + "(UTC+07:00) Krasnoyarsk": "Asia/Krasnoyarsk", + "(UTC+07:00) Krasnoyarsk (RTZ 6)": "Asia/Krasnoyarsk", + "(UTC+08:00) Beijing, Chongqing, Hong Kong, Urumqi": "Asia/Shanghai", + "(UTC+08:00) Irkutsk, Ulaanbaatar": "Asia/Irkutsk", + "(UTC+08:00) Irkutsk (RTZ 7)": "Asia/Irkutsk", + "(UTC+08:00) Kuala Lumpur, Singapore": "Asia/Singapore", + "(UTC+08:00) Perth": "Australia/Perth", + "(UTC+08:00) Taipei": "Asia/Taipei", + "(UTC+08:00) Ulaanbaatar": "Asia/Ulaanbaatar", + "(UTC+09:00) Osaka, Sapporo, Tokyo": "Asia/Tokyo", + "(UTC+09:00) Seoul": "Asia/Seoul", + "(UTC+09:00) Yakutsk": "Asia/Yakutsk", + "(UTC+09:00) Yakutsk (RTZ 8)": "Asia/Yakutsk", + "(UTC+09:30) Adelaide": "Australia/Adelaide", + "(UTC+09:30) Darwin": "Australia/Darwin", + "(UTC+10:00) Brisbane": "Australia/Brisbane", + "(UTC+10:00) Canberra, Melborune, Sydney": "Australia/Sydney", + "(UTC+10:00) Guam, Port Moresby": "Pacific/Port_Moresby", + "(UTC+10:00) Hobart": "Australia/Hobart", + "(UTC+10:00) Magadan": "Asia/Magadan", + "(UTC+10:00) Vladivostok": "Asia/Vladivostok", + "(UTC+10:00) Vladivostok, Magadan (RTZ 9)": "Asia/Vladivostok", + "(UTC+11:00) Chokurdakh (RTZ 10)": "Asia/Srednekolymsk", + "(UTC+11:00) Solomon Is., New Caledonia": "Pacific/Guadalcanal", + "(UTC+12:00) Auckland, Wellington": "Pacific/Auckland", + "(UTC+12:00) Coordinated Universal Time+12": "Pacific/Tarawa", + "(UTC+12:00) Fiji": "Pacific/Fiji", + "(UTC+12:00) Anadyr, Petropavlovsk-Kamchatsky": "Asia/Kamchatka", + "(UTC+12:00) Anadyr, Petropavlovsk-Kamchatsky (RTZ 11)": "Asia/Kamchatka", + "(UTC+13:00) Nuku'alofa": "Pacific/Tongatapu", + "(UTC+13:00) Samoa": "Pacific/Apia", + "(UTC+14:00) Kiritimati Island": "Pacific/Kiritimati", } # UTC and GMT are equivalent, and some servers can send the GMT version -GMT_friendly_timezones = {k.replace('(UTC', '(GMT'): v for k, v in UTC_friendly_timezones.items()} +GMT_friendly_timezones = { + k.replace("(UTC", "(GMT"): v for k, v in UTC_friendly_timezones.items() +} # Also add the Olson DB timezones (e.g: Europe/Paris, etc.) olson_timezones = {tz: tz for tz in pytz.all_timezones} # Also add really strange formats we've seen in calendar events awful_timezones = { - 'Pacific': 'America/Los_Angeles', - 'US/PST': 'America/Los_Angeles', - 'Mountain': 'America/Denver', - 'US/MT': 'America/Denver', - 'Central': 'America/Chicago', - 'US/CST': 'America/Chicago', - 'Eastern': 'America/New_York', - 'US/EST': 'America/New_York' + "Pacific": "America/Los_Angeles", + "US/PST": "America/Los_Angeles", + "Mountain": "America/Denver", + "US/MT": "America/Denver", + "Central": "America/Chicago", + "US/CST": "America/Chicago", + "Eastern": "America/New_York", + "US/EST": "America/New_York", } timezones_table = {} diff --git a/inbox/events/util.py b/inbox/events/util.py index 58c3b74d3..b8042f972 100644 --- a/inbox/events/util.py +++ b/inbox/events/util.py @@ -17,29 +17,29 @@ def parse_datetime(datetime): # http://crsmithdev.com/arrow/ if datetime is not None: if isinstance(datetime, int): - return arrow.get(datetime).to('utc') - return arrow.get(parse(datetime)).to('utc') + return arrow.get(datetime).to("utc") + return arrow.get(parse(datetime)).to("utc") def parse_rrule_datetime(datetime, tzinfo=None): # format: 20140904T133000Z (datetimes) or 20140904 (dates) - if datetime[-1] == 'Z': - tzinfo = 'UTC' + if datetime[-1] == "Z": + tzinfo = "UTC" datetime = datetime[:-1] if len(datetime) == 8: - dt = arrow.get(datetime, 'YYYYMMDD').to('utc') + dt = arrow.get(datetime, "YYYYMMDD").to("utc") else: - dt = arrow.get(datetime, 'YYYYMMDDTHHmmss') - if tzinfo and tzinfo != 'UTC': + dt = arrow.get(datetime, "YYYYMMDDTHHmmss") + if tzinfo and tzinfo != "UTC": dt = arrow.get(dt.datetime, tzinfo) return dt def serialize_datetime(d): - return d.strftime('%Y%m%dT%H%M%SZ') + return d.strftime("%Y%m%dT%H%M%SZ") -EventTime = namedtuple('EventTime', ['start', 'end', 'all_day']) +EventTime = namedtuple("EventTime", ["start", "end", "all_day"]) def when_to_event_time(raw): @@ -51,7 +51,7 @@ def parse_google_time(d): # google dictionaries contain either 'date' or 'dateTime' & 'timeZone' # 'dateTime' is in ISO format so is UTC-aware, 'date' is just a date for key, dt in d.iteritems(): - if key != 'timeZone': + if key != "timeZone": return arrow.get(dt) @@ -61,13 +61,13 @@ def google_to_event_time(start_raw, end_raw): if start > end: start, end = (end, start) - if 'date' in start_raw: + if "date" in start_raw: # Google all-day events normally end a 'day' later than they should, # but not always if they were created by a third-party client. end = max(start, end.replace(days=-1)) - d = {'start_date': start, 'end_date': end} + d = {"start_date": start, "end_date": end} else: - d = {'start_time': start, 'end_time': end} + d = {"start_time": start, "end_time": end} event_time = when_to_event_time(d) @@ -82,10 +82,16 @@ def valid_base36(uid): def removed_participants(original_participants, update_participants): """Returns the name and addresses of the participants which have been removed.""" - original_table = {part['email'].lower(): part.get('name') for part in original_participants - if 'email' in part} - update_table = {part['email'].lower(): part.get('name') for part in update_participants - if 'email' in part} + original_table = { + part["email"].lower(): part.get("name") + for part in original_participants + if "email" in part + } + update_table = { + part["email"].lower(): part.get("name") + for part in update_participants + if "email" in part + } ret = [] for email in original_table: @@ -99,6 +105,6 @@ def removed_participants(original_participants, update_participants): # all together, but we want to handle deletions separately in our persistence # logic. deleted_uids should be a list of uids, and updated_objects should be a # list of (un-added, uncommitted) model instances. -CalendarSyncResponse = namedtuple('CalendarSyncResponse', - ['deleted_uids', - 'updated_objects']) +CalendarSyncResponse = namedtuple( + "CalendarSyncResponse", ["deleted_uids", "updated_objects"] +) diff --git a/inbox/folder_edge_cases.py b/inbox/folder_edge_cases.py index 9877f4b4a..25446afe2 100644 --- a/inbox/folder_edge_cases.py +++ b/inbox/folder_edge_cases.py @@ -1,33 +1,69 @@ -''' +""" This folder list was generated by aggregating examples of user folders that were not properly labeled as trash, sent, or spam. This list was constructed semi automatically, and manuallly verified. Should we encounter problems with account folders in the future, add them below to test for them. -''' +""" localized_folder_names = { - 'trash': {'Gel\xc3\xb6scht', 'Papierkorb', - '\xd0\x9a\xd0\xbe\xd1\x80\xd0\xb7\xd0\xb8\xd0\xbd\xd0\xb0', - '[Imap]/Trash', 'Papelera', 'Borradores', - '[Imap]/\xd0\x9a\xd0\xbe\xd1\x80' - '\xd0\xb7\xd0\xb8\xd0\xbd\xd0\xb0', 'Deleted Items', - '\xd0\xa1\xd0\xbc\xd1\x96\xd1\x82\xd1\x82\xd1\x8f', - 'Papierkorb/Trash', 'Gel\xc3\xb6schte Elemente', - 'Deleted Messages', '[Gmail]/Trash', 'INBOX/Trash', 'Trash', - 'mail/TRASH', 'INBOX.Trash', 'INBOX.\xc9l\xe9ments supprim\xe9s', - 'INBOX.INBOX.Trash'}, - 'spam': {'Roskaposti', 'INBOX.spam', 'INBOX.Spam', 'Skr\xc3\xa4ppost', - 'Spamverdacht', 'spam', 'Spam', '[Gmail]/Spam', '[Imap]/Spam', - '\xe5\x9e\x83\xe5\x9c\xbe\xe9\x82\xae\xe4\xbb\xb6', 'Junk', - 'Junk Mail', 'Junk E-Mail'}, - 'inbox': {u'INBOX'}, - 'sent': {'Postausgang', 'INBOX.Gesendet', '[Gmail]/Sent Mail', - '\xeb\xb3\xb4\xeb\x82\xbc\xed\x8e\xb8\xec\xa7\x80\xed\x95\xa8' - 'Elementos enviados', 'Sent', 'Sent Items', 'Sent Messages', - 'INBOX.Sent Messages', 'Odeslan\xc3\xa9', 'mail/sent-mail', - 'Ko\xc5\xa1', 'INBOX.SentMail', 'Gesendet', - 'Ko\xc5\xa1/Sent Items', 'Gesendete Elemente', - 'INBOX.\xc9l\xe9ments envoy\xe9s', 'INBOX.INBOX.Sent', - 'Objets envoy\xe9s', }} + "trash": { + "Gel\xc3\xb6scht", + "Papierkorb", + "\xd0\x9a\xd0\xbe\xd1\x80\xd0\xb7\xd0\xb8\xd0\xbd\xd0\xb0", + "[Imap]/Trash", + "Papelera", + "Borradores", + "[Imap]/\xd0\x9a\xd0\xbe\xd1\x80" "\xd0\xb7\xd0\xb8\xd0\xbd\xd0\xb0", + "Deleted Items", + "\xd0\xa1\xd0\xbc\xd1\x96\xd1\x82\xd1\x82\xd1\x8f", + "Papierkorb/Trash", + "Gel\xc3\xb6schte Elemente", + "Deleted Messages", + "[Gmail]/Trash", + "INBOX/Trash", + "Trash", + "mail/TRASH", + "INBOX.Trash", + "INBOX.\xc9l\xe9ments supprim\xe9s", + "INBOX.INBOX.Trash", + }, + "spam": { + "Roskaposti", + "INBOX.spam", + "INBOX.Spam", + "Skr\xc3\xa4ppost", + "Spamverdacht", + "spam", + "Spam", + "[Gmail]/Spam", + "[Imap]/Spam", + "\xe5\x9e\x83\xe5\x9c\xbe\xe9\x82\xae\xe4\xbb\xb6", + "Junk", + "Junk Mail", + "Junk E-Mail", + }, + "inbox": {u"INBOX"}, + "sent": { + "Postausgang", + "INBOX.Gesendet", + "[Gmail]/Sent Mail", + "\xeb\xb3\xb4\xeb\x82\xbc\xed\x8e\xb8\xec\xa7\x80\xed\x95\xa8" + "Elementos enviados", + "Sent", + "Sent Items", + "Sent Messages", + "INBOX.Sent Messages", + "Odeslan\xc3\xa9", + "mail/sent-mail", + "Ko\xc5\xa1", + "INBOX.SentMail", + "Gesendet", + "Ko\xc5\xa1/Sent Items", + "Gesendete Elemente", + "INBOX.\xc9l\xe9ments envoy\xe9s", + "INBOX.INBOX.Sent", + "Objets envoy\xe9s", + }, +} diff --git a/inbox/heartbeat/config.py b/inbox/heartbeat/config.py index d65c8afd9..821efa020 100644 --- a/inbox/heartbeat/config.py +++ b/inbox/heartbeat/config.py @@ -4,12 +4,12 @@ STATUS_DATABASE = 1 -ALIVE_EXPIRY = int(config.get('BASE_ALIVE_THRESHOLD', 480)) -REDIS_SHARDS = config.get('REDIS_SHARDS') -REDIS_PORT = int(config.get('REDIS_PORT')) +ALIVE_EXPIRY = int(config.get("BASE_ALIVE_THRESHOLD", 480)) +REDIS_SHARDS = config.get("REDIS_SHARDS") +REDIS_PORT = int(config.get("REDIS_PORT")) -CONTACTS_FOLDER_ID = '-1' -EVENTS_FOLDER_ID = '-2' +CONTACTS_FOLDER_ID = "-1" +EVENTS_FOLDER_ID = "-2" MAX_CONNECTIONS = 70 WAIT_TIMEOUT = 15 @@ -29,9 +29,13 @@ def _get_redis_connection_pool(host, port, db): connection_pool = connection_pool_map.get(host) if connection_pool is None: connection_pool = BlockingConnectionPool( - host=host, port=port, db=db, - max_connections=MAX_CONNECTIONS, timeout=WAIT_TIMEOUT, - socket_timeout=SOCKET_TIMEOUT) + host=host, + port=port, + db=db, + max_connections=MAX_CONNECTIONS, + timeout=WAIT_TIMEOUT, + socket_timeout=SOCKET_TIMEOUT, + ) connection_pool_map[host] = connection_pool return connection_pool @@ -45,7 +49,7 @@ def get_redis_client(account_id): account_shard_number = account_redis_shard_number(account_id) host = REDIS_SHARDS[account_shard_number] - connection_pool = _get_redis_connection_pool(host, REDIS_PORT, - STATUS_DATABASE) - return StrictRedis(host, REDIS_PORT, STATUS_DATABASE, - connection_pool=connection_pool) + connection_pool = _get_redis_connection_pool(host, REDIS_PORT, STATUS_DATABASE) + return StrictRedis( + host, REDIS_PORT, STATUS_DATABASE, connection_pool=connection_pool + ) diff --git a/inbox/heartbeat/status.py b/inbox/heartbeat/status.py index 519c52774..08c4c59a3 100644 --- a/inbox/heartbeat/status.py +++ b/inbox/heartbeat/status.py @@ -13,8 +13,8 @@ # More lightweight statuses (dead/alive signals only) - placeholder name Pings -AccountPing = namedtuple('AccountPing', ['id', 'folders']) -FolderPing = namedtuple('FolderPing', ['id', 'alive', 'timestamp']) +AccountPing = namedtuple("AccountPing", ["id", "folders"]) +FolderPing = namedtuple("FolderPing", ["id", "alive", "timestamp"]) def get_ping_status(account_ids, host=None, port=6379, threshold=ALIVE_EXPIRY): @@ -26,8 +26,9 @@ def get_ping_status(account_ids, host=None, port=6379, threshold=ALIVE_EXPIRY): # Get a single account's heartbeat account_id = account_ids[0] folder_heartbeats = store.get_account_folders(account_id) - folders = [FolderPing(int(aid), ts > expiry, ts) - for (aid, ts) in folder_heartbeats] + folders = [ + FolderPing(int(aid), ts > expiry, ts) for (aid, ts) in folder_heartbeats + ] account = AccountPing(account_id, folders) return {account_id: account} else: @@ -37,8 +38,9 @@ def get_ping_status(account_ids, host=None, port=6379, threshold=ALIVE_EXPIRY): for account_id in account_ids: account_id = int(account_id) folder_heartbeats = all_folder_heartbeats[account_id] - folders = [FolderPing(int(aid), ts > expiry, ts) - for (aid, ts) in folder_heartbeats] + folders = [ + FolderPing(int(aid), ts > expiry, ts) for (aid, ts) in folder_heartbeats + ] account = AccountPing(account_id, folders) accounts[account_id] = account diff --git a/inbox/heartbeat/store.py b/inbox/heartbeat/store.py index 45f9dda39..e6f330ac8 100644 --- a/inbox/heartbeat/store.py +++ b/inbox/heartbeat/store.py @@ -3,13 +3,14 @@ from inbox.util.itert import chunk from nylas.logging import get_logger + log = get_logger() # We're doing this weird rename import to make it easier to monkeypatch # get_redis_client. That's the only way we have to test our very brittle # status code. import inbox.heartbeat.config as heartbeat_config -from inbox.heartbeat.config import (CONTACTS_FOLDER_ID, EVENTS_FOLDER_ID) +from inbox.heartbeat.config import CONTACTS_FOLDER_ID, EVENTS_FOLDER_ID def safe_failure(f): @@ -17,17 +18,16 @@ def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except Exception: - log.error('Error interacting with heartbeats', - exc_info=True) + log.error("Error interacting with heartbeats", exc_info=True) + return wrapper class HeartbeatStatusKey(object): - def __init__(self, account_id, folder_id): self.account_id = account_id self.folder_id = folder_id - self.key = '{}:{}'.format(self.account_id, self.folder_id) + self.key = "{}:{}".format(self.account_id, self.folder_id) def __repr__(self): return self.key @@ -38,12 +38,11 @@ def __lt__(self, other): return self.folder_id < other.folder_id def __eq__(self, other): - return self.account_id == other.account_id and \ - self.folder_id == other.folder_id + return self.account_id == other.account_id and self.folder_id == other.folder_id @classmethod def all_folders(cls, account_id): - return cls(account_id, '*') + return cls(account_id, "*") @classmethod def contacts(cls, account_id): @@ -55,14 +54,20 @@ def events(cls, account_id): @classmethod def from_string(cls, string_key): - account_id, folder_id = map(int, string_key.split(':')) + account_id, folder_id = map(int, string_key.split(":")) return cls(account_id, folder_id) class HeartbeatStatusProxy(object): - - def __init__(self, account_id, folder_id, folder_name=None, - email_address=None, provider_name=None, device_id=0): + def __init__( + self, + account_id, + folder_id, + folder_name=None, + email_address=None, + provider_name=None, + device_id=0, + ): self.key = HeartbeatStatusKey(account_id, folder_id) self.account_id = account_id self.folder_id = folder_id @@ -76,21 +81,23 @@ def publish(self, **kwargs): self.store.publish(self.key, self.heartbeat_at) except Exception: log = get_logger() - log.error('Error while writing the heartbeat status', - account_id=self.key.account_id, - folder_id=self.key.folder_id, - device_id=self.device_id, - exc_info=True) + log.error( + "Error while writing the heartbeat status", + account_id=self.key.account_id, + folder_id=self.key.folder_id, + device_id=self.device_id, + exc_info=True, + ) @safe_failure def clear(self): - self.store.remove_folders(self.account_id, self.folder_id, - self.device_id) + self.store.remove_folders(self.account_id, self.folder_id, self.device_id) class HeartbeatStore(object): """ Store that proxies requests to Redis with handlers that also update indexes and handle scanning through results. """ + _instances = {} def __init__(self, host=None, port=6379): @@ -158,23 +165,24 @@ def update_accounts_index(self, key): # Find the oldest heartbeat from the account-folder index try: client = heartbeat_config.get_redis_client(key.account_id) - f, oldest_heartbeat = client.zrange(key.account_id, 0, 0, - withscores=True).pop() - client.zadd('account_index', oldest_heartbeat, key.account_id) + f, oldest_heartbeat = client.zrange( + key.account_id, 0, 0, withscores=True + ).pop() + client.zadd("account_index", oldest_heartbeat, key.account_id) except: # If all heartbeats were deleted at the same time as this, the pop # will fail -- ignore it. pass def remove_from_folder_index(self, key, client): - client.zrem('folder_index', key) + client.zrem("folder_index", key) if isinstance(key, str): key = HeartbeatStatusKey.from_string(key) client.zrem(key.account_id, key.folder_id) def remove_from_account_index(self, account_id, client): client.delete(account_id) - client.zrem('account_index', account_id) + client.zrem("account_index", account_id) def get_account_folders(self, account_id): client = heartbeat_config.get_redis_client(account_id) @@ -191,7 +199,10 @@ def get_accounts_folders(self, account_ids): # http://stackoverflow.com/questions/8793772/how-to-split-a-sequence-according-to-a-predicate shard_num = heartbeat_config.account_redis_shard_number account_ids_grouped_by_shards = [ - list(v[1]) for v in itertools.groupby(sorted(account_ids, key=shard_num), key=shard_num) + list(v[1]) + for v in itertools.groupby( + sorted(account_ids, key=shard_num), key=shard_num + ) ] results = dict() diff --git a/inbox/ignition.py b/inbox/ignition.py index de21b5d04..bd5059663 100644 --- a/inbox/ignition.py +++ b/inbox/ignition.py @@ -7,20 +7,23 @@ from urllib import quote_plus as urlquote from sqlalchemy import create_engine, event -from inbox.sqlalchemy_ext.util import (ForceStrictMode, - disabled_dubiously_many_queries_warning) +from inbox.sqlalchemy_ext.util import ( + ForceStrictMode, + disabled_dubiously_many_queries_warning, +) from inbox.config import config from inbox.util.stats import statsd_client from nylas.logging import get_logger, find_first_app_frame_and_name from warnings import filterwarnings -filterwarnings('ignore', message='Invalid utf8mb4 character string') + +filterwarnings("ignore", message="Invalid utf8mb4 character string") log = get_logger() -DB_POOL_SIZE = config.get_required('DB_POOL_SIZE') +DB_POOL_SIZE = config.get_required("DB_POOL_SIZE") # Sane default of max overflow=5 if value missing in config. -DB_POOL_MAX_OVERFLOW = config.get('DB_POOL_MAX_OVERFLOW') or 5 -DB_POOL_TIMEOUT = config.get('DB_POOL_TIMEOUT') or 60 +DB_POOL_MAX_OVERFLOW = config.get("DB_POOL_MAX_OVERFLOW") or 5 +DB_POOL_TIMEOUT = config.get("DB_POOL_TIMEOUT") or 60 pool_tracker = weakref.WeakKeyDictionary() @@ -33,64 +36,78 @@ def gevent_waiter(fd, hub=gevent.hub.get_hub()): def build_uri(username, password, hostname, port, database_name): - uri_template = 'mysql+mysqldb://{username}:{password}@{hostname}' \ - ':{port}/{database_name}?charset=utf8mb4' - return uri_template.format(username=urlquote(username), - password=urlquote(password), - hostname=urlquote(hostname), - port=port, - database_name=urlquote(database_name)) - - -def engine(database_name, database_uri, pool_size=DB_POOL_SIZE, - max_overflow=DB_POOL_MAX_OVERFLOW, pool_timeout=DB_POOL_TIMEOUT, - echo=False): - engine = create_engine(database_uri, - listeners=[ForceStrictMode()], - isolation_level='READ COMMITTED', - echo=echo, - pool_size=pool_size, - pool_timeout=pool_timeout, - pool_recycle=3600, - max_overflow=max_overflow, - connect_args={'binary_prefix': True, - 'charset': 'utf8mb4', - 'waiter': gevent_waiter, - 'connect_timeout': 60}) - - @event.listens_for(engine, 'checkout') - def receive_checkout(dbapi_connection, connection_record, - connection_proxy): - '''Log checkedout and overflow when a connection is checked out''' + uri_template = ( + "mysql+mysqldb://{username}:{password}@{hostname}" + ":{port}/{database_name}?charset=utf8mb4" + ) + return uri_template.format( + username=urlquote(username), + password=urlquote(password), + hostname=urlquote(hostname), + port=port, + database_name=urlquote(database_name), + ) + + +def engine( + database_name, + database_uri, + pool_size=DB_POOL_SIZE, + max_overflow=DB_POOL_MAX_OVERFLOW, + pool_timeout=DB_POOL_TIMEOUT, + echo=False, +): + engine = create_engine( + database_uri, + listeners=[ForceStrictMode()], + isolation_level="READ COMMITTED", + echo=echo, + pool_size=pool_size, + pool_timeout=pool_timeout, + pool_recycle=3600, + max_overflow=max_overflow, + connect_args={ + "binary_prefix": True, + "charset": "utf8mb4", + "waiter": gevent_waiter, + "connect_timeout": 60, + }, + ) + + @event.listens_for(engine, "checkout") + def receive_checkout(dbapi_connection, connection_record, connection_proxy): + """Log checkedout and overflow when a connection is checked out""" hostname = gethostname().replace(".", "-") process_name = str(config.get("PROCESS_NAME", "main_process")) - if config.get('ENABLE_DB_TXN_METRICS', False): - statsd_client.gauge(".".join( - ["dbconn", database_name, hostname, process_name, - "checkedout"]), - connection_proxy._pool.checkedout()) + if config.get("ENABLE_DB_TXN_METRICS", False): + statsd_client.gauge( + ".".join( + ["dbconn", database_name, hostname, process_name, "checkedout"] + ), + connection_proxy._pool.checkedout(), + ) - statsd_client.gauge(".".join( - ["dbconn", database_name, hostname, process_name, - "overflow"]), - connection_proxy._pool.overflow()) + statsd_client.gauge( + ".".join(["dbconn", database_name, hostname, process_name, "overflow"]), + connection_proxy._pool.overflow(), + ) # Keep track of where and why this connection was checked out. log = get_logger() context = log._context._dict.copy() - f, name = find_first_app_frame_and_name(ignores=['sqlalchemy', - 'inbox.ignition', - 'nylas.logging']) - source = '{}:{}'.format(name, f.f_lineno) + f, name = find_first_app_frame_and_name( + ignores=["sqlalchemy", "inbox.ignition", "nylas.logging"] + ) + source = "{}:{}".format(name, f.f_lineno) pool_tracker[dbapi_connection] = { - 'source': source, - 'context': context, - 'checkedout_at': time.time() + "source": source, + "context": context, + "checkedout_at": time.time(), } - @event.listens_for(engine, 'checkin') + @event.listens_for(engine, "checkin") def receive_checkin(dbapi_connection, connection_record): if dbapi_connection in pool_tracker: del pool_tracker[dbapi_connection] @@ -99,43 +116,49 @@ def receive_checkin(dbapi_connection, connection_record): class EngineManager(object): - def __init__(self, databases, users, include_disabled=False): self.engines = {} self._engine_zones = {} keys = set() schema_names = set() - use_proxysql = config.get('USE_PROXYSQL', False) + use_proxysql = config.get("USE_PROXYSQL", False) for database in databases: - hostname = '127.0.0.1' if use_proxysql else database['HOSTNAME'] - port = database['PORT'] - username = users[hostname]['USER'] - password = users[hostname]['PASSWORD'] - zone = database.get('ZONE') - for shard in database['SHARDS']: - schema_name = shard['SCHEMA_NAME'] - key = shard['ID'] + hostname = "127.0.0.1" if use_proxysql else database["HOSTNAME"] + port = database["PORT"] + username = users[hostname]["USER"] + password = users[hostname]["PASSWORD"] + zone = database.get("ZONE") + for shard in database["SHARDS"]: + schema_name = shard["SCHEMA_NAME"] + key = shard["ID"] # Perform some sanity checks on the configuration. assert isinstance(key, int) - assert key not in keys, \ - 'Shard key collision: key {} is repeated'.format(key) - assert schema_name not in schema_names, \ - 'Shard name collision: {} is repeated'.format(schema_name) + assert ( + key not in keys + ), "Shard key collision: key {} is repeated".format(key) + assert ( + schema_name not in schema_names + ), "Shard name collision: {} is repeated".format(schema_name) keys.add(key) schema_names.add(schema_name) - if shard.get('DISABLED') and not include_disabled: - log.info('Not creating engine for disabled shard', - schema_name=schema_name, hostname=hostname, - key=key) + if shard.get("DISABLED") and not include_disabled: + log.info( + "Not creating engine for disabled shard", + schema_name=schema_name, + hostname=hostname, + key=key, + ) continue - uri = build_uri(username=username, - password=password, - database_name=schema_name, - hostname=hostname, - port=port) + uri = build_uri( + username=username, + password=password, + database_name=schema_name, + hostname=hostname, + port=port, + ) self.engines[key] = engine(schema_name, uri) self._engine_zones[key] = zone @@ -151,8 +174,10 @@ def zone_for_id(self, id_): def shards_for_zone(self, zone): return [k for k, z in self._engine_zones.items() if z == zone] -engine_manager = EngineManager(config.get_required('DATABASE_HOSTS'), - config.get_required('DATABASE_USERS')) + +engine_manager = EngineManager( + config.get_required("DATABASE_HOSTS"), config.get_required("DATABASE_USERS") +) def init_db(engine, key=0): @@ -174,9 +199,15 @@ def init_db(engine, key=0): # STOPSHIP(emfree): verify increment = (key << 48) + 1 for table in MailSyncBase.metadata.tables.values(): - event.listen(table, 'after_create', - DDL('ALTER TABLE {tablename} AUTO_INCREMENT={increment}'. - format(tablename=table, increment=increment))) + event.listen( + table, + "after_create", + DDL( + "ALTER TABLE {tablename} AUTO_INCREMENT={increment}".format( + tablename=table, increment=increment + ) + ), + ) with disabled_dubiously_many_queries_warning(): MailSyncBase.metadata.create_all(engine) @@ -191,14 +222,14 @@ def verify_db(engine, schema, key): for table in MailSyncBase.metadata.sorted_tables: # ContactSearchIndexCursor does not need to be checked because there's # only one row in the table - if str(table) == 'contactsearchindexcursor': + if str(table) == "contactsearchindexcursor": continue increment = engine.execute(query.format(schema, table)).scalar() if increment is not None: - assert (increment >> 48) == key, \ - 'table: {}, increment: {}, key: {}'.format( - table, increment, key) + assert (increment >> 48) == key, "table: {}, increment: {}, key: {}".format( + table, increment, key + ) else: # We leverage the following invariants about the sync # schema to make the assertion below: one, in the sync @@ -209,7 +240,7 @@ def verify_db(engine, schema, key): # '*thread' '*actionlog', 'recurringevent*'), because # their id column is instead a foreign_key on their # parent's id column. - parent = list(table.columns['id'].foreign_keys)[0].column.table + parent = list(table.columns["id"].foreign_keys)[0].column.table assert parent in verified verified.add(table) @@ -226,8 +257,9 @@ def reset_invalid_autoincrements(engine, schema, key, dry_run=True): if increment is not None: if (increment >> 48) != key: if not dry_run: - reset_query = "ALTER TABLE {} AUTO_INCREMENT={}". \ - format(table, (key << 48) + 1) + reset_query = "ALTER TABLE {} AUTO_INCREMENT={}".format( + table, (key << 48) + 1 + ) engine.execute(reset_query) reset.add(str(table)) return reset @@ -246,7 +278,5 @@ def reset_invalid_autoincrements(engine, schema, key, dry_run=True): # these are _required_. nylas shouldn't start if these aren't present. redis_txn = redis.Redis( - config["TXN_REDIS_HOSTNAME"], - int(config["REDIS_PORT"]), - db=config["TXN_REDIS_DB"], + config["TXN_REDIS_HOSTNAME"], int(config["REDIS_PORT"]), db=config["TXN_REDIS_DB"], ) diff --git a/inbox/instrumentation.py b/inbox/instrumentation.py index e5214eb05..85d647fe1 100644 --- a/inbox/instrumentation.py +++ b/inbox/instrumentation.py @@ -37,7 +37,7 @@ def start(self): try: signal.signal(signal.SIGVTALRM, self._sample) except ValueError: - raise ValueError('Can only sample on the main thread') + raise ValueError("Can only sample on the main thread") signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0) @@ -47,25 +47,23 @@ def _sample(self, signum, frame): stack.append(self._format_frame(frame)) frame = frame.f_back - stack = ';'.join(reversed(stack)) + stack = ";".join(reversed(stack)) self._stack_counts[stack] += 1 signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0) def _format_frame(self, frame): - return '{}({})'.format(frame.f_code.co_name, - frame.f_globals.get('__name__')) + return "{}({})".format(frame.f_code.co_name, frame.f_globals.get("__name__")) def stats(self): if self._started is None: - return '' + return "" elapsed = time.time() - self._started - lines = ['elapsed {}'.format(elapsed), - 'granularity {}'.format(self.interval)] - ordered_stacks = sorted(self._stack_counts.items(), - key=lambda kv: kv[1], reverse=True) - lines.extend(['{} {}'.format(frame, count) - for frame, count in ordered_stacks]) - return '\n'.join(lines) + '\n' + lines = ["elapsed {}".format(elapsed), "granularity {}".format(self.interval)] + ordered_stacks = sorted( + self._stack_counts.items(), key=lambda kv: kv[1], reverse=True + ) + lines.extend(["{} {}".format(frame, count) for frame, count in ordered_stacks]) + return "\n".join(lines) + "\n" def reset(self): self._started = time.time() @@ -83,10 +81,12 @@ class GreenletTracer(object): seconds. """ - def __init__(self, - blocking_sample_period=BLOCKING_SAMPLE_PERIOD, - sampling_interval=GREENLET_SAMPLING_INTERVAL, - logging_interval=LOGGING_INTERVAL): + def __init__( + self, + blocking_sample_period=BLOCKING_SAMPLE_PERIOD, + sampling_interval=GREENLET_SAMPLING_INTERVAL, + logging_interval=LOGGING_INTERVAL, + ): self.blocking_sample_period = blocking_sample_period self.sampling_interval = sampling_interval self.logging_interval = logging_interval @@ -121,27 +121,29 @@ def start(self): def stats(self): total_time = time.time() - self.start_time - idle_fraction = self.time_spent_by_context.get('hub', 0) / total_time + idle_fraction = self.time_spent_by_context.get("hub", 0) / total_time return { - 'times': self.time_spent_by_context, - 'idle_fraction': idle_fraction, - 'total_time': total_time, - 'pending_avgs': self.pending_avgs, - 'cpu_avgs': self.cpu_avgs, - 'total_switches': self.total_switches + "times": self.time_spent_by_context, + "idle_fraction": idle_fraction, + "total_time": total_time, + "pending_avgs": self.pending_avgs, + "cpu_avgs": self.cpu_avgs, + "total_switches": self.total_switches, } def log_stats(self, max_stats=60): total_time = round(time.time() - self.start_time, 2) - greenlets_by_cost = sorted(self.time_spent_by_context.items(), - key=lambda k_v: k_v[1], reverse=True) - formatted_times = {k: round(v, 2) for k, v in - greenlets_by_cost[:max_stats]} - self.log.info('greenlet stats', - times=str(formatted_times), - total_switches=self.total_switches, - total_time=total_time, - pending_avgs=self.pending_avgs) + greenlets_by_cost = sorted( + self.time_spent_by_context.items(), key=lambda k_v: k_v[1], reverse=True + ) + formatted_times = {k: round(v, 2) for k, v in greenlets_by_cost[:max_stats]} + self.log.info( + "greenlet stats", + times=str(formatted_times), + total_switches=self.total_switches, + total_time=total_time, + pending_avgs=self.pending_avgs, + ) self._publish_load_avgs() def _trace(self, event, xxx_todo_changeme): @@ -151,9 +153,9 @@ def _trace(self, event, xxx_todo_changeme): if self._last_switch_time is not None: time_spent = current_time - self._last_switch_time if origin is not self._hub: - context = getattr(origin, 'context', None) + context = getattr(origin, "context", None) else: - context = 'hub' + context = "hub" self.time_spent_by_context[context] += time_spent self._active_greenlet = target self._last_switch_time = current_time @@ -170,11 +172,13 @@ def _notify_greenlet_blocked(self, active_greenlet, current_time): # greenlet.gr_frame doesn't work on another thread -- we have # to get the main thread's frame. frame = sys._current_frames()[self._main_thread_id] - formatted_frame = '\t'.join(traceback.format_stack(frame)) + formatted_frame = "\t".join(traceback.format_stack(frame)) self.log.warning( - 'greenlet blocking', frame=formatted_frame, - context=getattr(active_greenlet, 'context', None), - blocking_greenlet_id=id(active_greenlet)) + "greenlet blocking", + frame=formatted_frame, + context=getattr(active_greenlet, "context", None), + blocking_greenlet_id=id(active_greenlet), + ) def _calculate_pending_avgs(self): # Calculate a "load average" for greenlet scheduling in roughly the @@ -183,26 +187,28 @@ def _calculate_pending_avgs(self): # are waiting to run. pendingcnt = self._hub.loop.pendingcnt for k, v in self.pending_avgs.items(): - exp = math.exp(- self.sampling_interval / (60. * k)) - self.pending_avgs[k] = exp * v + (1. - exp) * pendingcnt + exp = math.exp(-self.sampling_interval / (60.0 * k)) + self.pending_avgs[k] = exp * v + (1.0 - exp) * pendingcnt def _calculate_cpu_avgs(self): times = self.process.cpu_times() new_total_time = times.user + times.system delta = new_total_time - self.total_cpu_time for k, v in self.cpu_avgs.items(): - exp = math.exp(- self.sampling_interval / (60. * k)) - self.cpu_avgs[k] = exp * v + (1. - exp) * delta + exp = math.exp(-self.sampling_interval / (60.0 * k)) + self.cpu_avgs[k] = exp * v + (1.0 - exp) * delta self.total_cpu_time = new_total_time def _publish_load_avgs(self): for k, v in self.pending_avgs.items(): - path = 'greenlet_tracer.pending_avg.{}.{}.{:02d}'.format( - self.hostname, self.process_name, k) + path = "greenlet_tracer.pending_avg.{}.{}.{:02d}".format( + self.hostname, self.process_name, k + ) self.statsd_client.gauge(path, v) for k, v in self.cpu_avgs.items(): - path = 'greenlet_tracer.cpu_avg.{}.{}.{:02d}'.format( - self.hostname, self.process_name, k) + path = "greenlet_tracer.cpu_avg.{}.{}.{:02d}".format( + self.hostname, self.process_name, k + ) self.statsd_client.gauge(path, v) def _monitoring_thread(self): @@ -230,18 +236,22 @@ def _run_impl(self): class KillerGreenletTracer(GreenletTracer): - def __init__(self, - blocking_sample_period=BLOCKING_SAMPLE_PERIOD, - sampling_interval=GREENLET_SAMPLING_INTERVAL, - logging_interval=LOGGING_INTERVAL, - max_blocking_time=MAX_BLOCKING_TIME_BEFORE_INTERRUPT): + def __init__( + self, + blocking_sample_period=BLOCKING_SAMPLE_PERIOD, + sampling_interval=GREENLET_SAMPLING_INTERVAL, + logging_interval=LOGGING_INTERVAL, + max_blocking_time=MAX_BLOCKING_TIME_BEFORE_INTERRUPT, + ): self._max_blocking_time = max_blocking_time - super(KillerGreenletTracer, self).__init__(blocking_sample_period, - sampling_interval, - logging_interval) + super(KillerGreenletTracer, self).__init__( + blocking_sample_period, sampling_interval, logging_interval + ) def _notify_greenlet_blocked(self, active_greenlet, current_time): - super(KillerGreenletTracer, self)._notify_greenlet_blocked(active_greenlet, current_time) + super(KillerGreenletTracer, self)._notify_greenlet_blocked( + active_greenlet, current_time + ) if self._last_switch_time is None: return @@ -254,7 +264,8 @@ def _notify_greenlet_blocked(self, active_greenlet, current_time): # throw an exception on this thread saying that we would block forever # (which is true). self.log.warning( - 'interrupting blocked greenlet', - context=getattr(active_greenlet, 'context', None), - blocking_greenlet_id=id(active_greenlet)) + "interrupting blocked greenlet", + context=getattr(active_greenlet, "context", None), + blocking_greenlet_id=id(active_greenlet), + ) thread.interrupt_main() diff --git a/inbox/mailsync/backends/__init__.py b/inbox/mailsync/backends/__init__.py index 93fc87988..c3e168104 100644 --- a/inbox/mailsync/backends/__init__.py +++ b/inbox/mailsync/backends/__init__.py @@ -20,6 +20,8 @@ """ # Allow out-of-tree backend submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) from inbox.util.misc import register_backends + module_registry = register_backends(__name__, __path__) diff --git a/inbox/mailsync/backends/base.py b/inbox/mailsync/backends/base.py index 39912e591..a9e3cb54b 100644 --- a/inbox/mailsync/backends/base.py +++ b/inbox/mailsync/backends/base.py @@ -1,14 +1,15 @@ from gevent import Greenlet, GreenletExit, event from nylas.logging import get_logger + log = get_logger() from inbox.config import config from inbox.util.debug import bind_context from inbox.util.concurrency import retry_with_logging from inbox.models.session import session_scope -THROTTLE_COUNT = config.get('THROTTLE_COUNT', 200) -THROTTLE_WAIT = config.get('THROTTLE_WAIT', 60) +THROTTLE_COUNT = config.get("THROTTLE_COUNT", 200) +THROTTLE_WAIT = config.get("THROTTLE_WAIT", 60) class MailsyncError(Exception): @@ -36,11 +37,11 @@ class BaseMailSyncMonitor(Greenlet): """ def __init__(self, account, heartbeat=1): - bind_context(self, 'mailsyncmonitor', account.id) + bind_context(self, "mailsyncmonitor", account.id) self.shutdown = event.Event() # how often to check inbox, in seconds self.heartbeat = heartbeat - self.log = log.new(component='mail sync', account_id=account.id) + self.log = log.new(component="mail sync", account_id=account.id) self.account_id = account.id self.namespace_id = account.namespace.id self.email_address = account.email_address @@ -50,29 +51,36 @@ def __init__(self, account, heartbeat=1): def _run(self): try: - return retry_with_logging(self._run_impl, - account_id=self.account_id, - provider=self.provider_name, - logger=self.log) + return retry_with_logging( + self._run_impl, + account_id=self.account_id, + provider=self.provider_name, + logger=self.log, + ) except GreenletExit: self._cleanup() raise def _run_impl(self): - self.sync = Greenlet(retry_with_logging, self.sync, - account_id=self.account_id, - provider=self.provider_name, - logger=self.log) + self.sync = Greenlet( + retry_with_logging, + self.sync, + account_id=self.account_id, + provider=self.provider_name, + logger=self.log, + ) self.sync.start() self.sync.join() if self.sync.successful(): return self._cleanup() - self.log.error('mail sync should run forever', - provider=self.provider_name, - account_id=self.account_id, - exc=self.sync.exception) + self.log.error( + "mail sync should run forever", + provider=self.provider_name, + account_id=self.account_id, + exc=self.sync.exception, + ) raise self.sync.exception def sync(self): @@ -81,6 +89,5 @@ def sync(self): def _cleanup(self): self.sync.kill() with session_scope(self.namespace_id) as mailsync_db_session: - map(lambda x: x.set_stopped(mailsync_db_session), - self.folder_monitors) + map(lambda x: x.set_stopped(mailsync_db_session), self.folder_monitors) self.folder_monitors.kill() diff --git a/inbox/mailsync/backends/gmail.py b/inbox/mailsync/backends/gmail.py index 9147b0b28..6d81571cb 100644 --- a/inbox/mailsync/backends/gmail.py +++ b/inbox/mailsync/backends/gmail.py @@ -39,10 +39,11 @@ from inbox.mailsync.backends.imap import common from inbox.mailsync.gc import LabelRenameHandler from inbox.mailsync.backends.base import THROTTLE_COUNT, THROTTLE_WAIT + log = get_logger() -PROVIDER = 'gmail' -SYNC_MONITOR_CLS = 'GmailSyncMonitor' +PROVIDER = "gmail" +SYNC_MONITOR_CLS = "GmailSyncMonitor" MAX_DOWNLOAD_BYTES = 2 ** 20 @@ -51,7 +52,6 @@ class GmailSyncMonitor(ImapSyncMonitor): - def __init__(self, *args, **kwargs): ImapSyncMonitor.__init__(self, *args, **kwargs) self.sync_engine_class = GmailFolderSyncEngine @@ -72,35 +72,45 @@ def __init__(self, *args, **kwargs): self.label_rename_semaphore = Semaphore(value=1) def handle_raw_folder_change(self, db_session, account, raw_folder): - folder = db_session.query(Folder).filter( - Folder.account_id == account.id, - Folder.canonical_name == raw_folder.role).first() + folder = ( + db_session.query(Folder) + .filter( + Folder.account_id == account.id, + Folder.canonical_name == raw_folder.role, + ) + .first() + ) if folder: if folder.name != raw_folder.display_name: - log.info('Folder name changed on remote', - account_id=self.account_id, - role=raw_folder.role, - new_name=raw_folder.display_name, - name=folder.name) + log.info( + "Folder name changed on remote", + account_id=self.account_id, + role=raw_folder.role, + new_name=raw_folder.display_name, + name=folder.name, + ) folder.name = raw_folder.display_name if folder.category: - if folder.category.display_name != \ - raw_folder.display_name: + if folder.category.display_name != raw_folder.display_name: folder.category.display_name = raw_folder.display_name # noqa else: - log.info('Creating category for folder', - account_id=self.account_id, - folder_id=folder.id) + log.info( + "Creating category for folder", + account_id=self.account_id, + folder_id=folder.id, + ) folder.category = Category.find_or_create( - db_session, namespace_id=account.namespace.id, + db_session, + namespace_id=account.namespace.id, name=raw_folder.role, display_name=raw_folder.display_name, - type_='folder') + type_="folder", + ) else: - Folder.find_or_create(db_session, account, - raw_folder.display_name, - raw_folder.role) + Folder.find_or_create( + db_session, account, raw_folder.display_name, raw_folder.role + ) def set_sync_should_run_bit(self, account): # Ensure sync_should_run is True for the folders we want to sync (for @@ -145,9 +155,12 @@ def save_folder_names(self, db_session, raw_folders): account = db_session.query(Account).get(self.account_id) current_labels = set() - old_labels = {label for label in db_session.query(Label).filter( - Label.account_id == self.account_id, - Label.deleted_at == None)} # noqa + old_labels = { + label + for label in db_session.query(Label).filter( + Label.account_id == self.account_id, Label.deleted_at == None + ) + } # noqa # Is it the first time we've been syncing folders? # It's important to know this because we don't want to @@ -157,22 +170,23 @@ def save_folder_names(self, db_session, raw_folders): # Create new labels, folders for raw_folder in raw_folders: - if raw_folder.role == 'starred': + if raw_folder.role == "starred": # The starred state of messages is tracked separately # (we set Message.is_starred from the '\\Flagged' flag) continue - if raw_folder.role in ('all', 'spam', 'trash'): + if raw_folder.role in ("all", "spam", "trash"): self.handle_raw_folder_change(db_session, account, raw_folder) - label = Label.find_or_create(db_session, account, - raw_folder.display_name, - raw_folder.role) + label = Label.find_or_create( + db_session, account, raw_folder.display_name, raw_folder.role + ) if label.deleted_at is not None: # This is a label which was previously marked as deleted # but which mysteriously reappeared. Unmark it. - log.info('Deleted label recreated on remote', - name=raw_folder.display_name) + log.info( + "Deleted label recreated on remote", name=raw_folder.display_name + ) label.deleted_at = None label.category.deleted_at = EPOCH @@ -191,7 +205,8 @@ def save_folder_names(self, db_session, raw_folders): account_id=self.account_id, namespace_id=self.namespace_id, label_name=label.name, - semaphore=self.label_rename_semaphore) + semaphore=self.label_rename_semaphore, + ) rename_handler.start() @@ -204,15 +219,13 @@ def save_folder_names(self, db_session, raw_folders): class GmailFolderSyncEngine(FolderSyncEngine): - def __init__(self, *args, **kwargs): FolderSyncEngine.__init__(self, *args, **kwargs) self.saved_uids = set() def is_all_mail(self, crispin_client): - if not hasattr(self, '_is_all_mail'): - self._is_all_mail = (self.folder_name in - crispin_client.folder_names()['all']) + if not hasattr(self, "_is_all_mail"): + self._is_all_mail = self.folder_name in crispin_client.folder_names()["all"] return self._is_all_mail def should_idle(self, crispin_client): @@ -226,37 +239,43 @@ def initial_sync_impl(self, crispin_client): remote_uids = sorted(crispin_client.all_uids(), key=int) with self.syncmanager_lock: with session_scope(self.namespace_id) as db_session: - local_uids = common.local_uids(self.account_id, db_session, - self.folder_id) + local_uids = common.local_uids( + self.account_id, db_session, self.folder_id + ) common.remove_deleted_uids( - self.account_id, self.folder_id, - set(local_uids) - set(remote_uids)) + self.account_id, self.folder_id, set(local_uids) - set(remote_uids) + ) unknown_uids = set(remote_uids) - local_uids with session_scope(self.namespace_id) as db_session: self.update_uid_counts( - db_session, remote_uid_count=len(remote_uids), - download_uid_count=len(unknown_uids)) + db_session, + remote_uid_count=len(remote_uids), + download_uid_count=len(unknown_uids), + ) change_poller = gevent.spawn(self.poll_for_changes) - bind_context(change_poller, 'changepoller', self.account_id, - self.folder_id) + bind_context(change_poller, "changepoller", self.account_id, self.folder_id) if self.is_all_mail(crispin_client): # Prioritize UIDs for messages in the inbox folder. if len(remote_uids) < 1e6: inbox_uids = set( - crispin_client.search_uids(['X-GM-LABELS', 'inbox'])) + crispin_client.search_uids(["X-GM-LABELS", "inbox"]) + ) else: # The search above is really slow (times out) on really # large mailboxes, so bound the search to messages within # the past month in order to get anywhere. since = datetime.utcnow() - timedelta(days=30) - inbox_uids = set(crispin_client.search_uids([ - 'X-GM-LABELS', 'inbox', - 'SINCE', since])) + inbox_uids = set( + crispin_client.search_uids( + ["X-GM-LABELS", "inbox", "SINCE", since] + ) + ) - uids_to_download = (sorted(unknown_uids - inbox_uids) + - sorted(unknown_uids & inbox_uids)) + uids_to_download = sorted(unknown_uids - inbox_uids) + sorted( + unknown_uids & inbox_uids + ) else: uids_to_download = sorted(unknown_uids) @@ -266,8 +285,7 @@ def initial_sync_impl(self, crispin_client): # case the g_metadata call above will return nothing. # They may also have been preemptively downloaded by thread # expansion. We can omit such UIDs. - uids = [u for u in uids if u in g_metadata and u not in - self.saved_uids] + uids = [u for u in uids if u in g_metadata and u not in self.saved_uids] self.batch_download_uids(crispin_client, uids, g_metadata) finally: if change_poller is not None: @@ -276,49 +294,60 @@ def initial_sync_impl(self, crispin_client): def resync_uids_impl(self): with session_scope(self.namespace_id) as db_session: - imap_folder_info_entry = db_session.query(ImapFolderInfo)\ - .options(load_only('uidvalidity', 'highestmodseq'))\ - .filter_by(account_id=self.account_id, - folder_id=self.folder_id)\ + imap_folder_info_entry = ( + db_session.query(ImapFolderInfo) + .options(load_only("uidvalidity", "highestmodseq")) + .filter_by(account_id=self.account_id, folder_id=self.folder_id) .one() + ) with self.conn_pool.get() as crispin_client: - crispin_client.select_folder(self.folder_name, - lambda *args: True) + crispin_client.select_folder(self.folder_name, lambda *args: True) uidvalidity = crispin_client.selected_uidvalidity if uidvalidity <= imap_folder_info_entry.uidvalidity: # if the remote UIDVALIDITY is less than or equal to - # from my (siro) understanding it should not be less than - # the local UIDVALIDITY log a debug message and exit right # away - log.debug('UIDVALIDITY unchanged') + log.debug("UIDVALIDITY unchanged") return msg_uids = crispin_client.all_uids() - mapping = {g_msgid: msg_uid for msg_uid, g_msgid in - crispin_client.g_msgids(msg_uids).iteritems()} - imap_uid_entries = db_session.query(ImapUid)\ - .options(load_only('msg_uid'), - joinedload('message').load_only('g_msgid'))\ - .filter_by(account_id=self.account_id, - folder_id=self.folder_id) + mapping = { + g_msgid: msg_uid + for msg_uid, g_msgid in crispin_client.g_msgids( + msg_uids + ).iteritems() + } + imap_uid_entries = ( + db_session.query(ImapUid) + .options( + load_only("msg_uid"), joinedload("message").load_only("g_msgid") + ) + .filter_by(account_id=self.account_id, folder_id=self.folder_id) + ) chunk_size = 1000 for entry in imap_uid_entries.yield_per(chunk_size): if entry.message.g_msgid in mapping: - log.debug('X-GM-MSGID {} from UID {} to UID {}'.format( - entry.message.g_msgid, - entry.msg_uid, - mapping[entry.message.g_msgid])) + log.debug( + "X-GM-MSGID {} from UID {} to UID {}".format( + entry.message.g_msgid, + entry.msg_uid, + mapping[entry.message.g_msgid], + ) + ) entry.msg_uid = mapping[entry.message.g_msgid] else: db_session.delete(entry) - log.debug('UIDVALIDITY from {} to {}'.format( - imap_folder_info_entry.uidvalidity, uidvalidity)) + log.debug( + "UIDVALIDITY from {} to {}".format( + imap_folder_info_entry.uidvalidity, uidvalidity + ) + ) imap_folder_info_entry.uidvalidity = uidvalidity imap_folder_info_entry.highestmodseq = None db_session.commit() - def __deduplicate_message_object_creation(self, db_session, raw_messages, - account): + def __deduplicate_message_object_creation(self, db_session, raw_messages, account): """ We deduplicate messages based on g_msgid: if we've previously saved a Message object for this raw message, we don't create a new one. But we @@ -332,44 +361,58 @@ def __deduplicate_message_object_creation(self, db_session, raw_messages, """ new_g_msgids = {msg.g_msgid for msg in raw_messages} - existing_g_msgids = g_msgids(self.namespace_id, db_session, - in_=new_g_msgids) - brand_new_messages = [m for m in raw_messages if m.g_msgid not in - existing_g_msgids] - previously_synced_messages = [m for m in raw_messages if m.g_msgid in - existing_g_msgids] + existing_g_msgids = g_msgids(self.namespace_id, db_session, in_=new_g_msgids) + brand_new_messages = [ + m for m in raw_messages if m.g_msgid not in existing_g_msgids + ] + previously_synced_messages = [ + m for m in raw_messages if m.g_msgid in existing_g_msgids + ] if previously_synced_messages: - log.info('saving new uids for existing messages', - count=len(previously_synced_messages)) + log.info( + "saving new uids for existing messages", + count=len(previously_synced_messages), + ) account = Account.get(self.account_id, db_session) folder = Folder.get(self.folder_id, db_session) for raw_message in previously_synced_messages: - message_obj = db_session.query(Message).filter( - Message.namespace_id == self.namespace_id, - Message.g_msgid == raw_message.g_msgid).first() + message_obj = ( + db_session.query(Message) + .filter( + Message.namespace_id == self.namespace_id, + Message.g_msgid == raw_message.g_msgid, + ) + .first() + ) if message_obj is None: log.warning( - 'Message disappeared while saving new uid', + "Message disappeared while saving new uid", g_msgid=raw_message.g_msgid, - uid=raw_message.uid) + uid=raw_message.uid, + ) brand_new_messages.append(raw_message) continue - already_have_uid = ( - (raw_message.uid, self.folder_id) in - {(u.msg_uid, u.folder_id) for u in message_obj.imapuids} - ) + already_have_uid = (raw_message.uid, self.folder_id) in { + (u.msg_uid, u.folder_id) for u in message_obj.imapuids + } if already_have_uid: - log.warning('Skipping existing UID for message', - uid=raw_message.uid, message_id=message_obj.id) + log.warning( + "Skipping existing UID for message", + uid=raw_message.uid, + message_id=message_obj.id, + ) continue - uid = ImapUid(account=account, - folder=folder, - msg_uid=raw_message.uid, - message=message_obj) + uid = ImapUid( + account=account, + folder=folder, + msg_uid=raw_message.uid, + message=message_obj, + ) uid.update_flags(raw_message.flags) uid.update_labels(raw_message.g_labels) common.update_message_metadata( - db_session, account, message_obj, uid.is_draft) + db_session, account, message_obj, uid.is_draft + ) db_session.commit() return brand_new_messages @@ -385,7 +428,8 @@ def add_message_to_thread(self, db_session, message_obj, raw_message): # Disable autoflush so we don't try to flush a message with null # thread_id. message_obj.thread = ImapThread.from_gmail_message( - db_session, self.namespace_id, message_obj) + db_session, self.namespace_id, message_obj + ) def download_and_commit_uids(self, crispin_client, uids): start = datetime.utcnow() @@ -398,24 +442,22 @@ def download_and_commit_uids(self, crispin_client, uids): account = Account.get(self.account_id, db_session) folder = Folder.get(self.folder_id, db_session) raw_messages = self.__deduplicate_message_object_creation( - db_session, raw_messages, account) + db_session, raw_messages, account + ) if not raw_messages: return 0 for msg in raw_messages: - uid = self.create_message(db_session, account, folder, - msg) + uid = self.create_message(db_session, account, folder, msg) if uid is not None: db_session.add(uid) db_session.commit() new_uids.add(uid) - log.debug('Committed new UIDs', - new_committed_message_count=len(new_uids)) + log.debug("Committed new UIDs", new_committed_message_count=len(new_uids)) # If we downloaded uids, record message velocity (#uid / latency) if self.state == "initial" and len(new_uids): - self._report_message_velocity(datetime.utcnow() - start, - len(new_uids)) + self._report_message_velocity(datetime.utcnow() - start, len(new_uids)) if self.is_first_message: self._report_first_message() @@ -449,17 +491,22 @@ def expand_uids_to_download(self, crispin_client, uids, metadata): for uid in sorted(uids, reverse=True): yield uid - def batch_download_uids(self, crispin_client, uids, metadata, - max_download_bytes=MAX_DOWNLOAD_BYTES, - max_download_count=MAX_DOWNLOAD_COUNT): + def batch_download_uids( + self, + crispin_client, + uids, + metadata, + max_download_bytes=MAX_DOWNLOAD_BYTES, + max_download_count=MAX_DOWNLOAD_COUNT, + ): expanded_pending_uids = self.expand_uids_to_download( - crispin_client, uids, metadata) + crispin_client, uids, metadata + ) count = 0 while True: dl_size = 0 batch = [] - while (dl_size < max_download_bytes and - len(batch) < max_download_count): + while dl_size < max_download_bytes and len(batch) < max_download_count: try: uid = expanded_pending_uids.next() except StopIteration: @@ -499,12 +546,18 @@ def g_msgids(namespace_id, session, in_): if len(in_) > 1000: # If in_ is really large, passing all the values to MySQL can get # deadly slow. (Approximate threshold empirically determined) - query = session.query(Message.g_msgid).join(Namespace). \ - filter(Message.namespace_id == namespace_id).all() + query = ( + session.query(Message.g_msgid) + .join(Namespace) + .filter(Message.namespace_id == namespace_id) + .all() + ) return sorted(g_msgid for g_msgid, in query if g_msgid in in_) # But in the normal case that in_ only has a few elements, it's way better # to not fetch a bunch of values from MySQL only to return a few of them. - query = session.query(Message.g_msgid). \ - filter(Message.namespace_id == namespace_id, - Message.g_msgid.in_(in_)).all() + query = ( + session.query(Message.g_msgid) + .filter(Message.namespace_id == namespace_id, Message.g_msgid.in_(in_)) + .all() + ) return {g_msgid for g_msgid, in query} diff --git a/inbox/mailsync/backends/imap/__init__.py b/inbox/mailsync/backends/imap/__init__.py index a5bff649d..913f5253b 100644 --- a/inbox/mailsync/backends/imap/__init__.py +++ b/inbox/mailsync/backends/imap/__init__.py @@ -1,8 +1,8 @@ from inbox.mailsync.backends.imap import common from inbox.mailsync.backends.imap.monitor import ImapSyncMonitor -__all__ = ['common', 'ImapSyncMonitor'] +__all__ = ["common", "ImapSyncMonitor"] -PROVIDER = 'generic' -SYNC_MONITOR_CLS = 'ImapSyncMonitor' +PROVIDER = "generic" +SYNC_MONITOR_CLS = "ImapSyncMonitor" diff --git a/inbox/mailsync/backends/imap/common.py b/inbox/mailsync/backends/imap/common.py index 0b5fab303..091530898 100644 --- a/inbox/mailsync/backends/imap/common.py +++ b/inbox/mailsync/backends/imap/common.py @@ -31,24 +31,25 @@ def local_uids(account_id, session, folder_id, limit=None): q = bakery(lambda session: session.query(ImapUid.msg_uid)) q += lambda q: q.filter( - ImapUid.account_id == bindparam('account_id'), - ImapUid.folder_id == bindparam('folder_id')) + ImapUid.account_id == bindparam("account_id"), + ImapUid.folder_id == bindparam("folder_id"), + ) if limit: q += lambda q: q.order_by(desc(ImapUid.msg_uid)) - q += lambda q: q.limit(bindparam('limit')) - results = q(session).params(account_id=account_id, - folder_id=folder_id, - limit=limit).all() + q += lambda q: q.limit(bindparam("limit")) + results = ( + q(session).params(account_id=account_id, folder_id=folder_id, limit=limit).all() + ) return {u for u, in results} def lastseenuid(account_id, session, folder_id): q = bakery(lambda session: session.query(func.max(ImapUid.msg_uid))) q += lambda q: q.filter( - ImapUid.account_id == bindparam('account_id'), - ImapUid.folder_id == bindparam('folder_id')) - res = q(session).params(account_id=account_id, - folder_id=folder_id).one()[0] + ImapUid.account_id == bindparam("account_id"), + ImapUid.folder_id == bindparam("folder_id"), + ) + res = q(session).params(account_id=account_id, folder_id=folder_id).one()[0] return res or 0 @@ -65,7 +66,7 @@ def update_message_metadata(session, account, message, is_draft): for i in uids: categories.update(i.categories) - if account.category_type == 'folder': + if account.category_type == "folder": categories = [_select_category(categories)] if categories else [] # Use a consistent time across creating categories, message updated_at @@ -88,8 +89,7 @@ def update_message_metadata(session, account, message, is_draft): # created_at value. Taken from # https://docs.sqlalchemy.org/en/13/orm/extensions/ # associationproxy.html#simplifying-association-objects - MessageCategory(category=category, message=message, - created_at=update_time) + MessageCategory(category=category, message=message, created_at=update_time) # Update the message updated_at field so that it can be used in # the transaction that will be created for category changes. @@ -129,11 +129,12 @@ def update_metadata(account_id, folder_id, folder_role, new_flags, session): account = Account.get(account_id, session) change_count = 0 for item in session.query(ImapUid).filter( - ImapUid.account_id == account_id, - ImapUid.msg_uid.in_(new_flags.keys()), - ImapUid.folder_id == folder_id): + ImapUid.account_id == account_id, + ImapUid.msg_uid.in_(new_flags.keys()), + ImapUid.folder_id == folder_id, + ): flags = new_flags[item.msg_uid].flags - labels = getattr(new_flags[item.msg_uid], 'labels', None) + labels = getattr(new_flags[item.msg_uid], "labels", None) # TODO(emfree) refactor so this is only ever relevant for Gmail. changed = item.update_flags(flags) @@ -143,12 +144,12 @@ def update_metadata(account_id, folder_id, folder_role, new_flags, session): if changed: change_count += 1 - is_draft = item.is_draft and (folder_role == 'drafts' or - folder_role == 'all') + is_draft = item.is_draft and ( + folder_role == "drafts" or folder_role == "all" + ) update_message_metadata(session, account, item.message, is_draft) session.commit() - log.info('Updated UID metadata', changed=change_count, - out_of=len(new_flags)) + log.info("Updated UID metadata", changed=change_count, out_of=len(new_flags)) def remove_deleted_uids(account_id, folder_id, uids): @@ -170,10 +171,15 @@ def remove_deleted_uids(account_id, folder_id, uids): # Performance could perhaps be additionally improved by choosing a # sane balance, e.g., operating on 10 or 100 uids or something at once. with session_scope(account_id) as db_session: - imapuid = db_session.query(ImapUid).filter( - ImapUid.account_id == account_id, - ImapUid.folder_id == folder_id, - ImapUid.msg_uid == uid).first() + imapuid = ( + db_session.query(ImapUid) + .filter( + ImapUid.account_id == account_id, + ImapUid.folder_id == folder_id, + ImapUid.msg_uid == uid, + ) + .first() + ) if imapuid is None: continue deleted_uid_count += 1 @@ -192,23 +198,27 @@ def remove_deleted_uids(account_id, folder_id, uids): db_session.delete(thread) else: account = Account.get(account_id, db_session) - update_message_metadata(db_session, account, message, - message.is_draft) + update_message_metadata( + db_session, account, message, message.is_draft + ) if not message.imapuids: # But don't outright delete messages. Just mark them as # 'deleted' and wait for the asynchronous # dangling-message-collector to delete them. message.mark_for_deletion() db_session.commit() - log.info('Deleted expunged UIDs', count=deleted_uid_count) + log.info("Deleted expunged UIDs", count=deleted_uid_count) def get_folder_info(account_id, session, folder_name): try: # using .one() here may catch duplication bugs - return session.query(ImapFolderInfo).join(Folder).filter( - ImapFolderInfo.account_id == account_id, - Folder.name == folder_name).one() + return ( + session.query(ImapFolderInfo) + .join(Folder) + .filter(ImapFolderInfo.account_id == account_id, Folder.name == folder_name) + .one() + ) except NoResultFound: return None @@ -224,13 +234,16 @@ def create_imap_message(db_session, account, folder, msg): relationships. All new objects are uncommitted. """ - log.debug('creating message', account_id=account.id, - folder_name=folder.name, - mid=msg.uid) - new_message = Message.create_from_synced(account=account, mid=msg.uid, - folder_name=folder.name, - received_date=msg.internaldate, - body_string=msg.body) + log.debug( + "creating message", account_id=account.id, folder_name=folder.name, mid=msg.uid + ) + new_message = Message.create_from_synced( + account=account, + mid=msg.uid, + folder_name=folder.name, + received_date=msg.internaldate, + body_string=msg.body, + ) # Check to see if this is a copy of a message that was first created # by the Nylas API. If so, don't create a new object; just use the old one. @@ -238,16 +251,18 @@ def create_imap_message(db_session, account, folder, msg): if existing_copy is not None: new_message = existing_copy - imapuid = ImapUid(account=account, folder=folder, msg_uid=msg.uid, - message=new_message) + imapuid = ImapUid( + account=account, folder=folder, msg_uid=msg.uid, message=new_message + ) imapuid.update_flags(msg.flags) if msg.g_labels is not None: imapuid.update_labels(msg.g_labels) # Update the message's metadata with db_session.no_autoflush: - is_draft = imapuid.is_draft and (folder.canonical_name == 'drafts' or - folder.canonical_name == 'all') + is_draft = imapuid.is_draft and ( + folder.canonical_name == "drafts" or folder.canonical_name == "all" + ) update_message_metadata(db_session, account, new_message, is_draft) update_contacts_from_message(db_session, new_message, account.namespace.id) @@ -265,18 +280,25 @@ def _update_categories(db_session, message, synced_categories): # We make the simplifying assumption that only the latest syncback action # matters, since it reflects the current local state. - actionlog_id = db_session.query(func.max(ActionLog.id)).filter( - ActionLog.namespace_id == message.namespace_id, - ActionLog.table_name == 'message', - ActionLog.record_id == message.id, - ActionLog.action.in_(['change_labels', 'move'])).scalar() + actionlog_id = ( + db_session.query(func.max(ActionLog.id)) + .filter( + ActionLog.namespace_id == message.namespace_id, + ActionLog.table_name == "message", + ActionLog.record_id == message.id, + ActionLog.action.in_(["change_labels", "move"]), + ) + .scalar() + ) if actionlog_id is not None: actionlog = db_session.query(ActionLog).get(actionlog_id) # Do /not/ overwrite message.categories in case of a recent local # change - namely, a still 'pending' action or one that completed # recently. - if (actionlog.status == 'pending' or - (now - actionlog.updated_at).total_seconds() <= 90): + if ( + actionlog.status == "pending" + or (now - actionlog.updated_at).total_seconds() <= 90 + ): return # We completed the syncback action /long enough ago/ (on average and diff --git a/inbox/mailsync/backends/imap/generic.py b/inbox/mailsync/backends/imap/generic.py index 3bfc98da8..5f7ec087c 100644 --- a/inbox/mailsync/backends/imap/generic.py +++ b/inbox/mailsync/backends/imap/generic.py @@ -80,15 +80,24 @@ from inbox.util.threading import fetch_corresponding_thread, MAX_THREAD_LENGTH from inbox.util.stats import statsd_client from nylas.logging import get_logger + log = get_logger() from inbox.crispin import connection_pool, retry_crispin, FolderMissingError from inbox.models import Folder, Account, Message -from inbox.models.backends.imap import (ImapFolderSyncStatus, ImapThread, - ImapUid, ImapFolderInfo) +from inbox.models.backends.imap import ( + ImapFolderSyncStatus, + ImapThread, + ImapUid, + ImapFolderInfo, +) from inbox.models.session import session_scope from inbox.mailsync.backends.imap import common -from inbox.mailsync.backends.base import (MailsyncDone, MailsyncError, - THROTTLE_COUNT, THROTTLE_WAIT) +from inbox.mailsync.backends.base import ( + MailsyncDone, + MailsyncError, + THROTTLE_COUNT, + THROTTLE_WAIT, +) from inbox.heartbeat.store import HeartbeatStatusProxy from inbox.events.ical import import_attached_events @@ -114,17 +123,27 @@ class FolderSyncEngine(Greenlet): """Base class for a per-folder IMAP sync engine.""" - def __init__(self, account_id, namespace_id, folder_name, - email_address, provider_name, syncmanager_lock): + def __init__( + self, + account_id, + namespace_id, + folder_name, + email_address, + provider_name, + syncmanager_lock, + ): with session_scope(namespace_id) as db_session: try: - folder = db_session.query(Folder). \ - filter(Folder.name == folder_name, - Folder.account_id == account_id).one() + folder = ( + db_session.query(Folder) + .filter(Folder.name == folder_name, Folder.account_id == account_id) + .one() + ) except NoResultFound: - raise MailsyncError(u"Missing Folder '{}' on account {}" - .format(folder_name, account_id)) + raise MailsyncError( + u"Missing Folder '{}' on account {}".format(folder_name, account_id) + ) self.folder_id = folder.id self.folder_role = folder.canonical_name @@ -133,13 +152,13 @@ def __init__(self, account_id, namespace_id, folder_name, self.is_first_sync = folder.initial_sync_start is None self.is_first_message = self.is_first_sync - bind_context(self, 'foldersyncengine', account_id, self.folder_id) + bind_context(self, "foldersyncengine", account_id, self.folder_id) self.account_id = account_id self.namespace_id = namespace_id self.folder_name = folder_name self.email_address = email_address - if self.folder_name.lower() == 'inbox': + if self.folder_name.lower() == "inbox": self.poll_frequency = INBOX_POLL_FREQUENCY else: self.poll_frequency = DEFAULT_POLL_FREQUENCY @@ -152,11 +171,11 @@ def __init__(self, account_id, namespace_id, folder_name, self.polling_logged_at = 0 self.state_handlers = { - 'initial': self.initial_sync, - 'initial uidinvalid': self.resync_uids, - 'poll': self.poll, - 'poll uidinvalid': self.resync_uids, - 'finish': lambda: 'finish', + "initial": self.initial_sync, + "initial uidinvalid": self.resync_uids, + "poll": self.poll, + "poll uidinvalid": self.resync_uids, + "finish": lambda: "finish", } self.setup_heartbeats() @@ -170,33 +189,40 @@ def __init__(self, account_id, namespace_id, folder_name, self.uidinvalid_count = 0 def setup_heartbeats(self): - self.heartbeat_status = HeartbeatStatusProxy(self.account_id, - self.folder_id, - self.folder_name, - self.email_address, - self.provider_name) + self.heartbeat_status = HeartbeatStatusProxy( + self.account_id, + self.folder_id, + self.folder_name, + self.email_address, + self.provider_name, + ) def _run(self): # Bind greenlet-local logging context. - self.log = log.new(account_id=self.account_id, folder=self.folder_name, - provider=self.provider_name) + self.log = log.new( + account_id=self.account_id, + folder=self.folder_name, + provider=self.provider_name, + ) # eagerly signal the sync status self.heartbeat_status.publish() def start_sync(saved_folder_status): # Ensure we don't cause an error if the folder was deleted. sync_end_time = ( - saved_folder_status.folder and - saved_folder_status.metrics.get('sync_end_time') + saved_folder_status.folder + and saved_folder_status.metrics.get("sync_end_time") ) if sync_end_time: sync_delay = datetime.utcnow() - sync_end_time if sync_delay > timedelta(days=1): - saved_folder_status.state = 'initial' - log.info('switching to initial sync due to delay', - folder_id=self.folder_id, - account_id=self.account_id, - sync_delay=sync_delay.total_seconds()) + saved_folder_status.state = "initial" + log.info( + "switching to initial sync due to delay", + folder_id=self.folder_id, + account_id=self.account_id, + sync_delay=sync_delay.total_seconds(), + ) saved_folder_status.start_sync() @@ -207,16 +233,23 @@ def start_sync(saved_folder_status): # was no longer valid, ie. the folder for this engine was deleted # while we were starting up. # Exit the sync and let the monitor sort things out. - log.info("Folder state loading failed due to IntegrityError", - folder_id=self.folder_id, account_id=self.account_id) + log.info( + "Folder state loading failed due to IntegrityError", + folder_id=self.folder_id, + account_id=self.account_id, + ) raise MailsyncDone() # NOTE: The parent ImapSyncMonitor handler could kill us at any # time if it receives a shutdown command. The shutdown command is # equivalent to ctrl-c. - while self.state != 'finish': - retry_with_logging(self._run_impl, account_id=self.account_id, - provider=self.provider_name, logger=log) + while self.state != "finish": + retry_with_logging( + self._run_impl, + account_id=self.account_id, + provider=self.provider_name, + logger=log, + ) def _run_impl(self): old_state = self.state @@ -224,39 +257,50 @@ def _run_impl(self): self.state = self.state_handlers[old_state]() self.heartbeat_status.publish(state=self.state) except UidInvalid: - self.state = self.state + ' uidinvalid' + self.state = self.state + " uidinvalid" self.uidinvalid_count += 1 self.heartbeat_status.publish(state=self.state) # Check that we're not stuck in an endless uidinvalidity resync loop. if self.uidinvalid_count > MAX_UIDINVALID_RESYNCS: - log.error('Resynced more than MAX_UIDINVALID_RESYNCS in a' - ' row. Stopping sync.', folder_name=self.folder_name) + log.error( + "Resynced more than MAX_UIDINVALID_RESYNCS in a" + " row. Stopping sync.", + folder_name=self.folder_name, + ) # Only stop syncing the entire account if the INBOX folder is # failing. Otherwise simply stop syncing the folder. - if self.folder_name.lower() == 'inbox': + if self.folder_name.lower() == "inbox": with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) - account.disable_sync('Detected endless uidvalidity ' - 'resync loop') - account.sync_state = 'stopped' + account.disable_sync( + "Detected endless uidvalidity " "resync loop" + ) + account.sync_state = "stopped" db_session.commit() raise MailsyncDone() else: - self.state = 'finish' + self.state = "finish" self.heartbeat_status.publish(state=self.state) except FolderMissingError: # Folder was deleted by monitor while its sync was running. # TODO: Monitor should handle shutting down the folder engine. - log.info('Folder disappeared. Stopping sync.', - account_id=self.account_id, folder_id=self.folder_id) + log.info( + "Folder disappeared. Stopping sync.", + account_id=self.account_id, + folder_id=self.folder_id, + ) raise MailsyncDone() except ValidationError as exc: - log.error('Error authenticating; stopping sync', exc_info=True, - account_id=self.account_id, folder_id=self.folder_id, - logstash_tag='mark_invalid') + log.error( + "Error authenticating; stopping sync", + exc_info=True, + account_id=self.account_id, + folder_id=self.folder_id, + logstash_tag="mark_invalid", + ) with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) account.mark_invalid() @@ -266,11 +310,13 @@ def _run_impl(self): # State handlers are idempotent, so it's okay if we're # killed between the end of the handler and the commit. if self.state != old_state: + def update(status): status.state = self.state + self.update_folder_sync_status(update) - if self.state == old_state and self.state in ['initial', 'poll']: + if self.state == old_state and self.state in ["initial", "poll"]: # We've been through a normal state transition without raising any # error. It's safe to reset the uidvalidity counter. self.uidinvalid_count = 0 @@ -281,13 +327,16 @@ def update_folder_sync_status(self, cb): # they are never out of sync. with session_scope(self.namespace_id) as db_session: try: - saved_folder_status = db_session.query(ImapFolderSyncStatus)\ - .filter_by(account_id=self.account_id, folder_id=self.folder_id)\ + saved_folder_status = ( + db_session.query(ImapFolderSyncStatus) + .filter_by(account_id=self.account_id, folder_id=self.folder_id) .one() + ) except NoResultFound: saved_folder_status = ImapFolderSyncStatus( - account_id=self.account_id, folder_id=self.folder_id) + account_id=self.account_id, folder_id=self.folder_id + ) db_session.add(saved_folder_status) cb(saved_folder_status) @@ -310,8 +359,8 @@ def _report_initial_sync_end(self): @retry_crispin def initial_sync(self): - log.bind(state='initial') - log.info('starting initial sync') + log.bind(state="initial") + log.info("starting initial sync") if self.is_first_sync: self._report_initial_sync_start() @@ -322,15 +371,17 @@ def initial_sync(self): # Ensure we have an ImapFolderInfo row created prior to sync start. with session_scope(self.namespace_id) as db_session: try: - db_session.query(ImapFolderInfo). \ - filter(ImapFolderInfo.account_id == self.account_id, - ImapFolderInfo.folder_id == self.folder_id). \ - one() + db_session.query(ImapFolderInfo).filter( + ImapFolderInfo.account_id == self.account_id, + ImapFolderInfo.folder_id == self.folder_id, + ).one() except NoResultFound: imapfolderinfo = ImapFolderInfo( - account_id=self.account_id, folder_id=self.folder_id, + account_id=self.account_id, + folder_id=self.folder_id, uidvalidity=crispin_client.selected_uidvalidity, - uidnext=crispin_client.selected_uidnext) + uidnext=crispin_client.selected_uidnext, + ) db_session.add(imapfolderinfo) db_session.commit() @@ -340,26 +391,26 @@ def initial_sync(self): self._report_initial_sync_end() self.is_initial_sync = False - return 'poll' + return "poll" @retry_crispin def poll(self): - log.bind(state='poll') + log.bind(state="poll") # Only log every 5 minutes to cut down on the volume of # this log statement timestamp = time.time() if timestamp - self.polling_logged_at > 60 * 5: self.polling_logged_at = timestamp - log.debug('polling') + log.debug("polling") self.poll_impl() - return 'poll' + return "poll" @retry_crispin def resync_uids(self): log.bind(state=self.state) - log.warning('UIDVALIDITY changed; initiating resync') + log.warning("UIDVALIDITY changed; initiating resync") self.resync_uids_impl() - return 'initial' + return "initial" def initial_sync_impl(self, crispin_client): # We wrap the block in a try/finally because the change_poller greenlet @@ -370,11 +421,14 @@ def initial_sync_impl(self, crispin_client): remote_uids = crispin_client.all_uids() with self.syncmanager_lock: with session_scope(self.namespace_id) as db_session: - local_uids = common.local_uids(self.account_id, db_session, - self.folder_id) + local_uids = common.local_uids( + self.account_id, db_session, self.folder_id + ) common.remove_deleted_uids( - self.account_id, self.folder_id, - set(local_uids).difference(remote_uids)) + self.account_id, + self.folder_id, + set(local_uids).difference(remote_uids), + ) new_uids = set(remote_uids).difference(local_uids) with session_scope(self.namespace_id) as db_session: @@ -384,11 +438,11 @@ def initial_sync_impl(self, crispin_client): db_session, remote_uid_count=len(remote_uids), # This is the initial size of our download_queue - download_uid_count=len(new_uids)) + download_uid_count=len(new_uids), + ) change_poller = gevent.spawn(self.poll_for_changes) - bind_context(change_poller, 'changepoller', self.account_id, - self.folder_id) + bind_context(change_poller, "changepoller", self.account_id, self.folder_id) uids = sorted(new_uids, reverse=True) count = 0 for uid in uids: @@ -410,10 +464,10 @@ def initial_sync_impl(self, crispin_client): gevent.kill(change_poller) def should_idle(self, crispin_client): - if not hasattr(self, '_should_idle'): + if not hasattr(self, "_should_idle"): self._should_idle = ( - crispin_client.idle_supported() and self.folder_name in - crispin_client.folder_names()['inbox'] + crispin_client.idle_supported() + and self.folder_name in crispin_client.folder_names()["inbox"] ) return self._should_idle @@ -421,18 +475,17 @@ def poll_impl(self): with self.conn_pool.get() as crispin_client: self.check_uid_changes(crispin_client) if self.should_idle(crispin_client): - crispin_client.select_folder(self.folder_name, - self.uidvalidity_cb) + crispin_client.select_folder(self.folder_name, self.uidvalidity_cb) idling = True try: crispin_client.idle(IDLE_WAIT) except Exception as exc: # With some servers we get e.g. # 'Unexpected IDLE response: * FLAGS (...)' - if isinstance(exc, imaplib.IMAP4.error) and \ - exc.message.startswith('Unexpected IDLE response'): - log.info('Error initiating IDLE, not idling', - error=exc) + if isinstance(exc, imaplib.IMAP4.error) and exc.message.startswith( + "Unexpected IDLE response" + ): + log.info("Error initiating IDLE, not idling", error=exc) try: # Still have to take the connection out of IDLE # mode to reuse it though. @@ -456,7 +509,7 @@ def resync_uids_impl(self): remote_uidvalidity = crispin_client.selected_uidvalidity remote_uidnext = crispin_client.selected_uidnext if remote_uidvalidity <= self.uidvalidity: - log.debug('UIDVALIDITY unchanged') + log.debug("UIDVALIDITY unchanged") return # Otherwise, if the UIDVALIDITY really has changed, discard all saved # UIDs for the folder, mark associated messages for garbage-collection, @@ -465,13 +518,13 @@ def resync_uids_impl(self): # uidinvalidity is sufficiently rare that this tradeoff is acceptable. with session_scope(self.namespace_id) as db_session: invalid_uids = { - uid for uid, in db_session.query(ImapUid.msg_uid). - filter_by(account_id=self.account_id, - folder_id=self.folder_id) + uid + for uid, in db_session.query(ImapUid.msg_uid).filter_by( + account_id=self.account_id, folder_id=self.folder_id + ) } with self.syncmanager_lock: - common.remove_deleted_uids(self.account_id, self.folder_id, - invalid_uids) + common.remove_deleted_uids(self.account_id, self.folder_id, invalid_uids) self.uidvalidity = remote_uidvalidity self.highestmodseq = None self.uidnext = remote_uidnext @@ -480,7 +533,7 @@ def resync_uids_impl(self): def poll_for_changes(self): log.new(account_id=self.account_id, folder=self.folder_name) while True: - log.debug('polling for changes') + log.debug("polling for changes") self.poll_impl() def create_message(self, db_session, acct, folder, msg): @@ -488,19 +541,27 @@ def create_message(self, db_session, acct, folder, msg): # Check if we somehow already saved the imapuid (shouldn't happen, but # possible due to race condition). If so, don't commit changes. - existing_imapuid = db_session.query(ImapUid).filter( - ImapUid.account_id == acct.id, ImapUid.folder_id == folder.id, - ImapUid.msg_uid == msg.uid).first() + existing_imapuid = ( + db_session.query(ImapUid) + .filter( + ImapUid.account_id == acct.id, + ImapUid.folder_id == folder.id, + ImapUid.msg_uid == msg.uid, + ) + .first() + ) if existing_imapuid is not None: - log.error('Expected to create imapuid, but existing row found', - remote_msg_uid=msg.uid, - existing_imapuid=existing_imapuid.id) + log.error( + "Expected to create imapuid, but existing row found", + remote_msg_uid=msg.uid, + existing_imapuid=existing_imapuid.id, + ) return None # Check if the message is valid. # https://sentry.nylas.com/sentry/sync-prod/group/3387/ if msg.body is None: - log.warning('Server returned a message with an empty body.') + log.warning("Server returned a message with an empty body.") return None new_uid = common.create_imap_message(db_session, acct, folder, msg) @@ -518,13 +579,15 @@ def create_message(self, db_session, acct, folder, msg): # If we're in the polling state, then we want to report the metric # for latency when the message was received vs created - if self.state == 'poll': + if self.state == "poll": latency_millis = ( - datetime.utcnow() - new_uid.message.received_date) \ - .total_seconds() * 1000 + datetime.utcnow() - new_uid.message.received_date + ).total_seconds() * 1000 metrics = [ - '.'.join(['mailsync', 'providers', 'overall', 'message_latency']), - '.'.join(['mailsync', 'providers', self.provider_name, 'message_latency']), + ".".join(["mailsync", "providers", "overall", "message_latency"]), + ".".join( + ["mailsync", "providers", self.provider_name, "message_latency"] + ), ] for metric in metrics: statsd_client.timing(metric, latency_millis) @@ -532,8 +595,11 @@ def create_message(self, db_session, acct, folder, msg): return new_uid def _count_thread_messages(self, thread_id, db_session): - count, = db_session.query(func.count(Message.id)). \ - filter(Message.thread_id == thread_id).one() + (count,) = ( + db_session.query(func.count(Message.id)) + .filter(Message.thread_id == thread_id) + .one() + ) return count def add_message_to_thread(self, db_session, message_obj, raw_message): @@ -543,20 +609,23 @@ def add_message_to_thread(self, db_session, message_obj, raw_message): # Disable autoflush so we don't try to flush a message with null # thread_id. parent_thread = fetch_corresponding_thread( - db_session, self.namespace_id, message_obj) + db_session, self.namespace_id, message_obj + ) construct_new_thread = True if parent_thread: # If there's a parent thread that isn't too long already, # add to it. Otherwise create a new thread. parent_message_count = self._count_thread_messages( - parent_thread.id, db_session) + parent_thread.id, db_session + ) if parent_message_count < MAX_THREAD_LENGTH: construct_new_thread = False if construct_new_thread: message_obj.thread = ImapThread.from_imap_message( - db_session, self.namespace_id, message_obj) + db_session, self.namespace_id, message_obj + ) else: parent_thread.messages.append(message_obj) @@ -572,19 +641,17 @@ def download_and_commit_uids(self, crispin_client, uids): account = Account.get(self.account_id, db_session) folder = Folder.get(self.folder_id, db_session) for msg in raw_messages: - uid = self.create_message(db_session, account, - folder, msg) + uid = self.create_message(db_session, account, folder, msg) if uid is not None: db_session.add(uid) db_session.flush() new_uids.add(uid) db_session.commit() - log.debug('Committed new UIDs', new_committed_message_count=len(new_uids)) + log.debug("Committed new UIDs", new_committed_message_count=len(new_uids)) # If we downloaded uids, record message velocity (#uid / latency) - if self.state == 'initial' and len(new_uids): - self._report_message_velocity(datetime.utcnow() - start, - len(new_uids)) + if self.state == "initial" and len(new_uids): + self._report_message_velocity(datetime.utcnow() - start, len(new_uids)) if self.is_first_message: self._report_first_message() self.is_first_message = False @@ -595,7 +662,7 @@ def _report_first_message(self): # Only record the "time to first message" in the inbox. Because users # can add more folders at any time, "initial sync"-style metrics for # other folders don't mean much. - if self.folder_role not in ['inbox', 'all']: + if self.folder_role not in ["inbox", "all"]: return now = datetime.utcnow() @@ -606,8 +673,8 @@ def _report_first_message(self): latency = (now - account_created).total_seconds() * 1000 metrics = [ - '.'.join(['mailsync', 'providers', self.provider_name, 'first_message']), - '.'.join(['mailsync', 'providers', 'overall', 'first_message']) + ".".join(["mailsync", "providers", self.provider_name, "first_message"]), + ".".join(["mailsync", "providers", "overall", "first_message"]), ] for metric in metrics: @@ -617,20 +684,25 @@ def _report_message_velocity(self, timedelta, num_uids): latency = (timedelta).total_seconds() * 1000 latency_per_uid = float(latency) / num_uids metrics = [ - '.'.join(['mailsync', 'providers', self.provider_name, - 'message_velocity']), - '.'.join(['mailsync', 'providers', 'overall', 'message_velocity']) + ".".join(["mailsync", "providers", self.provider_name, "message_velocity"]), + ".".join(["mailsync", "providers", "overall", "message_velocity"]), ] for metric in metrics: statsd_client.timing(metric, latency_per_uid) def update_uid_counts(self, db_session, **kwargs): - saved_status = db_session.query(ImapFolderSyncStatus).join(Folder). \ - filter(ImapFolderSyncStatus.account_id == self.account_id, - Folder.name == self.folder_name).one() + saved_status = ( + db_session.query(ImapFolderSyncStatus) + .join(Folder) + .filter( + ImapFolderSyncStatus.account_id == self.account_id, + Folder.name == self.folder_name, + ) + .one() + ) # We're not updating the current_remote_count metric # so don't update uid_checked_timestamp. - if kwargs.get('remote_uid_count') is None: + if kwargs.get("remote_uid_count") is None: saved_status.update_metrics(kwargs) else: metrics = dict(uid_checked_timestamp=datetime.utcnow()) @@ -640,31 +712,39 @@ def update_uid_counts(self, db_session, **kwargs): def get_new_uids(self, crispin_client): try: remote_uidnext = crispin_client.conn.folder_status( - self.folder_name, ['UIDNEXT']).get('UIDNEXT') + self.folder_name, ["UIDNEXT"] + ).get("UIDNEXT") except ValueError: # Work around issue where ValueError is raised on parsing STATUS # response. - log.warning('Error getting UIDNEXT', exc_info=True) + log.warning("Error getting UIDNEXT", exc_info=True) remote_uidnext = None except imaplib.IMAP4.error as e: # TODO: match with CrispinClient.select_folder - if '[NONEXISTENT]' in e.message or \ - 'does not exist' in e.message or \ - "doesn't exist" in e.message: + if ( + "[NONEXISTENT]" in e.message + or "does not exist" in e.message + or "doesn't exist" in e.message + ): raise FolderMissingError() else: raise e if remote_uidnext is not None and remote_uidnext == self.uidnext: return - log.debug('UIDNEXT changed, checking for new UIDs', - remote_uidnext=remote_uidnext, saved_uidnext=self.uidnext) + log.debug( + "UIDNEXT changed, checking for new UIDs", + remote_uidnext=remote_uidnext, + saved_uidnext=self.uidnext, + ) crispin_client.select_folder(self.folder_name, self.uidvalidity_cb) with session_scope(self.namespace_id) as db_session: - lastseenuid = common.lastseenuid(self.account_id, db_session, - self.folder_id) - latest_uids = crispin_client.conn.fetch('{}:*'.format(lastseenuid + 1), - ['UID']).keys() + lastseenuid = common.lastseenuid( + self.account_id, db_session, self.folder_id + ) + latest_uids = crispin_client.conn.fetch( + "{}:*".format(lastseenuid + 1), ["UID"] + ).keys() new_uids = set(latest_uids) - {lastseenuid} if new_uids: for uid in sorted(new_uids): @@ -673,7 +753,8 @@ def get_new_uids(self, crispin_client): def condstore_refresh_flags(self, crispin_client): new_highestmodseq = crispin_client.conn.folder_status( - self.folder_name, ['HIGHESTMODSEQ'])['HIGHESTMODSEQ'] + self.folder_name, ["HIGHESTMODSEQ"] + )["HIGHESTMODSEQ"] # Ensure that we have an initial highestmodseq value stored before we # begin polling for changes. if self.highestmodseq is None: @@ -685,18 +766,20 @@ def condstore_refresh_flags(self, crispin_client): return elif new_highestmodseq < self.highestmodseq: # This should really never happen, but if it does, handle it. - log.warning('got server highestmodseq less than saved ' - 'highestmodseq', - new_highestmodseq=new_highestmodseq, - saved_highestmodseq=self.highestmodseq) + log.warning( + "got server highestmodseq less than saved " "highestmodseq", + new_highestmodseq=new_highestmodseq, + saved_highestmodseq=self.highestmodseq, + ) return - log.debug('HIGHESTMODSEQ has changed, getting changed UIDs', - new_highestmodseq=new_highestmodseq, - saved_highestmodseq=self.highestmodseq) + log.debug( + "HIGHESTMODSEQ has changed, getting changed UIDs", + new_highestmodseq=new_highestmodseq, + saved_highestmodseq=self.highestmodseq, + ) crispin_client.select_folder(self.folder_name, self.uidvalidity_cb) - changed_flags = crispin_client.condstore_changed_flags( - self.highestmodseq) + changed_flags = crispin_client.condstore_changed_flags(self.highestmodseq) remote_uids = crispin_client.all_uids() # In order to be able to sync changes to tens of thousands of flags at @@ -709,19 +792,23 @@ def condstore_refresh_flags(self, crispin_client): # we avoid that by batching. flag_batches = chunk( sorted(changed_flags.items(), key=lambda (k, v): v.modseq), - CONDSTORE_FLAGS_REFRESH_BATCH_SIZE) + CONDSTORE_FLAGS_REFRESH_BATCH_SIZE, + ) for flag_batch in flag_batches: with session_scope(self.namespace_id) as db_session: - common.update_metadata(self.account_id, self.folder_id, - self.folder_role, dict(flag_batch), - db_session) + common.update_metadata( + self.account_id, + self.folder_id, + self.folder_role, + dict(flag_batch), + db_session, + ) if len(flag_batch) == CONDSTORE_FLAGS_REFRESH_BATCH_SIZE: interim_highestmodseq = max(v.modseq for k, v in flag_batch) self.highestmodseq = interim_highestmodseq with session_scope(self.namespace_id) as db_session: - local_uids = common.local_uids(self.account_id, db_session, - self.folder_id) + local_uids = common.local_uids(self.account_id, db_session, self.folder_id) expunged_uids = set(local_uids).difference(remote_uids) if expunged_uids: @@ -730,25 +817,27 @@ def condstore_refresh_flags(self, crispin_client): # latest UIDs before expunging anything, in order to properly # capture draft revisions. with session_scope(self.namespace_id) as db_session: - lastseenuid = common.lastseenuid(self.account_id, db_session, - self.folder_id) + lastseenuid = common.lastseenuid( + self.account_id, db_session, self.folder_id + ) if remote_uids and lastseenuid < max(remote_uids): - log.info('Downloading new UIDs before expunging') + log.info("Downloading new UIDs before expunging") self.get_new_uids(crispin_client) with self.syncmanager_lock: - common.remove_deleted_uids(self.account_id, self.folder_id, - expunged_uids) + common.remove_deleted_uids( + self.account_id, self.folder_id, expunged_uids + ) self.highestmodseq = new_highestmodseq def generic_refresh_flags(self, crispin_client): now = datetime.utcnow() slow_refresh_due = ( - self.last_slow_refresh is None or - now > self.last_slow_refresh + SLOW_REFRESH_INTERVAL + self.last_slow_refresh is None + or now > self.last_slow_refresh + SLOW_REFRESH_INTERVAL ) fast_refresh_due = ( - self.last_fast_refresh is None or - now > self.last_fast_refresh + FAST_REFRESH_INTERVAL + self.last_fast_refresh is None + or now > self.last_fast_refresh + FAST_REFRESH_INTERVAL ) if slow_refresh_due: self.refresh_flags_impl(crispin_client, SLOW_FLAGS_REFRESH_LIMIT) @@ -763,24 +852,27 @@ def refresh_flags_impl(self, crispin_client, max_uids): # Check for any deleted messages. remote_uids = crispin_client.all_uids() with session_scope(self.namespace_id) as db_session: - local_uids = common.local_uids(self.account_id, db_session, - self.folder_id) + local_uids = common.local_uids(self.account_id, db_session, self.folder_id) expunged_uids = set(local_uids).difference(remote_uids) if expunged_uids: with self.syncmanager_lock: - common.remove_deleted_uids(self.account_id, self.folder_id, - expunged_uids) + common.remove_deleted_uids( + self.account_id, self.folder_id, expunged_uids + ) # Get recent UIDs to monitor for flag changes. with session_scope(self.namespace_id) as db_session: - local_uids = common.local_uids(account_id=self.account_id, - session=db_session, - folder_id=self.folder_id, - limit=max_uids) + local_uids = common.local_uids( + account_id=self.account_id, + session=db_session, + folder_id=self.folder_id, + limit=max_uids, + ) flags = crispin_client.flags(local_uids) - if (max_uids in self.flags_fetch_results and - self.flags_fetch_results[max_uids] == (local_uids, flags)): + if max_uids in self.flags_fetch_results and self.flags_fetch_results[ + max_uids + ] == (local_uids, flags): # If the flags fetch response is exactly the same as the last one # we got, then we don't need to persist any changes. @@ -788,16 +880,17 @@ def refresh_flags_impl(self, crispin_client, max_uids): # log.debug('Unchanged flags refresh response, ' # 'not persisting changes', max_uids=max_uids) return - log.debug('Changed flags refresh response, persisting changes', - max_uids=max_uids) + log.debug( + "Changed flags refresh response, persisting changes", max_uids=max_uids + ) expunged_uids = set(local_uids).difference(flags.keys()) with self.syncmanager_lock: - common.remove_deleted_uids(self.account_id, self.folder_id, - expunged_uids) + common.remove_deleted_uids(self.account_id, self.folder_id, expunged_uids) with self.syncmanager_lock: with session_scope(self.namespace_id) as db_session: - common.update_metadata(self.account_id, self.folder_id, - self.folder_role, flags, db_session) + common.update_metadata( + self.account_id, self.folder_id, self.folder_role, flags, db_session + ) self.flags_fetch_results[max_uids] = (local_uids, flags) def check_uid_changes(self, crispin_client): @@ -809,24 +902,24 @@ def check_uid_changes(self, crispin_client): @property def uidvalidity(self): - if not hasattr(self, '_uidvalidity'): + if not hasattr(self, "_uidvalidity"): self._uidvalidity = self._load_imap_folder_info().uidvalidity return self._uidvalidity @uidvalidity.setter def uidvalidity(self, value): - self._update_imap_folder_info('uidvalidity', value) + self._update_imap_folder_info("uidvalidity", value) self._uidvalidity = value @property def uidnext(self): - if not hasattr(self, '_uidnext'): + if not hasattr(self, "_uidnext"): self._uidnext = self._load_imap_folder_info().uidnext return self._uidnext @uidnext.setter def uidnext(self, value): - self._update_imap_folder_info('uidnext', value) + self._update_imap_folder_info("uidnext", value) self._uidnext = value @property @@ -834,83 +927,92 @@ def last_slow_refresh(self): # We persist the last_slow_refresh timestamp so that we don't end up # doing a (potentially expensive) full flags refresh for every account # on every process restart. - if not hasattr(self, '_last_slow_refresh'): - self._last_slow_refresh = self._load_imap_folder_info(). \ - last_slow_refresh + if not hasattr(self, "_last_slow_refresh"): + self._last_slow_refresh = self._load_imap_folder_info().last_slow_refresh return self._last_slow_refresh @last_slow_refresh.setter def last_slow_refresh(self, value): - self._update_imap_folder_info('last_slow_refresh', value) + self._update_imap_folder_info("last_slow_refresh", value) self._last_slow_refresh = value @property def highestmodseq(self): - if not hasattr(self, '_highestmodseq'): + if not hasattr(self, "_highestmodseq"): self._highestmodseq = self._load_imap_folder_info().highestmodseq return self._highestmodseq @highestmodseq.setter def highestmodseq(self, value): self._highestmodseq = value - self._update_imap_folder_info('highestmodseq', value) + self._update_imap_folder_info("highestmodseq", value) def _load_imap_folder_info(self): with session_scope(self.namespace_id) as db_session: - imapfolderinfo = db_session.query(ImapFolderInfo). \ - filter(ImapFolderInfo.account_id == self.account_id, - ImapFolderInfo.folder_id == self.folder_id). \ - one() + imapfolderinfo = ( + db_session.query(ImapFolderInfo) + .filter( + ImapFolderInfo.account_id == self.account_id, + ImapFolderInfo.folder_id == self.folder_id, + ) + .one() + ) db_session.expunge(imapfolderinfo) return imapfolderinfo def _update_imap_folder_info(self, attrname, value): with session_scope(self.namespace_id) as db_session: - imapfolderinfo = db_session.query(ImapFolderInfo). \ - filter(ImapFolderInfo.account_id == self.account_id, - ImapFolderInfo.folder_id == self.folder_id). \ - one() + imapfolderinfo = ( + db_session.query(ImapFolderInfo) + .filter( + ImapFolderInfo.account_id == self.account_id, + ImapFolderInfo.folder_id == self.folder_id, + ) + .one() + ) setattr(imapfolderinfo, attrname, value) db_session.commit() def uidvalidity_cb(self, account_id, folder_name, select_info): assert folder_name == self.folder_name assert account_id == self.account_id - selected_uidvalidity = select_info['UIDVALIDITY'] - is_valid = (self.uidvalidity is None or - selected_uidvalidity <= self.uidvalidity) + selected_uidvalidity = select_info["UIDVALIDITY"] + is_valid = self.uidvalidity is None or selected_uidvalidity <= self.uidvalidity if not is_valid: raise UidInvalid( - 'folder: {}, remote uidvalidity: {}, ' - 'cached uidvalidity: {}'.format(folder_name.encode('utf-8'), - selected_uidvalidity, - self.uidvalidity)) + "folder: {}, remote uidvalidity: {}, " + "cached uidvalidity: {}".format( + folder_name.encode("utf-8"), selected_uidvalidity, self.uidvalidity + ) + ) return select_info class UidInvalid(Exception): """Raised when a folder's UIDVALIDITY changes, requiring a resync.""" + pass # This version is elsewhere in the codebase, so keep it for now # TODO(emfree): clean this up. def uidvalidity_cb(account_id, folder_name, select_info): - assert folder_name is not None and select_info is not None, \ - "must start IMAP session before verifying UIDVALIDITY" + assert ( + folder_name is not None and select_info is not None + ), "must start IMAP session before verifying UIDVALIDITY" with session_scope(account_id) as db_session: - saved_folder_info = common.get_folder_info(account_id, db_session, - folder_name) - saved_uidvalidity = or_none(saved_folder_info, lambda i: - i.uidvalidity) - selected_uidvalidity = select_info['UIDVALIDITY'] + saved_folder_info = common.get_folder_info(account_id, db_session, folder_name) + saved_uidvalidity = or_none(saved_folder_info, lambda i: i.uidvalidity) + selected_uidvalidity = select_info["UIDVALIDITY"] if saved_folder_info: - is_valid = (saved_uidvalidity is None or - selected_uidvalidity <= saved_uidvalidity) + is_valid = ( + saved_uidvalidity is None or selected_uidvalidity <= saved_uidvalidity + ) if not is_valid: raise UidInvalid( - 'folder: {}, remote uidvalidity: {}, ' - 'cached uidvalidity: {}'.format(folder_name.encode('utf-8'), - selected_uidvalidity, - saved_uidvalidity)) + "folder: {}, remote uidvalidity: {}, " + "cached uidvalidity: {}".format( + folder_name.encode("utf-8"), selected_uidvalidity, saved_uidvalidity + ) + ) return select_info diff --git a/inbox/mailsync/backends/imap/monitor.py b/inbox/mailsync/backends/imap/monitor.py index 050baf48e..4860e3676 100644 --- a/inbox/mailsync/backends/imap/monitor.py +++ b/inbox/mailsync/backends/imap/monitor.py @@ -10,6 +10,7 @@ from inbox.mailsync.backends.base import BaseMailSyncMonitor from inbox.mailsync.backends.imap.generic import FolderSyncEngine from inbox.mailsync.gc import DeleteHandler + log = get_logger() @@ -26,8 +27,7 @@ class ImapSyncMonitor(BaseMailSyncMonitor): Seconds to wait between checking for new folders to sync. """ - def __init__(self, account, - heartbeat=1, refresh_frequency=30): + def __init__(self, account, heartbeat=1, refresh_frequency=30): self.refresh_frequency = refresh_frequency self.syncmanager_lock = BoundedSemaphore(1) self.saved_remote_folders = None @@ -76,35 +76,38 @@ def save_folder_names(self, db_session, raw_folders): """ account = db_session.query(Account).get(self.account_id) - remote_folder_names = {sanitize_name(f.display_name) - for f in raw_folders} + remote_folder_names = {sanitize_name(f.display_name) for f in raw_folders} - assert 'inbox' in {f.role for f in raw_folders},\ - 'Account {} has no detected inbox folder'.\ - format(account.email_address) + assert "inbox" in { + f.role for f in raw_folders + }, "Account {} has no detected inbox folder".format(account.email_address) - local_folders = {f.name: f for f in db_session.query(Folder).filter( - Folder.account_id == self.account_id)} + local_folders = { + f.name: f + for f in db_session.query(Folder).filter( + Folder.account_id == self.account_id + ) + } # Delete folders no longer present on the remote. # Note that the folder with canonical_name='inbox' cannot be deleted; # remote_folder_names will always contain an entry corresponding to it. discard = set(local_folders) - remote_folder_names for name in discard: - log.info('Folder deleted from remote', account_id=self.account_id, - name=name) + log.info( + "Folder deleted from remote", account_id=self.account_id, name=name + ) if local_folders[name].category_id is not None: - cat = db_session.query(Category).get( - local_folders[name].category_id) + cat = db_session.query(Category).get(local_folders[name].category_id) if cat is not None: db_session.delete(cat) del local_folders[name] # Create new folders for raw_folder in raw_folders: - folder = Folder.find_or_create(db_session, account, - raw_folder.display_name, - raw_folder.role) + folder = Folder.find_or_create( + db_session, account, raw_folder.display_name, raw_folder.role + ) if folder.canonical_name != raw_folder.role: folder.canonical_name = raw_folder.role @@ -117,32 +120,39 @@ def save_folder_names(self, db_session, raw_folders): db_session.commit() def start_new_folder_sync_engines(self): - running_monitors = {monitor.folder_name: monitor for monitor in - self.folder_monitors} + running_monitors = { + monitor.folder_name: monitor for monitor in self.folder_monitors + } for folder_name in self.prepare_sync(): if folder_name in running_monitors: thread = running_monitors[folder_name] else: - log.info('Folder sync engine started', - account_id=self.account_id, - folder_name=folder_name) - thread = self.sync_engine_class(self.account_id, - self.namespace_id, - folder_name, - self.email_address, - self.provider_name, - self.syncmanager_lock) + log.info( + "Folder sync engine started", + account_id=self.account_id, + folder_name=folder_name, + ) + thread = self.sync_engine_class( + self.account_id, + self.namespace_id, + folder_name, + self.email_address, + self.provider_name, + self.syncmanager_lock, + ) self.folder_monitors.start(thread) - while not thread.state == 'poll' and not thread.ready(): + while not thread.state == "poll" and not thread.ready(): sleep(self.heartbeat) if thread.ready(): - log.info('Folder sync engine exited', - account_id=self.account_id, - folder_name=folder_name, - error=thread.exception) + log.info( + "Folder sync engine exited", + account_id=self.account_id, + folder_name=folder_name, + error=thread.exception, + ) def start_delete_handler(self): if self.delete_handler is None: @@ -150,7 +160,8 @@ def start_delete_handler(self): account_id=self.account_id, namespace_id=self.namespace_id, provider_name=self.provider_name, - uid_accessor=lambda m: m.imapuids) + uid_accessor=lambda m: m.imapuids, + ) self.delete_handler.start() def sync(self): @@ -162,8 +173,11 @@ def sync(self): self.start_new_folder_sync_engines() except ValidationError as exc: log.error( - 'Error authenticating; stopping sync', exc_info=True, - account_id=self.account_id, logstash_tag='mark_invalid') + "Error authenticating; stopping sync", + exc_info=True, + account_id=self.account_id, + logstash_tag="mark_invalid", + ) with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) account.mark_invalid() diff --git a/inbox/mailsync/frontend.py b/inbox/mailsync/frontend.py index 872fa7b77..218b00a60 100644 --- a/inbox/mailsync/frontend.py +++ b/inbox/mailsync/frontend.py @@ -3,8 +3,7 @@ from pympler import muppy, summary from werkzeug.serving import run_simple, WSGIRequestHandler from flask import Flask, jsonify, request -from inbox.instrumentation import (GreenletTracer, KillerGreenletTracer, - ProfileCollector) +from inbox.instrumentation import GreenletTracer, KillerGreenletTracer, ProfileCollector class HTTPFrontend(object): @@ -17,8 +16,9 @@ def start(self): app = self._create_app() # We need to spawn an OS-level thread because we don't want a stuck # greenlet to prevent us to access the web API. - gevent._threading.start_new_thread(run_simple, ('0.0.0.0', self.port, app), - {"request_handler": _QuietHandler}) + gevent._threading.start_new_thread( + run_simple, ("0.0.0.0", self.port, app), {"request_handler": _QuietHandler} + ) def _create_app(self): app = Flask(__name__) @@ -48,29 +48,29 @@ def start(self): super(ProfilingHTTPFrontend, self).start() def _create_app_impl(self, app): - @app.route('/profile') + @app.route("/profile") def profile(): if self.profiler is None: - return 'Profiling disabled\n', 404 + return "Profiling disabled\n", 404 resp = self.profiler.stats() - if request.args.get('reset ') in (1, 'true'): + if request.args.get("reset ") in (1, "true"): self.profiler.reset() return resp - @app.route('/load') + @app.route("/load") def load(): if self.tracer is None: - return 'Load tracing disabled\n', 404 + return "Load tracing disabled\n", 404 resp = jsonify(self.tracer.stats()) - if request.args.get('reset ') in (1, 'true'): + if request.args.get("reset ") in (1, "true"): self.tracer.reset() return resp - @app.route('/mem') + @app.route("/mem") def mem(): objs = muppy.get_objects() summ = summary.summarize(objs) - return '\n'.join(summary.format_(summ)) + '\n' + return "\n".join(summary.format_(summ)) + "\n" class SyncbackHTTPFrontend(ProfilingHTTPFrontend): @@ -89,30 +89,26 @@ def greenlet_tracer_cls(self): def _create_app_impl(self, app): super(SyncHTTPFrontend, self)._create_app_impl(app) - @app.route('/unassign', methods=['POST']) + @app.route("/unassign", methods=["POST"]) def unassign_account(): - account_id = request.json['account_id'] + account_id = request.json["account_id"] ret = self.sync_service.stop_sync(account_id) if ret: - return 'OK' + return "OK" else: - return 'Account not assigned to this process', 409 + return "Account not assigned to this process", 409 - @app.route('/build-metadata', methods=['GET']) + @app.route("/build-metadata", methods=["GET"]) def build_metadata(): - filename = '/usr/share/python/cloud-core/metadata.txt' - with open(filename, 'r') as f: - _, build_id = f.readline().rstrip('\n').split() - build_id = build_id[1:-1] # Remove first and last single quotes. - _, git_commit = f.readline().rstrip('\n').split() - return jsonify({ - 'build_id': build_id, - 'git_commit': git_commit, - }) + filename = "/usr/share/python/cloud-core/metadata.txt" + with open(filename, "r") as f: + _, build_id = f.readline().rstrip("\n").split() + build_id = build_id[1:-1] # Remove first and last single quotes. + _, git_commit = f.readline().rstrip("\n").split() + return jsonify({"build_id": build_id, "git_commit": git_commit,}) class _QuietHandler(WSGIRequestHandler): - def log_request(self, *args, **kwargs): """Suppress request logging so as not to pollute application logs.""" pass diff --git a/inbox/mailsync/gc.py b/inbox/mailsync/gc.py index c2c627cb4..0a7492922 100644 --- a/inbox/mailsync/gc.py +++ b/inbox/mailsync/gc.py @@ -4,6 +4,7 @@ from sqlalchemy import func from sqlalchemy.orm import load_only from nylas.logging import get_logger + log = get_logger() from inbox.models import Message, Thread from inbox.models.category import Category, EPOCH @@ -18,8 +19,8 @@ from inbox.crispin import connection_pool from imapclient.imap_utf7 import encode as utf7_encode -DEFAULT_MESSAGE_TTL = 2 * 60 # 2 minutes -DEFAULT_THREAD_TTL = 60 * 60 * 24 * 7 # 7 days +DEFAULT_MESSAGE_TTL = 2 * 60 # 2 minutes +DEFAULT_THREAD_TTL = 60 * 60 * 24 * 7 # 7 days MAX_FETCH = 1000 @@ -50,9 +51,16 @@ class DeleteHandler(gevent.Greenlet): """ - def __init__(self, account_id, namespace_id, provider_name, uid_accessor, - message_ttl=DEFAULT_MESSAGE_TTL, thread_ttl=DEFAULT_THREAD_TTL): - bind_context(self, 'deletehandler', account_id) + def __init__( + self, + account_id, + namespace_id, + provider_name, + uid_accessor, + message_ttl=DEFAULT_MESSAGE_TTL, + thread_ttl=DEFAULT_THREAD_TTL, + ): + bind_context(self, "deletehandler", account_id) self.account_id = account_id self.namespace_id = namespace_id self.provider_name = provider_name @@ -64,8 +72,9 @@ def __init__(self, account_id, namespace_id, provider_name, uid_accessor, def _run(self): while True: - retry_with_logging(self._run_impl, account_id=self.account_id, - provider=self.provider_name) + retry_with_logging( + self._run_impl, account_id=self.account_id, provider=self.provider_name + ) def _run_impl(self): current_time = datetime.datetime.utcnow() @@ -76,10 +85,14 @@ def _run_impl(self): def check(self, current_time): with session_scope(self.namespace_id) as db_session: - dangling_messages = db_session.query(Message).filter( - Message.namespace_id == self.namespace_id, - Message.deleted_at <= current_time - self.message_ttl - ).limit(MAX_FETCH) + dangling_messages = ( + db_session.query(Message) + .filter( + Message.namespace_id == self.namespace_id, + Message.deleted_at <= current_time - self.message_ttl, + ) + .limit(MAX_FETCH) + ) for message in dangling_messages: # If the message isn't *actually* dangling (i.e., it has # imapuids associated with it), undelete it. @@ -90,9 +103,11 @@ def check(self, current_time): thread = message.thread if not thread or message not in thread.messages: - self.log.warning("Running delete handler check but message" - " is not part of referenced thread: {}", - thread_id=thread.id) + self.log.warning( + "Running delete handler check but message" + " is not part of referenced thread: {}", + thread_id=thread.id, + ) # Nothing to check continue @@ -111,8 +126,7 @@ def check(self, current_time): # TODO(emfree): This is messy. We need better # abstractions for recomputing a thread's attributes # from messages, here and in mail sync. - non_draft_messages = [m for m in thread.messages if not - m.is_draft] + non_draft_messages = [m for m in thread.messages if not m.is_draft] if not non_draft_messages: continue # The value of thread.messages is ordered oldest-to-newest. @@ -135,14 +149,17 @@ def gc_deleted_categories(self): # associated with it. If not, delete it. with session_scope(self.namespace_id) as db_session: categories = db_session.query(Category).filter( - Category.namespace_id == self.namespace_id, - Category.deleted_at > EPOCH) + Category.namespace_id == self.namespace_id, Category.deleted_at > EPOCH + ) for category in categories: # Check if no message is associated with the category. If yes, # delete it. - count = db_session.query(func.count(MessageCategory.id)).filter( - MessageCategory.category_id == category.id).scalar() + count = ( + db_session.query(func.count(MessageCategory.id)) + .filter(MessageCategory.category_id == category.id) + .scalar() + ) if count == 0: db_session.delete(category) @@ -150,10 +167,14 @@ def gc_deleted_categories(self): def gc_deleted_threads(self, current_time): with session_scope(self.namespace_id) as db_session: - deleted_threads = db_session.query(Thread).filter( - Thread.namespace_id == self.namespace_id, - Thread.deleted_at <= current_time - self.thread_ttl - ).limit(MAX_FETCH) + deleted_threads = ( + db_session.query(Thread) + .filter( + Thread.namespace_id == self.namespace_id, + Thread.deleted_at <= current_time - self.thread_ttl, + ) + .limit(MAX_FETCH) + ) for thread in deleted_threads: if thread.messages: thread.deleted_at = None @@ -177,7 +198,7 @@ class LabelRenameHandler(gevent.Greenlet): """ def __init__(self, account_id, namespace_id, label_name, semaphore): - bind_context(self, 'renamehandler', account_id) + bind_context(self, "renamehandler", account_id) self.account_id = account_id self.namespace_id = namespace_id self.label_name = label_name @@ -189,8 +210,7 @@ def _run(self): return retry_with_logging(self._run_impl, account_id=self.account_id) def _run_impl(self): - self.log.info('Starting LabelRenameHandler', - label_name=self.label_name) + self.log.info("Starting LabelRenameHandler", label_name=self.label_name) self.semaphore.acquire(blocking=True) @@ -199,7 +219,8 @@ def _run_impl(self): folder_names = [] with session_scope(self.account_id) as db_session: folders = db_session.query(Folder).filter( - Folder.account_id == self.account_id) + Folder.account_id == self.account_id + ) folder_names = [folder.name for folder in folders] db_session.expunge_all() @@ -207,22 +228,35 @@ def _run_impl(self): for folder_name in folder_names: crispin_client.select_folder(folder_name, uidvalidity_cb) - found_uids = crispin_client.search_uids(['X-GM-LABELS', - utf7_encode(self.label_name)]) + found_uids = crispin_client.search_uids( + ["X-GM-LABELS", utf7_encode(self.label_name)] + ) for chnk in chunk(found_uids, 200): flags = crispin_client.flags(chnk) - self.log.info('Running metadata update for folder', - folder_name=folder_name) + self.log.info( + "Running metadata update for folder", + folder_name=folder_name, + ) with session_scope(self.account_id) as db_session: - fld = db_session.query(Folder).options(load_only("id"))\ - .filter(Folder.account_id == self.account_id, - Folder.name == folder_name).one() - - common.update_metadata(self.account_id, fld.id, - fld.canonical_name, flags, - db_session) + fld = ( + db_session.query(Folder) + .options(load_only("id")) + .filter( + Folder.account_id == self.account_id, + Folder.name == folder_name, + ) + .one() + ) + + common.update_metadata( + self.account_id, + fld.id, + fld.canonical_name, + flags, + db_session, + ) db_session.commit() finally: self.semaphore.release() diff --git a/inbox/mailsync/service.py b/inbox/mailsync/service.py index 3bdf65b78..d47964cb5 100644 --- a/inbox/mailsync/service.py +++ b/inbox/mailsync/service.py @@ -22,18 +22,19 @@ from inbox.mailsync.backends import module_registry -USE_GOOGLE_PUSH_NOTIFICATIONS = \ - 'GOOGLE_PUSH_NOTIFICATIONS' in config.get('FEATURE_FLAGS', []) +USE_GOOGLE_PUSH_NOTIFICATIONS = "GOOGLE_PUSH_NOTIFICATIONS" in config.get( + "FEATURE_FLAGS", [] +) # How much time (in minutes) should all CPUs be over 90% to consider them # overloaded. SYNC_POLL_INTERVAL = 20 PENDING_AVGS_THRESHOLD = 10 -MAX_ACCOUNTS_PER_PROCESS = config.get('MAX_ACCOUNTS_PER_PROCESS', 150) +MAX_ACCOUNTS_PER_PROCESS = config.get("MAX_ACCOUNTS_PER_PROCESS", 150) -SYNC_EVENT_QUEUE_NAME = 'sync:event_queue:{}' -SHARED_SYNC_EVENT_QUEUE_NAME = 'sync:shared_event_queue:{}' +SYNC_EVENT_QUEUE_NAME = "sync:event_queue:{}" +SHARED_SYNC_EVENT_QUEUE_NAME = "sync:shared_event_queue:{}" SHARED_SYNC_EVENT_QUEUE_ZONE_MAP = {} @@ -59,16 +60,23 @@ class SyncService(object): Serves as the max timeout for the redis blocking pop. """ - def __init__(self, process_identifier, process_number, - poll_interval=SYNC_POLL_INTERVAL, - exit_after_min=None, exit_after_max=None): + def __init__( + self, + process_identifier, + process_number, + poll_interval=SYNC_POLL_INTERVAL, + exit_after_min=None, + exit_after_max=None, + ): self.keep_running = True self.host = platform.node() self.process_number = process_number self.process_identifier = process_identifier - self.monitor_cls_for = {mod.PROVIDER: getattr( - mod, mod.SYNC_MONITOR_CLS) for mod in module_registry.values() - if hasattr(mod, 'SYNC_MONITOR_CLS')} + self.monitor_cls_for = { + mod.PROVIDER: getattr(mod, mod.SYNC_MONITOR_CLS) + for mod in module_registry.values() + if hasattr(mod, "SYNC_MONITOR_CLS") + } for p_name, p in providers.iteritems(): if p_name not in self.monitor_cls_for: @@ -76,8 +84,9 @@ def __init__(self, process_identifier, process_number, self.log = get_logger() self.log.bind(process_number=process_number) - self.log.info('starting mail sync process', - supported_providers=module_registry.keys()) + self.log.info( + "starting mail sync process", supported_providers=module_registry.keys() + ) self.syncing_accounts = set() self.email_sync_monitors = {} @@ -86,30 +95,32 @@ def __init__(self, process_identifier, process_number, # Randomize the poll_interval so we maintain at least a little fairness # when using a timeout while blocking on the redis queues. min_poll_interval = 5 - self.poll_interval = int((random.random() * (poll_interval - min_poll_interval)) + min_poll_interval) + self.poll_interval = int( + (random.random() * (poll_interval - min_poll_interval)) + min_poll_interval + ) self.semaphore = BoundedSemaphore(1) - self.zone = config.get('ZONE') + self.zone = config.get("ZONE") # Note that we don't partition by zone for the private queues. # There's not really a reason to since there's one queue per machine # anyways. Also, if you really want to send an Account to a mailsync # machine in another zone you can do so. - self.private_queue = EventQueue(SYNC_EVENT_QUEUE_NAME.format(self.process_identifier)) - self.queue_group = EventQueueGroup([ - shared_sync_event_queue_for_zone(self.zone), - self.private_queue, - ]) - - self.stealing_enabled = config.get('SYNC_STEAL_ACCOUNTS', True) + self.private_queue = EventQueue( + SYNC_EVENT_QUEUE_NAME.format(self.process_identifier) + ) + self.queue_group = EventQueueGroup( + [shared_sync_event_queue_for_zone(self.zone), self.private_queue,] + ) + + self.stealing_enabled = config.get("SYNC_STEAL_ACCOUNTS", True) self._pending_avgs_provider = None self.last_unloaded_account = time.time() if exit_after_min and exit_after_max: - exit_after = random.randint(exit_after_min*60, exit_after_max*60) - self.log.info('exit after', seconds=exit_after) + exit_after = random.randint(exit_after_min * 60, exit_after_max * 60) + self.log.info("exit after", seconds=exit_after) gevent.spawn_later(exit_after, self.stop) - def run(self): while self.keep_running: retry_with_logging(self._run_impl, self.log) @@ -120,7 +131,7 @@ def _run_impl(self): """ # When the service first starts we should check the state of the world. - self.poll({'queue_name': 'none'}) + self.poll({"queue_name": "none"}) event = None while self.keep_running and event is None: event = self.queue_group.receive_event(timeout=self.poll_interval) @@ -128,7 +139,10 @@ def _run_impl(self): if not event: return - if shared_sync_event_queue_for_zone(self.zone).queue_name == event['queue_name']: + if ( + shared_sync_event_queue_for_zone(self.zone).queue_name + == event["queue_name"] + ): self.poll_shared_queue(event) return @@ -155,28 +169,39 @@ def poll_shared_queue(self, event): pending_avgs = self._pending_avgs_provider.get_pending_avgs() pending_avgs_over_threshold = pending_avgs[15] >= PENDING_AVGS_THRESHOLD - if self.stealing_enabled and not pending_avgs_over_threshold and \ - len(self.syncing_accounts) < MAX_ACCOUNTS_PER_PROCESS: - account_id = event['id'] + if ( + self.stealing_enabled + and not pending_avgs_over_threshold + and len(self.syncing_accounts) < MAX_ACCOUNTS_PER_PROCESS + ): + account_id = event["id"] if self.start_sync(account_id): - self.log.info('Claimed new unassigned account sync', account_id=account_id) + self.log.info( + "Claimed new unassigned account sync", account_id=account_id + ) return if not self.stealing_enabled: - reason = 'stealing disabled' + reason = "stealing disabled" elif pending_avgs_over_threshold: - reason = 'process pending avgs too high' + reason = "process pending avgs too high" else: - reason = 'reached max accounts for process' - self.log.info('Not claiming new account sync, sending event back to shared queue', reason=reason) + reason = "reached max accounts for process" + self.log.info( + "Not claiming new account sync, sending event back to shared queue", + reason=reason, + ) shared_sync_event_queue_for_zone(self.zone).send_event(event) def poll(self, event): # Determine which accounts to sync start_accounts = self.account_ids_to_sync() statsd_client.gauge( - 'mailsync.account_counts.{}.mailsync-{}.count'.format( - self.host, self.process_number), len(start_accounts)) + "mailsync.account_counts.{}.mailsync-{}.count".format( + self.host, self.process_number + ), + len(start_accounts), + ) # Perform the appropriate action on each account for account_id in start_accounts: @@ -184,36 +209,53 @@ def poll(self, event): try: self.start_sync(account_id) except OperationalError: - self.log.error('Database error starting account sync', - exc_info=True) + self.log.error( + "Database error starting account sync", exc_info=True + ) log_uncaught_errors() stop_accounts = self.account_ids_owned() - set(start_accounts) for account_id in stop_accounts: - self.log.info('sync service stopping sync', - account_id=account_id) + self.log.info("sync service stopping sync", account_id=account_id) try: self.stop_sync(account_id) except OperationalError: - self.log.error('Database error stopping account sync', - exc_info=True) + self.log.error("Database error stopping account sync", exc_info=True) log_uncaught_errors() def account_ids_to_sync(self): with global_session_scope() as db_session: - return {r[0] for r in db_session.query(Account.id). - filter(Account.sync_should_run, - or_(and_(Account.desired_sync_host == self.process_identifier, - Account.sync_host == None), # noqa - and_(Account.desired_sync_host == None, # noqa - Account.sync_host == self.process_identifier), - and_(Account.desired_sync_host == self.process_identifier, - Account.sync_host == self.process_identifier))).all()} + return { + r[0] + for r in db_session.query(Account.id) + .filter( + Account.sync_should_run, + or_( + and_( + Account.desired_sync_host == self.process_identifier, + Account.sync_host == None, + ), # noqa + and_( + Account.desired_sync_host == None, # noqa + Account.sync_host == self.process_identifier, + ), + and_( + Account.desired_sync_host == self.process_identifier, + Account.sync_host == self.process_identifier, + ), + ), + ) + .all() + } def account_ids_owned(self): with global_session_scope() as db_session: - return {r[0] for r in db_session.query(Account.id). - filter(Account.sync_host == self.process_identifier).all()} + return { + r[0] + for r in db_session.query(Account.id) + .filter(Account.sync_host == self.process_identifier) + .all() + } def register_pending_avgs_provider(self, pending_avgs_provider): self._pending_avgs_provider = pending_avgs_provider @@ -227,19 +269,23 @@ def start_sync(self, account_id): with self.semaphore, session_scope(account_id) as db_session: acc = db_session.query(Account).with_for_update().get(account_id) if acc is None: - self.log.error('no such account', account_id=account_id) + self.log.error("no such account", account_id=account_id) return False if not acc.sync_should_run: return False - if acc.desired_sync_host is not None and acc.desired_sync_host != self.process_identifier: + if ( + acc.desired_sync_host is not None + and acc.desired_sync_host != self.process_identifier + ): return False if acc.sync_host is not None and acc.sync_host != self.process_identifier: return False - self.log.info('starting sync', account_id=acc.id, - email_address=acc.email_address) + self.log.info( + "starting sync", account_id=acc.id, email_address=acc.email_address + ) if acc.id in self.syncing_accounts: - self.log.info('sync already started', account_id=account_id) + self.log.info("sync already started", account_id=account_id) return False try: @@ -250,26 +296,31 @@ def start_sync(self, account_id): monitor.start() info = acc.provider_info - if info.get('contacts', None) and acc.sync_contacts: - contact_sync = ContactSync(acc.email_address, - acc.verbose_provider, - acc.id, - acc.namespace.id) + if info.get("contacts", None) and acc.sync_contacts: + contact_sync = ContactSync( + acc.email_address, + acc.verbose_provider, + acc.id, + acc.namespace.id, + ) self.contact_sync_monitors[acc.id] = contact_sync contact_sync.start() - if info.get('events', None) and acc.sync_events: - if (USE_GOOGLE_PUSH_NOTIFICATIONS and - acc.provider == 'gmail'): - event_sync = GoogleEventSync(acc.email_address, - acc.verbose_provider, - acc.id, - acc.namespace.id) + if info.get("events", None) and acc.sync_events: + if USE_GOOGLE_PUSH_NOTIFICATIONS and acc.provider == "gmail": + event_sync = GoogleEventSync( + acc.email_address, + acc.verbose_provider, + acc.id, + acc.namespace.id, + ) else: - event_sync = EventSync(acc.email_address, - acc.verbose_provider, - acc.id, - acc.namespace.id) + event_sync = EventSync( + acc.email_address, + acc.verbose_provider, + acc.id, + acc.namespace.id, + ) self.event_sync_monitors[acc.id] = event_sync event_sync.start() @@ -278,16 +329,18 @@ def start_sync(self, account_id): # TODO (mark): Uncomment this after we've transitioned to from statsd to brubeck # statsd_client.gauge('mailsync.sync_hosts_counts.{}'.format(acc.id), 1, delta=True) db_session.commit() - self.log.info('Sync started', account_id=account_id, - sync_host=acc.sync_host) + self.log.info( + "Sync started", account_id=account_id, sync_host=acc.sync_host + ) except Exception: - self.log.error('Error starting sync', exc_info=True, - account_id=account_id) + self.log.error( + "Error starting sync", exc_info=True, account_id=account_id + ) return False return True def stop(self, *args): - self.log.info('stopping mail sync process') + self.log.info("stopping mail sync process") for k, v in self.email_sync_monitors.iteritems(): gevent.kill(v) for k, v in self.contact_sync_monitors.iteritems(): @@ -304,7 +357,7 @@ def stop_sync(self, account_id): """ with self.semaphore: - self.log.info('Stopping monitors', account_id=account_id) + self.log.info("Stopping monitors", account_id=account_id) if account_id in self.email_sync_monitors: self.email_sync_monitors[account_id].kill() del self.email_sync_monitors[account_id] @@ -327,7 +380,7 @@ def stop_sync(self, account_id): if not acc.sync_stopped(self.process_identifier): self.syncing_accounts.discard(account_id) return False - self.log.info('sync stopped', account_id=account_id) + self.log.info("sync stopped", account_id=account_id) # TODO (mark): Uncomment this after we've transitioned to from statsd to brubeck # statsd_client.gauge('mailsync.sync_hosts_counts.{}'.format(acc.id), -1, delta=True) db_session.commit() diff --git a/inbox/models/__init__.py b/inbox/models/__init__.py index 1bc8b96f2..8bf7c75ca 100644 --- a/inbox/models/__init__.py +++ b/inbox/models/__init__.py @@ -27,7 +27,9 @@ """ from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) from inbox.models.backends import module_registry as backend_module_registry # noqa from inbox.models.meta import load_models + locals().update({model.__name__: model for model in load_models()}) diff --git a/inbox/models/account.py b/inbox/models/account.py index 7403031db..e87d06357 100644 --- a/inbox/models/account.py +++ b/inbox/models/account.py @@ -2,8 +2,19 @@ import traceback from datetime import datetime -from sqlalchemy import (Column, BigInteger, String, DateTime, Boolean, - ForeignKey, Enum, inspect, bindparam, Index, event) +from sqlalchemy import ( + Column, + BigInteger, + String, + DateTime, + Boolean, + ForeignKey, + Enum, + inspect, + bindparam, + Index, + event, +) from sqlalchemy.orm import relationship from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import false @@ -11,15 +22,21 @@ from inbox.config import config from inbox.sqlalchemy_ext.util import JSON, MutableDict, bakery -from inbox.models.mixins import (HasPublicID, HasEmailAddress, HasRunState, - HasRevisions, UpdatedAtMixin, - DeletedAtMixin) +from inbox.models.mixins import ( + HasPublicID, + HasEmailAddress, + HasRunState, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +) from inbox.models.base import MailSyncBase from inbox.models.calendar import Calendar from inbox.scheduling.event_queue import EventQueue from inbox.providers import provider_info from nylas.logging.sentry import log_uncaught_errors from nylas.logging import get_logger + log = get_logger() @@ -27,9 +44,17 @@ # should use objects that inherit from this, such as GenericAccount or # GmailAccount -class Account(MailSyncBase, HasPublicID, HasEmailAddress, HasRunState, - HasRevisions, UpdatedAtMixin, DeletedAtMixin): - API_OBJECT_NAME = 'account' + +class Account( + MailSyncBase, + HasPublicID, + HasEmailAddress, + HasRunState, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +): + API_OBJECT_NAME = "account" @property def provider(self): @@ -63,6 +88,7 @@ def category_type(self): @property def auth_handler(self): from inbox.auth.base import handler_from_provider + return handler_from_provider(self.provider) @property @@ -72,10 +98,11 @@ def provider_info(self): @property def thread_cls(self): from inbox.models.thread import Thread + return Thread # The default phrase used when sending mail from this account. - name = Column(String(256), nullable=False, server_default='') + name = Column(String(256), nullable=False, server_default="") # If True, throttle initial sync to reduce resource load throttled = Column(Boolean, server_default=false()) @@ -91,25 +118,31 @@ def thread_cls(self): # DEPRECATED last_synced_events = Column(DateTime, nullable=True) - emailed_events_calendar_id = Column(BigInteger, - ForeignKey('calendar.id', - ondelete='SET NULL', - use_alter=True, - name='emailed_events_cal'), - nullable=True) + emailed_events_calendar_id = Column( + BigInteger, + ForeignKey( + "calendar.id", + ondelete="SET NULL", + use_alter=True, + name="emailed_events_cal", + ), + nullable=True, + ) _emailed_events_calendar = relationship( - 'Calendar', post_update=True, - foreign_keys=[emailed_events_calendar_id]) + "Calendar", post_update=True, foreign_keys=[emailed_events_calendar_id] + ) def create_emailed_events_calendar(self): if not self._emailed_events_calendar: calname = "Emailed events" - cal = Calendar(namespace=self.namespace, - description=calname, - uid='inbox', - name=calname, - read_only=True) + cal = Calendar( + namespace=self.namespace, + description=calname, + uid="inbox", + name=calname, + read_only=True, + ) self._emailed_events_calendar = cal @property @@ -125,7 +158,7 @@ def emailed_events_calendar(self, cal): desired_sync_host = Column(String(255), nullable=True) # current state of this account - state = Column(Enum('live', 'down', 'invalid'), nullable=True) + state = Column(Enum("live", "down", "invalid"), nullable=True) # Based on account status, should the sync be running? # (Note, this is stored via a mixin.) @@ -139,41 +172,44 @@ def emailed_events_calendar(self, cal): def sync_enabled(self): return self.sync_should_run - sync_state = Column(Enum('running', 'stopped', 'killed', - 'invalid', 'connerror'), - nullable=True) + sync_state = Column( + Enum("running", "stopped", "killed", "invalid", "connerror"), nullable=True + ) - _sync_status = Column(MutableDict.as_mutable(JSON), default={}, - nullable=True) + _sync_status = Column(MutableDict.as_mutable(JSON), default={}, nullable=True) @property def sync_status(self): - d = dict(id=self.id, - email=self.email_address, - provider=self.provider, - is_enabled=self.sync_enabled, - state=self.sync_state, - sync_host=self.sync_host, - desired_sync_host=self.desired_sync_host) + d = dict( + id=self.id, + email=self.email_address, + provider=self.provider, + is_enabled=self.sync_enabled, + state=self.sync_state, + sync_host=self.sync_host, + desired_sync_host=self.desired_sync_host, + ) d.update(self._sync_status or {}) return d @property def sync_error(self): - return self._sync_status.get('sync_error') + return self._sync_status.get("sync_error") @property def initial_sync_start(self): - if len(self.folders) == 0 or \ - any([f.initial_sync_start is None for f in self.folders]): + if len(self.folders) == 0 or any( + [f.initial_sync_start is None for f in self.folders] + ): return None return min([f.initial_sync_start for f in self.folders]) @property def initial_sync_end(self): - if len(self.folders) == 0 \ - or any([f.initial_sync_end is None for f in self.folders]): + if len(self.folders) == 0 or any( + [f.initial_sync_end is None for f in self.folders] + ): return None return max([f.initial_sync_end for f in self.folders]) @@ -185,14 +221,17 @@ def initial_sync_duration(self): def update_sync_error(self, error=None): if error is None: - self._sync_status['sync_error'] = None + self._sync_status["sync_error"] = None else: error_obj = { - 'message': str(error.message)[:3000], - 'exception': "".join(traceback.format_exception_only(type(error), error))[:500], - 'traceback': traceback.format_exc(20)[:3000]} + "message": str(error.message)[:3000], + "exception": "".join( + traceback.format_exception_only(type(error), error) + )[:500], + "traceback": traceback.format_exc(20)[:3000], + } - self._sync_status['sync_error'] = error_obj + self._sync_status["sync_error"] = error_obj def sync_started(self): """ @@ -204,18 +243,18 @@ def sync_started(self): # Never run before (vs restarting stopped/killed) if self.sync_state is None and ( - not self._sync_status or - self._sync_status.get('sync_end_time') is None): - self._sync_status['original_start_time'] = current_time + not self._sync_status or self._sync_status.get("sync_end_time") is None + ): + self._sync_status["original_start_time"] = current_time - self._sync_status['sync_start_time'] = current_time - self._sync_status['sync_end_time'] = None - self._sync_status['sync_error'] = None - self._sync_status['sync_disabled_reason'] = None - self._sync_status['sync_disabled_on'] = None - self._sync_status['sync_disabled_by'] = None + self._sync_status["sync_start_time"] = current_time + self._sync_status["sync_end_time"] = None + self._sync_status["sync_error"] = None + self._sync_status["sync_disabled_reason"] = None + self._sync_status["sync_disabled_on"] = None + self._sync_status["sync_disabled_by"] = None - self.sync_state = 'running' + self.sync_state = "running" def enable_sync(self, sync_host=None): """ Tell the monitor that this account should be syncing. """ @@ -226,12 +265,11 @@ def enable_sync(self, sync_host=None): def disable_sync(self, reason): """ Tell the monitor that this account should stop syncing. """ self.sync_should_run = False - self._sync_status['sync_disabled_reason'] = reason - self._sync_status['sync_disabled_on'] = datetime.utcnow() - self._sync_status['sync_disabled_by'] = os.environ.get('USER', - 'unknown') + self._sync_status["sync_disabled_reason"] = reason + self._sync_status["sync_disabled_on"] = datetime.utcnow() + self._sync_status["sync_disabled_by"] = os.environ.get("USER", "unknown") - def mark_invalid(self, reason='invalid credentials', scope='mail'): + def mark_invalid(self, reason="invalid credentials", scope="mail"): """ In the event that the credentials for this account are invalid, update the status and sync flag accordingly. Should only be called @@ -239,21 +277,21 @@ def mark_invalid(self, reason='invalid credentials', scope='mail'): """ self.disable_sync(reason) - self.sync_state = 'invalid' + self.sync_state = "invalid" def mark_for_deletion(self): """ Mark account for deletion """ - self.disable_sync('account deleted') - self.sync_state = 'stopped' + self.disable_sync("account deleted") + self.sync_state = "stopped" # Commit this to prevent race conditions inspect(self).session.commit() def unmark_for_deletion(self): self.enable_sync() self._sync_status = {} - self.sync_state = 'running' + self.sync_state = "running" inspect(self).session.commit() def sync_stopped(self, requesting_host): @@ -268,38 +306,41 @@ def sync_stopped(self, requesting_host): # here still owns the account sync (i.e is account.sync_host), # the request can proceed. self.sync_host = None - if self.sync_state == 'running': - self.sync_state = 'stopped' - self._sync_status['sync_end_time'] = datetime.utcnow() + if self.sync_state == "running": + self.sync_state = "stopped" + self._sync_status["sync_end_time"] = datetime.utcnow() return True return False @classmethod def get(cls, id_, session): q = bakery(lambda session: session.query(cls)) - q += lambda q: q.filter(cls.id == bindparam('id_')) + q += lambda q: q.filter(cls.id == bindparam("id_")) return q(session).params(id_=id_).first() @property def is_killed(self): - return self.sync_state == 'killed' + return self.sync_state == "killed" @property def is_running(self): - return self.sync_state == 'running' + return self.sync_state == "running" @property def is_marked_for_deletion(self): - return self.sync_state in ('stopped', 'killed', 'invalid') and \ - self.sync_should_run is False and \ - self._sync_status.get('sync_disabled_reason') == 'account deleted' + return ( + self.sync_state in ("stopped", "killed", "invalid") + and self.sync_should_run is False + and self._sync_status.get("sync_disabled_reason") == "account deleted" + ) @property def should_suppress_transaction_creation(self): # Only version if new or the `sync_state` has changed. obj_state = inspect(self) - return not (obj_state.pending or - inspect(self).attrs.sync_state.history.has_changes()) + return not ( + obj_state.pending or inspect(self).attrs.sync_state.history.has_changes() + ) @property def server_settings(self): @@ -312,9 +353,11 @@ def get_raw_message_contents(self, message): # account class. raise NotImplementedError - discriminator = Column('type', String(16)) - __mapper_args__ = {'polymorphic_identity': 'account', - 'polymorphic_on': discriminator} + discriminator = Column("type", String(16)) + __mapper_args__ = { + "polymorphic_identity": "account", + "polymorphic_on": discriminator, + } def should_send_event(obj): @@ -332,39 +375,49 @@ def should_send_event(obj): def already_registered_listener(obj): - return getattr(obj, '_listener_state', None) is not None + return getattr(obj, "_listener_state", None) is not None def update_listener_state(obj): - obj._listener_state['sync_should_run'] = obj.sync_should_run - obj._listener_state['sync_host'] = obj.sync_host - obj._listener_state['desired_sync_host'] = obj.desired_sync_host - obj._listener_state['sent_event'] = False + obj._listener_state["sync_should_run"] = obj.sync_should_run + obj._listener_state["sync_host"] = obj.sync_host + obj._listener_state["desired_sync_host"] = obj.desired_sync_host + obj._listener_state["sent_event"] = False @event.listens_for(Session, "after_flush") def after_flush(session, flush_context): - from inbox.mailsync.service import shared_sync_event_queue_for_zone, SYNC_EVENT_QUEUE_NAME + from inbox.mailsync.service import ( + shared_sync_event_queue_for_zone, + SYNC_EVENT_QUEUE_NAME, + ) def send_migration_events(obj_state): def f(session): - if obj_state['sent_event']: + if obj_state["sent_event"]: return - id = obj_state['id'] - sync_should_run = obj_state['sync_should_run'] - sync_host = obj_state['sync_host'] - desired_sync_host = obj_state['desired_sync_host'] + id = obj_state["id"] + sync_should_run = obj_state["sync_should_run"] + sync_host = obj_state["sync_host"] + desired_sync_host = obj_state["desired_sync_host"] try: if sync_host is not None: # Somebody is actively syncing this Account, so notify them if # they should give up the Account. - if not sync_should_run or (sync_host != desired_sync_host and desired_sync_host is not None): + if not sync_should_run or ( + sync_host != desired_sync_host and desired_sync_host is not None + ): queue_name = SYNC_EVENT_QUEUE_NAME.format(sync_host) - log.info("Sending 'migrate_from' event for Account", - account_id=id, queue_name=queue_name) - EventQueue(queue_name).send_event({'event': 'migrate_from', 'id': id}) + log.info( + "Sending 'migrate_from' event for Account", + account_id=id, + queue_name=queue_name, + ) + EventQueue(queue_name).send_event( + {"event": "migrate_from", "id": id} + ) return if not sync_should_run: @@ -377,21 +430,32 @@ def f(session): # Nobody is actively syncing the Account, and we have somebody # who wants to sync this Account, so notify them. queue_name = SYNC_EVENT_QUEUE_NAME.format(desired_sync_host) - log.info("Sending 'migrate_to' event for Account", - account_id=id, queue_name=queue_name) - EventQueue(queue_name).send_event({'event': 'migrate_to', 'id': id}) + log.info( + "Sending 'migrate_to' event for Account", + account_id=id, + queue_name=queue_name, + ) + EventQueue(queue_name).send_event({"event": "migrate_to", "id": id}) return # Nobody is actively syncing the Account, and nobody in particular # wants to sync the Account so notify the shared queue. - shared_queue = shared_sync_event_queue_for_zone(config.get('ZONE')) - log.info("Sending 'migrate' event for Account", - account_id=id, queue_name=shared_queue.queue_name) - shared_queue.send_event({'event': 'migrate', 'id': id}) - obj_state['sent_event'] = True + shared_queue = shared_sync_event_queue_for_zone(config.get("ZONE")) + log.info( + "Sending 'migrate' event for Account", + account_id=id, + queue_name=shared_queue.queue_name, + ) + shared_queue.send_event({"event": "migrate", "id": id}) + obj_state["sent_event"] = True except: - log_uncaught_errors(log, account_id=id, sync_host=sync_host, - desired_sync_host=desired_sync_host) + log_uncaught_errors( + log, + account_id=id, + sync_host=sync_host, + desired_sync_host=desired_sync_host, + ) + return f for obj in session.new: @@ -399,11 +463,11 @@ def f(session): if already_registered_listener(obj): update_listener_state(obj) else: - obj._listener_state = {'id': obj.id} + obj._listener_state = {"id": obj.id} update_listener_state(obj) - event.listen(session, - 'after_commit', - send_migration_events(obj._listener_state)) + event.listen( + session, "after_commit", send_migration_events(obj._listener_state) + ) for obj in session.dirty: if not session.is_modified(obj): @@ -412,12 +476,16 @@ def f(session): if already_registered_listener(obj): update_listener_state(obj) else: - obj._listener_state = {'id': obj.id} + obj._listener_state = {"id": obj.id} update_listener_state(obj) - event.listen(session, - 'after_commit', - send_migration_events(obj._listener_state)) + event.listen( + session, "after_commit", send_migration_events(obj._listener_state) + ) -Index('ix_account_sync_should_run_sync_host', Account.sync_should_run, - Account.sync_host, mysql_length={'sync_host': 191}) +Index( + "ix_account_sync_should_run_sync_host", + Account.sync_should_run, + Account.sync_host, + mysql_length={"sync_host": 191}, +) diff --git a/inbox/models/action_log.py b/inbox/models/action_log.py index c49f5bedb..4dbed5813 100644 --- a/inbox/models/action_log.py +++ b/inbox/models/action_log.py @@ -1,8 +1,18 @@ -from sqlalchemy import (Column, BigInteger, Integer, Text, ForeignKey, Enum, - Index, String, desc) +from sqlalchemy import ( + Column, + BigInteger, + Integer, + Text, + ForeignKey, + Enum, + Index, + String, + desc, +) from sqlalchemy.orm import relationship from nylas.logging import get_logger + log = get_logger() from inbox.sqlalchemy_ext.util import JSON from inbox.models.base import MailSyncBase @@ -17,13 +27,18 @@ def schedule_action(func_name, record, namespace_id, db_session, **kwargs): account = db_session.query(Namespace).get(namespace_id).account # Don't queue action if an existing pending action exists. - existing_log_entry = db_session.query(ActionLog).filter( - ActionLog.discriminator == 'actionlog', - ActionLog.status == 'pending', - ActionLog.namespace_id == namespace_id, - ActionLog.action == func_name, - ActionLog.record_id == record.id).\ - order_by(desc(ActionLog.id)).first() + existing_log_entry = ( + db_session.query(ActionLog) + .filter( + ActionLog.discriminator == "actionlog", + ActionLog.status == "pending", + ActionLog.namespace_id == namespace_id, + ActionLog.action == func_name, + ActionLog.record_id == record.id, + ) + .order_by(desc(ActionLog.id)) + .first() + ) if existing_log_entry and existing_log_entry.extra_args == kwargs: return @@ -32,37 +47,52 @@ def schedule_action(func_name, record, namespace_id, db_session, **kwargs): table_name=record.__tablename__, record_id=record.id, namespace_id=namespace_id, - extra_args=kwargs) + extra_args=kwargs, + ) db_session.add(log_entry) class ActionLog(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False, - index=True) - namespace = relationship('Namespace') + namespace_id = Column( + ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False, index=True + ) + namespace = relationship("Namespace") action = Column(Text(40), nullable=False) record_id = Column(BigInteger, nullable=False) table_name = Column(Text(40), nullable=False) - status = Column(Enum('pending', 'successful', 'failed'), - server_default='pending') - retries = Column(Integer, server_default='0', nullable=False) + status = Column(Enum("pending", "successful", "failed"), server_default="pending") + retries = Column(Integer, server_default="0", nullable=False) extra_args = Column(JSON, nullable=True) @classmethod def create(cls, action, table_name, record_id, namespace_id, extra_args): - return cls(action=action, table_name=table_name, record_id=record_id, - namespace_id=namespace_id, extra_args=extra_args) + return cls( + action=action, + table_name=table_name, + record_id=record_id, + namespace_id=namespace_id, + extra_args=extra_args, + ) - discriminator = Column('type', String(16)) - __mapper_args__ = {'polymorphic_identity': 'actionlog', - 'polymorphic_on': discriminator} + discriminator = Column("type", String(16)) + __mapper_args__ = { + "polymorphic_identity": "actionlog", + "polymorphic_on": discriminator, + } -Index('ix_actionlog_status_namespace_id_record_id', ActionLog.status, - ActionLog.namespace_id, ActionLog.record_id) +Index( + "ix_actionlog_status_namespace_id_record_id", + ActionLog.status, + ActionLog.namespace_id, + ActionLog.record_id, +) -Index('ix_actionlog_namespace_id_status_type', - ActionLog.namespace_id, ActionLog.status, ActionLog.discriminator) +Index( + "ix_actionlog_namespace_id_status_type", + ActionLog.namespace_id, + ActionLog.status, + ActionLog.discriminator, +) diff --git a/inbox/models/backends/__init__.py b/inbox/models/backends/__init__.py index 5dcfddbf9..0d89b9c64 100644 --- a/inbox/models/backends/__init__.py +++ b/inbox/models/backends/__init__.py @@ -3,6 +3,8 @@ """ # Allow out-of-tree table submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) from inbox.util.misc import register_backends + module_registry = register_backends(__name__, __path__) diff --git a/inbox/models/backends/generic.py b/inbox/models/backends/generic.py index f80c1fc11..abe040c4f 100644 --- a/inbox/models/backends/generic.py +++ b/inbox/models/backends/generic.py @@ -4,12 +4,11 @@ from inbox.models.backends.imap import ImapAccount from inbox.models.secret import Secret -PROVIDER = 'generic' +PROVIDER = "generic" class GenericAccount(ImapAccount): - id = Column(ForeignKey(ImapAccount.id, ondelete='CASCADE'), - primary_key=True) + id = Column(ForeignKey(ImapAccount.id, ondelete="CASCADE"), primary_key=True) provider = Column(String(64)) imap_username = Column(String(255), nullable=True) @@ -17,60 +16,65 @@ class GenericAccount(ImapAccount): # The IMAP specs says folder separators always are one character-long # but you never know. - folder_separator = Column(String(16), default='.') - folder_prefix = Column(String(191), default='') + folder_separator = Column(String(16), default=".") + folder_prefix = Column(String(191), default="") supports_condstore = Column(Boolean) # IMAP Secret - imap_password_id = Column(ForeignKey(Secret.id), - nullable=False) - imap_secret = relationship('Secret', cascade='save-update, merge, ' - 'refresh-expire, expunge, ' - 'delete', - single_parent=True, uselist=False, - lazy='joined', - foreign_keys=[imap_password_id]) + imap_password_id = Column(ForeignKey(Secret.id), nullable=False) + imap_secret = relationship( + "Secret", + cascade="save-update, merge, " "refresh-expire, expunge, " "delete", + single_parent=True, + uselist=False, + lazy="joined", + foreign_keys=[imap_password_id], + ) # SMTP Secret - smtp_password_id = Column(ForeignKey(Secret.id), - nullable=False) - smtp_secret = relationship('Secret', cascade='save-update, merge, ' - 'refresh-expire, expunge, ' - 'delete', - single_parent=True, uselist=False, - lazy='joined', - foreign_keys=[smtp_password_id]) + smtp_password_id = Column(ForeignKey(Secret.id), nullable=False) + smtp_secret = relationship( + "Secret", + cascade="save-update, merge, " "refresh-expire, expunge, " "delete", + single_parent=True, + uselist=False, + lazy="joined", + foreign_keys=[smtp_password_id], + ) ssl_required = Column(Boolean, default=True) # Old Secret # TODO[logan]: delete once IMAP and SMTP secret are in production. - password_id = Column(ForeignKey(Secret.id, ondelete='CASCADE'), - nullable=True) - old_secret = relationship('Secret', cascade='all, delete-orphan', - single_parent=True, uselist=False, - lazy='joined', - foreign_keys=[password_id]) - - __mapper_args__ = {'polymorphic_identity': 'genericaccount'} + password_id = Column(ForeignKey(Secret.id, ondelete="CASCADE"), nullable=True) + old_secret = relationship( + "Secret", + cascade="all, delete-orphan", + single_parent=True, + uselist=False, + lazy="joined", + foreign_keys=[password_id], + ) + + __mapper_args__ = {"polymorphic_identity": "genericaccount"} @property def verbose_provider(self): - if self.provider == 'custom': - return 'imap' + if self.provider == "custom": + return "imap" return self.provider def valid_password(self, value): # Must be a valid UTF-8 byte sequence without NULL bytes. if isinstance(value, unicode): - value = value.encode('utf-8') + value = value.encode("utf-8") try: - unicode(value, 'utf-8') + unicode(value, "utf-8") except UnicodeDecodeError: - raise ValueError('Invalid password') + raise ValueError("Invalid password") - if b'\x00' in value: - raise ValueError('Invalid password') + if b"\x00" in value: + raise ValueError("Invalid password") return value @@ -84,7 +88,7 @@ def imap_password(self, value): if not self.imap_secret: self.imap_secret = Secret() self.imap_secret.secret = value - self.imap_secret.type = 'password' + self.imap_secret.type = "password" @property def smtp_password(self): @@ -96,7 +100,7 @@ def smtp_password(self, value): if not self.smtp_secret: self.smtp_secret = Secret() self.smtp_secret.secret = value - self.smtp_secret.type = 'password' + self.smtp_secret.type = "password" # The password property is used for legacy reasons. # TODO[logan]: Remove once changeover to IMAP/SMTP auth is complete. @@ -110,31 +114,33 @@ def password(self, value): if not self.old_secret: self.old_secret = Secret() self.old_secret.secret = value - self.old_secret.type = 'password' + self.old_secret.type = "password" @property def category_type(self): - if self.provider == 'gmail': - return 'label' + if self.provider == "gmail": + return "label" else: - return 'folder' + return "folder" @property def thread_cls(self): from inbox.models.backends.imap import ImapThread + return ImapThread @property def actionlog_cls(self): from inbox.models.action_log import ActionLog + return ActionLog @property def server_settings(self): settings = {} - settings['imap_host'], settings['imap_port'] = self.imap_endpoint - settings['smtp_host'], settings['smtp_port'] = self.smtp_endpoint - settings['ssl_required'] = self.ssl_required + settings["imap_host"], settings["imap_port"] = self.imap_endpoint + settings["smtp_host"], settings["smtp_port"] = self.smtp_endpoint + settings["ssl_required"] = self.ssl_required return settings # Override provider_info and auth_handler to make sure we always get @@ -146,10 +152,11 @@ def server_settings(self): @property def provider_info(self): provider_info = super(GenericAccount, self).provider_info - provider_info['auth'] = 'password' + provider_info["auth"] = "password" return provider_info @property def auth_handler(self): from inbox.auth.base import handler_from_provider - return handler_from_provider('custom') + + return handler_from_provider("custom") diff --git a/inbox/models/backends/gmail.py b/inbox/models/backends/gmail.py index fd51a86c1..d4865b813 100644 --- a/inbox/models/backends/gmail.py +++ b/inbox/models/backends/gmail.py @@ -17,21 +17,21 @@ from inbox.models.mixins import UpdatedAtMixin, DeletedAtMixin from nylas.logging import get_logger + log = get_logger() -PROVIDER = 'gmail' +PROVIDER = "gmail" -GOOGLE_CALENDAR_SCOPE = 'https://www.googleapis.com/auth/calendar' -GOOGLE_EMAIL_SCOPE = 'https://mail.google.com/' -GOOGLE_CONTACTS_SCOPE = 'https://www.google.com/m8/feeds' +GOOGLE_CALENDAR_SCOPE = "https://www.googleapis.com/auth/calendar" +GOOGLE_EMAIL_SCOPE = "https://mail.google.com/" +GOOGLE_CONTACTS_SCOPE = "https://www.google.com/m8/feeds" # Google token named tuple - only used in this file. # NOTE: we only keep track of the auth_credentials id because # we need it for contacts sync (which is unfortunate). If that ever # changes, we should remove auth_creds from GToken. -GToken = namedtuple('GToken', - 'value expiration scopes client_id auth_creds_id') +GToken = namedtuple("GToken", "value expiration scopes client_id auth_creds_id") class GTokenManager(object): @@ -91,10 +91,10 @@ def get_token_and_auth_creds_id(self, account, scope, force_refresh=False): gtoken = self._get_token(account, scope, force_refresh=force_refresh) return gtoken.value, gtoken.auth_creds_id - def get_token_and_auth_creds_id_for_contacts(self, account, - force_refresh=False): + def get_token_and_auth_creds_id_for_contacts(self, account, force_refresh=False): return self.get_token_and_auth_creds_id( - account, GOOGLE_CONTACTS_SCOPE, force_refresh) + account, GOOGLE_CONTACTS_SCOPE, force_refresh + ) def cache_token(self, account, gtoken): for scope in gtoken.scopes: @@ -103,8 +103,9 @@ def cache_token(self, account, gtoken): def clear_cache(self, account): self._tokens[account.id] = {} - def get_token_for_calendars_restrict_ids(self, account, client_ids, - force_refresh=False): + def get_token_for_calendars_restrict_ids( + self, account, client_ids, force_refresh=False + ): """ For the given account, returns an access token that's associated with a client id from the given list of client_ids. @@ -134,10 +135,9 @@ def get_token_for_calendars_restrict_ids(self, account, client_ids, class GmailAccount(OAuthAccount, ImapAccount): - id = Column(ForeignKey(ImapAccount.id, ondelete='CASCADE'), - primary_key=True) + id = Column(ForeignKey(ImapAccount.id, ondelete="CASCADE"), primary_key=True) - __mapper_args__ = {'polymorphic_identity': 'gmailaccount'} + __mapper_args__ = {"polymorphic_identity": "gmailaccount"} # STOPSHIP(emfree) store these either as secrets or as properties of the # developer app. @@ -167,16 +167,18 @@ def provider(self): @property def category_type(self): - return 'label' + return "label" @property def thread_cls(self): from inbox.models.backends.imap import ImapThread + return ImapThread @property def actionlog_cls(self): from inbox.models.action_log import ActionLog + return ActionLog def new_token(self, scope, client_ids=None): @@ -196,11 +198,10 @@ def new_token(self, scope, client_ids=None): non_oauth_error = None possible_credentials = [ - auth_creds for auth_creds in self.valid_auth_credentials - if scope in auth_creds.scopes and ( - client_ids is None or - auth_creds.client_id in client_ids - ) + auth_creds + for auth_creds in self.valid_auth_credentials + if scope in auth_creds.scopes + and (client_ids is None or auth_creds.client_id in client_ids) ] # If more than one set of credentials is present, we don't want to @@ -212,29 +213,36 @@ def new_token(self, scope, client_ids=None): token, expires_in = self.auth_handler.new_token( auth_creds.refresh_token, auth_creds.client_id, - auth_creds.client_secret) + auth_creds.client_secret, + ) expires_in -= 10 - expiration = (datetime.utcnow() + - timedelta(seconds=expires_in)) + expiration = datetime.utcnow() + timedelta(seconds=expires_in) return GToken( - token, expiration, auth_creds.scopes, - auth_creds.client_id, auth_creds.id) + token, + expiration, + auth_creds.scopes, + auth_creds.client_id, + auth_creds.id, + ) except OAuthError as e: - log.error('Error validating', - account_id=self.id, - auth_creds_id=auth_creds.id, - logstash_tag='mark_invalid') + log.error( + "Error validating", + account_id=self.id, + auth_creds_id=auth_creds.id, + logstash_tag="mark_invalid", + ) auth_creds.is_valid = False except Exception as e: log.error( - 'Error while getting access token: {}'.format(e), + "Error while getting access token: {}".format(e), account_id=self.id, auth_creds_id=auth_creds.id, - exc_info=True) + exc_info=True, + ) non_oauth_error = e if non_oauth_error: @@ -262,9 +270,8 @@ def verify_all_credentials(self): def verify_credentials(self, auth_creds): try: self.auth_handler.new_token( - auth_creds.refresh_token, - auth_creds.client_id, - auth_creds.client_secret) + auth_creds.refresh_token, auth_creds.client_id, auth_creds.client_secret + ) # Valid access token might have changed? This might not # be necessary. g_token_manager.clear_cache(self) @@ -277,13 +284,12 @@ def valid_auth_credentials(self): return [creds for creds in self.auth_credentials if creds.is_valid] def verify(self): - token = g_token_manager.get_token(self, GOOGLE_EMAIL_SCOPE, - force_refresh=True) + token = g_token_manager.get_token(self, GOOGLE_EMAIL_SCOPE, force_refresh=True) return self.auth_handler.validate_token(token) def new_calendar_list_watch(self, expiration): # Google gives us back expiration timestamps in milliseconds - expiration = datetime.fromtimestamp(int(expiration) / 1000.) + expiration = datetime.fromtimestamp(int(expiration) / 1000.0) self.gpush_calendar_list_expiration = expiration self.gpush_calendar_list_last_ping = datetime.utcnow() @@ -302,28 +308,33 @@ def should_update_calendars(self, max_time_between_syncs, poll_frequency): now = datetime.utcnow() return ( # Never synced - self.last_calendar_list_sync is None or + self.last_calendar_list_sync is None + or # Too much time has passed to not sync - (now > self.last_calendar_list_sync + max_time_between_syncs) or + (now > self.last_calendar_list_sync + max_time_between_syncs) + or # Push notifications channel is stale (and we didn't just sync it) ( - self.needs_new_calendar_list_watch() and - now > self.last_calendar_list_sync + poll_frequency - ) or + self.needs_new_calendar_list_watch() + and now > self.last_calendar_list_sync + poll_frequency + ) + or # Our info is stale, according to google's push notifications ( - self.gpush_calendar_list_last_ping is not None and - (self.last_calendar_list_sync < - self.gpush_calendar_list_last_ping) + self.gpush_calendar_list_last_ping is not None + and (self.last_calendar_list_sync < self.gpush_calendar_list_last_ping) ) ) def needs_new_calendar_list_watch(self): - return (self.gpush_calendar_list_expiration is None or - self.gpush_calendar_list_expiration < datetime.utcnow()) + return ( + self.gpush_calendar_list_expiration is None + or self.gpush_calendar_list_expiration < datetime.utcnow() + ) def get_raw_message_contents(self, message): from inbox.s3.backends.gmail import get_gmail_raw_contents + return get_gmail_raw_contents(message) @@ -343,14 +354,15 @@ class GmailAuthCredentials(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): [auth_creds.refresh_token for auth_creds in g.auth_credentials] """ - gmailaccount_id = Column(BigInteger, - ForeignKey(GmailAccount.id, ondelete='CASCADE'), - nullable=False) - refresh_token_id = Column(BigInteger, - ForeignKey(Secret.id, ondelete='CASCADE'), - nullable=False) - - _scopes = Column('scopes', String(512), nullable=False) + + gmailaccount_id = Column( + BigInteger, ForeignKey(GmailAccount.id, ondelete="CASCADE"), nullable=False + ) + refresh_token_id = Column( + BigInteger, ForeignKey(Secret.id, ondelete="CASCADE"), nullable=False + ) + + _scopes = Column("scopes", String(512), nullable=False) g_id_token = Column(String(2048), nullable=False) client_id = Column(String(256), nullable=False) client_secret = Column(String(256), nullable=False) @@ -358,23 +370,24 @@ class GmailAuthCredentials(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): gmailaccount = relationship( GmailAccount, - backref=backref('auth_credentials', cascade='all, delete-orphan', - lazy='joined'), - lazy='joined', - join_depth=2 + backref=backref( + "auth_credentials", cascade="all, delete-orphan", lazy="joined" + ), + lazy="joined", + join_depth=2, ) refresh_token_secret = relationship( Secret, - cascade='all, delete-orphan', + cascade="all, delete-orphan", single_parent=True, - lazy='joined', - backref=backref('gmail_auth_credentials') + lazy="joined", + backref=backref("gmail_auth_credentials"), ) @hybrid_property def scopes(self): - return self._scopes.split(' ') + return self._scopes.split(" ") @scopes.setter def scopes(self, value): @@ -382,7 +395,7 @@ def scopes(self, value): if isinstance(value, basestring): self._scopes = value else: - self._scopes = ' '.join(value) + self._scopes = " ".join(value) @property def refresh_token(self): @@ -394,18 +407,18 @@ def refresh_token(self): def refresh_token(self, value): # Must be a valid UTF-8 byte sequence without NULL bytes. if isinstance(value, unicode): - value = value.encode('utf-8') + value = value.encode("utf-8") try: - unicode(value, 'utf-8') + unicode(value, "utf-8") except UnicodeDecodeError: - raise ValueError('Invalid refresh_token') + raise ValueError("Invalid refresh_token") - if b'\x00' in value: - raise ValueError('Invalid refresh_token') + if b"\x00" in value: + raise ValueError("Invalid refresh_token") if not self.refresh_token_secret: self.refresh_token_secret = Secret() self.refresh_token_secret.secret = value - self.refresh_token_secret.type = 'token' + self.refresh_token_secret.type = "token" diff --git a/inbox/models/backends/imap.py b/inbox/models/backends/imap.py index 0e2cf158a..087ada0a3 100644 --- a/inbox/models/backends/imap.py +++ b/inbox/models/backends/imap.py @@ -1,16 +1,27 @@ import json from datetime import datetime -from sqlalchemy import (Column, Integer, BigInteger, Boolean, Enum, - ForeignKey, Index, String, DateTime, desc) +from sqlalchemy import ( + Column, + Integer, + BigInteger, + Boolean, + Enum, + ForeignKey, + Index, + String, + DateTime, + desc, +) from sqlalchemy.schema import UniqueConstraint from sqlalchemy.orm import relationship, backref from sqlalchemy.sql.expression import false -from inbox.sqlalchemy_ext.util import (LittleJSON, JSON, MutableDict) +from inbox.sqlalchemy_ext.util import LittleJSON, JSON, MutableDict from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.orm import object_session from nylas.logging import get_logger + log = get_logger() from inbox.models.base import MailSyncBase from inbox.models.account import Account @@ -21,21 +32,20 @@ from inbox.models.label import Label from inbox.util.misc import cleanup_subject -PROVIDER = 'imap' +PROVIDER = "imap" # Note, you should never directly create ImapAccount objects. Instead you # should use objects that inherit from this, such as GenericAccount or # GmailAccount class ImapAccount(Account): - id = Column(ForeignKey(Account.id, ondelete='CASCADE'), - primary_key=True) + id = Column(ForeignKey(Account.id, ondelete="CASCADE"), primary_key=True) _imap_server_host = Column(String(255), nullable=True) - _imap_server_port = Column(Integer, nullable=False, server_default='993') + _imap_server_port = Column(Integer, nullable=False, server_default="993") _smtp_server_host = Column(String(255), nullable=True) - _smtp_server_port = Column(Integer, nullable=False, server_default='587') + _smtp_server_port = Column(Integer, nullable=False, server_default="587") @property def imap_endpoint(self): @@ -46,7 +56,7 @@ def imap_endpoint(self): # handle ports of type long. Yay. http://bugs.python.org/issue8853. return (self._imap_server_host, int(self._imap_server_port)) else: - return self.provider_info['imap'] + return self.provider_info["imap"] @imap_endpoint.setter def imap_endpoint(self, endpoint): @@ -59,7 +69,7 @@ def smtp_endpoint(self): if self._smtp_server_host is not None: return (self._smtp_server_host, int(self._smtp_server_port)) else: - return self.provider_info['smtp'] + return self.provider_info["smtp"] @smtp_endpoint.setter def smtp_endpoint(self, endpoint): @@ -69,9 +79,10 @@ def smtp_endpoint(self, endpoint): def get_raw_message_contents(self, message): from inbox.s3.backends.imap import get_imap_raw_contents + return get_imap_raw_contents(message) - __mapper_args__ = {'polymorphic_identity': 'imapaccount'} + __mapper_args__ = {"polymorphic_identity": "imapaccount"} class ImapUid(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): @@ -80,25 +91,23 @@ class ImapUid(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): This table is used solely for bookkeeping by the IMAP mail sync backends. """ - account_id = Column(ForeignKey(ImapAccount.id, ondelete='CASCADE'), - nullable=False) + + account_id = Column(ForeignKey(ImapAccount.id, ondelete="CASCADE"), nullable=False) account = relationship(ImapAccount) - message_id = Column(ForeignKey(Message.id, ondelete='CASCADE'), - nullable=False) - message = relationship(Message, backref=backref('imapuids', - passive_deletes=True)) + message_id = Column(ForeignKey(Message.id, ondelete="CASCADE"), nullable=False) + message = relationship(Message, backref=backref("imapuids", passive_deletes=True)) msg_uid = Column(BigInteger, nullable=False, index=True) - folder_id = Column(ForeignKey(Folder.id, ondelete='CASCADE'), - nullable=False) + folder_id = Column(ForeignKey(Folder.id, ondelete="CASCADE"), nullable=False) # We almost always need the folder name too, so eager load by default. - folder = relationship(Folder, lazy='joined', - backref=backref('imapuids', passive_deletes=True)) + folder = relationship( + Folder, lazy="joined", backref=backref("imapuids", passive_deletes=True) + ) labels = association_proxy( - 'labelitems', 'label', - creator=lambda label: LabelItem(label=label)) + "labelitems", "label", creator=lambda label: LabelItem(label=label) + ) # Flags # # Message has not completed composition (marked as a draft). @@ -127,11 +136,11 @@ def update_flags(self, new_flags): changed = False new_flags = set(new_flags) col_for_flag = { - u'\\Draft': 'is_draft', - u'\\Seen': 'is_seen', - u'\\Recent': 'is_recent', - u'\\Answered': 'is_answered', - u'\\Flagged': 'is_flagged', + u"\\Draft": "is_draft", + u"\\Seen": "is_seen", + u"\\Recent": "is_recent", + u"\\Answered": "is_answered", + u"\\Flagged": "is_flagged", } for flag, col in col_for_flag.iteritems(): prior_flag_value = getattr(self, col) @@ -159,21 +168,21 @@ def update_labels(self, new_labels): # Gmail IMAP doesn't use the normal IMAP \\Draft flag. Silly Gmail # IMAP. - self.is_draft = '\\Draft' in new_labels - self.is_starred = '\\Starred' in new_labels + self.is_draft = "\\Draft" in new_labels + self.is_starred = "\\Starred" in new_labels category_map = { - '\\Inbox': 'inbox', - '\\Important': 'important', - '\\Sent': 'sent', - '\\Trash': 'trash', - '\\Spam': 'spam', - '\\All': 'all' + "\\Inbox": "inbox", + "\\Important": "important", + "\\Sent": "sent", + "\\Trash": "trash", + "\\Spam": "spam", + "\\All": "all", } remote_labels = set() for label in new_labels: - if label in ('\\Draft', '\\Starred'): + if label in ("\\Draft", "\\Starred"): continue elif label in category_map: remote_labels.add((category_map[label], category_map[label])) @@ -190,9 +199,9 @@ def update_labels(self, new_labels): self.labels.remove(local_labels[key]) for name, canonical_name in add: - label = Label.find_or_create(object_session(self), - self.account, name, - canonical_name) + label = Label.find_or_create( + object_session(self), self.account, name, canonical_name + ) self.labels.add(label) @property @@ -205,10 +214,11 @@ def categories(self): categories.add(self.folder.category) return categories - __table_args__ = (UniqueConstraint('folder_id', 'msg_uid', 'account_id',),) + __table_args__ = (UniqueConstraint("folder_id", "msg_uid", "account_id",),) + # make pulling up all messages in a given folder fast -Index('account_id_folder_id', ImapUid.account_id, ImapUid.folder_id) +Index("account_id_folder_id", ImapUid.account_id, ImapUid.folder_id) class ImapFolderInfo(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): @@ -225,14 +235,13 @@ class ImapFolderInfo(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): HIGHESTMODSEQ. """ - account_id = Column(ForeignKey(ImapAccount.id, ondelete='CASCADE'), - nullable=False) + + account_id = Column(ForeignKey(ImapAccount.id, ondelete="CASCADE"), nullable=False) account = relationship(ImapAccount) - folder_id = Column(ForeignKey('folder.id', ondelete='CASCADE'), - nullable=False) - folder = relationship('Folder', backref=backref('imapfolderinfo', - uselist=False, - passive_deletes=True)) + folder_id = Column(ForeignKey("folder.id", ondelete="CASCADE"), nullable=False) + folder = relationship( + "Folder", backref=backref("imapfolderinfo", uselist=False, passive_deletes=True) + ) uidvalidity = Column(BigInteger, nullable=False) # Invariant: the local datastore for this folder has always incorporated # remote changes up to _at least_ this modseq (we can't guarantee that we @@ -245,7 +254,7 @@ class ImapFolderInfo(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): uidnext = Column(BigInteger, nullable=True) last_slow_refresh = Column(DateTime) - __table_args__ = (UniqueConstraint('account_id', 'folder_id'),) + __table_args__ = (UniqueConstraint("account_id", "folder_id"),) def _choose_existing_thread_for_gmail(message, db_session): @@ -263,9 +272,12 @@ def _choose_existing_thread_for_gmail(message, db_session): """ # TODO(emfree): also use the References header, or better yet, change API # semantics so that we don't have to do this at all. - prior_threads = db_session.query(ImapThread).filter_by( - g_thrid=message.g_thrid, namespace_id=message.namespace_id). \ - order_by(desc(ImapThread.recentdate)).all() + prior_threads = ( + db_session.query(ImapThread) + .filter_by(g_thrid=message.g_thrid, namespace_id=message.namespace_id) + .order_by(desc(ImapThread.recentdate)) + .all() + ) if not prior_threads: return None if len(prior_threads) == 1: @@ -274,8 +286,7 @@ def _choose_existing_thread_for_gmail(message, db_session): # If no header, add the new message to the most recent thread. return prior_threads[0] for prior_thread in prior_threads: - prior_message_ids = [m.message_id_header for m in - prior_thread.messages] + prior_message_ids = [m.message_id_header for m in prior_thread.messages] if message.in_reply_to in prior_message_ids: return prior_thread @@ -284,7 +295,8 @@ def _choose_existing_thread_for_gmail(message, db_session): class ImapThread(Thread): """ TODO: split into provider-specific classes. """ - id = Column(ForeignKey(Thread.id, ondelete='CASCADE'), primary_key=True) + + id = Column(ForeignKey(Thread.id, ondelete="CASCADE"), primary_key=True) # Only on messages from Gmail # @@ -311,11 +323,14 @@ def from_gmail_message(cls, session, namespace_id, message): if message.g_thrid is not None: thread = _choose_existing_thread_for_gmail(message, session) if thread is None: - thread = cls(subject=message.subject, g_thrid=message.g_thrid, - recentdate=message.received_date, - namespace_id=namespace_id, - subjectdate=message.received_date, - snippet=message.snippet) + thread = cls( + subject=message.subject, + g_thrid=message.g_thrid, + recentdate=message.received_date, + namespace_id=namespace_id, + subjectdate=message.received_date, + snippet=message.snippet, + ) return thread @classmethod @@ -325,34 +340,40 @@ def from_imap_message(cls, session, namespace_id, message): # create a new one. return message.thread clean_subject = cleanup_subject(message.subject) - thread = cls(subject=clean_subject, recentdate=message.received_date, - namespace_id=namespace_id, - subjectdate=message.received_date, - snippet=message.snippet) + thread = cls( + subject=clean_subject, + recentdate=message.received_date, + namespace_id=namespace_id, + subjectdate=message.received_date, + snippet=message.snippet, + ) return thread - __mapper_args__ = {'polymorphic_identity': 'imapthread'} + __mapper_args__ = {"polymorphic_identity": "imapthread"} -class ImapFolderSyncStatus(MailSyncBase, HasRunState, UpdatedAtMixin, - DeletedAtMixin): +class ImapFolderSyncStatus(MailSyncBase, HasRunState, UpdatedAtMixin, DeletedAtMixin): """ Per-folder status state saving for IMAP folders. """ - account_id = Column(ForeignKey(ImapAccount.id, ondelete='CASCADE'), - nullable=False) - account = relationship(ImapAccount, - backref=backref('foldersyncstatuses', - passive_deletes=True)) - - folder_id = Column(ForeignKey('folder.id', ondelete='CASCADE'), - nullable=False) + + account_id = Column(ForeignKey(ImapAccount.id, ondelete="CASCADE"), nullable=False) + account = relationship( + ImapAccount, backref=backref("foldersyncstatuses", passive_deletes=True) + ) + + folder_id = Column(ForeignKey("folder.id", ondelete="CASCADE"), nullable=False) # We almost always need the folder name too, so eager load by default. - folder = relationship('Folder', lazy='joined', backref=backref( - 'imapsyncstatus', uselist=False, passive_deletes=True)) + folder = relationship( + "Folder", + lazy="joined", + backref=backref("imapsyncstatus", uselist=False, passive_deletes=True), + ) # see state machine in mailsync/backends/imap/imap.py - state = Column(Enum('initial', 'initial uidinvalid', - 'poll', 'poll uidinvalid', 'finish'), - server_default='initial', nullable=False) + state = Column( + Enum("initial", "initial uidinvalid", "poll", "poll uidinvalid", "finish"), + server_default="initial", + nullable=False, + ) # stats on messages downloaded etc. _metrics = Column(MutableDict.as_mutable(JSON), default={}, nullable=True) @@ -365,23 +386,27 @@ def metrics(self): return status def start_sync(self): - self._metrics = dict(run_state='running', - sync_start_time=datetime.utcnow()) + self._metrics = dict(run_state="running", sync_start_time=datetime.utcnow()) def stop_sync(self): - self._metrics['run_state'] = 'stopped' - self._metrics['sync_end_time'] = datetime.utcnow() + self._metrics["run_state"] = "stopped" + self._metrics["sync_end_time"] = datetime.utcnow() @property def is_killed(self): - return self._metrics.get('run_state') == 'killed' + return self._metrics.get("run_state") == "killed" def update_metrics(self, metrics): - sync_status_metrics = ['remote_uid_count', 'delete_uid_count', - 'update_uid_count', 'download_uid_count', - 'uid_checked_timestamp', - 'num_downloaded_since_timestamp', - 'queue_checked_at', 'percent'] + sync_status_metrics = [ + "remote_uid_count", + "delete_uid_count", + "update_uid_count", + "download_uid_count", + "uid_checked_timestamp", + "num_downloaded_since_timestamp", + "queue_checked_at", + "percent", + ] assert isinstance(metrics, dict) for k in metrics.iterkeys(): @@ -400,28 +425,29 @@ def sync_enabled(self): # against folder heartbeats.) return self.sync_should_run and self.account.sync_should_run - __table_args__ = (UniqueConstraint('account_id', 'folder_id'),) + __table_args__ = (UniqueConstraint("account_id", "folder_id"),) class LabelItem(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): """ Mapping between imapuids and labels. """ - imapuid_id = Column(ForeignKey(ImapUid.id, ondelete='CASCADE'), - nullable=False) + + imapuid_id = Column(ForeignKey(ImapUid.id, ondelete="CASCADE"), nullable=False) imapuid = relationship( - 'ImapUid', - backref=backref('labelitems', - collection_class=set, - cascade='all, delete-orphan')) + "ImapUid", + backref=backref( + "labelitems", collection_class=set, cascade="all, delete-orphan" + ), + ) - label_id = Column(ForeignKey(Label.id, ondelete='CASCADE'), nullable=False) + label_id = Column(ForeignKey(Label.id, ondelete="CASCADE"), nullable=False) label = relationship( Label, - backref=backref('labelitems', - cascade='all, delete-orphan', - lazy='dynamic')) + backref=backref("labelitems", cascade="all, delete-orphan", lazy="dynamic"), + ) @property def namespace(self): return self.label.namespace -Index('imapuid_label_ids', LabelItem.imapuid_id, LabelItem.label_id) + +Index("imapuid_label_ids", LabelItem.imapuid_id, LabelItem.label_id) diff --git a/inbox/models/backends/oauth.py b/inbox/models/backends/oauth.py index bb03e0a4c..04e4ec78a 100644 --- a/inbox/models/backends/oauth.py +++ b/inbox/models/backends/oauth.py @@ -10,11 +10,11 @@ from inbox.models.secret import Secret from nylas.logging import get_logger + log = get_logger() class TokenManager(object): - def __init__(self): self._tokens = {} @@ -46,8 +46,7 @@ def refresh_token_id(cls): @declared_attr def secret(cls): - return relationship('Secret', cascade='all', uselist=False, - lazy='joined') + return relationship("Secret", cascade="all", uselist=False, lazy="joined") @property def refresh_token(self): @@ -59,31 +58,33 @@ def refresh_token(self): def refresh_token(self, value): # Must be a valid UTF-8 byte sequence without NULL bytes. if isinstance(value, unicode): - value = value.encode('utf-8') + value = value.encode("utf-8") try: - unicode(value, 'utf-8') + unicode(value, "utf-8") except UnicodeDecodeError: - raise ValueError('Invalid refresh_token') + raise ValueError("Invalid refresh_token") - if b'\x00' in value: - raise ValueError('Invalid refresh_token') + if b"\x00" in value: + raise ValueError("Invalid refresh_token") if not self.secret: self.secret = Secret() self.secret.secret = value - self.secret.type = 'token' + self.secret.type = "token" def new_token(self): try: - return self.auth_handler.new_token(self.refresh_token, - self.client_id, - self.client_secret) + return self.auth_handler.new_token( + self.refresh_token, self.client_id, self.client_secret + ) except Exception as e: - log.error('Error while getting access token: {}'.format(e), - account_id=self.id, - exc_info=True) + log.error( + "Error while getting access token: {}".format(e), + account_id=self.id, + exc_info=True, + ) raise def verify(self): diff --git a/inbox/models/backends/outlook.py b/inbox/models/backends/outlook.py index 9c6e37842..eedd5893e 100644 --- a/inbox/models/backends/outlook.py +++ b/inbox/models/backends/outlook.py @@ -3,14 +3,13 @@ from inbox.models.backends.imap import ImapAccount from inbox.models.backends.oauth import OAuthAccount -PROVIDER = '_outlook' +PROVIDER = "_outlook" class OutlookAccount(ImapAccount, OAuthAccount): - id = Column(ForeignKey(ImapAccount.id, ondelete='CASCADE'), - primary_key=True) + id = Column(ForeignKey(ImapAccount.id, ondelete="CASCADE"), primary_key=True) - __mapper_args__ = {'polymorphic_identity': 'outlookaccount'} + __mapper_args__ = {"polymorphic_identity": "outlookaccount"} # STOPSHIP(emfree) store these either as secrets or as properties of the # developer app. @@ -31,14 +30,16 @@ def provider(self): @property def category_type(self): - return 'folder' + return "folder" @property def thread_cls(self): from inbox.models.backends.imap import ImapThread + return ImapThread @property def actionlog_cls(self): from inbox.models.action_log import ActionLog + return ActionLog diff --git a/inbox/models/base.py b/inbox/models/base.py index 2fe62eace..519d84625 100644 --- a/inbox/models/base.py +++ b/inbox/models/base.py @@ -11,6 +11,7 @@ class MailSyncBase(CreatedAtMixin): Provides automated table name, primary key column, and created_at timestamp. """ + id = Column(BigInteger, primary_key=True, autoincrement=True) @declared_attr @@ -19,13 +20,17 @@ def __tablename__(cls): @declared_attr def __table_args__(cls): - return {'extend_existing': True} + return {"extend_existing": True} def __repr__(self): try: - return "<{} (id: {})>".format(self.__module__ + "." + self.__class__.__name__, self.id) + return "<{} (id: {})>".format( + self.__module__ + "." + self.__class__.__name__, self.id + ) except DetachedInstanceError: # SQLAlchemy has expired all values for this object and is trying # to refresh them from the database, but has no session for the # refresh. - return "<{} (id: detached)>".format(self.__module__ + "." + self.__class__.__name__) + return "<{} (id: detached)>".format( + self.__module__ + "." + self.__class__.__name__ + ) diff --git a/inbox/models/block.py b/inbox/models/block.py index cc8d8a8ff..c90079970 100644 --- a/inbox/models/block.py +++ b/inbox/models/block.py @@ -1,38 +1,45 @@ -from sqlalchemy import (Column, Integer, String, Boolean, Enum, ForeignKey, - event) +from sqlalchemy import Column, Integer, String, Boolean, Enum, ForeignKey, event from sqlalchemy.orm import reconstructor, relationship, backref from sqlalchemy.schema import UniqueConstraint from sqlalchemy.sql.expression import false from inbox.models.roles import Blob -from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin) +from inbox.models.mixins import ( + HasPublicID, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +) from inbox.models.base import MailSyncBase from inbox.models.message import Message # These are the top 15 most common Content-Type headers # in my personal mail archive. --mg -COMMON_CONTENT_TYPES = ['text/plain', - 'text/html', - 'multipart/alternative', - 'multipart/mixed', - 'image/jpeg', - 'multipart/related', - 'application/pdf', - 'image/png', - 'image/gif', - 'application/octet-stream', - 'multipart/signed', - 'application/msword', - 'application/pkcs7-signature', - 'message/rfc822', - 'image/jpg'] - - -class Block(Blob, MailSyncBase, HasRevisions, HasPublicID, - UpdatedAtMixin, DeletedAtMixin): +COMMON_CONTENT_TYPES = [ + "text/plain", + "text/html", + "multipart/alternative", + "multipart/mixed", + "image/jpeg", + "multipart/related", + "application/pdf", + "image/png", + "image/gif", + "application/octet-stream", + "multipart/signed", + "application/msword", + "application/pkcs7-signature", + "message/rfc822", + "image/jpg", +] + + +class Block( + Blob, MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, DeletedAtMixin +): """ Metadata for any file that we store """ - API_OBJECT_NAME = 'file' + + API_OBJECT_NAME = "file" @property def should_suppress_transaction_creation(self): @@ -52,13 +59,12 @@ def __init__(self, *args, **kwargs): self.size = 0 MailSyncBase.__init__(self, *args, **kwargs) - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) - namespace = relationship('Namespace', - backref=backref('blocks', - passive_deletes=True, - cascade='all,delete-orphan'), - load_on_pending=True) + namespace_id = Column(ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False) + namespace = relationship( + "Namespace", + backref=backref("blocks", passive_deletes=True, cascade="all,delete-orphan"), + load_on_pending=True, + ) @reconstructor def init_on_load(self): @@ -68,7 +74,7 @@ def init_on_load(self): self.content_type = self._content_type_other -@event.listens_for(Block, 'before_insert', propagate=True) +@event.listens_for(Block, "before_insert", propagate=True) def serialize_before_insert(mapper, connection, target): if target.content_type in COMMON_CONTENT_TYPES: target._content_type_common = target.content_type @@ -82,28 +88,33 @@ class Part(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): """ Part is a section of a specific message. This includes message bodies as well as attachments. """ - block_id = Column(ForeignKey(Block.id, ondelete='CASCADE')) - block = relationship(Block, - backref=backref("parts", - passive_deletes=True, - cascade="all,delete,delete-orphan"), - load_on_pending=True) - - message_id = Column(ForeignKey(Message.id, ondelete='CASCADE')) - message = relationship('Message', - backref=backref("parts", - passive_deletes=True, - cascade="all,delete,delete-orphan"), - load_on_pending=True) + + block_id = Column(ForeignKey(Block.id, ondelete="CASCADE")) + block = relationship( + Block, + backref=backref( + "parts", passive_deletes=True, cascade="all,delete,delete-orphan" + ), + load_on_pending=True, + ) + + message_id = Column(ForeignKey(Message.id, ondelete="CASCADE")) + message = relationship( + "Message", + backref=backref( + "parts", passive_deletes=True, cascade="all,delete,delete-orphan" + ), + load_on_pending=True, + ) walk_index = Column(Integer) # https://www.ietf.org/rfc/rfc2183.txt - content_disposition = Column(Enum('inline', 'attachment'), nullable=True) + content_disposition = Column(Enum("inline", "attachment"), nullable=True) content_id = Column(String(255)) # For attachments is_inboxapp_attachment = Column(Boolean, server_default=false()) - __table_args__ = (UniqueConstraint('message_id', 'walk_index'),) + __table_args__ = (UniqueConstraint("message_id", "walk_index"),) @property def thread_id(self): @@ -117,5 +128,7 @@ def is_attachment(self): @property def is_embedded(self): - return (self.content_disposition is not None and - self.content_disposition.lower() == 'inline') + return ( + self.content_disposition is not None + and self.content_disposition.lower() == "inline" + ) diff --git a/inbox/models/calendar.py b/inbox/models/calendar.py index 43193dc5e..ac92477fb 100644 --- a/inbox/models/calendar.py +++ b/inbox/models/calendar.py @@ -1,34 +1,43 @@ from datetime import datetime -from sqlalchemy import (Column, String, Text, Boolean, - UniqueConstraint, ForeignKey, DateTime, inspect) +from sqlalchemy import ( + Column, + String, + Text, + Boolean, + UniqueConstraint, + ForeignKey, + DateTime, + inspect, +) from sqlalchemy.orm import relationship, backref, object_session from inbox.models.base import MailSyncBase from inbox.models.namespace import Namespace from inbox.models.constants import MAX_INDEXABLE_LENGTH -from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin) +from inbox.models.mixins import ( + HasPublicID, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +) -class Calendar(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin): - API_OBJECT_NAME = 'calendar' - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) +class Calendar(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, DeletedAtMixin): + API_OBJECT_NAME = "calendar" + namespace_id = Column(ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False) namespace = relationship( - Namespace, - load_on_pending=True, - backref=backref('calendars')) + Namespace, load_on_pending=True, backref=backref("calendars") + ) name = Column(String(MAX_INDEXABLE_LENGTH), nullable=True) - provider_name = Column(String(128), nullable=True, default='DEPRECATED') + provider_name = Column(String(128), nullable=True, default="DEPRECATED") description = Column(Text, nullable=True) # A server-provided unique ID. - uid = Column(String(767, collation='ascii_general_ci'), nullable=False) + uid = Column(String(767, collation="ascii_general_ci"), nullable=False) read_only = Column(Boolean, nullable=False, default=False) @@ -37,18 +46,20 @@ class Calendar(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, gpush_last_ping = Column(DateTime) gpush_expiration = Column(DateTime) - __table_args__ = (UniqueConstraint('namespace_id', 'provider_name', - 'name', 'uid', name='uuid'),) + __table_args__ = ( + UniqueConstraint("namespace_id", "provider_name", "name", "uid", name="uuid"), + ) @property def should_suppress_transaction_creation(self): - if (self in object_session(self).new or - self in object_session(self).deleted): + if self in object_session(self).new or self in object_session(self).deleted: return False obj_state = inspect(self) - return not (obj_state.attrs.name.history.has_changes() or - obj_state.attrs.description.history.has_changes() or - obj_state.attrs.read_only.history.has_changes()) + return not ( + obj_state.attrs.name.history.has_changes() + or obj_state.attrs.description.history.has_changes() + or obj_state.attrs.read_only.history.has_changes() + ) def update(self, calendar): self.uid = calendar.uid @@ -60,7 +71,7 @@ def new_event_watch(self, expiration): """ Google gives us expiration as a timestamp in milliseconds """ - expiration = datetime.fromtimestamp(int(expiration) / 1000.) + expiration = datetime.fromtimestamp(int(expiration) / 1000.0) self.gpush_expiration = expiration self.gpush_last_ping = datetime.utcnow() @@ -68,7 +79,7 @@ def handle_gpush_notification(self): self.gpush_last_ping = datetime.utcnow() def can_sync(self): - if self.name == 'Emailed events' and self.uid == 'inbox': + if self.name == "Emailed events" and self.uid == "inbox": # This is our own internal calendar return False @@ -76,15 +87,15 @@ def can_sync(self): # If you try to watch Holidays, you get a 404. # If you try to watch Birthdays, you get a cryptic 'Missing Title' # error. Thanks, Google. - if 'group.v.calendar.google.com' in self.uid: + if "group.v.calendar.google.com" in self.uid: return False # If you try to watch "Phases of the Moon" or holiday calendars, you # get 400 ("Push notifications are not supported by this resource.") - if self.uid == 'ht3jlfaac5lfd6263ulfh4tql8@group.calendar.google.com': + if self.uid == "ht3jlfaac5lfd6263ulfh4tql8@group.calendar.google.com": return False - if 'holiday.calendar.google.com' in self.uid: + if "holiday.calendar.google.com" in self.uid: return False return True @@ -94,8 +105,7 @@ def needs_new_watch(self): return False return ( - self.gpush_expiration is None or - self.gpush_expiration < datetime.utcnow() + self.gpush_expiration is None or self.gpush_expiration < datetime.utcnow() ) def should_update_events(self, max_time_between_syncs, poll_frequency): @@ -115,17 +125,17 @@ def should_update_events(self, max_time_between_syncs, poll_frequency): return ( # Never synced - self.last_synced is None or + self.last_synced is None + or # Push notifications channel is stale (and we didn't just sync it) - ( - self.needs_new_watch() and - now > self.last_synced + poll_frequency - ) or + (self.needs_new_watch() and now > self.last_synced + poll_frequency) + or # Too much time has passed not to sync - now > self.last_synced + max_time_between_syncs or + now > self.last_synced + max_time_between_syncs + or # Events are stale, according to the push notifications ( - self.gpush_last_ping is not None and - self.gpush_last_ping > self.last_synced + self.gpush_last_ping is not None + and self.gpush_last_ping > self.last_synced ) ) diff --git a/inbox/models/category.py b/inbox/models/category.py index 1734043dd..0b9476a5e 100644 --- a/inbox/models/category.py +++ b/inbox/models/category.py @@ -8,13 +8,18 @@ from inbox.sqlalchemy_ext.util import StringWithTransform from inbox.models.base import MailSyncBase -from inbox.models.mixins import (HasRevisions, HasPublicID, - CaseInsensitiveComparator, DeletedAtMixin, - UpdatedAtMixin) +from inbox.models.mixins import ( + HasRevisions, + HasPublicID, + CaseInsensitiveComparator, + DeletedAtMixin, + UpdatedAtMixin, +) from inbox.models.constants import MAX_INDEXABLE_LENGTH from nylas.logging import get_logger from inbox.util.misc import fs_folder_path from inbox.util.encoding import unicode_safe_truncate + log = get_logger() EPOCH = datetime.utcfromtimestamp(0) @@ -38,14 +43,12 @@ class CategoryNameString(StringWithTransform): """ def __init__(self, *args, **kwargs): - super(CategoryNameString, self)\ - .__init__(sanitize_name, MAX_INDEXABLE_LENGTH, - collation='utf8mb4_bin') - + super(CategoryNameString, self).__init__( + sanitize_name, MAX_INDEXABLE_LENGTH, collation="utf8mb4_bin" + ) -class Category(MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, - DeletedAtMixin): +class Category(MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, DeletedAtMixin): @property def API_OBJECT_NAME(self): return self.type_ @@ -53,41 +56,54 @@ def API_OBJECT_NAME(self): # Override the default `deleted_at` column with one that is NOT NULL -- # Category.deleted_at is needed in a UniqueConstraint. # Set the default Category.deleted_at = EPOCH instead. - deleted_at = Column(DateTime, index=True, nullable=False, - default='1970-01-01 00:00:00') + deleted_at = Column( + DateTime, index=True, nullable=False, default="1970-01-01 00:00:00" + ) # Need `use_alter` here to avoid circular dependencies - namespace_id = Column(ForeignKey('namespace.id', use_alter=True, - name='category_fk1', - ondelete='CASCADE'), nullable=False) - namespace = relationship('Namespace', load_on_pending=True) + namespace_id = Column( + ForeignKey( + "namespace.id", use_alter=True, name="category_fk1", ondelete="CASCADE" + ), + nullable=False, + ) + namespace = relationship("Namespace", load_on_pending=True) # STOPSHIP(emfree): need to index properly for API filtering performance. - name = Column(String(MAX_INDEXABLE_LENGTH), nullable=False, default='') + name = Column(String(MAX_INDEXABLE_LENGTH), nullable=False, default="") display_name = Column(CategoryNameString(), nullable=False) - type_ = Column(Enum('folder', 'label'), nullable=False, default='folder') + type_ = Column(Enum("folder", "label"), nullable=False, default="folder") - @validates('display_name') + @validates("display_name") def validate_display_name(self, key, display_name): sanitized_name = sanitize_name(display_name) if sanitized_name != display_name: - log.warning("Truncating category display_name", type_=self.type_, - original=display_name) + log.warning( + "Truncating category display_name", + type_=self.type_, + original=display_name, + ) return sanitized_name @classmethod def find_or_create(cls, session, namespace_id, name, display_name, type_): - name = name or '' + name = name or "" - objects = session.query(cls).filter( - cls.namespace_id == namespace_id, - cls.display_name == display_name).all() + objects = ( + session.query(cls) + .filter(cls.namespace_id == namespace_id, cls.display_name == display_name) + .all() + ) if not objects: - obj = cls(namespace_id=namespace_id, name=name, - display_name=display_name, type_=type_, - deleted_at=EPOCH) + obj = cls( + namespace_id=namespace_id, + name=name, + display_name=display_name, + type_=type_, + deleted_at=EPOCH, + ) session.add(obj) elif len(objects) == 1: obj = objects[0] @@ -100,20 +116,27 @@ def find_or_create(cls, session, namespace_id, name, display_name, type_): # merely want to update its `name`, not create a new one. obj.name = name else: - log.error('Duplicate category rows for namespace_id {}, ' - 'name {}, display_name: {}'. - format(namespace_id, name, display_name)) + log.error( + "Duplicate category rows for namespace_id {}, " + "name {}, display_name: {}".format(namespace_id, name, display_name) + ) raise MultipleResultsFound( - 'Duplicate category rows for namespace_id {}, name {}, ' - 'display_name: {}'.format(namespace_id, name, display_name)) + "Duplicate category rows for namespace_id {}, name {}, " + "display_name: {}".format(namespace_id, name, display_name) + ) return obj @classmethod def create(cls, session, namespace_id, name, display_name, type_): - name = name or '' - obj = cls(namespace_id=namespace_id, name=name, - display_name=display_name, type_=type_, deleted_at=EPOCH) + name = name or "" + obj = cls( + namespace_id=namespace_id, + name=name, + display_name=display_name, + type_=type_, + deleted_at=EPOCH, + ) session.add(obj) return obj @@ -135,16 +158,18 @@ def lowercase_name(cls): @property def api_display_name(self): - if self.namespace.account.provider == 'gmail': - if self.display_name.startswith('[Gmail]/'): + if self.namespace.account.provider == "gmail": + if self.display_name.startswith("[Gmail]/"): return self.display_name[8:] - elif self.display_name.startswith('[Google Mail]/'): + elif self.display_name.startswith("[Google Mail]/"): return self.display_name[14:] - if self.namespace.account.provider not in ['gmail', 'eas']: - return fs_folder_path(self.display_name, - separator=self.namespace.account.folder_separator, - prefix=self.namespace.account.folder_prefix) + if self.namespace.account.provider not in ["gmail", "eas"]: + return fs_folder_path( + self.display_name, + separator=self.namespace.account.folder_separator, + prefix=self.namespace.account.folder_prefix, + ) return self.display_name @@ -152,6 +177,7 @@ def api_display_name(self): def is_deleted(self): return self.deleted_at > EPOCH - __table_args__ = (UniqueConstraint('namespace_id', 'name', 'display_name', - 'deleted_at'), - UniqueConstraint('namespace_id', 'public_id')) + __table_args__ = ( + UniqueConstraint("namespace_id", "name", "display_name", "deleted_at"), + UniqueConstraint("namespace_id", "public_id"), + ) diff --git a/inbox/models/contact.py b/inbox/models/contact.py index 3d347c104..f228df408 100644 --- a/inbox/models/contact.py +++ b/inbox/models/contact.py @@ -1,11 +1,24 @@ -from sqlalchemy import Column, Integer, String, Enum, Text, Index, BigInteger, \ - ForeignKey +from sqlalchemy import ( + Column, + Integer, + String, + Enum, + Text, + Index, + BigInteger, + ForeignKey, +) from sqlalchemy.orm import relationship, backref, validates from sqlalchemy.schema import UniqueConstraint from inbox.sqlalchemy_ext.util import MAX_TEXT_CHARS -from inbox.models.mixins import (HasPublicID, HasEmailAddress, HasRevisions, - UpdatedAtMixin, DeletedAtMixin) +from inbox.models.mixins import ( + HasPublicID, + HasEmailAddress, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +) from inbox.models.base import MailSyncBase from inbox.models.event import Event from inbox.models.message import Message @@ -13,20 +26,28 @@ from inbox.util.encoding import unicode_safe_truncate -class Contact(MailSyncBase, HasRevisions, HasPublicID, HasEmailAddress, - UpdatedAtMixin, DeletedAtMixin): +class Contact( + MailSyncBase, + HasRevisions, + HasPublicID, + HasEmailAddress, + UpdatedAtMixin, + DeletedAtMixin, +): """Data for a user's contact.""" - API_OBJECT_NAME = 'contact' + + API_OBJECT_NAME = "contact" namespace_id = Column(BigInteger, nullable=False, index=True) namespace = relationship( Namespace, - primaryjoin='foreign(Contact.namespace_id) == remote(Namespace.id)', - load_on_pending=True) + primaryjoin="foreign(Contact.namespace_id) == remote(Namespace.id)", + load_on_pending=True, + ) # A server-provided unique ID. # NB: We specify the collation here so that the test DB gets setup correctly. - uid = Column(String(64, collation='utf8mb4_bin'), nullable=False) + uid = Column(String(64, collation="utf8mb4_bin"), nullable=False) # A constant, unique identifier for the remote backend this contact came # from. E.g., 'google', 'eas', 'inbox' provider_name = Column(String(64)) @@ -44,14 +65,15 @@ class Contact(MailSyncBase, HasRevisions, HasPublicID, HasEmailAddress, # database column.) deleted = False - __table_args__ = (UniqueConstraint('uid', 'namespace_id', - 'provider_name'), - Index('idx_namespace_created', 'namespace_id', - 'created_at'), - Index('ix_contact_ns_uid_provider_name', - 'namespace_id', 'uid', 'provider_name')) + __table_args__ = ( + UniqueConstraint("uid", "namespace_id", "provider_name"), + Index("idx_namespace_created", "namespace_id", "created_at"), + Index( + "ix_contact_ns_uid_provider_name", "namespace_id", "uid", "provider_name" + ), + ) - @validates('raw_data') + @validates("raw_data") def validate_text_column_length(self, key, value): if value is None: return None @@ -59,11 +81,11 @@ def validate_text_column_length(self, key, value): @property def versioned_relationships(self): - return ['phone_numbers'] + return ["phone_numbers"] def merge_from(self, new_contact): # This must be updated when new fields are added to the class. - merge_attrs = ['name', 'email_address', 'raw_data'] + merge_attrs = ["name", "email_address", "raw_data"] for attr in merge_attrs: if getattr(self, attr) != getattr(new_contact, attr): setattr(self, attr, getattr(new_contact, attr)) @@ -75,8 +97,9 @@ class PhoneNumber(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): contact_id = Column(BigInteger, index=True) contact = relationship( Contact, - primaryjoin='foreign(PhoneNumber.contact_id) == remote(Contact.id)', - backref=backref('phone_numbers', cascade='all, delete-orphan')) + primaryjoin="foreign(PhoneNumber.contact_id) == remote(Contact.id)", + backref=backref("phone_numbers", cascade="all, delete-orphan"), + ) type = Column(String(STRING_LENGTH), nullable=True) number = Column(String(STRING_LENGTH), nullable=False) @@ -94,38 +117,39 @@ class MessageContactAssociation(MailSyncBase): [assoc.message for assoc in c.message_associations if assoc.field == ... 'to_addr'] """ + contact_id = Column(BigInteger, primary_key=True, index=True) - message_id = Column(ForeignKey(Message.id, ondelete='CASCADE'), - primary_key=True) - field = Column(Enum('from_addr', 'to_addr', - 'cc_addr', 'bcc_addr', 'reply_to')) + message_id = Column(ForeignKey(Message.id, ondelete="CASCADE"), primary_key=True) + field = Column(Enum("from_addr", "to_addr", "cc_addr", "bcc_addr", "reply_to")) # Note: The `cascade` properties need to be a parameter of the backref # here, and not of the relationship. Otherwise a sqlalchemy error is thrown # when you try to delete a message or a contact. contact = relationship( Contact, - primaryjoin='foreign(MessageContactAssociation.contact_id) == ' - 'remote(Contact.id)', - backref=backref('message_associations', cascade='all, delete-orphan')) + primaryjoin="foreign(MessageContactAssociation.contact_id) == " + "remote(Contact.id)", + backref=backref("message_associations", cascade="all, delete-orphan"), + ) message = relationship( - Message, - backref=backref('contacts', cascade='all, delete-orphan')) + Message, backref=backref("contacts", cascade="all, delete-orphan") + ) class EventContactAssociation(MailSyncBase): """Association table between event participants and contacts.""" + contact_id = Column(BigInteger, primary_key=True, index=True) - event_id = Column(ForeignKey(Event.id, ondelete='CASCADE'), - primary_key=True) - field = Column(Enum('participant', 'title', 'description', 'owner')) + event_id = Column(ForeignKey(Event.id, ondelete="CASCADE"), primary_key=True) + field = Column(Enum("participant", "title", "description", "owner")) # Note: The `cascade` properties need to be a parameter of the backref # here, and not of the relationship. Otherwise a sqlalchemy error is thrown # when you try to delete an event or a contact. contact = relationship( Contact, - primaryjoin='foreign(EventContactAssociation.contact_id) == ' - 'remote(Contact.id)', - backref=backref('event_associations', cascade='all, delete-orphan')) + primaryjoin="foreign(EventContactAssociation.contact_id) == " + "remote(Contact.id)", + backref=backref("event_associations", cascade="all, delete-orphan"), + ) event = relationship( - Event, - backref=backref('contacts', cascade='all, delete-orphan')) + Event, backref=backref("contacts", cascade="all, delete-orphan") + ) diff --git a/inbox/models/data_processing.py b/inbox/models/data_processing.py index e124c4108..374684a33 100644 --- a/inbox/models/data_processing.py +++ b/inbox/models/data_processing.py @@ -15,10 +15,10 @@ class DataProcessingCache(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): """Cached data used in data processing """ - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) - _contact_rankings = Column('contact_rankings', MEDIUMBLOB) - _contact_groups = Column('contact_groups', MEDIUMBLOB) + + namespace_id = Column(ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False) + _contact_rankings = Column("contact_rankings", MEDIUMBLOB) + _contact_groups = Column("contact_groups", MEDIUMBLOB) contact_rankings_last_updated = Column(DateTime) contact_groups_last_updated = Column(DateTime) @@ -31,8 +31,7 @@ def contact_rankings(self): @contact_rankings.setter def contact_rankings(self, value): - self._contact_rankings = \ - zlib.compress(json.dumps(value).encode('utf-8')) + self._contact_rankings = zlib.compress(json.dumps(value).encode("utf-8")) self.contact_rankings_last_updated = datetime.datetime.now() @property @@ -44,7 +43,7 @@ def contact_groups(self): @contact_groups.setter def contact_groups(self, value): - self._contact_groups = zlib.compress(json.dumps(value).encode('utf-8')) + self._contact_groups = zlib.compress(json.dumps(value).encode("utf-8")) self.contact_groups_last_updated = datetime.datetime.now() - __table_args__ = (UniqueConstraint('namespace_id'),) + __table_args__ = (UniqueConstraint("namespace_id"),) diff --git a/inbox/models/event.py b/inbox/models/event.py index f3bffc92b..5ae03f8c8 100644 --- a/inbox/models/event.py +++ b/inbox/models/event.py @@ -4,16 +4,30 @@ import ast import json -from sqlalchemy import (Column, String, ForeignKey, Text, Boolean, Integer, - DateTime, Enum, Index, event) +from sqlalchemy import ( + Column, + String, + ForeignKey, + Text, + Boolean, + Integer, + DateTime, + Enum, + Index, + event, +) from sqlalchemy.orm import relationship, backref, validates, reconstructor from sqlalchemy.types import TypeDecorator from sqlalchemy.dialects.mysql import LONGTEXT from inbox.sqlalchemy_ext.util import MAX_TEXT_CHARS, BigJSON, MutableList from inbox.models.base import MailSyncBase -from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin) +from inbox.models.mixins import ( + HasPublicID, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +) from inbox.models.calendar import Calendar from inbox.models.namespace import Namespace from inbox.models.message import Message @@ -23,6 +37,7 @@ from inbox.util.addr import extract_emails_from_text from nylas.logging import get_logger + log = get_logger() EVENT_STATUSES = ["confirmed", "tentative", "cancelled"] @@ -33,17 +48,17 @@ REMINDER_MAX_LEN = 255 OWNER_MAX_LEN = 1024 MAX_LENS = { - 'location': LOCATION_MAX_LEN, - 'owner': OWNER_MAX_LEN, - 'recurrence': MAX_TEXT_CHARS, - 'reminders': REMINDER_MAX_LEN, - 'title': TITLE_MAX_LEN, - 'raw_data': MAX_TEXT_CHARS + "location": LOCATION_MAX_LEN, + "owner": OWNER_MAX_LEN, + "recurrence": MAX_TEXT_CHARS, + "reminders": REMINDER_MAX_LEN, + "title": TITLE_MAX_LEN, + "raw_data": MAX_TEXT_CHARS, } def time_parse(x): - return arrow.get(x).to('utc').naive + return arrow.get(x).to("utc").naive class FlexibleDateTime(TypeDecorator): @@ -53,16 +68,16 @@ class FlexibleDateTime(TypeDecorator): def process_bind_param(self, value, dialect): if isinstance(value, arrow.arrow.Arrow): - value = value.to('utc').naive + value = value.to("utc").naive if isinstance(value, datetime): - value = arrow.get(value).to('utc').naive + value = arrow.get(value).to("utc").naive return value def process_result_value(self, value, dialect): if value is None: return value else: - return arrow.get(value).to('utc') + return arrow.get(value).to("utc") def compare_values(self, x, y): if isinstance(x, datetime) or isinstance(x, int): @@ -73,42 +88,46 @@ def compare_values(self, x, y): return x == y -class Event(MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, - DeletedAtMixin): +class Event(MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, DeletedAtMixin): """Data for events.""" - API_OBJECT_NAME = 'event' - API_MODIFIABLE_FIELDS = ['title', 'description', 'location', - 'when', 'participants', 'busy'] - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) + API_OBJECT_NAME = "event" + API_MODIFIABLE_FIELDS = [ + "title", + "description", + "location", + "when", + "participants", + "busy", + ] + + namespace_id = Column(ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False) namespace = relationship(Namespace, load_on_pending=True) - calendar_id = Column(ForeignKey(Calendar.id, ondelete='CASCADE'), - nullable=False) + calendar_id = Column(ForeignKey(Calendar.id, ondelete="CASCADE"), nullable=False) # Note that we configure a delete cascade, rather than # passive_deletes=True, in order to ensure that delete revisions are # created for events if their parent calendar is deleted. - calendar = relationship(Calendar, - backref=backref('events', cascade='delete'), - load_on_pending=True) + calendar = relationship( + Calendar, backref=backref("events", cascade="delete"), load_on_pending=True + ) # A server-provided unique ID. - uid = Column(String(767, collation='ascii_general_ci'), nullable=False) + uid = Column(String(767, collation="ascii_general_ci"), nullable=False) # DEPRECATED # TODO(emfree): remove - provider_name = Column(String(64), nullable=False, default='DEPRECATED') - source = Column('source', Enum('local', 'remote'), default='local') + provider_name = Column(String(64), nullable=False, default="DEPRECATED") + source = Column("source", Enum("local", "remote"), default="local") raw_data = Column(Text, nullable=False) title = Column(String(TITLE_MAX_LEN), nullable=True) # The database column is named differently for legacy reasons. - owner = Column('owner2', String(OWNER_MAX_LEN), nullable=True) + owner = Column("owner2", String(OWNER_MAX_LEN), nullable=True) - description = Column('_description', LONGTEXT, nullable=True) + description = Column("_description", LONGTEXT, nullable=True) location = Column(String(LOCATION_MAX_LEN), nullable=True) busy = Column(Boolean, nullable=False, default=True) read_only = Column(Boolean, nullable=False) @@ -119,37 +138,35 @@ class Event(MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, all_day = Column(Boolean, nullable=False) is_owner = Column(Boolean, nullable=False, default=True) last_modified = Column(FlexibleDateTime, nullable=True) - status = Column('status', Enum(*EVENT_STATUSES), - server_default='confirmed') + status = Column("status", Enum(*EVENT_STATUSES), server_default="confirmed") # This column is only used for events that are synced from iCalendar # files. - message_id = Column(ForeignKey(Message.id, ondelete='CASCADE'), - nullable=True) + message_id = Column(ForeignKey(Message.id, ondelete="CASCADE"), nullable=True) - message = relationship(Message, - backref=backref('events', - order_by='Event.last_modified', - cascade='all, delete-orphan')) + message = relationship( + Message, + backref=backref( + "events", order_by="Event.last_modified", cascade="all, delete-orphan" + ), + ) - __table_args__ = (Index('ix_event_ns_uid_calendar_id', - 'namespace_id', 'uid', 'calendar_id'),) + __table_args__ = ( + Index("ix_event_ns_uid_calendar_id", "namespace_id", "uid", "calendar_id"), + ) - participants = Column(MutableList.as_mutable(BigJSON), default=[], - nullable=True) + participants = Column(MutableList.as_mutable(BigJSON), default=[], nullable=True) # This is only used by the iCalendar invite code. The sequence number # stores the version number of the invite. sequence_number = Column(Integer, nullable=True) - visibility = Column(Enum('private', 'public'), nullable=True) + visibility = Column(Enum("private", "public"), nullable=True) - discriminator = Column('type', String(30)) - __mapper_args__ = {'polymorphic_on': discriminator, - 'polymorphic_identity': 'event'} + discriminator = Column("type", String(30)) + __mapper_args__ = {"polymorphic_on": discriminator, "polymorphic_identity": "event"} - @validates('reminders', 'recurrence', 'owner', 'location', 'title', - 'raw_data') + @validates("reminders", "recurrence", "owner", "location", "title", "raw_data") def validate_length(self, key, value): if value is None: return None @@ -159,8 +176,8 @@ def validate_length(self, key, value): def when(self): if self.all_day: # Dates are stored as DateTimes so transform to dates here. - start = arrow.get(self.start).to('utc').date() - end = arrow.get(self.end).to('utc').date() + start = arrow.get(self.start).to("utc").date() + end = arrow.get(self.end).to("utc").date() return Date(start) if start == end else DateSpan(start, end) else: start = self.start @@ -169,19 +186,19 @@ def when(self): @when.setter def when(self, when): - if 'time' in when: - self.start = self.end = time_parse(when['time']) + if "time" in when: + self.start = self.end = time_parse(when["time"]) self.all_day = False - elif 'start_time' in when: - self.start = time_parse(when['start_time']) - self.end = time_parse(when['end_time']) + elif "start_time" in when: + self.start = time_parse(when["start_time"]) + self.end = time_parse(when["end_time"]) self.all_day = False - elif 'date' in when: - self.start = self.end = date_parse(when['date']) + elif "date" in when: + self.start = self.end = date_parse(when["date"]) self.all_day = True - elif 'start_date' in when: - self.start = date_parse(when['start_date']) - self.end = date_parse(when['end_date']) + elif "start_date" in when: + self.start = date_parse(when["start_date"]) + self.end = date_parse(when["end_date"]) self.all_day = True def _merge_participant_attributes(self, left, right): @@ -190,9 +207,9 @@ def _merge_participant_attributes(self, left, right): # Special cases: if right[attribute] is None: continue - elif right[attribute] == '': + elif right[attribute] == "": continue - elif right['status'] == 'noreply': + elif right["status"] == "noreply": continue else: left[attribute] = right[attribute] @@ -217,18 +234,18 @@ def _partial_participants_merge(self, event): # hash only if the email is None. self_hash = {} for participant in self.participants: - email = participant.get('email') - name = participant.get('name') + email = participant.get("email") + name = participant.get("name") if email is not None: - participant['email'] = participant['email'].lower() + participant["email"] = participant["email"].lower() self_hash[email] = participant elif name is not None: # We have a name without an email. self_hash[name] = participant for participant in event.participants: - email = participant.get('email') - name = participant.get('name') + email = participant.get("email") + name = participant.get("name") # This is the tricky part --- we only want to store one entry per # participant --- we check if there's an email we already know, if @@ -237,18 +254,18 @@ def _partial_participants_merge(self, event): # always have an email address. # - karim if email is not None: - participant['email'] = participant['email'].lower() + participant["email"] = participant["email"].lower() if email in self_hash: - self_hash[email] =\ - self._merge_participant_attributes(self_hash[email], - participant) + self_hash[email] = self._merge_participant_attributes( + self_hash[email], participant + ) else: self_hash[email] = participant elif name is not None: if name in self_hash: - self_hash[name] =\ - self._merge_participant_attributes(self_hash[name], - participant) + self_hash[name] = self._merge_participant_attributes( + self_hash[name], participant + ) else: self_hash[name] = participant @@ -289,15 +306,18 @@ def update(self, event): @property def recurring(self): - if self.recurrence and self.recurrence != '': + if self.recurrence and self.recurrence != "": try: r = ast.literal_eval(self.recurrence) if isinstance(r, str): r = [r] return r except (ValueError, SyntaxError): - log.warn('Invalid RRULE entry for event', event_id=self.id, - raw_rrule=self.recurrence) + log.warn( + "Invalid RRULE entry for event", + event_id=self.id, + raw_rrule=self.recurrence, + ) return [] return [] @@ -310,7 +330,7 @@ def organizer_email(self): if len(parsed_owner) == 0: return None - if parsed_owner[1] == '': + if parsed_owner[1] == "": return None return parsed_owner[1] @@ -322,7 +342,7 @@ def organizer_name(self): if len(parsed_owner) == 0: return None - if parsed_owner[0] == '': + if parsed_owner[0] == "": return None return parsed_owner[0] @@ -337,19 +357,19 @@ def length(self): @property def cancelled(self): - return self.status == 'cancelled' + return self.status == "cancelled" @cancelled.setter def cancelled(self, is_cancelled): if is_cancelled: - self.status = 'cancelled' + self.status = "cancelled" else: - self.status = 'confirmed' + self.status = "confirmed" @property def calendar_event_link(self): try: - return json.loads(self.raw_data)['htmlLink'] + return json.loads(self.raw_data)["htmlLink"] except (ValueError, KeyError): return @@ -372,11 +392,11 @@ def __new__(cls, *args, **kwargs): # Decide whether or not to instantiate a RecurringEvent/Override # based on the kwargs we get. cls_ = cls - recurrence = kwargs.get('recurrence') - master_event_uid = kwargs.get('master_event_uid') + recurrence = kwargs.get("recurrence") + master_event_uid = kwargs.get("master_event_uid") if recurrence and master_event_uid: raise ValueError("Event can't have both recurrence and master UID") - if recurrence and recurrence != '': + if recurrence and recurrence != "": cls_ = RecurringEvent if master_event_uid: cls_ = RecurringEventOverride @@ -389,33 +409,35 @@ def __init__(self, **kwargs): del kwargs[k] super(Event, self).__init__(**kwargs) + # For API querying performance - default sort order is event.start ASC -Index('idx_namespace_id_started', Event.namespace_id, Event.start) +Index("idx_namespace_id_started", Event.namespace_id, Event.start) class RecurringEvent(Event): """ Represents an individual one-off instance of a recurring event, including cancelled events. """ - __mapper_args__ = {'polymorphic_identity': 'recurringevent'} + + __mapper_args__ = {"polymorphic_identity": "recurringevent"} __table_args__ = None - id = Column(ForeignKey('event.id', ondelete='CASCADE'), - primary_key=True) + id = Column(ForeignKey("event.id", ondelete="CASCADE"), primary_key=True) rrule = Column(String(RECURRENCE_MAX_LEN)) exdate = Column(Text) # There can be a lot of exception dates until = Column(FlexibleDateTime, nullable=True) start_timezone = Column(String(35)) def __init__(self, **kwargs): - self.start_timezone = kwargs.pop('original_start_tz', None) - kwargs['recurrence'] = repr(kwargs['recurrence']) + self.start_timezone = kwargs.pop("original_start_tz", None) + kwargs["recurrence"] = repr(kwargs["recurrence"]) super(RecurringEvent, self).__init__(**kwargs) try: self.unwrap_rrule() except Exception as e: - log.error("Error parsing RRULE entry", event_id=self.id, - error=e, exc_info=True) + log.error( + "Error parsing RRULE entry", event_id=self.id, error=e, exc_info=True + ) # FIXME @karim: use an overrided property instead of a reconstructor. @reconstructor @@ -423,27 +445,33 @@ def reconstruct(self): try: self.unwrap_rrule() except Exception as e: - log.error("Error parsing stored RRULE entry", event_id=self.id, - error=e, exc_info=True) + log.error( + "Error parsing stored RRULE entry", + event_id=self.id, + error=e, + exc_info=True, + ) def inflate(self, start=None, end=None): # Convert a RecurringEvent into a series of InflatedEvents # by expanding its RRULE into a series of start times. from inbox.events.recurring import get_start_times + occurrences = get_start_times(self, start, end) return [InflatedEvent(self, o) for o in occurrences] def unwrap_rrule(self): from inbox.events.util import parse_rrule_datetime + # Unwraps the RRULE list of strings into RecurringEvent properties. for item in self.recurring: - if item.startswith('RRULE'): + if item.startswith("RRULE"): self.rrule = item - if 'UNTIL' in item: - for p in item.split(';'): - if p.startswith('UNTIL'): + if "UNTIL" in item: + for p in item.split(";"): + if p.startswith("UNTIL"): self.until = parse_rrule_datetime(p[6:]) - elif item.startswith('EXDATE'): + elif item.startswith("EXDATE"): self.exdate = item def all_events(self, start=None, end=None): @@ -461,7 +489,8 @@ def all_events(self, start=None, end=None): # may show up in a query for calendar A. # (https://phab.nylas.com/T3420) overrides = overrides.filter( - RecurringEventOverride.calendar_id == self.calendar_id) + RecurringEventOverride.calendar_id == self.calendar_id + ) events = list(overrides) overridden_starts = [e.original_start_time for e in events] @@ -489,19 +518,22 @@ class RecurringEventOverride(Event): """ Represents an individual one-off instance of a recurring event, including cancelled events. """ - id = Column(ForeignKey('event.id', ondelete='CASCADE'), - primary_key=True) - __mapper_args__ = {'polymorphic_identity': 'recurringeventoverride', - 'inherit_condition': (id == Event.id)} + + id = Column(ForeignKey("event.id", ondelete="CASCADE"), primary_key=True) + __mapper_args__ = { + "polymorphic_identity": "recurringeventoverride", + "inherit_condition": (id == Event.id), + } __table_args__ = None - master_event_id = Column(ForeignKey('event.id', ondelete='CASCADE')) - master_event_uid = Column(String(767, collation='ascii_general_ci'), - index=True) + master_event_id = Column(ForeignKey("event.id", ondelete="CASCADE")) + master_event_uid = Column(String(767, collation="ascii_general_ci"), index=True) original_start_time = Column(FlexibleDateTime) - master = relationship(RecurringEvent, foreign_keys=[master_event_id], - backref=backref('overrides', lazy="dynamic", - cascade='all, delete-orphan')) + master = relationship( + RecurringEvent, + foreign_keys=[master_event_id], + backref=backref("overrides", lazy="dynamic", cascade="all, delete-orphan"), + ) def update(self, event): super(RecurringEventOverride, self).update(event) @@ -517,9 +549,10 @@ class InflatedEvent(Event): These are transient objects that should never be committed to the database. """ - __mapper_args__ = {'polymorphic_identity': 'inflatedevent'} - __tablename__ = 'event' - __table_args__ = {'extend_existing': True} + + __mapper_args__ = {"polymorphic_identity": "inflatedevent"} + __tablename__ = "event" + __table_args__ = {"extend_existing": True} def __init__(self, event, instance_start): self.master = event @@ -535,7 +568,7 @@ def __init__(self, event, instance_start): def set_start_end(self, start): # get the length from the master event length = self.master.length - self.start = start.to('utc') + self.start = start.to("utc") self.end = self.start + length def update(self, master): @@ -559,4 +592,5 @@ def insert_warning(mapper, connection, target): log.warn("InflatedEvent {} shouldn't be committed".format(target)) raise Exception("InflatedEvent should not be committed") -event.listen(InflatedEvent, 'before_insert', insert_warning) + +event.listen(InflatedEvent, "before_insert", insert_warning) diff --git a/inbox/models/folder.py b/inbox/models/folder.py index cc2da04ee..4b45a4fb6 100644 --- a/inbox/models/folder.py +++ b/inbox/models/folder.py @@ -9,26 +9,31 @@ from inbox.models.constants import MAX_INDEXABLE_LENGTH from inbox.sqlalchemy_ext.util import bakery from nylas.logging import get_logger + log = get_logger() class Folder(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): """ Folders from the remote account backend (Generic IMAP/ Gmail). """ + # TOFIX this causes an import error due to circular dependencies # from inbox.models.account import Account # `use_alter` required here to avoid circular dependency w/Account - account_id = Column(ForeignKey('account.id', use_alter=True, - name='folder_fk1', - ondelete='CASCADE'), nullable=False) + account_id = Column( + ForeignKey("account.id", use_alter=True, name="folder_fk1", ondelete="CASCADE"), + nullable=False, + ) account = relationship( - 'Account', + "Account", backref=backref( - 'folders', + "folders", # Don't load folders if the account is deleted, # (the folders will be deleted by the foreign key delete casade). - passive_deletes=True), + passive_deletes=True, + ), foreign_keys=[account_id], - load_on_pending=True) + load_on_pending=True, + ) # Set the name column to be case sensitive, which isn't the default for # MySQL. This is a requirement since IMAP allows users to create both a @@ -37,8 +42,9 @@ class Folder(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): # folders as per # https://msdn.microsoft.com/en-us/library/ee624913(v=exchg.80).aspx name = Column(CategoryNameString(), nullable=False) - _canonical_name = Column(String(MAX_INDEXABLE_LENGTH), nullable=False, - default='', name="canonical_name") + _canonical_name = Column( + String(MAX_INDEXABLE_LENGTH), nullable=False, default="", name="canonical_name" + ) @property def canonical_name(self): @@ -46,47 +52,59 @@ def canonical_name(self): @canonical_name.setter def canonical_name(self, value): - value = value or '' + value = value or "" self._canonical_name = value if self.category: self.category.name = value - canonical_name = synonym('_canonical_name', descriptor=canonical_name) + canonical_name = synonym("_canonical_name", descriptor=canonical_name) - category_id = Column(ForeignKey(Category.id, ondelete='CASCADE')) + category_id = Column(ForeignKey(Category.id, ondelete="CASCADE")) category = relationship( - Category, - backref=backref('folders', - cascade='all, delete-orphan')) + Category, backref=backref("folders", cascade="all, delete-orphan") + ) initial_sync_start = Column(DateTime, nullable=True) initial_sync_end = Column(DateTime, nullable=True) - @validates('name') + @validates("name") def validate_name(self, key, name): sanitized_name = sanitize_name(name) if sanitized_name != name: - log.warning("Truncating folder name for account", - account_id=self.account_id, name=name) + log.warning( + "Truncating folder name for account", + account_id=self.account_id, + name=name, + ) return sanitized_name @classmethod def find_or_create(cls, session, account, name, role=None): - q = session.query(cls).filter(cls.account_id == account.id)\ + q = ( + session.query(cls) + .filter(cls.account_id == account.id) .filter(cls.name == name) + ) - role = role or '' + role = role or "" try: obj = q.one() except NoResultFound: obj = cls(account=account, name=name, canonical_name=role) obj.category = Category.find_or_create( - session, namespace_id=account.namespace.id, name=role, - display_name=name, type_='folder') + session, + namespace_id=account.namespace.id, + name=role, + display_name=name, + type_="folder", + ) session.add(obj) except MultipleResultsFound: - log.info('Duplicate folder rows for name {}, account_id {}' - .format(name, account.id)) + log.info( + "Duplicate folder rows for name {}, account_id {}".format( + name, account.id + ) + ) raise return obj @@ -94,8 +112,7 @@ def find_or_create(cls, session, account, name, role=None): @classmethod def get(cls, id_, session): q = bakery(lambda session: session.query(cls)) - q += lambda q: q.filter(cls.id == bindparam('id_')) + q += lambda q: q.filter(cls.id == bindparam("id_")) return q(session).params(id_=id_).first() - __table_args__ = \ - (UniqueConstraint('account_id', 'name', 'canonical_name'),) + __table_args__ = (UniqueConstraint("account_id", "name", "canonical_name"),) diff --git a/inbox/models/label.py b/inbox/models/label.py index 161d4d5b4..2b1e91b77 100644 --- a/inbox/models/label.py +++ b/inbox/models/label.py @@ -7,49 +7,55 @@ from inbox.models.mixins import UpdatedAtMixin, DeletedAtMixin from inbox.models.constants import MAX_INDEXABLE_LENGTH from nylas.logging import get_logger + log = get_logger() class Label(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): """ Labels from the remote account backend (Gmail). """ + # TOFIX this causes an import error due to circular dependencies # from inbox.models.account import Account # `use_alter` required here to avoid circular dependency w/Account - account_id = Column(ForeignKey('account.id', use_alter=True, - name='label_fk1', - ondelete='CASCADE'), nullable=False) + account_id = Column( + ForeignKey("account.id", use_alter=True, name="label_fk1", ondelete="CASCADE"), + nullable=False, + ) account = relationship( - 'Account', + "Account", backref=backref( - 'labels', + "labels", # Don't load labels if the account is deleted, # (the labels will be deleted by the foreign key delete casade). - passive_deletes=True), - load_on_pending=True) + passive_deletes=True, + ), + load_on_pending=True, + ) name = Column(CategoryNameString(), nullable=False) - canonical_name = Column(String(MAX_INDEXABLE_LENGTH), nullable=False, - default='') + canonical_name = Column(String(MAX_INDEXABLE_LENGTH), nullable=False, default="") - category_id = Column(ForeignKey(Category.id, ondelete='CASCADE')) + category_id = Column(ForeignKey(Category.id, ondelete="CASCADE")) category = relationship( - Category, - backref=backref('labels', - cascade='all, delete-orphan')) + Category, backref=backref("labels", cascade="all, delete-orphan") + ) - @validates('name') + @validates("name") def validate_name(self, key, name): sanitized_name = sanitize_name(name) if sanitized_name != name: - log.warning("Truncating label name for account", - account_id=self.account_id, name=name) + log.warning( + "Truncating label name for account", + account_id=self.account_id, + name=name, + ) return sanitized_name @classmethod def find_or_create(cls, session, account, name, role=None): q = session.query(cls).filter(cls.account_id == account.id) - role = role or '' + role = role or "" if role: q = q.filter(cls.canonical_name == role) else: @@ -59,10 +65,13 @@ def find_or_create(cls, session, account, name, role=None): if obj is None: obj = cls(account=account, name=name, canonical_name=role) obj.category = Category.find_or_create( - session, namespace_id=account.namespace.id, name=role, - display_name=name, type_='label') + session, + namespace_id=account.namespace.id, + name=role, + display_name=name, + type_="label", + ) session.add(obj) return obj - __table_args__ = \ - (UniqueConstraint('account_id', 'name', 'canonical_name'),) + __table_args__ = (UniqueConstraint("account_id", "name", "canonical_name"),) diff --git a/inbox/models/message.py b/inbox/models/message.py index ff9b6ac6f..1567c6d35 100644 --- a/inbox/models/message.py +++ b/inbox/models/message.py @@ -6,15 +6,32 @@ from collections import defaultdict from flanker import mime -from sqlalchemy import (Column, Integer, BigInteger, String, DateTime, - Boolean, Enum, Index, bindparam) +from sqlalchemy import ( + Column, + Integer, + BigInteger, + String, + DateTime, + Boolean, + Enum, + Index, + bindparam, +) from sqlalchemy.dialects.mysql import LONGBLOB, VARCHAR -from sqlalchemy.orm import (relationship, backref, validates, joinedload, - subqueryload, load_only, synonym) +from sqlalchemy.orm import ( + relationship, + backref, + validates, + joinedload, + subqueryload, + load_only, + synonym, +) from sqlalchemy.sql.expression import false from sqlalchemy.ext.associationproxy import association_proxy from nylas.logging import get_logger + log = get_logger() from inbox.config import config from inbox.util.html import plaintext2html, strip_tags @@ -23,8 +40,12 @@ from inbox.util.misc import parse_references, get_internaldate from inbox.util.blockstore import save_to_blockstore from inbox.security.blobstorage import encode_blob, decode_blob -from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin) +from inbox.models.mixins import ( + HasPublicID, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +) from inbox.models.base import MailSyncBase from inbox.models.category import Category @@ -45,40 +66,40 @@ def _trim_filename(s, namespace_id, max_len=255): # characters, len will return the wrong value (bytes not chars). # Convert it to unicode first. if not isinstance(s, unicode): - s = s.decode('utf-8', 'ignore') + s = s.decode("utf-8", "ignore") if len(s) > max_len: # If we need to truncate the string, keep the extension filename, fileext = os.path.splitext(s) if len(fileext) < max_len - 1: - return filename[:(max_len - len(fileext))] + fileext + return filename[: (max_len - len(fileext))] + fileext else: - return filename[0] + fileext[:(max_len - 1)] + return filename[0] + fileext[: (max_len - 1)] return s -class Message(MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, - DeletedAtMixin): - +class Message(MailSyncBase, HasRevisions, HasPublicID, UpdatedAtMixin, DeletedAtMixin): @property def API_OBJECT_NAME(self): - return 'message' if not self.is_draft else 'draft' + return "message" if not self.is_draft else "draft" namespace_id = Column(BigInteger, index=True, nullable=False) namespace = relationship( - 'Namespace', - primaryjoin='foreign(Message.namespace_id) == remote(Namespace.id)', # noqa - load_on_pending=True) + "Namespace", + primaryjoin="foreign(Message.namespace_id) == remote(Namespace.id)", # noqa + load_on_pending=True, + ) # Do delete messages if their associated thread is deleted. thread_id = Column(BigInteger, index=True, nullable=False) _thread = relationship( - 'Thread', - primaryjoin='foreign(Message.thread_id) == remote(Thread.id)', # noqa - backref=backref('messages', - order_by='Message.received_date', - cascade="all, delete-orphan")) + "Thread", + primaryjoin="foreign(Message.thread_id) == remote(Thread.id)", # noqa + backref=backref( + "messages", order_by="Message.received_date", cascade="all, delete-orphan" + ), + ) @property def thread(self): @@ -90,7 +111,7 @@ def thread(self, value): self._thread.deleted_at = None self._thread = value - thread = synonym('_thread', descriptor=thread) + thread = synonym("_thread", descriptor=thread) from_addr = Column(JSON, nullable=False, default=lambda: []) sender_addr = Column(JSON, nullable=True) @@ -103,11 +124,10 @@ def thread(self, value): # max message_id_header is 998 characters message_id_header = Column(String(998), nullable=True) # There is no hard limit on subject limit in the spec, but 255 is common. - subject = Column(String(255), nullable=True, default='') + subject = Column(String(255), nullable=True, default="") received_date = Column(DateTime, nullable=False, index=True) size = Column(Integer, nullable=False) - data_sha256 = Column(VARCHAR(64, charset='ascii'), nullable=True, - index=True) + data_sha256 = Column(VARCHAR(64, charset="ascii"), nullable=True, index=True) is_read = Column(Boolean, server_default=false(), nullable=False) is_starred = Column(Boolean, server_default=false(), nullable=False) @@ -117,8 +137,16 @@ def thread(self, value): is_sent = Column(Boolean, server_default=false(), nullable=False) # REPURPOSED - state = Column(Enum('draft', 'sending', 'sending failed', 'sent', - 'actions_pending', 'actions_committed')) + state = Column( + Enum( + "draft", + "sending", + "sending failed", + "sent", + "actions_pending", + "actions_committed", + ) + ) @property def is_sending(self): @@ -126,34 +154,33 @@ def is_sending(self): def mark_as_sending(self): if self.is_sent: - raise ValueError('Cannot mark a sent message as sending') + raise ValueError("Cannot mark a sent message as sending") self.version = MAX_MYSQL_INTEGER self.is_draft = False self.regenerate_nylas_uid() @property def categories_changes(self): - return self.state == 'actions_pending' + return self.state == "actions_pending" @categories_changes.setter def categories_changes(self, has_changes): if has_changes is True: - self.state = 'actions_pending' + self.state = "actions_pending" else: - self.state = 'actions_committed' + self.state = "actions_committed" _compacted_body = Column(LONGBLOB, nullable=True) snippet = Column(String(191), nullable=False) # this might be a mail-parsing bug, or just a message from a bad client - decode_error = Column(Boolean, server_default=false(), nullable=False, - index=True) + decode_error = Column(Boolean, server_default=false(), nullable=False, index=True) # In accordance with JWZ (http://www.jwz.org/doc/threading.html) references = Column(JSON, nullable=True) # Only used for drafts. - version = Column(Integer, nullable=False, server_default='0') + version = Column(Integer, nullable=False, server_default="0") # only on messages from Gmail (TODO: use different table) # @@ -170,7 +197,7 @@ def categories_changes(self, has_changes): g_thrid = Column(BigInteger, nullable=True, index=True, unique=False) # The uid as set in the X-INBOX-ID header of a sent message we create - nylas_uid = Column(String(64), nullable=True, index=True, name='inbox_uid') + nylas_uid = Column(String(64), nullable=True, index=True, name="inbox_uid") def regenerate_nylas_uid(self): """ @@ -180,12 +207,15 @@ def regenerate_nylas_uid(self): the old draft and add the new one on the remote.""" from inbox.sendmail.message import generate_message_id_header - self.nylas_uid = '{}-{}'.format(self.public_id, self.version) + + self.nylas_uid = "{}-{}".format(self.public_id, self.version) self.message_id_header = generate_message_id_header(self.nylas_uid) categories = association_proxy( - 'messagecategories', 'category', - creator=lambda category: MessageCategory(category=category)) + "messagecategories", + "category", + creator=lambda category: MessageCategory(category=category), + ) # FOR INBOX-CREATED MESSAGES: @@ -198,9 +228,10 @@ def regenerate_nylas_uid(self): # The backref here is unused, but must be configured so that the child's # foreign key gets updated when the parent is deleted. reply_to_message = relationship( - 'Message', - primaryjoin='foreign(Message.reply_to_message_id) == remote(Message.id)', # noqa - backref='replies') + "Message", + primaryjoin="foreign(Message.reply_to_message_id) == remote(Message.id)", # noqa + backref="replies", + ) def mark_for_deletion(self): """ @@ -210,7 +241,7 @@ def mark_for_deletion(self): """ self.deleted_at = datetime.datetime.utcnow() - @validates('subject') + @validates("subject") def sanitize_subject(self, key, value): # Trim overlong subjects, and remove null bytes. The latter can result # when, for example, UTF-8 text decoded from an RFC2047-encoded header @@ -218,12 +249,11 @@ def sanitize_subject(self, key, value): if value is None: return value = unicode_safe_truncate(value, 255) - value = value.replace('\0', '') + value = value.replace("\0", "") return value @classmethod - def create_from_synced(cls, account, mid, folder_name, received_date, - body_string): + def create_from_synced(cls, account, mid, folder_name, received_date, body_string): """ Parses message data and writes out db metadata and MIME blocks. @@ -245,8 +275,8 @@ def create_from_synced(cls, account, mid, folder_name, received_date, _rqd = [account, mid, folder_name, body_string] if not all([v is not None for v in _rqd]): raise ValueError( - 'Required keyword arguments: account, mid, folder_name, ' - 'body_string') + "Required keyword arguments: account, mid, folder_name, " "body_string" + ) # stop trickle-down bugs assert account.namespace is not None assert not isinstance(body_string, unicode) @@ -265,82 +295,111 @@ def create_from_synced(cls, account, mid, folder_name, received_date, parsed = mime.from_string(body_string) # Non-persisted instance attribute used by EAS. msg.parsed_body = parsed - msg._parse_metadata(parsed, body_string, received_date, account.id, - folder_name, mid) + msg._parse_metadata( + parsed, body_string, received_date, account.id, folder_name, mid + ) except Exception as e: parsed = None # Non-persisted instance attribute used by EAS. - msg.parsed_body = '' - log.error('Error parsing message metadata', - folder_name=folder_name, account_id=account.id, error=e, - mid=mid) + msg.parsed_body = "" + log.error( + "Error parsing message metadata", + folder_name=folder_name, + account_id=account.id, + error=e, + mid=mid, + ) msg._mark_error() if parsed is not None: plain_parts = [] html_parts = [] - for mimepart in parsed.walk( - with_self=parsed.content_type.is_singlepart()): + for mimepart in parsed.walk(with_self=parsed.content_type.is_singlepart()): try: if mimepart.content_type.is_multipart(): continue # TODO should we store relations? - msg._parse_mimepart(mid, mimepart, account.namespace.id, - html_parts, plain_parts) - except (mime.DecodingError, AttributeError, RuntimeError, - TypeError, binascii.Error, UnicodeDecodeError) as e: - log.error('Error parsing message MIME parts', - folder_name=folder_name, account_id=account.id, - error=e, mid=mid) + msg._parse_mimepart( + mid, mimepart, account.namespace.id, html_parts, plain_parts + ) + except ( + mime.DecodingError, + AttributeError, + RuntimeError, + TypeError, + binascii.Error, + UnicodeDecodeError, + ) as e: + log.error( + "Error parsing message MIME parts", + folder_name=folder_name, + account_id=account.id, + error=e, + mid=mid, + ) msg._mark_error() - store_body = config.get('STORE_MESSAGE_BODIES', True) + store_body = config.get("STORE_MESSAGE_BODIES", True) msg.calculate_body(html_parts, plain_parts, store_body=store_body) # Occasionally people try to send messages to way too many # recipients. In such cases, empty the field and treat as a parsing # error so that we don't break the entire sync. - for field in ('to_addr', 'cc_addr', 'bcc_addr', 'references', - 'reply_to'): + for field in ("to_addr", "cc_addr", "bcc_addr", "references", "reply_to"): value = getattr(msg, field) if json_field_too_long(value): - log.error('Recipient field too long', field=field, - account_id=account.id, folder_name=folder_name, - mid=mid) + log.error( + "Recipient field too long", + field=field, + account_id=account.id, + folder_name=folder_name, + mid=mid, + ) setattr(msg, field, []) msg._mark_error() return msg - def _parse_metadata(self, parsed, body_string, received_date, - account_id, folder_name, mid): - mime_version = parsed.headers.get('Mime-Version') + def _parse_metadata( + self, parsed, body_string, received_date, account_id, folder_name, mid + ): + mime_version = parsed.headers.get("Mime-Version") # sometimes MIME-Version is '1.0 (1.0)', hence the .startswith() - if mime_version is not None and not mime_version.startswith('1.0'): - log.warning('Unexpected MIME-Version', - account_id=account_id, folder_name=folder_name, - mid=mid, mime_version=mime_version) + if mime_version is not None and not mime_version.startswith("1.0"): + log.warning( + "Unexpected MIME-Version", + account_id=account_id, + folder_name=folder_name, + mid=mid, + mime_version=mime_version, + ) self.subject = parsed.subject - self.from_addr = parse_mimepart_address_header(parsed, 'From') - self.sender_addr = parse_mimepart_address_header(parsed, 'Sender') - self.reply_to = parse_mimepart_address_header(parsed, 'Reply-To') - self.to_addr = parse_mimepart_address_header(parsed, 'To') - self.cc_addr = parse_mimepart_address_header(parsed, 'Cc') - self.bcc_addr = parse_mimepart_address_header(parsed, 'Bcc') + self.from_addr = parse_mimepart_address_header(parsed, "From") + self.sender_addr = parse_mimepart_address_header(parsed, "Sender") + self.reply_to = parse_mimepart_address_header(parsed, "Reply-To") + self.to_addr = parse_mimepart_address_header(parsed, "To") + self.cc_addr = parse_mimepart_address_header(parsed, "Cc") + self.bcc_addr = parse_mimepart_address_header(parsed, "Bcc") - self.in_reply_to = parsed.headers.get('In-Reply-To') + self.in_reply_to = parsed.headers.get("In-Reply-To") # The RFC mandates that the Message-Id header must be at most 998 # characters. Sadly, not everybody follows specs. - self.message_id_header = parsed.headers.get('Message-Id') + self.message_id_header = parsed.headers.get("Message-Id") if self.message_id_header and len(self.message_id_header) > 998: self.message_id_header = self.message_id_header[:998] - log.warning('Message-Id header too long. Truncating', - parsed.headers.get('Message-Id'), - logstash_tag='truncated_message_id') - - self.received_date = received_date if received_date else \ - get_internaldate(parsed.headers.get('Date'), - parsed.headers.get('Received')) + log.warning( + "Message-Id header too long. Truncating", + parsed.headers.get("Message-Id"), + logstash_tag="truncated_message_id", + ) + + self.received_date = ( + received_date + if received_date + else get_internaldate( + parsed.headers.get("Date"), parsed.headers.get("Received") + ) + ) # It seems MySQL rounds up fractional seconds in a weird way, # preventing us from reconciling messages correctly. See: @@ -349,74 +408,97 @@ def _parse_metadata(self, parsed, body_string, received_date, self.received_date = self.received_date.replace(microsecond=0) # Custom Nylas header - self.nylas_uid = parsed.headers.get('X-INBOX-ID') + self.nylas_uid = parsed.headers.get("X-INBOX-ID") # In accordance with JWZ (http://www.jwz.org/doc/threading.html) self.references = parse_references( - parsed.headers.get('References', ''), - parsed.headers.get('In-Reply-To', '')) + parsed.headers.get("References", ""), parsed.headers.get("In-Reply-To", "") + ) self.size = len(body_string) # includes headers text - def _parse_mimepart(self, mid, mimepart, namespace_id, html_parts, - plain_parts): + def _parse_mimepart(self, mid, mimepart, namespace_id, html_parts, plain_parts): disposition, _ = mimepart.content_disposition - content_id = mimepart.headers.get('Content-Id') + content_id = mimepart.headers.get("Content-Id") content_type, params = mimepart.content_type filename = mimepart.detected_file_name - if filename == '': + if filename == "": filename = None data = mimepart.body - is_text = content_type.startswith('text') - if disposition not in (None, 'inline', 'attachment'): - log.error('Unknown Content-Disposition', - message_public_id=self.public_id, - bad_content_disposition=mimepart.content_disposition) + is_text = content_type.startswith("text") + if disposition not in (None, "inline", "attachment"): + log.error( + "Unknown Content-Disposition", + message_public_id=self.public_id, + bad_content_disposition=mimepart.content_disposition, + ) self._mark_error() return - if disposition == 'attachment': - self._save_attachment(data, disposition, content_type, - filename, content_id, namespace_id, mid) + if disposition == "attachment": + self._save_attachment( + data, disposition, content_type, filename, content_id, namespace_id, mid + ) return - if (disposition == 'inline' and - not (is_text and filename is None and content_id is None)): + if disposition == "inline" and not ( + is_text and filename is None and content_id is None + ): # Some clients set Content-Disposition: inline on text MIME parts # that we really want to treat as part of the text body. Don't # treat those as attachments. - self._save_attachment(data, disposition, content_type, - filename, content_id, namespace_id, mid) + self._save_attachment( + data, disposition, content_type, filename, content_id, namespace_id, mid + ) return if is_text: if data is None: return - normalized_data = data.encode('utf-8', 'strict') - normalized_data = normalized_data.replace('\r\n', '\n'). \ - replace('\r', '\n') - if content_type == 'text/html': + normalized_data = data.encode("utf-8", "strict") + normalized_data = normalized_data.replace("\r\n", "\n").replace("\r", "\n") + if content_type == "text/html": html_parts.append(normalized_data) - elif content_type == 'text/plain': + elif content_type == "text/plain": plain_parts.append(normalized_data) else: - log.info('Saving other text MIME part as attachment', - content_type=content_type, namespace_id=namespace_id) - self._save_attachment(data, 'attachment', content_type, - filename, content_id, namespace_id, mid) + log.info( + "Saving other text MIME part as attachment", + content_type=content_type, + namespace_id=namespace_id, + ) + self._save_attachment( + data, + "attachment", + content_type, + filename, + content_id, + namespace_id, + mid, + ) return # Finally, if we get a non-text MIME part without Content-Disposition, # treat it as an attachment. - self._save_attachment(data, 'attachment', content_type, - filename, content_id, namespace_id, mid) + self._save_attachment( + data, "attachment", content_type, filename, content_id, namespace_id, mid + ) - def _save_attachment(self, data, content_disposition, content_type, - filename, content_id, namespace_id, mid): + def _save_attachment( + self, + data, + content_disposition, + content_type, + filename, + content_id, + namespace_id, + mid, + ): from inbox.models import Part, Block + block = Block() block.namespace_id = namespace_id block.filename = _trim_filename(filename, namespace_id=namespace_id) @@ -426,9 +508,9 @@ def _save_attachment(self, data, content_disposition, content_type, content_id = content_id[:255] part.content_id = content_id part.content_disposition = content_disposition - data = data or '' + data = data or "" if isinstance(data, unicode): - data = data.encode('utf-8', 'strict') + data = data.encode("utf-8", "strict") block.data = data def _mark_error(self): @@ -451,11 +533,11 @@ def _mark_error(self): if self.body is None: self.body = None if self.snippet is None: - self.snippet = '' + self.snippet = "" def calculate_body(self, html_parts, plain_parts, store_body=True): - html_body = ''.join(html_parts).decode('utf-8').strip() - plain_body = '\n'.join(plain_parts).decode('utf-8').strip() + html_body = "".join(html_parts).decode("utf-8").strip() + plain_body = "\n".join(plain_parts).decode("utf-8").strip() if html_body: self.snippet = self.calculate_html_snippet(html_body) if store_body: @@ -470,27 +552,27 @@ def calculate_body(self, html_parts, plain_parts, store_body=True): self.body = None else: self.body = None - self.snippet = u'' + self.snippet = u"" def calculate_html_snippet(self, text): text = strip_tags(text) return self.calculate_plaintext_snippet(text) def calculate_plaintext_snippet(self, text): - return unicode_safe_truncate(' '.join(text.split()), SNIPPET_LENGTH) + return unicode_safe_truncate(" ".join(text.split()), SNIPPET_LENGTH) @property def body(self): if self._compacted_body is None: return None - return decode_blob(self._compacted_body).decode('utf-8') + return decode_blob(self._compacted_body).decode("utf-8") @body.setter def body(self, value): if value is None: self._compacted_body = None else: - self._compacted_body = encode_blob(value.encode('utf-8')) + self._compacted_body = encode_blob(value.encode("utf-8")) @property def participants(self): @@ -521,7 +603,7 @@ def participants(self): p = [] for address, phrases in deduped_participants.iteritems(): for phrase in phrases: - if phrase != '' or len(phrases) == 1: + if phrase != "" or len(phrases) == 1: p.append((phrase, address)) return p @@ -535,34 +617,37 @@ def api_attachment_metadata(self): for part in self.parts: if not part.is_attachment: continue - k = {'content_type': part.block.content_type, - 'size': part.block.size, - 'filename': part.block.filename, - 'id': part.block.public_id} + k = { + "content_type": part.block.content_type, + "size": part.block.size, + "filename": part.block.filename, + "id": part.block.public_id, + } content_id = part.content_id if content_id: - if content_id[0] == '<' and content_id[-1] == '>': + if content_id[0] == "<" and content_id[-1] == ">": content_id = content_id[1:-1] - k['content_id'] = content_id + k["content_id"] = content_id resp.append(k) return resp @property def versioned_relationships(self): - return ['parts', 'messagecategories'] + return ["parts", "messagecategories"] @property def propagated_attributes(self): - return ['is_read', 'is_starred', 'messagecategories'] + return ["is_read", "is_starred", "messagecategories"] @property def has_attached_events(self): - return 'text/calendar' in [p.block.content_type for p in self.parts] + return "text/calendar" in [p.block.content_type for p in self.parts] @property def attached_event_files(self): - return [part for part in self.parts - if part.block.content_type == 'text/calendar'] + return [ + part for part in self.parts if part.block.content_type == "text/calendar" + ] @property def account(self): @@ -570,7 +655,7 @@ def account(self): def get_header(self, header, mid): if self.decode_error: - log.warning('Error getting message header', mid=mid) + log.warning("Error getting message header", mid=mid) return return self.parsed_body.headers.get(header) @@ -578,83 +663,112 @@ def get_header(self, header, mid): def from_public_id(cls, public_id, namespace_id, db_session): q = bakery(lambda s: s.query(cls)) q += lambda q: q.filter( - Message.public_id == bindparam('public_id'), - Message.namespace_id == bindparam('namespace_id')) + Message.public_id == bindparam("public_id"), + Message.namespace_id == bindparam("namespace_id"), + ) q += lambda q: q.options( - joinedload(Message.thread). - load_only('discriminator', 'public_id'), - joinedload(Message.messagecategories). - joinedload(MessageCategory.category), - joinedload(Message.parts).joinedload('block'), - joinedload(Message.events)) - return q(db_session).params( - public_id=public_id, namespace_id=namespace_id).one() + joinedload(Message.thread).load_only("discriminator", "public_id"), + joinedload(Message.messagecategories).joinedload(MessageCategory.category), + joinedload(Message.parts).joinedload("block"), + joinedload(Message.events), + ) + return ( + q(db_session).params(public_id=public_id, namespace_id=namespace_id).one() + ) @classmethod def api_loading_options(cls, expand=False): - columns = ['public_id', 'is_draft', 'from_addr', 'to_addr', 'cc_addr', - 'bcc_addr', 'is_read', 'is_starred', 'received_date', - 'is_sent', 'subject', 'snippet', 'version', 'from_addr', - 'to_addr', 'cc_addr', 'bcc_addr', 'reply_to', - '_compacted_body', 'thread_id', 'namespace_id'] + columns = [ + "public_id", + "is_draft", + "from_addr", + "to_addr", + "cc_addr", + "bcc_addr", + "is_read", + "is_starred", + "received_date", + "is_sent", + "subject", + "snippet", + "version", + "from_addr", + "to_addr", + "cc_addr", + "bcc_addr", + "reply_to", + "_compacted_body", + "thread_id", + "namespace_id", + ] if expand: - columns += ['message_id_header', 'in_reply_to', 'references'] + columns += ["message_id_header", "in_reply_to", "references"] return ( load_only(*columns), - subqueryload('parts').joinedload('block'), - subqueryload('thread').load_only('public_id', 'discriminator'), - subqueryload('events').load_only('public_id', 'discriminator'), - subqueryload('messagecategories').joinedload('category') + subqueryload("parts").joinedload("block"), + subqueryload("thread").load_only("public_id", "discriminator"), + subqueryload("events").load_only("public_id", "discriminator"), + subqueryload("messagecategories").joinedload("category"), ) # Need to explicitly specify the index length for table generation with MySQL # 5.6 when columns are too long to be fully indexed with utf8mb4 collation. -Index('ix_message_subject', Message.subject, mysql_length=80) +Index("ix_message_subject", Message.subject, mysql_length=80) # For API querying performance. -Index('ix_message_ns_id_is_draft_received_date', Message.namespace_id, - Message.is_draft, Message.received_date) +Index( + "ix_message_ns_id_is_draft_received_date", + Message.namespace_id, + Message.is_draft, + Message.received_date, +) # For async deletion. -Index('ix_message_namespace_id_deleted_at', Message.namespace_id, - Message.deleted_at) +Index("ix_message_namespace_id_deleted_at", Message.namespace_id, Message.deleted_at) # For statistics about messages sent via Nylas -Index('ix_message_namespace_id_is_created', Message.namespace_id, - Message.is_created) +Index("ix_message_namespace_id_is_created", Message.namespace_id, Message.is_created) # For filtering messages by Message-Id via API (with namespace), and for # debugging purposes (without namespace). -Index('ix_message_message_id_header_namespace_id', - Message.message_id_header, Message.namespace_id, - mysql_length={'message_id_header': 80}) +Index( + "ix_message_message_id_header_namespace_id", + Message.message_id_header, + Message.namespace_id, + mysql_length={"message_id_header": 80}, +) # Used by delete-accounts to find next batch to delete -Index('ix_message_namespace_id_received_date', Message.namespace_id, - Message.received_date) +Index( + "ix_message_namespace_id_received_date", Message.namespace_id, Message.received_date +) + class MessageCategory(MailSyncBase): """ Mapping between messages and categories. """ + message_id = Column(BigInteger, nullable=False) message = relationship( - 'Message', - primaryjoin='foreign(MessageCategory.message_id) == remote(Message.id)', # noqa - backref=backref('messagecategories', - collection_class=set, - cascade="all, delete-orphan")) + "Message", + primaryjoin="foreign(MessageCategory.message_id) == remote(Message.id)", # noqa + backref=backref( + "messagecategories", collection_class=set, cascade="all, delete-orphan" + ), + ) category_id = Column(BigInteger, nullable=False, index=True) category = relationship( Category, - primaryjoin='foreign(MessageCategory.category_id) == remote(Category.id)', # noqa - backref=backref('messagecategories', - cascade="all, delete-orphan", - lazy='dynamic')) + primaryjoin="foreign(MessageCategory.category_id) == remote(Category.id)", # noqa + backref=backref( + "messagecategories", cascade="all, delete-orphan", lazy="dynamic" + ), + ) @property def namespace(self): return self.message.namespace -Index('message_category_ids', - MessageCategory.message_id, MessageCategory.category_id) + +Index("message_category_ids", MessageCategory.message_id, MessageCategory.category_id) diff --git a/inbox/models/meta.py b/inbox/models/meta.py index 053d4f597..ac083920b 100644 --- a/inbox/models/meta.py +++ b/inbox/models/meta.py @@ -7,9 +7,12 @@ def load_models(): from inbox.models.base import MailSyncBase from inbox.models.action_log import ActionLog from inbox.models.block import Block, Part - from inbox.models.contact import (EventContactAssociation, - MessageContactAssociation, Contact, - PhoneNumber) + from inbox.models.contact import ( + EventContactAssociation, + MessageContactAssociation, + Contact, + PhoneNumber, + ) from inbox.models.calendar import Calendar from inbox.models.data_processing import DataProcessingCache from inbox.models.event import Event @@ -24,10 +27,36 @@ def load_models(): from inbox.models.label import Label from inbox.models.category import Category from inbox.models.metadata import Metadata - exports = [Account, MailSyncBase, ActionLog, Block, Part, - MessageContactAssociation, Contact, PhoneNumber, Calendar, - DataProcessingCache, Event, EventContactAssociation, Folder, - Message, Namespace, ContactSearchIndexCursor, Secret, - Thread, Transaction, When, Time, TimeSpan, Date, DateSpan, - Label, Category, MessageCategory, Metadata, AccountTransaction] + + exports = [ + Account, + MailSyncBase, + ActionLog, + Block, + Part, + MessageContactAssociation, + Contact, + PhoneNumber, + Calendar, + DataProcessingCache, + Event, + EventContactAssociation, + Folder, + Message, + Namespace, + ContactSearchIndexCursor, + Secret, + Thread, + Transaction, + When, + Time, + TimeSpan, + Date, + DateSpan, + Label, + Category, + MessageCategory, + Metadata, + AccountTransaction, + ] return exports diff --git a/inbox/models/metadata.py b/inbox/models/metadata.py index 49cb90c1d..a70840c49 100644 --- a/inbox/models/metadata.py +++ b/inbox/models/metadata.py @@ -1,16 +1,19 @@ from sqlalchemy import Column, Integer, String, ForeignKey, BigInteger, Index -from sqlalchemy.orm import (relationship) +from sqlalchemy.orm import relationship from inbox.sqlalchemy_ext.util import JSON from inbox.models.base import MailSyncBase -from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin) +from inbox.models.mixins import ( + HasPublicID, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +) from inbox.sqlalchemy_ext.util import Base36UID from inbox.models.namespace import Namespace -class Metadata(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin): +class Metadata(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, DeletedAtMixin): """ Key-value store for applications to store arbitrary data associated with mail. API object public_id's are used as the keys, and values are JSON. @@ -24,7 +27,8 @@ class Metadata(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, metadata objects should never be deleted from the table; instead, the row's value should be set to null. """ - API_OBJECT_NAME = 'metadata' + + API_OBJECT_NAME = "metadata" # Application data fields # - app_id: The referenced app's primary key @@ -35,8 +39,7 @@ class Metadata(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, app_client_id = Column(Base36UID, nullable=False) app_type = Column(String(20), nullable=False) - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) + namespace_id = Column(ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False) namespace = relationship(Namespace) # Reference to the object that this metadata is about. Public ID is the @@ -49,9 +52,10 @@ class Metadata(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, queryable_value = Column(BigInteger, nullable=True, index=True) - version = Column(Integer, nullable=True, server_default='0') + version = Column(Integer, nullable=True, server_default="0") + -Index('ix_obj_public_id_app_id', - Metadata.object_public_id, Metadata.app_id, unique=True) -Index('ix_namespace_id_app_id', - Metadata.namespace_id, Metadata.app_id) +Index( + "ix_obj_public_id_app_id", Metadata.object_public_id, Metadata.app_id, unique=True +) +Index("ix_namespace_id_app_id", Metadata.namespace_id, Metadata.app_id) diff --git a/inbox/models/mixins.py b/inbox/models/mixins.py index d69f523a3..e02216e7f 100644 --- a/inbox/models/mixins.py +++ b/inbox/models/mixins.py @@ -12,6 +12,7 @@ class HasRevisions(ABCMixin): """Mixin for tables that should be versioned in the transaction log.""" + @property def versioned_relationships(self): """ @@ -77,12 +78,12 @@ def has_versioned_changes(self): class HasPublicID(object): - public_id = Column(Base36UID, nullable=False, - index=True, default=generate_public_id) + public_id = Column( + Base36UID, nullable=False, index=True, default=generate_public_id + ) class AddressComparator(Comparator): - def __eq__(self, other): return self.__clause_element__() == canonicalize_address(other) @@ -94,7 +95,6 @@ def in_(self, addresses): class CaseInsensitiveComparator(Comparator): - def __eq__(self, other): return func.lower(self.__clause_element__()) == func.lower(other) @@ -112,10 +112,11 @@ class HasEmailAddress(object): equivalent. """ - _raw_address = Column(String(MAX_INDEXABLE_LENGTH), - nullable=True, index=True) - _canonicalized_address = Column(String(MAX_INDEXABLE_LENGTH), - nullable=True, index=True) + + _raw_address = Column(String(MAX_INDEXABLE_LENGTH), nullable=True, index=True) + _canonicalized_address = Column( + String(MAX_INDEXABLE_LENGTH), nullable=True, index=True + ) @hybrid_property def email_address(self): @@ -137,13 +138,17 @@ def email_address(self, value): class CreatedAtMixin(object): - created_at = Column(DateTime, server_default=func.now(), - nullable=False, index=True) + created_at = Column(DateTime, server_default=func.now(), nullable=False, index=True) class UpdatedAtMixin(object): - updated_at = Column(DateTime, default=datetime.utcnow, - onupdate=datetime.utcnow, nullable=False, index=True) + updated_at = Column( + DateTime, + default=datetime.utcnow, + onupdate=datetime.utcnow, + nullable=False, + index=True, + ) class DeletedAtMixin(object): @@ -163,5 +168,6 @@ class HasRunState(ABCMixin): sync_enabled = abc.abstractproperty() # Database-level tracking of whether the sync should be running. - sync_should_run = Column(Boolean, default=True, nullable=False, - server_default=sql.expression.true()) + sync_should_run = Column( + Boolean, default=True, nullable=False, server_default=sql.expression.true() + ) diff --git a/inbox/models/namespace.py b/inbox/models/namespace.py index ac106c151..9a24974ae 100644 --- a/inbox/models/namespace.py +++ b/inbox/models/namespace.py @@ -7,22 +7,27 @@ class Namespace(MailSyncBase, HasPublicID, UpdatedAtMixin, DeletedAtMixin): - account_id = Column(BigInteger, - ForeignKey('account.id', ondelete='CASCADE'), - nullable=True) - account = relationship('Account', - lazy='joined', - single_parent=True, - backref=backref('namespace', - uselist=False, - lazy='joined', - passive_deletes=True, - cascade='all,delete-orphan'), - uselist=False) + account_id = Column( + BigInteger, ForeignKey("account.id", ondelete="CASCADE"), nullable=True + ) + account = relationship( + "Account", + lazy="joined", + single_parent=True, + backref=backref( + "namespace", + uselist=False, + lazy="joined", + passive_deletes=True, + cascade="all,delete-orphan", + ), + uselist=False, + ) def __str__(self): - return "{} <{}>".format(self.public_id, self.account.email_address if - self.account else '') + return "{} <{}>".format( + self.public_id, self.account.email_address if self.account else "" + ) @property def email_address(self): @@ -32,12 +37,11 @@ def email_address(self): @classmethod def get(cls, id_, session): q = bakery(lambda session: session.query(cls)) - q += lambda q: q.filter(cls.id == bindparam('id_')) + q += lambda q: q.filter(cls.id == bindparam("id_")) return q(session).params(id_=id_).first() @classmethod def from_public_id(cls, public_id, db_session): q = bakery(lambda session: session.query(Namespace)) - q += lambda q: q.filter( - Namespace.public_id == bindparam('public_id')) + q += lambda q: q.filter(Namespace.public_id == bindparam("public_id")) return q(db_session).params(public_id=public_id).one() diff --git a/inbox/models/roles.py b/inbox/models/roles.py index 05b367220..4eb3f38ee 100644 --- a/inbox/models/roles.py +++ b/inbox/models/roles.py @@ -3,6 +3,7 @@ from sqlalchemy import Column, Integer, String from nylas.logging import get_logger + log = get_logger() from inbox.config import config from inbox.util import blockstore @@ -10,31 +11,32 @@ from inbox.util.stats import statsd_client # TODO: store AWS credentials in a better way. -STORE_MSG_ON_S3 = config.get('STORE_MESSAGES_ON_S3', None) -STORE_MESSAGE_ATTACHMENTS = config.get('STORE_MESSAGE_ATTACHMENTS', True) +STORE_MSG_ON_S3 = config.get("STORE_MESSAGES_ON_S3", None) +STORE_MESSAGE_ATTACHMENTS = config.get("STORE_MESSAGE_ATTACHMENTS", True) class Blob(object): """ A blob of data that can be saved to local or remote (S3) disk. """ + size = Column(Integer, default=0) data_sha256 = Column(String(64)) @property def data(self): if self.size == 0: - log.warning('Block size is 0') - return '' - elif hasattr(self, '_data'): + log.warning("Block size is 0") + return "" + elif hasattr(self, "_data"): # On initial download we temporarily store data in memory value = self._data else: value = blockstore.get_from_blockstore(self.data_sha256) if value is None: - log.warning("Couldn't find data on S3 for block", - sha_hash=self.data_sha256) + log.warning("Couldn't find data on S3 for block", sha_hash=self.data_sha256) from inbox.models.block import Block + if isinstance(self, Block): if self.parts: # This block is an attachment of a message that was @@ -44,20 +46,23 @@ def data(self): message = self.parts[0].message # only grab one account = message.namespace.account - statsd_string = 'api.direct_fetching.{}.{}'.format( - account.provider, account.id) + statsd_string = "api.direct_fetching.{}.{}".format( + account.provider, account.id + ) # Try to fetch the message from S3 first. - with statsd_client.timer('{}.blockstore_latency'.format( - statsd_string)): + with statsd_client.timer( + "{}.blockstore_latency".format(statsd_string) + ): raw_mime = blockstore.get_from_blockstore(message.data_sha256) # If it's not there, get it from the provider. if raw_mime is None: - statsd_client.incr('{}.cache_misses'.format(statsd_string)) + statsd_client.incr("{}.cache_misses".format(statsd_string)) - with statsd_client.timer('{}.provider_latency'.format( - statsd_string)): + with statsd_client.timer( + "{}.provider_latency".format(statsd_string) + ): raw_mime = get_raw_from_provider(message) msg_sha256 = sha256(raw_mime).hexdigest() @@ -65,50 +70,65 @@ def data(self): # Cache the raw message in the blockstore so that # we don't have to fetch it over and over. - with statsd_client.timer('{}.blockstore_save_latency'.format( - statsd_string)): + with statsd_client.timer( + "{}.blockstore_save_latency".format(statsd_string) + ): blockstore.save_to_blockstore(msg_sha256, raw_mime) else: # We found it in the blockstore --- report this. - statsd_client.incr('{}.cache_hits'.format(statsd_string)) + statsd_client.incr("{}.cache_hits".format(statsd_string)) # If we couldn't find it there, give up. if raw_mime is None: - log.error("Don't have raw message for hash {}" - .format(message.data_sha256)) + log.error( + "Don't have raw message for hash {}".format( + message.data_sha256 + ) + ) return None parsed = mime.from_string(raw_mime) if parsed is not None: for mimepart in parsed.walk( - with_self=parsed.content_type.is_singlepart()): + with_self=parsed.content_type.is_singlepart() + ): if mimepart.content_type.is_multipart(): continue # TODO should we store relations? data = mimepart.body if isinstance(data, unicode): - data = data.encode('utf-8', 'strict') + data = data.encode("utf-8", "strict") if data is None: continue # Found it! if sha256(data).hexdigest() == self.data_sha256: - log.info('Found subpart with hash {}'.format( - self.data_sha256)) - - with statsd_client.timer('{}.blockstore_save_latency'.format( - statsd_string)): - blockstore.save_to_blockstore(self.data_sha256, data) + log.info( + "Found subpart with hash {}".format( + self.data_sha256 + ) + ) + + with statsd_client.timer( + "{}.blockstore_save_latency".format(statsd_string) + ): + blockstore.save_to_blockstore( + self.data_sha256, data + ) return data - log.error("Couldn't find the attachment in the raw message", message_id=message.id) + log.error( + "Couldn't find the attachment in the raw message", + message_id=message.id, + ) - log.error('No data returned!') + log.error("No data returned!") return value - assert self.data_sha256 == sha256(value).hexdigest(), \ - "Returned data doesn't match stored hash!" + assert ( + self.data_sha256 == sha256(value).hexdigest() + ), "Returned data doesn't match stored hash!" return value @data.setter @@ -124,7 +144,7 @@ def data(self, value): assert self.data_sha256 if len(value) == 0: - log.warning('Not saving 0-length data blob') + log.warning("Not saving 0-length data blob") return if STORE_MESSAGE_ATTACHMENTS: diff --git a/inbox/models/search.py b/inbox/models/search.py index ab7708f98..7b90d03c2 100644 --- a/inbox/models/search.py +++ b/inbox/models/search.py @@ -5,12 +5,11 @@ from inbox.models.transaction import Transaction -class ContactSearchIndexCursor(MailSyncBase, UpdatedAtMixin, - DeletedAtMixin): +class ContactSearchIndexCursor(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): """ Store the id of the last Transaction indexed into CloudSearch. Is namespace-agnostic. """ - transaction_id = Column(ForeignKey(Transaction.id), nullable=True, - index=True) + + transaction_id = Column(ForeignKey(Transaction.id), nullable=True, index=True) diff --git a/inbox/models/secret.py b/inbox/models/secret.py index f96f7d0b9..7df3217b7 100644 --- a/inbox/models/secret.py +++ b/inbox/models/secret.py @@ -9,20 +9,21 @@ class Secret(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): """Simple local secrets table.""" + _secret = Column(BLOB, nullable=False) # Type of secret - type = Column(Enum('password', 'token'), nullable=False) + type = Column(Enum("password", "token"), nullable=False) # Scheme used - encryption_scheme = Column(Integer, server_default='0', nullable=False) + encryption_scheme = Column(Integer, server_default="0", nullable=False) @property def secret(self): - with get_decryption_oracle('SECRET_ENCRYPTION_KEY') as d_oracle: + with get_decryption_oracle("SECRET_ENCRYPTION_KEY") as d_oracle: return d_oracle.decrypt( - self._secret, - encryption_scheme=self.encryption_scheme) + self._secret, encryption_scheme=self.encryption_scheme + ) @secret.setter def secret(self, plaintext): @@ -32,14 +33,14 @@ def secret(self, plaintext): """ if not isinstance(plaintext, bytes): - raise TypeError('Invalid secret') + raise TypeError("Invalid secret") - with get_encryption_oracle('SECRET_ENCRYPTION_KEY') as e_oracle: + with get_encryption_oracle("SECRET_ENCRYPTION_KEY") as e_oracle: self._secret, self.encryption_scheme = e_oracle.encrypt(plaintext) - @validates('type') + @validates("type") def validate_type(self, k, type): - if type != 'password' and type != 'token': - raise TypeError('Invalid secret type: must be password or token') + if type != "password" and type != "token": + raise TypeError("Invalid secret type: must be password or token") return type diff --git a/inbox/models/session.py b/inbox/models/session.py index fbbae7e7f..517056fd6 100644 --- a/inbox/models/session.py +++ b/inbox/models/session.py @@ -11,6 +11,7 @@ from inbox.ignition import engine_manager from inbox.util.stats import statsd_client from nylas.logging import get_logger, find_first_app_frame_and_name + log = get_logger() @@ -28,8 +29,7 @@ def two_phase_session(engine_map, versioned=True): versioned: bool """ - session = Session(binds=engine_map, twophase=True, autoflush=True, - autocommit=False) + session = Session(binds=engine_map, twophase=True, autoflush=True, autocommit=False) if versioned: session = configure_versioning(session) # TODO[k]: Metrics for transaction latencies! @@ -46,14 +46,18 @@ def new_session(engine, versioned=True): # Make statsd calls for transaction times transaction_start_map = {} frame, modname = find_first_app_frame_and_name( - ignores=['sqlalchemy', 'inbox.models.session', 'nylas.logging', - 'contextlib']) + ignores=[ + "sqlalchemy", + "inbox.models.session", + "nylas.logging", + "contextlib", + ] + ) funcname = frame.f_code.co_name modname = modname.replace(".", "-") - metric_name = 'db.{}.{}.{}'.format(engine.url.database, modname, - funcname) + metric_name = "db.{}.{}.{}".format(engine.url.database, modname, funcname) - @event.listens_for(session, 'after_begin') + @event.listens_for(session, "after_begin") def after_begin(session, transaction, connection): # It's okay to key on the session object here, because each session # binds to only one engine/connection. If this changes in the @@ -61,8 +65,8 @@ def after_begin(session, transaction, connection): # we'll have to get more sophisticated. transaction_start_map[session] = time.time() - @event.listens_for(session, 'after_commit') - @event.listens_for(session, 'after_rollback') + @event.listens_for(session, "after_commit") + @event.listens_for(session, "after_rollback") def end(session): start_time = transaction_start_map.get(session) if not start_time: @@ -72,28 +76,34 @@ def end(session): t = time.time() latency = int((t - start_time) * 1000) - if config.get('ENABLE_DB_TXN_METRICS', False): + if config.get("ENABLE_DB_TXN_METRICS", False): statsd_client.timing(metric_name, latency) statsd_client.incr(metric_name) if latency > MAX_SANE_TRX_TIME_MS: - log.warning('Long transaction', latency=latency, - modname=modname, funcname=funcname) + log.warning( + "Long transaction", + latency=latency, + modname=modname, + funcname=funcname, + ) return session def configure_versioning(session): from inbox.models.transaction import ( - create_revisions, propagate_changes, increment_versions, - bump_redis_txn_id + create_revisions, + propagate_changes, + increment_versions, + bump_redis_txn_id, ) - @event.listens_for(session, 'before_flush') + @event.listens_for(session, "before_flush") def before_flush(session, flush_context, instances): propagate_changes(session) increment_versions(session) - @event.listens_for(session, 'after_flush') + @event.listens_for(session, "after_flush") def after_flush(session, flush_context): """ Hook to log revision snapshots. Must be post-flush in order to @@ -108,7 +118,7 @@ def after_flush(session, flush_context): try: bump_redis_txn_id(session) except Exception: - log.exception('bump_redis_txn_id exception') + log.exception("bump_redis_txn_id exception") pass create_revisions(session) @@ -146,15 +156,16 @@ def session_scope(id_, versioned=True): session = new_session(engine, versioned) try: - if config.get('LOG_DB_SESSIONS'): + if config.get("LOG_DB_SESSIONS"): start_time = time.time() calling_frame = sys._getframe().f_back.f_back - call_loc = '{}:{}'.format(calling_frame.f_globals.get('__name__'), - calling_frame.f_lineno) - logger = log.bind(engine_id=id(engine), - session_id=id(session), call_loc=call_loc) - logger.info('creating db_session', - sessions_used=engine.pool.checkedout()) + call_loc = "{}:{}".format( + calling_frame.f_globals.get("__name__"), calling_frame.f_lineno + ) + logger = log.bind( + engine_id=id(engine), session_id=id(session), call_loc=call_loc + ) + logger.info("creating db_session", sessions_used=engine.pool.checkedout()) yield session session.commit() except BaseException as exc: @@ -162,14 +173,18 @@ def session_scope(id_, versioned=True): session.rollback() raise except OperationalError: - log.warn('Encountered OperationalError on rollback', - original_exception=type(exc)) + log.warn( + "Encountered OperationalError on rollback", original_exception=type(exc) + ) raise exc finally: - if config.get('LOG_DB_SESSIONS'): + if config.get("LOG_DB_SESSIONS"): lifetime = time.time() - start_time - logger.info('closing db_session', lifetime=lifetime, - sessions_used=engine.pool.checkedout()) + logger.info( + "closing db_session", + lifetime=lifetime, + sessions_used=engine.pool.checkedout(), + ) session.close() @@ -207,7 +222,8 @@ def global_session_scope(): shard_chooser=shard_chooser, id_chooser=id_chooser, query_chooser=query_chooser, - shards=shards) + shards=shards, + ) # STOPSHIP(emfree): need instrumentation and proper exception handling # here. try: diff --git a/inbox/models/thread.py b/inbox/models/thread.py index 590583a77..88a083079 100644 --- a/inbox/models/thread.py +++ b/inbox/models/thread.py @@ -3,20 +3,29 @@ from collections import defaultdict from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Index -from sqlalchemy.orm import (relationship, backref, validates, object_session, - subqueryload) +from sqlalchemy.orm import ( + relationship, + backref, + validates, + object_session, + subqueryload, +) from nylas.logging import get_logger + log = get_logger() -from inbox.models.mixins import (HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin) +from inbox.models.mixins import ( + HasPublicID, + HasRevisions, + UpdatedAtMixin, + DeletedAtMixin, +) from inbox.models.base import MailSyncBase from inbox.models.namespace import Namespace from inbox.util.misc import cleanup_subject -class Thread(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, - DeletedAtMixin): +class Thread(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, DeletedAtMixin): """ Threads are a first-class object in Nylas. This thread aggregates the relevant thread metadata from elsewhere so that clients can only @@ -28,13 +37,15 @@ class Thread(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, don't query based on folder! """ - API_OBJECT_NAME = 'thread' - namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'), - nullable=False) - namespace = relationship('Namespace', - backref=backref('threads', passive_deletes=True), - load_on_pending=True) + API_OBJECT_NAME = "thread" + + namespace_id = Column(ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False) + namespace = relationship( + "Namespace", + backref=backref("threads", passive_deletes=True), + load_on_pending=True, + ) subject = Column(String(255), nullable=True) # a column with the cleaned up version of a subject string, to speed up @@ -42,15 +53,15 @@ class Thread(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, _cleaned_subject = Column(String(255), nullable=True) subjectdate = Column(DateTime, nullable=False, index=True) recentdate = Column(DateTime, nullable=False, index=True) - snippet = Column(String(191), nullable=True, default='') - version = Column(Integer, nullable=True, server_default='0') + snippet = Column(String(191), nullable=True, default="") + version = Column(Integer, nullable=True, server_default="0") - @validates('subject') + @validates("subject") def compute_cleaned_up_subject(self, key, value): self._cleaned_subject = cleanup_subject(value) return value - @validates('messages') + @validates("messages") def update_from_message(self, k, message): with object_session(self).no_autoflush: if message.is_draft: @@ -72,18 +83,24 @@ def update_from_message(self, k, message): def most_recent_received_date(self): received_recent_date = None for m in self.messages: - if all(category.name != "sent" for category in m.categories if category is not None) and \ - not m.is_draft and not m.is_sent: - if not received_recent_date or \ - m.received_date > received_recent_date: + if ( + all( + category.name != "sent" + for category in m.categories + if category is not None + ) + and not m.is_draft + and not m.is_sent + ): + if not received_recent_date or m.received_date > received_recent_date: received_recent_date = m.received_date if not received_recent_date: - sorted_messages = sorted(self.messages, - key=lambda m: m.received_date) + sorted_messages = sorted(self.messages, key=lambda m: m.received_date) if not sorted_messages: - log.warning('Thread does not have associated messages', - thread_id=self.id) + log.warning( + "Thread does not have associated messages", thread_id=self.id + ) return None received_recent_date = sorted_messages[-1].received_date @@ -96,11 +113,11 @@ def most_recent_sent_date(self): not. Clients can use this to properly sort the Sent view. """ sent_recent_date = None - sorted_messages = sorted(self.messages, - key=lambda m: m.received_date, reverse=True) + sorted_messages = sorted( + self.messages, key=lambda m: m.received_date, reverse=True + ) for m in sorted_messages: - if "sent" in [c.name for c in m.categories] or \ - (m.is_draft and m.is_sent): + if "sent" in [c.name for c in m.categories] or (m.is_draft and m.is_sent): sent_recent_date = m.received_date return sent_recent_date @@ -118,7 +135,7 @@ def has_attachments(self): @property def versioned_relationships(self): - return ['messages'] + return ["messages"] @property def participants(self): @@ -134,13 +151,14 @@ def participants(self): if m.is_draft: # Don't use drafts to compute participants. continue - for phrase, address in itertools.chain(m.from_addr, m.to_addr, - m.cc_addr, m.bcc_addr): + for phrase, address in itertools.chain( + m.from_addr, m.to_addr, m.cc_addr, m.bcc_addr + ): deduped_participants[address].add(phrase.strip()) p = [] for address, phrases in deduped_participants.iteritems(): for phrase in phrases: - if phrase != '' or len(phrases) == 1: + if phrase != "" or len(phrases) == 1: p.append((phrase, address)) return p @@ -169,20 +187,35 @@ def categories(self): @classmethod def api_loading_options(cls, expand=False): - message_columns = ['public_id', 'is_draft', 'from_addr', 'to_addr', - 'cc_addr', 'bcc_addr', 'is_read', 'is_starred', - 'received_date', 'is_sent'] + message_columns = [ + "public_id", + "is_draft", + "from_addr", + "to_addr", + "cc_addr", + "bcc_addr", + "is_read", + "is_starred", + "received_date", + "is_sent", + ] if expand: - message_columns += ['subject', 'snippet', 'version', 'from_addr', - 'to_addr', 'cc_addr', 'bcc_addr', 'reply_to'] + message_columns += [ + "subject", + "snippet", + "version", + "from_addr", + "to_addr", + "cc_addr", + "bcc_addr", + "reply_to", + ] return ( - subqueryload(Thread.messages). - load_only(*message_columns) - .joinedload('messagecategories') - .joinedload('category'), subqueryload(Thread.messages) - .joinedload('parts') - .joinedload('block') + .load_only(*message_columns) + .joinedload("messagecategories") + .joinedload("category"), + subqueryload(Thread.messages).joinedload("parts").joinedload("block"), ) def mark_for_deletion(self): @@ -193,18 +226,21 @@ def mark_for_deletion(self): """ self.deleted_at = datetime.datetime.utcnow() - discriminator = Column('type', String(16)) - __mapper_args__ = {'polymorphic_on': discriminator} + discriminator = Column("type", String(16)) + __mapper_args__ = {"polymorphic_on": discriminator} + # Need to explicitly specify the index length for MySQL 5.6, because the # subject column is too long to be fully indexed with utf8mb4 collation. -Index('ix_thread_subject', Thread.subject, mysql_length=80) +Index("ix_thread_subject", Thread.subject, mysql_length=80) # For async deletion. -Index('ix_thread_namespace_id_deleted_at', Thread.namespace_id, - Thread.deleted_at) +Index("ix_thread_namespace_id_deleted_at", Thread.namespace_id, Thread.deleted_at) # For fetch_corresponding_thread. -Index('ix_namespace_id__cleaned_subject', - Thread.namespace_id, Thread._cleaned_subject, - mysql_length={'_cleaned_subject': 80}) +Index( + "ix_namespace_id__cleaned_subject", + Thread.namespace_id, + Thread._cleaned_subject, + mysql_length={"_cleaned_subject": 80}, +) diff --git a/inbox/models/transaction.py b/inbox/models/transaction.py index 2e4dfc6e9..c4e859ca9 100644 --- a/inbox/models/transaction.py +++ b/inbox/models/transaction.py @@ -1,6 +1,5 @@ import redis -from sqlalchemy import (Column, BigInteger, String, Index, Enum, - inspect, func) +from sqlalchemy import Column, BigInteger, String, Index, Enum, inspect, func from sqlalchemy.orm import relationship from inbox.config import config @@ -10,63 +9,79 @@ from inbox.models.mixins import HasPublicID, HasRevisions from inbox.models.namespace import Namespace -TXN_REDIS_KEY = 'latest-txn-by-namespace' +TXN_REDIS_KEY = "latest-txn-by-namespace" class Transaction(MailSyncBase, HasPublicID): """ Transactional log to enable client syncing. """ + # Do delete transactions if their associated namespace is deleted. namespace_id = Column(BigInteger, nullable=False) namespace = relationship( Namespace, - primaryjoin='foreign(Transaction.namespace_id) == remote(Namespace.id)') + primaryjoin="foreign(Transaction.namespace_id) == remote(Namespace.id)", + ) object_type = Column(String(20), nullable=False) record_id = Column(BigInteger, nullable=False, index=True) object_public_id = Column(String(191), nullable=False, index=True) - command = Column(Enum('insert', 'update', 'delete'), nullable=False) + command = Column(Enum("insert", "update", "delete"), nullable=False) + -Index('object_type_record_id', Transaction.object_type, Transaction.record_id) -Index('namespace_id_created_at', Transaction.namespace_id, - Transaction.created_at) -Index('ix_transaction_namespace_id_object_type_id', Transaction.namespace_id, - Transaction.object_type, Transaction.id) +Index("object_type_record_id", Transaction.object_type, Transaction.record_id) +Index("namespace_id_created_at", Transaction.namespace_id, Transaction.created_at) +Index( + "ix_transaction_namespace_id_object_type_id", + Transaction.namespace_id, + Transaction.object_type, + Transaction.id, +) class AccountTransaction(MailSyncBase, HasPublicID): namespace_id = Column(BigInteger, index=True, nullable=False) namespace = relationship( Namespace, - primaryjoin='foreign(AccountTransaction.namespace_id) == remote(Namespace.id)') + primaryjoin="foreign(AccountTransaction.namespace_id) == remote(Namespace.id)", + ) object_type = Column(String(20), nullable=False) record_id = Column(BigInteger, nullable=False, index=True) object_public_id = Column(String(191), nullable=False, index=True) - command = Column(Enum('insert', 'update', 'delete'), nullable=False) + command = Column(Enum("insert", "update", "delete"), nullable=False) + -Index('ix_accounttransaction_table_name', Transaction.object_type) -Index('ix_accounttransaction_command', Transaction.command) -Index('ix_accounttransaction_object_type_record_id', - AccountTransaction.object_type, AccountTransaction.record_id) -Index('ix_accounttransaction_namespace_id_created_at', - AccountTransaction.namespace_id, AccountTransaction.created_at) +Index("ix_accounttransaction_table_name", Transaction.object_type) +Index("ix_accounttransaction_command", Transaction.command) +Index( + "ix_accounttransaction_object_type_record_id", + AccountTransaction.object_type, + AccountTransaction.record_id, +) +Index( + "ix_accounttransaction_namespace_id_created_at", + AccountTransaction.namespace_id, + AccountTransaction.created_at, +) def is_dirty(session, obj): if obj in session.dirty and obj.has_versioned_changes(): return True - if hasattr(obj, 'dirty') and getattr(obj, 'dirty'): + if hasattr(obj, "dirty") and getattr(obj, "dirty"): return True return False def create_revisions(session): for obj in session: - if (not isinstance(obj, HasRevisions) or - obj.should_suppress_transaction_creation): + if ( + not isinstance(obj, HasRevisions) + or obj.should_suppress_transaction_creation + ): continue if obj in session.new: - create_revision(obj, session, 'insert') + create_revision(obj, session, "insert") elif is_dirty(session, obj): # Need to unmark the object as 'dirty' to prevent an infinite loop # (the pre-flush hook may be called again before a commit @@ -74,48 +89,54 @@ def create_revisions(session): # in that they are no longer present in the set during the next # invocation of the pre-flush hook. obj.dirty = False - create_revision(obj, session, 'update') + create_revision(obj, session, "update") elif obj in session.deleted: - create_revision(obj, session, 'delete') + create_revision(obj, session, "delete") def create_revision(obj, session, revision_type): - assert revision_type in ('insert', 'update', 'delete') + assert revision_type in ("insert", "update", "delete") # If available use object dates for the transaction timestamp # otherwise use DB time. This is needed because CURRENT_TIMESTAMP # changes during a transaction which can lead to inconsistencies # between object timestamps and the transaction timestamps. - if revision_type == 'delete': - created_at = getattr(obj, 'deleted_at', None) + if revision_type == "delete": + created_at = getattr(obj, "deleted_at", None) # Sometimes categories are deleted explicitly which leaves # their deleted_at default value, EPOCH, when the # transaction is created. if created_at == EPOCH: created_at = func.now() else: - created_at = getattr(obj, 'updated_at', None) + created_at = getattr(obj, "updated_at", None) if created_at is None: created_at = func.now() # Always create a Transaction record -- this maintains a total ordering over # all events for an account. - revision = Transaction(command=revision_type, record_id=obj.id, - object_type=obj.API_OBJECT_NAME, - object_public_id=obj.public_id, - namespace_id=obj.namespace.id, - created_at=created_at) + revision = Transaction( + command=revision_type, + record_id=obj.id, + object_type=obj.API_OBJECT_NAME, + object_public_id=obj.public_id, + namespace_id=obj.namespace.id, + created_at=created_at, + ) session.add(revision) # Additionally, record account-level events in the AccountTransaction -- # this is an optimization needed so these sparse events can be still be # retrieved efficiently for webhooks etc. - if obj.API_OBJECT_NAME == 'account': - revision = AccountTransaction(command=revision_type, record_id=obj.id, - object_type=obj.API_OBJECT_NAME, - object_public_id=obj.public_id, - namespace_id=obj.namespace.id) + if obj.API_OBJECT_NAME == "account": + revision = AccountTransaction( + command=revision_type, + record_id=obj.id, + object_type=obj.API_OBJECT_NAME, + object_public_id=obj.public_id, + namespace_id=obj.namespace.id, + ) session.add(revision) @@ -128,6 +149,7 @@ def propagate_changes(session): changes, the message.thread is marked as dirty. """ from inbox.models.message import Message + for obj in session.dirty: if isinstance(obj, Message): obj_state = inspect(obj) @@ -140,6 +162,7 @@ def propagate_changes(session): def increment_versions(session): from inbox.models.thread import Thread from inbox.models.metadata import Metadata + for obj in session: if isinstance(obj, Thread) and is_dirty(session, obj): # This issues SQL for an atomic increment. @@ -153,6 +176,7 @@ def bump_redis_txn_id(session): """ Called from post-flush hook to bump the latest id stored in redis """ + def get_namespace_public_id(namespace_id): # the namespace was just used to create the transaction, so it should # still be in the session. If not, a sql statement will be emitted. @@ -163,11 +187,7 @@ def get_namespace_public_id(namespace_id): mappings = { get_namespace_public_id(obj.namespace_id): obj.id for obj in session - if ( - obj in session.new - and isinstance(obj, Transaction) - and obj.id - ) + if (obj in session.new and isinstance(obj, Transaction) and obj.id) } if mappings: redis_txn.zadd(TXN_REDIS_KEY, **mappings) diff --git a/inbox/models/util.py b/inbox/models/util.py index 247c8abfe..04afcdb32 100644 --- a/inbox/models/util.py +++ b/inbox/models/util.py @@ -29,7 +29,7 @@ # Use a single throttle instance for rate limiting. Limits will be applied # across all db shards (same approach as the original check_throttle()). -bulk_throttle = limitlion.throttle_wait('bulk', rps=.75, window=5) +bulk_throttle = limitlion.throttle_wait("bulk", rps=0.75, window=5) def reconcile_message(new_message, session): @@ -46,15 +46,21 @@ def reconcile_message(new_message, session): # try to reconcile using other means q = session.query(Message).filter( Message.namespace_id == new_message.namespace_id, - Message.data_sha256 == new_message.data_sha256) + Message.data_sha256 == new_message.data_sha256, + ) return q.first() - if '-' not in new_message.nylas_uid: + if "-" not in new_message.nylas_uid: # Old X-Inbox-Id format; use the old reconciliation strategy. - existing_message = session.query(Message).filter( - Message.namespace_id == new_message.namespace_id, - Message.nylas_uid == new_message.nylas_uid, - Message.is_created).first() + existing_message = ( + session.query(Message) + .filter( + Message.namespace_id == new_message.namespace_id, + Message.nylas_uid == new_message.nylas_uid, + Message.is_created, + ) + .first() + ) version = None else: # new_message has the new X-Inbox-Id format - @@ -62,11 +68,16 @@ def reconcile_message(new_message, session): # * not commit a new, separate Message object for it # * not update the current draft with the old header values in the code # below. - expected_public_id, version = new_message.nylas_uid.split('-') - existing_message = session.query(Message).filter( - Message.namespace_id == new_message.namespace_id, - Message.public_id == expected_public_id, - Message.is_created).first() + expected_public_id, version = new_message.nylas_uid.split("-") + existing_message = ( + session.query(Message) + .filter( + Message.namespace_id == new_message.namespace_id, + Message.public_id == expected_public_id, + Message.is_created, + ) + .first() + ) if existing_message is None: return None @@ -87,29 +98,40 @@ def transaction_objects(): models that implement the HasRevisions mixin). """ - from inbox.models import (Calendar, Contact, Message, Event, Block, - Category, Thread, Metadata) + from inbox.models import ( + Calendar, + Contact, + Message, + Event, + Block, + Category, + Thread, + Metadata, + ) return { - 'calendar': Calendar, - 'contact': Contact, - 'draft': Message, - 'event': Event, - 'file': Block, - 'message': Message, - 'thread': Thread, - 'label': Category, - 'folder': Category, - 'account': Account, - 'metadata': Metadata + "calendar": Calendar, + "contact": Contact, + "draft": Message, + "event": Event, + "file": Block, + "message": Message, + "thread": Thread, + "label": Category, + "folder": Category, + "account": Account, + "metadata": Metadata, } def get_accounts_to_delete(shard_id): ids_to_delete = [] with session_scope_by_shard_id(shard_id) as db_session: - ids_to_delete = [(acc.id, acc.namespace.id) for acc - in db_session.query(Account) if acc.is_marked_for_deletion] + ids_to_delete = [ + (acc.id, acc.namespace.id) + for acc in db_session.query(Account) + if acc.is_marked_for_deletion + ] return ids_to_delete @@ -117,8 +139,7 @@ class AccountDeletionErrror(Exception): pass -def batch_delete_namespaces(ids_to_delete, throttle=False, - dry_run=False): +def batch_delete_namespaces(ids_to_delete, throttle=False, dry_run=False): start = time.time() @@ -126,20 +147,21 @@ def batch_delete_namespaces(ids_to_delete, throttle=False, for account_id, namespace_id in ids_to_delete: # try: try: - delete_namespace(namespace_id, - throttle=throttle, - dry_run=dry_run) + delete_namespace(namespace_id, throttle=throttle, dry_run=dry_run) except AccountDeletionErrror as e: - log.critical('AccountDeletionErrror', error_message=e.message) + log.critical("AccountDeletionErrror", error_message=e.message) except Exception: log_uncaught_errors(log, account_id=account_id) deleted_count += 1 end = time.time() - log.info('All data deleted successfully for ids', - ids_to_delete=ids_to_delete, - time=end - start, count=deleted_count) + log.info( + "All data deleted successfully for ids", + ids_to_delete=ids_to_delete, + time=end - start, + count=deleted_count, + ) def delete_namespace(namespace_id, throttle=False, dry_run=False): @@ -155,19 +177,24 @@ def delete_namespace(namespace_id, throttle=False, dry_run=False): with session_scope(namespace_id) as db_session: try: - account = db_session.query(Account).join(Namespace).filter(Namespace.id == namespace_id).one() + account = ( + db_session.query(Account) + .join(Namespace) + .filter(Namespace.id == namespace_id) + .one() + ) except NoResultFound: - raise AccountDeletionErrror( - 'Could not find account in database') + raise AccountDeletionErrror("Could not find account in database") if not account.is_marked_for_deletion: raise AccountDeletionErrror( - 'Account is_marked_for_deletion is False. ' - 'Change this to proceed with deletion.') + "Account is_marked_for_deletion is False. " + "Change this to proceed with deletion." + ) account_id = account.id account_discriminator = account.discriminator - log.info('Deleting account', account_id=account_id) + log.info("Deleting account", account_id=account_id) start_time = time.time() # These folders are used to configure batch deletion in chunks for @@ -177,27 +204,37 @@ def delete_namespace(namespace_id, throttle=False, dry_run=False): # we include here for simplicity anyway. filters = OrderedDict() - for table in ['message', 'block', 'thread', 'transaction', 'actionlog', - 'event', 'contact', 'dataprocessingcache']: - filters[table] = ('namespace_id', namespace_id) - - if account_discriminator == 'easaccount': - filters['easuid'] = ('easaccount_id', account_id) - filters['easfoldersyncstatus'] = ('account_id', account_id) + for table in [ + "message", + "block", + "thread", + "transaction", + "actionlog", + "event", + "contact", + "dataprocessingcache", + ]: + filters[table] = ("namespace_id", namespace_id) + + if account_discriminator == "easaccount": + filters["easuid"] = ("easaccount_id", account_id) + filters["easfoldersyncstatus"] = ("account_id", account_id) else: - filters['imapuid'] = ('account_id', account_id) - filters['imapfoldersyncstatus'] = ('account_id', account_id) - filters['imapfolderinfo'] = ('account_id', account_id) + filters["imapuid"] = ("account_id", account_id) + filters["imapfoldersyncstatus"] = ("account_id", account_id) + filters["imapfolderinfo"] = ("account_id", account_id) from inbox.ignition import engine_manager + # Bypass the ORM for performant bulk deletion; # we do /not/ want Transaction records created for these deletions, # so this is okay. engine = engine_manager.get_for_id(namespace_id) for cls in filters: - _batch_delete(engine, cls, filters[cls], account_id, throttle=throttle, - dry_run=dry_run) + _batch_delete( + engine, cls, filters[cls], account_id, throttle=throttle, dry_run=dry_run + ) # Use a single delete for the other tables. Rows from tables which contain # cascade-deleted foreign keys to other tables deleted here (or above) @@ -206,17 +243,17 @@ def delete_namespace(namespace_id, throttle=False, dry_run=False): # # NOTE: Namespace, Account are deleted at the end too. - query = 'DELETE FROM {} WHERE {}={};' + query = "DELETE FROM {} WHERE {}={};" filters = OrderedDict() - for table in ('category', 'calendar'): - filters[table] = ('namespace_id', namespace_id) - for table in ('folder', 'label'): - filters[table] = ('account_id', account_id) - filters['namespace'] = ('id', namespace_id) + for table in ("category", "calendar"): + filters[table] = ("namespace_id", namespace_id) + for table in ("folder", "label"): + filters[table] = ("account_id", account_id) + filters["namespace"] = ("id", namespace_id) for table, (column, id_) in filters.iteritems(): - log.info('Performing bulk deletion', table=table) + log.info("Performing bulk deletion", table=table) start = time.time() if throttle: @@ -228,7 +265,7 @@ def delete_namespace(namespace_id, throttle=False, dry_run=False): log.debug(query.format(table, column, id_)) end = time.time() - log.info('Completed bulk deletion', table=table, time=end - start) + log.info("Completed bulk deletion", table=table, time=end - start) # Delete the account object manually to get rid of the various objects # associated with it (e.g: secrets, tokens, etc.) @@ -239,46 +276,51 @@ def delete_namespace(namespace_id, throttle=False, dry_run=False): db_session.commit() # Delete liveness data ( heartbeats) - log.debug('Deleting liveness data', account_id=account_id) + log.debug("Deleting liveness data", account_id=account_id) clear_heartbeat_status(account_id) - statsd_client.timing('mailsync.account_deletion.queue.deleted', - time.time() - start_time) + statsd_client.timing( + "mailsync.account_deletion.queue.deleted", time.time() - start_time + ) -def _batch_delete(engine, table, column_id_filters, account_id, throttle=False, - dry_run=False): +def _batch_delete( + engine, table, column_id_filters, account_id, throttle=False, dry_run=False +): (column, id_) = column_id_filters count = engine.execute( - 'SELECT COUNT(*) FROM {} WHERE {}={};'.format(table, column, id_)).\ - scalar() + "SELECT COUNT(*) FROM {} WHERE {}={};".format(table, column, id_) + ).scalar() if count == 0: - log.info('Completed batch deletion', table=table) + log.info("Completed batch deletion", table=table) return batches = int(math.ceil(float(count) / CHUNK_SIZE)) - log.info('Starting batch deletion', table=table, count=count, - batches=batches) + log.info("Starting batch deletion", table=table, count=count, batches=batches) start = time.time() - if table in ('message', 'block'): - query = '' + if table in ("message", "block"): + query = "" else: - query = 'DELETE FROM {} WHERE {}={} LIMIT {};'.format(table, column, id_, CHUNK_SIZE) + query = "DELETE FROM {} WHERE {}={} LIMIT {};".format( + table, column, id_, CHUNK_SIZE + ) - log.info('deleting', account_id=account_id, table=table) + log.info("deleting", account_id=account_id, table=table) for i in range(0, batches): if throttle: bulk_throttle() - if table == 'block': + if table == "block": with session_scope(account_id) as db_session: - blocks = list(db_session.query(Block.id, Block.data_sha256) - .filter(Block.namespace_id == id_) - .limit(CHUNK_SIZE)) + blocks = list( + db_session.query(Block.id, Block.data_sha256) + .filter(Block.namespace_id == id_) + .limit(CHUNK_SIZE) + ) blocks = list(blocks) block_ids = [b[0] for b in blocks] block_hashes = [b[1] for b in blocks] @@ -292,25 +334,31 @@ def _batch_delete(engine, table, column_id_filters, account_id, throttle=False, if dry_run is False: query.delete(synchronize_session=False) - elif table == 'message': + elif table == "message": with session_scope(account_id) as db_session: # messages must be order by the foreign key `received_date` # otherwise MySQL will raise an error when deleting # from the message table - messages = list(db_session.query(Message.id, Message.data_sha256) - .filter(Message.namespace_id == id_) - .order_by(desc(Message.received_date)) - .limit(CHUNK_SIZE) - .with_hint(Message, 'use index (ix_message_namespace_id_received_date)')) + messages = list( + db_session.query(Message.id, Message.data_sha256) + .filter(Message.namespace_id == id_) + .order_by(desc(Message.received_date)) + .limit(CHUNK_SIZE) + .with_hint( + Message, "use index (ix_message_namespace_id_received_date)" + ) + ) message_ids = [m[0] for m in messages] message_hashes = [m[1] for m in messages] with session_scope(account_id) as db_session: - existing_hashes = list(db_session.query(Message.data_sha256) - .filter(Message.data_sha256.in_(message_hashes)) - .filter(Message.namespace_id != id_) - .distinct()) + existing_hashes = list( + db_session.query(Message.data_sha256) + .filter(Message.data_sha256.in_(message_hashes)) + .filter(Message.namespace_id != id_) + .distinct() + ) existing_hashes = [h[0] for h in existing_hashes] remove_hashes = set(message_hashes) - set(existing_hashes) @@ -329,11 +377,11 @@ def _batch_delete(engine, table, column_id_filters, account_id, throttle=False, log.debug(query) end = time.time() - log.info('Completed batch deletion', time=end - start, table=table) + log.info("Completed batch deletion", time=end - start, table=table) count = engine.execute( - 'SELECT COUNT(*) FROM {} WHERE {}={};'.format(table, column, id_)).\ - scalar() + "SELECT COUNT(*) FROM {} WHERE {}={};".format(table, column, id_) + ).scalar() if dry_run is False: assert count == 0 @@ -350,22 +398,26 @@ def check_throttle(): return True -def purge_transactions(shard_id, days_ago=60, limit=1000, throttle=False, - dry_run=False, now=None): - start = 'now()' +def purge_transactions( + shard_id, days_ago=60, limit=1000, throttle=False, dry_run=False, now=None +): + start = "now()" if now is not None: - start = "'{}'".format(now.strftime('%Y-%m-%d %H:%M:%S')) + start = "'{}'".format(now.strftime("%Y-%m-%d %H:%M:%S")) # Delete all items from the transaction table that are older than # `days_ago` days. if dry_run: offset = 0 - query = ("SELECT id FROM transaction where created_at < " - "DATE_SUB({}, INTERVAL {} day) LIMIT {}". - format(start, days_ago, limit)) + query = ( + "SELECT id FROM transaction where created_at < " + "DATE_SUB({}, INTERVAL {} day) LIMIT {}".format(start, days_ago, limit) + ) else: - query = ("DELETE FROM transaction where created_at < DATE_SUB({}," - " INTERVAL {} day) LIMIT {}".format(start, days_ago, limit)) + query = ( + "DELETE FROM transaction where created_at < DATE_SUB({}," + " INTERVAL {} day) LIMIT {}".format(start, days_ago, limit) + ) try: # delete from rows until there are no more rows affected rowcount = 1 @@ -373,18 +425,24 @@ def purge_transactions(shard_id, days_ago=60, limit=1000, throttle=False, if throttle: bulk_throttle() - with session_scope_by_shard_id(shard_id, versioned=False) as \ - db_session: + with session_scope_by_shard_id(shard_id, versioned=False) as db_session: if dry_run: rowcount = db_session.execute( - "{} OFFSET {}".format(query, offset)).rowcount + "{} OFFSET {}".format(query, offset) + ).rowcount offset += rowcount else: rowcount = db_session.execute(query).rowcount - log.info("Deleted batch from transaction table", batch_size=limit, - rowcount=rowcount) - log.info("Finished purging transaction table for shard", - shard_id=shard_id, date_delta=days_ago) + log.info( + "Deleted batch from transaction table", + batch_size=limit, + rowcount=rowcount, + ) + log.info( + "Finished purging transaction table for shard", + shard_id=shard_id, + date_delta=days_ago, + ) except Exception as e: log.critical("Exception encountered during deletion", exception=e) @@ -394,7 +452,7 @@ def purge_transactions(shard_id, days_ago=60, limit=1000, throttle=False, return try: with session_scope_by_shard_id(shard_id, versioned=False) as db_session: - min_txn_id, = db_session.query(func.min(Transaction.id)).one() + (min_txn_id,) = db_session.query(func.min(Transaction.id)).one() redis_txn.zremrangebyscore( TXN_REDIS_KEY, "-inf", @@ -402,7 +460,8 @@ def purge_transactions(shard_id, days_ago=60, limit=1000, throttle=False, ) log.info( "Finished purging transaction entries from redis", - min_id=min_txn_id, date_delta=days_ago + min_id=min_txn_id, + date_delta=days_ago, ) except Exception as e: log.critical("Exception encountered during deletion", exception=e) diff --git a/inbox/models/when.py b/inbox/models/when.py index cfd8e170f..785f3fc06 100644 --- a/inbox/models/when.py +++ b/inbox/models/when.py @@ -14,7 +14,7 @@ def parse_as_when(raw): """ when_classes = [TimeSpan, Time, DateSpan, Date] keys_for_type = {tuple(sorted(cls_.json_keys)): cls_ for cls_ in when_classes} - given_keys = tuple(sorted(set(raw.keys()) - set('object'))) + given_keys = tuple(sorted(set(raw.keys()) - set("object"))) when_type = keys_for_type.get(given_keys) if when_type is None: raise ValueError("When object had invalid keys.") @@ -23,7 +23,7 @@ def parse_as_when(raw): def parse_utc(datetime): # Arrow can handle epoch timestamps as well as most ISO-8601 strings - return arrow.get(datetime).to('utc') + return arrow.get(datetime).to("utc") class When(object): @@ -37,6 +37,7 @@ class When(object): end (datetime, optional): End time. If missing, start will be used. """ + __metaclass__ = abc.ABCMeta # Needed? json_keys = abc.abstractproperty() all_day = False @@ -63,7 +64,7 @@ def __init__(self, start, end=None): self.end = end or start def __repr__(self): - return '{} ({} - {})'.format(type(self), self.start, self.end) + return "{} ({} - {})".format(type(self), self.start, self.end) @property def is_time(self): @@ -104,18 +105,18 @@ def parse(cls, raw): class Time(When): - json_keys = ['time'] + json_keys = ["time"] class TimeSpan(Time, SpanningWhen): - json_keys = ['start_time', 'end_time'] + json_keys = ["start_time", "end_time"] singular_cls = Time class Date(AllDayWhen): - json_keys = ['date'] + json_keys = ["date"] class DateSpan(Date, AllDayWhen, SpanningWhen): - json_keys = ['start_date', 'end_date'] + json_keys = ["start_date", "end_date"] singular_cls = Date diff --git a/inbox/providers.py b/inbox/providers.py index f82a0566d..b15a38e89 100644 --- a/inbox/providers.py +++ b/inbox/providers.py @@ -2,7 +2,7 @@ from inbox.basicauth import NotSupportedError -__all__ = ['provider_info', 'providers'] +__all__ = ["provider_info", "providers"] def provider_info(provider_name): @@ -13,319 +13,529 @@ def provider_info(provider_name): """ if provider_name not in providers: - raise NotSupportedError('Provider: {} not supported.'.format( - provider_name)) + raise NotSupportedError("Provider: {} not supported.".format(provider_name)) return providers[provider_name] -providers = dict([ - ("aol", { - "type": "generic", - "imap": ("imap.aol.com", 993), - "smtp": ("smtp.aol.com", 587), - "auth": "password", - # .endswith() string match - "domains": ["aol.com"], - # regex match with dots interpreted literally and glob * as .*, - # pinned to start and end - "mx_servers": ["mailin-0[1-4].mx.aol.com"], - }), - ("bluehost", { - "type": "generic", - "auth": "password", - "domains": ["autobizbrokers.com"], - }), - ("eas", { - "auth": "password", - "domains": [ - "onmicrosoft.com", - "exchange.mit.edu", - "savills-studley.com", - "clearpoolgroup.com", - "stsci.edu", - "kms-technology.com", - "cigital.com", - "iontrading.com", - "adaptiveinsights.com", - "icims.com", - ], - "mx_servers": [ - # Office365 - "*.mail.protection.outlook.com", "*.mail.eo.outlook.com", - ], - }), - ("outlook", { - "auth": "password", - "domains": [ - "outlook.com", "outlook.com.ar", - "outlook.com.au", "outlook.at", "outlook.be", - "outlook.com.br", "outlook.cl", "outlook.cz", "outlook.dk", - "outlook.fr", "outlook.de", "outlook.com.gr", - "outlook.co.il", "outlook.in", "outlook.co.id", - "outlook.ie", "outlook.it", "outlook.hu", "outlook.jp", - "outlook.kr", "outlook.lv", "outlook.my", "outlook.co.nz", - "outlook.com.pe", "outlook.ph", "outlook.pt", "outlook.sa", - "outlook.sg", "outlook.sk", "outlook.es", "outlook.co.th", - "outlook.com.tr", "outlook.com.vn", "live.com", "live.com.ar" - "live.com.au", "live.at", "live.be", "live.cl", "live.cz", - "live.dk", "live.fr", "live.de", "live.com.gr", "live.co.il", - "live.in", "live.ie", "live.it", "live.hu", "live.jp", "live.lv", - "live.co.nz", "live.com.pe", "live.ph", "live.pt", "live.sa", - "live.sg", "live.sk", "live.es", "live.co.th", "live.com.tr", - "live.com.vn", "live.ca", "hotmail.ca", - "hotmail.com", "hotmail.com.ar", "hotmail.com.au", - "hotmail.at", "hotmail.be", "hotmail.com.br", "hotmail.cl", - "hotmail.cz", "hotmail.dk", "hotmail.fr", "hotmail.de", - "hotmail.co.il", "hotmail.in", "hotmail.ie", "hotmail.it", - "hotmail.hu", "hotmail.jp", "hotmail.kr", "hotmail.com.pe", - "hotmail.pt", "hotmail.sa", "hotmail.es", "hotmail.co.th", - "hotmail.com.tr", - ], - "mx_servers": [ - "*.pamx1.hotmail.com", "mx.*.hotmail.com", - ] - }), - ("_outlook", { - # IMAP-based Outlook. Legacy-only. - "type": "generic", - "imap": ("imap-mail.outlook.com", 993), - "smtp": ("smtp.live.com", 587), - "auth": "oauth2", - "events": False, - }), - ("fastmail", { - "type": "generic", - "condstore": True, - "imap": ("imap.fastmail.com", 993), - "smtp": ("smtp.fastmail.com", 465), - "auth": "password", - "folder_map": {"INBOX.Archive": "archive", - "INBOX.Drafts": "drafts", "INBOX.Junk Mail": "spam", - "INBOX.Sent": "sent", "INBOX.Sent Items": "sent", - "INBOX.Trash": "trash"}, - "domains": ["fastmail.fm", "fastmail.com"], - "mx_servers": ["in[12]-smtp.messagingengine.com"], - # exact string matches - "ns_servers": ["ns1.messagingengine.com.", - "ns2.messagingengine.com."], - }), - ("gandi", { - "type": "generic", - "condstore": True, - "imap": ("mail.gandi.net", 993), - "smtp": ("mail.gandi.net", 587), - "auth": "password", - "domains": ["debuggers.co"], - "mx_servers": ["(spool|fb).mail.gandi.net", "mail[45].gandi.net"], - }), - ("gmx", { - "type": "generic", - "imap": ("imap.gmx.com", 993), - "smtp": ("smtp.gmx.com", 587), - "auth": "password", - "domains": ["gmx.us", "gmx.com"], - }), - ("hover", { - "type": "generic", - "imap": ("mail.hover.com", 993), - "smtp": ("mail.hover.com", 587), - "auth": "password", - "mx_servers": ["mx.hover.com.cust.hostedemail.com"], - }), - ("icloud", { - "type": "generic", - "imap": ("imap.mail.me.com", 993), - "smtp": ("smtp.mail.me.com", 587), - "auth": "password", - "events": False, - "contacts": True, - "folder_map": {"Sent Messages": "sent", - "Deleted Messages": "trash"}, - "domains": ["icloud.com"], - "mx_servers": ["mx[1-6].mail.icloud.com"] - }), - ("soverin", { - "type": "generic", - "imap": ("imap.soverin.net", 993), - "smtp": ("smtp.soverin.net", 587), - "auth": "password", - "domains": ["soverin.net"], - "mx_servers": ["mx.soverin.net"] - }), - ("mail.ru", { - "type": "generic", - "imap": ("imap.mail.ru", 993), - "smtp": ("smtp.mail.ru", 587), - "auth": "password", - "domains": ["mail.ru"], - "mx_servers": ["mxs.mail.ru"] - }), - ("namecheap", { - "type": "generic", - "imap": ("mail.privateemail.com", 993), - "smtp": ("mail.privateemail.com", 465), - "auth": "password", - "mx_servers": ["mx[12].privateemail.com"] - }), - ("tiliq", { - "type": "generic", - "imap": ("imap.us-west-2.tiliq.com", 993), - "smtp": ("smtp.tiliq.com", 587), - "auth": "password", - "mx_servers": ["mx[12].(us-west-2.)?tiliq.com"] - }), - ("yahoo", { - "type": "generic", - "imap": ("imap.mail.yahoo.com", 993), - "smtp": ("smtp.mail.yahoo.com", 587), - "auth": "password", - "folder_map": {"Bulk Mail": "spam"}, - "domains": ["yahoo.com.ar", "yahoo.com.au", "yahoo.at", "yahoo.be", - "yahoo.fr", "yahoo.be", "yahoo.nl", "yahoo.com.br", - "yahoo.ca", "yahoo.en", "yahoo.ca", "yahoo.fr", - "yahoo.com.cn", "yahoo.cn", "yahoo.com.co", "yahoo.cz", - "yahoo.dk", "yahoo.fi", "yahoo.fr", "yahoo.de", "yahoo.gr", - "yahoo.com.hk", "yahoo.hu", "yahoo.co.in", "yahoo.in", - "yahoo.ie", "yahoo.co.il", "yahoo.it", "yahoo.co.jp", - "yahoo.com.my", "yahoo.com.mx", "yahoo.ae", "yahoo.nl", - "yahoo.co.nz", "yahoo.no", "yahoo.com.ph", "yahoo.pl", - "yahoo.pt", "yahoo.ro", "yahoo.ru", "yahoo.com.sg", - "yahoo.co.za", "yahoo.es", "yahoo.se", "yahoo.ch", - "yahoo.fr", "yahoo.ch", "yahoo.de", "yahoo.com.tw", - "yahoo.co.th", "yahoo.com.tr", "yahoo.co.uk", "yahoo.com", - "yahoo.com.vn", "ymail.com", "rocketmail.com"], - "mx_servers": ["mx-biz.mail.am0.yahoodns.net", - "mx[15].biz.mail.yahoo.com", - "mxvm2.mail.yahoo.com", "mx-van.mail.am0.yahoodns.net"], - }), - ("yandex", { - "type": "generic", - "imap": ("imap.yandex.com", 993), - "smtp": ("smtp.yandex.com", 587), - "auth": "password", - "mx_servers": ["mx.yandex.ru"], - }), - ("zimbra", { - "type": "generic", - "imap": ("mail.you-got-mail.com", 993), - "smtp": ("smtp.you-got-mail.com", 587), - "auth": "password", - "domains": ["mrmail.com"], - "mx_servers": ["mx.mrmail.com"] - }), - ("godaddy", { - "type": "generic", - "imap": ("imap.secureserver.net", 993), - "smtp": ("smtpout.secureserver.net", 465), - "auth": "password", - "mx_servers": ["smtp.secureserver.net", - "mailstore1.(asia.|europe.)?secureserver.net"] - }), - ("163", { - "type": "generic", - "imap": ("imap.163.com", 993), - "smtp": ("smtp.163.com", 465), - "auth": "password", - "domains": ["163.com"], - "mx_servers": ["163mx0[0-3].mxmail.netease.com"] - }), - ("163_ym", { - "type": "generic", - "imap": ("imap.ym.163.com", 993), - "smtp": ("smtp.ym.163.com", 994), - "auth": "password", - "mx_servers": ["mx.ym.163.com"] - }), - ("163_qiye", { - "type": "generic", - "imap": ("imap.qiye.163.com", 993), - "smtp": ("smtp.qiye.163.com", 994), - "auth": "password", - "mx_servers": ["qiye163mx0[12].mxmail.netease.com"] - }), - ("123_reg", { - "type": "generic", - "imap": ("imap.123-reg.co.uk", 993), - "smtp": ("smtp.123-reg.co.uk", 465), - "auth": "password", - "mx_servers": ["mx[01].123-reg.co.uk"] - }), - ("126", { - "type": "generic", - "imap": ("imap.126.com", 993), - "smtp": ("smtp.126.com", 465), - "auth": "password", - "domains": ["126.com"], - "mx_servers": ["126mx0[0-2].mxmail.netease.com"] - }), - ("yeah.net", { - "type": "generic", - "imap": ("imap.yeah.net", 993), - "smtp": ("smtp.yeah.net", 465), - "auth": "password", - "domains": ["yeah.net"], - "mx_servers": ["yeahmx0[01].mxmail.netease.com"] - }), - ("qq", { - "type": "generic", - "imap": ("imap.qq.com", 993), - "smtp": ("smtp.qq.com", 465), - "auth": "password", - "domains": ["qq.com", "vip.qq.com"], - "mx_servers": ["mx[1-3].qq.com"] - }), - ("foxmail", { - "type": "generic", - "imap": ("imap.exmail.qq.com", 993), - "smtp": ("smtp.exmail.qq.com", 465), - "auth": "password", - "domains": ["foxmail.com"], - "mx_servers": ["mx[1-3].qq.com"] - }), - ("qq_enterprise", { - "type": "generic", - "imap": ("imap.exmail.qq.com", 993), - "smtp": ("smtp.exmail.qq.com", 465), - "auth": "password", - "mx_servers": ["mxbiz[12].qq.com"] - }), - ("aliyun", { - "type": "generic", - "imap": ("imap.aliyun.com", 993), - "smtp": ("smtp.aliyun.com", 465), - "auth": "password", - "domains": ["aliyun"], - "mx_servers": ["mx2.mail.aliyun.com"] - }), - ("139", { - "type": "generic", - "imap": ("imap.139.com", 993), - "smtp": ("smtp.139.com", 465), - "auth": "password", - "domains": ["139.com"], - "mx_servers": ["mx[1-3].mail.139.com"] - }), - ("gmail", { - "imap": ("imap.gmail.com", 993), - "smtp": ("smtp.gmail.com", 587), - "auth": "oauth2", - "events": True, - "contacts": True, - "mx_servers": ["aspmx.l.google.com", - "aspmx[2-6].googlemail.com", - "(alt|aspmx)[1-4].aspmx.l.google.com", - "gmail-smtp-in.l.google.com", - "alt[1-4].gmail-smtp-in.l.google.com", - # Postini - "*.psmtp.com"], - }), - ("custom", { - "type": "generic", - "auth": "password", - "folder_map": {"INBOX.Archive": "archive", - "INBOX.Drafts": "drafts", "INBOX.Junk Mail": "spam", - "INBOX.Trash": "trash", "INBOX.Sent Items": "sent", - "INBOX.Sent": "sent"}, - }) -]) +providers = dict( + [ + ( + "aol", + { + "type": "generic", + "imap": ("imap.aol.com", 993), + "smtp": ("smtp.aol.com", 587), + "auth": "password", + # .endswith() string match + "domains": ["aol.com"], + # regex match with dots interpreted literally and glob * as .*, + # pinned to start and end + "mx_servers": ["mailin-0[1-4].mx.aol.com"], + }, + ), + ( + "bluehost", + {"type": "generic", "auth": "password", "domains": ["autobizbrokers.com"],}, + ), + ( + "eas", + { + "auth": "password", + "domains": [ + "onmicrosoft.com", + "exchange.mit.edu", + "savills-studley.com", + "clearpoolgroup.com", + "stsci.edu", + "kms-technology.com", + "cigital.com", + "iontrading.com", + "adaptiveinsights.com", + "icims.com", + ], + "mx_servers": [ + # Office365 + "*.mail.protection.outlook.com", + "*.mail.eo.outlook.com", + ], + }, + ), + ( + "outlook", + { + "auth": "password", + "domains": [ + "outlook.com", + "outlook.com.ar", + "outlook.com.au", + "outlook.at", + "outlook.be", + "outlook.com.br", + "outlook.cl", + "outlook.cz", + "outlook.dk", + "outlook.fr", + "outlook.de", + "outlook.com.gr", + "outlook.co.il", + "outlook.in", + "outlook.co.id", + "outlook.ie", + "outlook.it", + "outlook.hu", + "outlook.jp", + "outlook.kr", + "outlook.lv", + "outlook.my", + "outlook.co.nz", + "outlook.com.pe", + "outlook.ph", + "outlook.pt", + "outlook.sa", + "outlook.sg", + "outlook.sk", + "outlook.es", + "outlook.co.th", + "outlook.com.tr", + "outlook.com.vn", + "live.com", + "live.com.ar" "live.com.au", + "live.at", + "live.be", + "live.cl", + "live.cz", + "live.dk", + "live.fr", + "live.de", + "live.com.gr", + "live.co.il", + "live.in", + "live.ie", + "live.it", + "live.hu", + "live.jp", + "live.lv", + "live.co.nz", + "live.com.pe", + "live.ph", + "live.pt", + "live.sa", + "live.sg", + "live.sk", + "live.es", + "live.co.th", + "live.com.tr", + "live.com.vn", + "live.ca", + "hotmail.ca", + "hotmail.com", + "hotmail.com.ar", + "hotmail.com.au", + "hotmail.at", + "hotmail.be", + "hotmail.com.br", + "hotmail.cl", + "hotmail.cz", + "hotmail.dk", + "hotmail.fr", + "hotmail.de", + "hotmail.co.il", + "hotmail.in", + "hotmail.ie", + "hotmail.it", + "hotmail.hu", + "hotmail.jp", + "hotmail.kr", + "hotmail.com.pe", + "hotmail.pt", + "hotmail.sa", + "hotmail.es", + "hotmail.co.th", + "hotmail.com.tr", + ], + "mx_servers": ["*.pamx1.hotmail.com", "mx.*.hotmail.com",], + }, + ), + ( + "_outlook", + { + # IMAP-based Outlook. Legacy-only. + "type": "generic", + "imap": ("imap-mail.outlook.com", 993), + "smtp": ("smtp.live.com", 587), + "auth": "oauth2", + "events": False, + }, + ), + ( + "fastmail", + { + "type": "generic", + "condstore": True, + "imap": ("imap.fastmail.com", 993), + "smtp": ("smtp.fastmail.com", 465), + "auth": "password", + "folder_map": { + "INBOX.Archive": "archive", + "INBOX.Drafts": "drafts", + "INBOX.Junk Mail": "spam", + "INBOX.Sent": "sent", + "INBOX.Sent Items": "sent", + "INBOX.Trash": "trash", + }, + "domains": ["fastmail.fm", "fastmail.com"], + "mx_servers": ["in[12]-smtp.messagingengine.com"], + # exact string matches + "ns_servers": ["ns1.messagingengine.com.", "ns2.messagingengine.com."], + }, + ), + ( + "gandi", + { + "type": "generic", + "condstore": True, + "imap": ("mail.gandi.net", 993), + "smtp": ("mail.gandi.net", 587), + "auth": "password", + "domains": ["debuggers.co"], + "mx_servers": ["(spool|fb).mail.gandi.net", "mail[45].gandi.net"], + }, + ), + ( + "gmx", + { + "type": "generic", + "imap": ("imap.gmx.com", 993), + "smtp": ("smtp.gmx.com", 587), + "auth": "password", + "domains": ["gmx.us", "gmx.com"], + }, + ), + ( + "hover", + { + "type": "generic", + "imap": ("mail.hover.com", 993), + "smtp": ("mail.hover.com", 587), + "auth": "password", + "mx_servers": ["mx.hover.com.cust.hostedemail.com"], + }, + ), + ( + "icloud", + { + "type": "generic", + "imap": ("imap.mail.me.com", 993), + "smtp": ("smtp.mail.me.com", 587), + "auth": "password", + "events": False, + "contacts": True, + "folder_map": {"Sent Messages": "sent", "Deleted Messages": "trash"}, + "domains": ["icloud.com"], + "mx_servers": ["mx[1-6].mail.icloud.com"], + }, + ), + ( + "soverin", + { + "type": "generic", + "imap": ("imap.soverin.net", 993), + "smtp": ("smtp.soverin.net", 587), + "auth": "password", + "domains": ["soverin.net"], + "mx_servers": ["mx.soverin.net"], + }, + ), + ( + "mail.ru", + { + "type": "generic", + "imap": ("imap.mail.ru", 993), + "smtp": ("smtp.mail.ru", 587), + "auth": "password", + "domains": ["mail.ru"], + "mx_servers": ["mxs.mail.ru"], + }, + ), + ( + "namecheap", + { + "type": "generic", + "imap": ("mail.privateemail.com", 993), + "smtp": ("mail.privateemail.com", 465), + "auth": "password", + "mx_servers": ["mx[12].privateemail.com"], + }, + ), + ( + "tiliq", + { + "type": "generic", + "imap": ("imap.us-west-2.tiliq.com", 993), + "smtp": ("smtp.tiliq.com", 587), + "auth": "password", + "mx_servers": ["mx[12].(us-west-2.)?tiliq.com"], + }, + ), + ( + "yahoo", + { + "type": "generic", + "imap": ("imap.mail.yahoo.com", 993), + "smtp": ("smtp.mail.yahoo.com", 587), + "auth": "password", + "folder_map": {"Bulk Mail": "spam"}, + "domains": [ + "yahoo.com.ar", + "yahoo.com.au", + "yahoo.at", + "yahoo.be", + "yahoo.fr", + "yahoo.be", + "yahoo.nl", + "yahoo.com.br", + "yahoo.ca", + "yahoo.en", + "yahoo.ca", + "yahoo.fr", + "yahoo.com.cn", + "yahoo.cn", + "yahoo.com.co", + "yahoo.cz", + "yahoo.dk", + "yahoo.fi", + "yahoo.fr", + "yahoo.de", + "yahoo.gr", + "yahoo.com.hk", + "yahoo.hu", + "yahoo.co.in", + "yahoo.in", + "yahoo.ie", + "yahoo.co.il", + "yahoo.it", + "yahoo.co.jp", + "yahoo.com.my", + "yahoo.com.mx", + "yahoo.ae", + "yahoo.nl", + "yahoo.co.nz", + "yahoo.no", + "yahoo.com.ph", + "yahoo.pl", + "yahoo.pt", + "yahoo.ro", + "yahoo.ru", + "yahoo.com.sg", + "yahoo.co.za", + "yahoo.es", + "yahoo.se", + "yahoo.ch", + "yahoo.fr", + "yahoo.ch", + "yahoo.de", + "yahoo.com.tw", + "yahoo.co.th", + "yahoo.com.tr", + "yahoo.co.uk", + "yahoo.com", + "yahoo.com.vn", + "ymail.com", + "rocketmail.com", + ], + "mx_servers": [ + "mx-biz.mail.am0.yahoodns.net", + "mx[15].biz.mail.yahoo.com", + "mxvm2.mail.yahoo.com", + "mx-van.mail.am0.yahoodns.net", + ], + }, + ), + ( + "yandex", + { + "type": "generic", + "imap": ("imap.yandex.com", 993), + "smtp": ("smtp.yandex.com", 587), + "auth": "password", + "mx_servers": ["mx.yandex.ru"], + }, + ), + ( + "zimbra", + { + "type": "generic", + "imap": ("mail.you-got-mail.com", 993), + "smtp": ("smtp.you-got-mail.com", 587), + "auth": "password", + "domains": ["mrmail.com"], + "mx_servers": ["mx.mrmail.com"], + }, + ), + ( + "godaddy", + { + "type": "generic", + "imap": ("imap.secureserver.net", 993), + "smtp": ("smtpout.secureserver.net", 465), + "auth": "password", + "mx_servers": [ + "smtp.secureserver.net", + "mailstore1.(asia.|europe.)?secureserver.net", + ], + }, + ), + ( + "163", + { + "type": "generic", + "imap": ("imap.163.com", 993), + "smtp": ("smtp.163.com", 465), + "auth": "password", + "domains": ["163.com"], + "mx_servers": ["163mx0[0-3].mxmail.netease.com"], + }, + ), + ( + "163_ym", + { + "type": "generic", + "imap": ("imap.ym.163.com", 993), + "smtp": ("smtp.ym.163.com", 994), + "auth": "password", + "mx_servers": ["mx.ym.163.com"], + }, + ), + ( + "163_qiye", + { + "type": "generic", + "imap": ("imap.qiye.163.com", 993), + "smtp": ("smtp.qiye.163.com", 994), + "auth": "password", + "mx_servers": ["qiye163mx0[12].mxmail.netease.com"], + }, + ), + ( + "123_reg", + { + "type": "generic", + "imap": ("imap.123-reg.co.uk", 993), + "smtp": ("smtp.123-reg.co.uk", 465), + "auth": "password", + "mx_servers": ["mx[01].123-reg.co.uk"], + }, + ), + ( + "126", + { + "type": "generic", + "imap": ("imap.126.com", 993), + "smtp": ("smtp.126.com", 465), + "auth": "password", + "domains": ["126.com"], + "mx_servers": ["126mx0[0-2].mxmail.netease.com"], + }, + ), + ( + "yeah.net", + { + "type": "generic", + "imap": ("imap.yeah.net", 993), + "smtp": ("smtp.yeah.net", 465), + "auth": "password", + "domains": ["yeah.net"], + "mx_servers": ["yeahmx0[01].mxmail.netease.com"], + }, + ), + ( + "qq", + { + "type": "generic", + "imap": ("imap.qq.com", 993), + "smtp": ("smtp.qq.com", 465), + "auth": "password", + "domains": ["qq.com", "vip.qq.com"], + "mx_servers": ["mx[1-3].qq.com"], + }, + ), + ( + "foxmail", + { + "type": "generic", + "imap": ("imap.exmail.qq.com", 993), + "smtp": ("smtp.exmail.qq.com", 465), + "auth": "password", + "domains": ["foxmail.com"], + "mx_servers": ["mx[1-3].qq.com"], + }, + ), + ( + "qq_enterprise", + { + "type": "generic", + "imap": ("imap.exmail.qq.com", 993), + "smtp": ("smtp.exmail.qq.com", 465), + "auth": "password", + "mx_servers": ["mxbiz[12].qq.com"], + }, + ), + ( + "aliyun", + { + "type": "generic", + "imap": ("imap.aliyun.com", 993), + "smtp": ("smtp.aliyun.com", 465), + "auth": "password", + "domains": ["aliyun"], + "mx_servers": ["mx2.mail.aliyun.com"], + }, + ), + ( + "139", + { + "type": "generic", + "imap": ("imap.139.com", 993), + "smtp": ("smtp.139.com", 465), + "auth": "password", + "domains": ["139.com"], + "mx_servers": ["mx[1-3].mail.139.com"], + }, + ), + ( + "gmail", + { + "imap": ("imap.gmail.com", 993), + "smtp": ("smtp.gmail.com", 587), + "auth": "oauth2", + "events": True, + "contacts": True, + "mx_servers": [ + "aspmx.l.google.com", + "aspmx[2-6].googlemail.com", + "(alt|aspmx)[1-4].aspmx.l.google.com", + "gmail-smtp-in.l.google.com", + "alt[1-4].gmail-smtp-in.l.google.com", + # Postini + "*.psmtp.com", + ], + }, + ), + ( + "custom", + { + "type": "generic", + "auth": "password", + "folder_map": { + "INBOX.Archive": "archive", + "INBOX.Drafts": "drafts", + "INBOX.Junk Mail": "spam", + "INBOX.Trash": "trash", + "INBOX.Sent Items": "sent", + "INBOX.Sent": "sent", + }, + }, + ), + ] +) diff --git a/inbox/s3/__init__.py b/inbox/s3/__init__.py index b88765afd..1e6dbf001 100644 --- a/inbox/s3/__init__.py +++ b/inbox/s3/__init__.py @@ -1,3 +1,4 @@ # Allow out-of-tree backend submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/inbox/s3/backends/__init__.py b/inbox/s3/backends/__init__.py index b88765afd..1e6dbf001 100644 --- a/inbox/s3/backends/__init__.py +++ b/inbox/s3/backends/__init__.py @@ -1,3 +1,4 @@ # Allow out-of-tree backend submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/inbox/s3/backends/gmail.py b/inbox/s3/backends/gmail.py index be934a7c1..6cb32b07d 100644 --- a/inbox/s3/backends/gmail.py +++ b/inbox/s3/backends/gmail.py @@ -4,6 +4,7 @@ from inbox.auth.oauth import OAuthRequestsWrapper from inbox.models.backends.gmail import g_token_manager from nylas.logging import get_logger + log = get_logger() @@ -18,25 +19,35 @@ def get_gmail_raw_contents(message): g_msgid = message.g_msgid if g_msgid is None: - raise EmailDeletedException("Couldn't find message on backend server. This is a permanent error.") + raise EmailDeletedException( + "Couldn't find message on backend server. This is a permanent error." + ) if isinstance(g_msgid, basestring): g_msgid = int(g_msgid) - hex_id = format(g_msgid, 'x') - url = 'https://www.googleapis.com/gmail/v1/users/me/messages/{}?format=raw'.format(hex_id, 'x') + hex_id = format(g_msgid, "x") + url = "https://www.googleapis.com/gmail/v1/users/me/messages/{}?format=raw".format( + hex_id, "x" + ) r = requests.get(url, auth=OAuthRequestsWrapper(auth_token)) if r.status_code != 200: - log.error('Got an error when fetching raw email', r.status_code, r.text) + log.error("Got an error when fetching raw email", r.status_code, r.text) if r.status_code in [403, 429]: - raise TemporaryEmailFetchException("Temporary usage limit hit. Please try again.") + raise TemporaryEmailFetchException( + "Temporary usage limit hit. Please try again." + ) if r.status_code == 404: - raise EmailDeletedException("Couldn't find message on backend server. This is a permanent error.") + raise EmailDeletedException( + "Couldn't find message on backend server. This is a permanent error." + ) elif r.status_code >= 500 and r.status_code <= 599: - raise TemporaryEmailFetchException("Backend server error. Please try again in a few minutes.") + raise TemporaryEmailFetchException( + "Backend server error. Please try again in a few minutes." + ) data = r.json() - raw = str(data['raw']) - return base64.urlsafe_b64decode(raw + '=' * (4 - len(raw) % 4)) + raw = str(data["raw"]) + return base64.urlsafe_b64decode(raw + "=" * (4 - len(raw) % 4)) diff --git a/inbox/s3/backends/imap.py b/inbox/s3/backends/imap.py index 677fa5c5b..281124928 100644 --- a/inbox/s3/backends/imap.py +++ b/inbox/s3/backends/imap.py @@ -4,6 +4,7 @@ from inbox.mailsync.backends.imap.generic import uidvalidity_cb from nylas.logging import get_logger + log = get_logger() @@ -22,11 +23,18 @@ def get_imap_raw_contents(message): try: uids = crispin_client.uids([uid.msg_uid]) if len(uids) == 0: - raise EmailDeletedException("Message was deleted on the backend server.") + raise EmailDeletedException( + "Message was deleted on the backend server." + ) return uids[0].body except imapclient.IMAPClient.Error: - log.error("Error while fetching raw contents", exc_info=True, - logstash_tag='fetching_error') - raise EmailFetchException("Couldn't get message from server. " - "Please try again in a few minutes.") + log.error( + "Error while fetching raw contents", + exc_info=True, + logstash_tag="fetching_error", + ) + raise EmailFetchException( + "Couldn't get message from server. " + "Please try again in a few minutes." + ) diff --git a/inbox/s3/exc.py b/inbox/s3/exc.py index 10f76fc5f..e2393773c 100644 --- a/inbox/s3/exc.py +++ b/inbox/s3/exc.py @@ -8,10 +8,12 @@ class EmailFetchException(S3Exception): class EmailDeletedException(EmailFetchException): """Raises an error when the message is deleted on the remote.""" + pass class TemporaryEmailFetchException(EmailFetchException): """A class for temporary errors when trying to fetch emails. Exchange notably seems to need warming up before fetching data.""" + pass diff --git a/inbox/scheduling/deferred_migration.py b/inbox/scheduling/deferred_migration.py index 83dc52513..3e23ffbb7 100644 --- a/inbox/scheduling/deferred_migration.py +++ b/inbox/scheduling/deferred_migration.py @@ -9,18 +9,19 @@ from inbox.util.stats import statsd_client from nylas.logging import get_logger + log = get_logger() -DEFERRED_ACCOUNT_MIGRATION_COUNTER = 'sync:deferred_account_migration_counter' -DEFERRED_ACCOUNT_MIGRATION_PQUEUE = 'sync:deferred_account_migration_pqueue' -DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE = 'sync:deferred_account_migration_event_queue' -DEFERRED_ACCOUNT_MIGRATION_OBJ = 'sync:deferred_account_migration_objs:{}' -DEFERRED_ACCOUNT_MIGRATION_OBJ_TTL = 60 * 60 * 24 * 7 # 1 week +DEFERRED_ACCOUNT_MIGRATION_COUNTER = "sync:deferred_account_migration_counter" +DEFERRED_ACCOUNT_MIGRATION_PQUEUE = "sync:deferred_account_migration_pqueue" +DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE = "sync:deferred_account_migration_event_queue" +DEFERRED_ACCOUNT_MIGRATION_OBJ = "sync:deferred_account_migration_objs:{}" +DEFERRED_ACCOUNT_MIGRATION_OBJ_TTL = 60 * 60 * 24 * 7 # 1 week class DeferredAccountMigration(object): - _redis_fields = ['deadline', 'account_id', 'desired_host', 'id'] + _redis_fields = ["deadline", "account_id", "desired_host", "id"] def __init__(self, deadline, account_id, desired_host, id=None): self.deadline = float(deadline) @@ -32,7 +33,10 @@ def execute(self, client): with session_scope(self.account_id) as db_session: account = db_session.query(Account).get(self.account_id) if account is None: - log.warning('Account not found when trying to execute DeferredAccountMigration', account_id=self.account_id) + log.warning( + "Account not found when trying to execute DeferredAccountMigration", + account_id=self.account_id, + ) return account.desired_sync_host = self.desired_host db_session.commit() @@ -43,15 +47,22 @@ def save(self, client): self.id = client.incr(DEFERRED_ACCOUNT_MIGRATION_COUNTER) p = client.pipeline() key = DEFERRED_ACCOUNT_MIGRATION_OBJ.format(self.id) - p.hmset(key, dict((field, getattr(self, field)) for field in self.__class__._redis_fields)) + p.hmset( + key, + dict( + (field, getattr(self, field)) for field in self.__class__._redis_fields + ), + ) p.expire(key, DEFERRED_ACCOUNT_MIGRATION_OBJ_TTL) p.zadd(DEFERRED_ACCOUNT_MIGRATION_PQUEUE, self.deadline, self.id) - p.rpush(DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE, json.dumps({'id': self.id})) + p.rpush(DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE, json.dumps({"id": self.id})) p.execute() @classmethod def try_load(cls, client, id): - values = client.hmget(DEFERRED_ACCOUNT_MIGRATION_OBJ.format(id), cls._redis_fields) + values = client.hmget( + DEFERRED_ACCOUNT_MIGRATION_OBJ.format(id), cls._redis_fields + ) if values is None: return None return DeferredAccountMigration(*values) @@ -59,7 +70,9 @@ def try_load(cls, client, id): class DeferredAccountMigrationExecutor(gevent.Greenlet): def __init__(self): - self.event_queue = event_queue.EventQueue(DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE) + self.event_queue = event_queue.EventQueue( + DEFERRED_ACCOUNT_MIGRATION_EVENT_QUEUE + ) self.redis = self.event_queue.redis gevent.Greenlet.__init__(self) @@ -69,23 +82,29 @@ def _run(self): def _run_impl(self): current_time = time.time() - timeout = event_queue.SOCKET_TIMEOUT - 2 # Minus 2 to give us some leeway. + timeout = event_queue.SOCKET_TIMEOUT - 2 # Minus 2 to give us some leeway. next_deferral = self._try_get_next_deferral() while next_deferral is not None: if next_deferral.deadline >= current_time: - timeout = int(min(max(next_deferral.deadline - current_time, 1), timeout)) - log.info('Next deferral deadline is in the future, sleeping', - deferral_id=next_deferral.id, - deadline=next_deferral.deadline, - desired_host=next_deferral.desired_host, - account_id=next_deferral.account_id, - timeout=timeout) + timeout = int( + min(max(next_deferral.deadline - current_time, 1), timeout) + ) + log.info( + "Next deferral deadline is in the future, sleeping", + deferral_id=next_deferral.id, + deadline=next_deferral.deadline, + desired_host=next_deferral.desired_host, + account_id=next_deferral.account_id, + timeout=timeout, + ) break - log.info('Executing deferral', - deferral_id=next_deferral.id, - deadline=next_deferral.deadline, - desired_host=next_deferral.desired_host, - account_id=next_deferral.account_id) + log.info( + "Executing deferral", + deferral_id=next_deferral.id, + deadline=next_deferral.deadline, + desired_host=next_deferral.desired_host, + account_id=next_deferral.account_id, + ) next_deferral.execute(self.redis) self.redis.zrem(DEFERRED_ACCOUNT_MIGRATION_PQUEUE, next_deferral.id) next_deferral = self._try_get_next_deferral() diff --git a/inbox/scheduling/event_queue.py b/inbox/scheduling/event_queue.py index 1262087ed..38c4dde27 100644 --- a/inbox/scheduling/event_queue.py +++ b/inbox/scheduling/event_queue.py @@ -3,6 +3,7 @@ from inbox.config import config from nylas.logging import get_logger + log = get_logger() SOCKET_CONNECT_TIMEOUT = 5 @@ -10,22 +11,25 @@ def _get_redis_client(host=None, port=6379, db=1): - return StrictRedis(host=host, - port=port, - db=db, - socket_connect_timeout=SOCKET_CONNECT_TIMEOUT, - socket_timeout=SOCKET_TIMEOUT) + return StrictRedis( + host=host, + port=port, + db=db, + socket_connect_timeout=SOCKET_CONNECT_TIMEOUT, + socket_timeout=SOCKET_TIMEOUT, + ) class EventQueue(object): """Simple queue that clients can listen to and wait to be notified of some event that they're interested in. """ + def __init__(self, queue_name, redis=None): self.redis = redis if self.redis is None: - redis_host = config['EVENT_QUEUE_REDIS_HOSTNAME'] - redis_db = config['EVENT_QUEUE_REDIS_DB'] + redis_host = config["EVENT_QUEUE_REDIS_HOSTNAME"] + redis_db = config["EVENT_QUEUE_REDIS_DB"] self.redis = _get_redis_client(host=redis_host, db=redis_db) self.queue_name = queue_name @@ -38,22 +42,27 @@ def receive_event(self, timeout=0): if result is None: return None - queue_name, event_data = (self.queue_name, result) if timeout is None else result + queue_name, event_data = ( + (self.queue_name, result) if timeout is None else result + ) try: event = json.loads(event_data) - event['queue_name'] = queue_name + event["queue_name"] = queue_name return event except Exception as e: - log.error('Failed to load event data from queue', error=e, event_data=event_data) + log.error( + "Failed to load event data from queue", error=e, event_data=event_data + ) return None def send_event(self, event_data): - event_data.pop('queue_name', None) + event_data.pop("queue_name", None) self.redis.rpush(self.queue_name, json.dumps(event_data)) class EventQueueGroup(object): """Group of queues that can all be simultaneously watched for new events.""" + def __init__(self, queues): self.queues = queues self.redis = None @@ -67,8 +76,10 @@ def receive_event(self, timeout=0): queue_name, event_data = result try: event = json.loads(event_data) - event['queue_name'] = queue_name + event["queue_name"] = queue_name return event except Exception as e: - log.error('Failed to load event data from queue', error=e, event_data=event_data) + log.error( + "Failed to load event data from queue", error=e, event_data=event_data + ) return None diff --git a/inbox/scheduling/queue.py b/inbox/scheduling/queue.py index 6c53186a2..90554d18e 100644 --- a/inbox/scheduling/queue.py +++ b/inbox/scheduling/queue.py @@ -29,6 +29,7 @@ from inbox.util.stats import statsd_client from nylas.logging import get_logger from redis import StrictRedis + log = get_logger() SOCKET_CONNECT_TIMEOUT = 5 @@ -41,30 +42,33 @@ class QueueClient(object): """ # Lua scripts for atomic assignment and conflict-free unassignment. - ASSIGN = ''' + ASSIGN = """ local k = redis.call('RPOP', KEYS[1]) if k then local s = redis.call('HSETNX', KEYS[2], k, ARGV[1]) if s then return k end - end''' + end""" - UNASSIGN = ''' + UNASSIGN = """ if redis.call('HGET', KEYS[1], KEYS[2]) == ARGV[1] then return redis.call('HDEL', KEYS[1], KEYS[2]) else return 0 end - ''' + """ def __init__(self, zone): self.zone = zone - redis_host = config['ACCOUNT_QUEUE_REDIS_HOSTNAME'] - redis_db = config['ACCOUNT_QUEUE_REDIS_DB'] - self.redis = StrictRedis(host=redis_host, db=redis_db, - socket_connect_timeout=SOCKET_CONNECT_TIMEOUT, - socket_timeout=SOCKET_TIMEOUT) + redis_host = config["ACCOUNT_QUEUE_REDIS_HOSTNAME"] + redis_db = config["ACCOUNT_QUEUE_REDIS_DB"] + self.redis = StrictRedis( + host=redis_host, + db=redis_db, + socket_connect_timeout=SOCKET_CONNECT_TIMEOUT, + socket_timeout=SOCKET_TIMEOUT, + ) def all(self): """ @@ -115,11 +119,11 @@ def qsize(self): @property def _queue(self): - return 'unassigned_{}'.format(self.zone) + return "unassigned_{}".format(self.zone) @property def _hash(self): - return 'assigned_{}'.format(self.zone) + return "assigned_{}".format(self.zone) class QueuePopulator(object): @@ -133,24 +137,27 @@ def __init__(self, zone, poll_interval=1): self.poll_interval = poll_interval self.queue_client = QueueClient(zone) self.shards = [] - for database in config['DATABASE_HOSTS']: - if database.get('ZONE') == self.zone: - shard_ids = [shard['ID'] for shard in database['SHARDS']] - self.shards.extend(shard_id for shard_id in shard_ids - if shard_id in engine_manager.engines) + for database in config["DATABASE_HOSTS"]: + if database.get("ZONE") == self.zone: + shard_ids = [shard["ID"] for shard in database["SHARDS"]] + self.shards.extend( + shard_id + for shard_id in shard_ids + if shard_id in engine_manager.engines + ) def run(self): - log.info('Queueing accounts', zone=self.zone, shards=self.shards) + log.info("Queueing accounts", zone=self.zone, shards=self.shards) while True: retry_with_logging(self._run_impl) def _run_impl(self): self.enqueue_new_accounts() self.unassign_disabled_accounts() - statsd_client.gauge('syncqueue.queue.{}.length'.format(self.zone), - self.queue_client.qsize()) - statsd_client.incr('syncqueue.service.{}.heartbeat'. - format(self.zone)) + statsd_client.gauge( + "syncqueue.queue.{}.length".format(self.zone), self.queue_client.qsize() + ) + statsd_client.incr("syncqueue.service.{}.heartbeat".format(self.zone)) gevent.sleep(self.poll_interval) def enqueue_new_accounts(self): @@ -162,17 +169,18 @@ def enqueue_new_accounts(self): """ new_accounts = self.runnable_accounts() - self.queue_client.all() for account_id in new_accounts: - log.info('Enqueuing new account', account_id=account_id) + log.info("Enqueuing new account", account_id=account_id) self.queue_client.enqueue(account_id) def unassign_disabled_accounts(self): runnable_accounts = self.runnable_accounts() disabled_accounts = { - k: v for k, v in self.queue_client.assigned().items() + k: v + for k, v in self.queue_client.assigned().items() if k not in runnable_accounts } for account_id, sync_host in disabled_accounts.items(): - log.info('Removing disabled account', account_id=account_id) + log.info("Removing disabled account", account_id=account_id) self.queue_client.unassign(account_id, sync_host) def runnable_accounts(self): @@ -180,6 +188,9 @@ def runnable_accounts(self): for key in self.shards: with session_scope_by_shard_id(key) as db_session: accounts.update( - id_ for id_, in db_session.query(Account.id).filter( - Account.sync_should_run)) + id_ + for id_, in db_session.query(Account.id).filter( + Account.sync_should_run + ) + ) return accounts diff --git a/inbox/search/backends/__init__.py b/inbox/search/backends/__init__.py index 737940769..da3f6d994 100644 --- a/inbox/search/backends/__init__.py +++ b/inbox/search/backends/__init__.py @@ -1,5 +1,7 @@ # Allow out-of-tree backend submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) from inbox.util.misc import register_backends + module_registry = register_backends(__name__, __path__) diff --git a/inbox/search/backends/generic.py b/inbox/search/backends/generic.py index 18479ce18..c01cfaacf 100644 --- a/inbox/search/backends/generic.py +++ b/inbox/search/backends/generic.py @@ -1,6 +1,6 @@ from inbox.search.backends.imap import IMAPSearchClient -__all__ = ['IMAPSearchClient'] +__all__ = ["IMAPSearchClient"] -PROVIDER = 'generic' -SEARCH_CLS = 'IMAPSearchClient' +PROVIDER = "generic" +SEARCH_CLS = "IMAPSearchClient" diff --git a/inbox/search/backends/gmail.py b/inbox/search/backends/gmail.py index b358ae4ef..baa27dfa3 100644 --- a/inbox/search/backends/gmail.py +++ b/inbox/search/backends/gmail.py @@ -11,12 +11,11 @@ log = get_logger() -PROVIDER = 'gmail' -SEARCH_CLS = 'GmailSearchClient' +PROVIDER = "gmail" +SEARCH_CLS = "GmailSearchClient" class GmailSearchClient(object): - def __init__(self, account): self.account_id = int(account.id) try: @@ -28,7 +27,9 @@ def __init__(self, account): raise SearchBackendException( "This search can't be performed because the account's " "credentials are out of date. Please reauthenticate and try " - "again.", 403) + "again.", + 403, + ) def search_messages(self, db_session, search_query, offset=0, limit=40): # We need to get the next limit + offset terms if we want to @@ -36,10 +37,14 @@ def search_messages(self, db_session, search_query, offset=0, limit=40): g_msgids = self._search(search_query, limit=limit + offset) if not g_msgids: return [] - query = db_session.query(Message). \ - filter(Message.namespace_id == self.account.namespace.id, - Message.g_msgid.in_(g_msgids)). \ - order_by(desc(Message.received_date)) + query = ( + db_session.query(Message) + .filter( + Message.namespace_id == self.account.namespace.id, + Message.g_msgid.in_(g_msgids), + ) + .order_by(desc(Message.received_date)) + ) if offset: query = query.offset(offset) @@ -56,7 +61,9 @@ def g(): encoder = APIEncoder() with session_scope(self.account_id) as db_session: - yield encoder.cereal(self.search_messages(db_session, search_query)) + '\n' + yield encoder.cereal( + self.search_messages(db_session, search_query) + ) + "\n" return g @@ -66,13 +73,17 @@ def search_threads(self, db_session, search_query, offset=0, limit=40): g_msgids = self._search(search_query, limit=limit + offset) if not g_msgids: return [] - query = db_session.query(Thread). \ - join(Message, Message.thread_id == Thread.id). \ - filter(Thread.namespace_id == self.account.namespace.id, - Thread.deleted_at == None, - Message.namespace_id == self.account.namespace.id, - Message.g_msgid.in_(g_msgids)). \ - order_by(desc(Message.received_date)) + query = ( + db_session.query(Thread) + .join(Message, Message.thread_id == Thread.id) + .filter( + Thread.namespace_id == self.account.namespace.id, + Thread.deleted_at == None, + Message.namespace_id == self.account.namespace.id, + Message.g_msgid.in_(g_msgids), + ) + .order_by(desc(Message.received_date)) + ) if offset: query = query.offset(offset) @@ -87,7 +98,9 @@ def g(): encoder = APIEncoder() with session_scope(self.account_id) as db_session: - yield encoder.cereal(self.search_threads(db_session, search_query)) + '\n' + yield encoder.cereal( + self.search_threads(db_session, search_query) + ) + "\n" return g @@ -99,46 +112,51 @@ def _search(self, search_query, limit): # Could have used while True: but I don't like infinite loops. for i in range(1, 10): ret = requests.get( - u'https://www.googleapis.com/gmail/v1/users/me/messages', + u"https://www.googleapis.com/gmail/v1/users/me/messages", params=params, - auth=OAuthRequestsWrapper(self.auth_token)) + auth=OAuthRequestsWrapper(self.auth_token), + ) - log.info('Gmail API search request completed', - elapsed=ret.elapsed.total_seconds()) + log.info( + "Gmail API search request completed", + elapsed=ret.elapsed.total_seconds(), + ) if ret.status_code != 200: - log.critical('HTTP error making search request', - account_id=self.account.id, - url=ret.url, - response=ret.content) + log.critical( + "HTTP error making search request", + account_id=self.account.id, + url=ret.url, + response=ret.content, + ) raise SearchBackendException( - "Error issuing search request", 503, - server_error=ret.content) + "Error issuing search request", 503, server_error=ret.content + ) data = ret.json() - if 'messages' not in data: + if "messages" not in data: return results # Note that the Gmail API returns g_msgids in hex format. So for # example the IMAP X-GM-MSGID 1438297078380071706 corresponds to # 13f5db9286538b1a in the API response we have here. - results = results + [int(m['id'], 16) for m in data['messages']] + results = results + [int(m["id"], 16) for m in data["messages"]] if len(results) >= limit: return results[:limit] - if 'nextPageToken' not in data: + if "nextPageToken" not in data: return results else: # We don't have results and there's more to fetch --- # get them! - params['pageToken'] = data['nextPageToken'] - log.info('Getting next page of search results') + params["pageToken"] = data["nextPageToken"] + log.info("Getting next page of search results") continue # If we've been through the loop 10 times, it means we got a request # a crazy-high offset --- raise an error. - log.error('Too many search results', query=search_query, limit=limit) + log.error("Too many search results", query=search_query, limit=limit) raise SearchBackendException("Too many results", 400) diff --git a/inbox/search/backends/imap.py b/inbox/search/backends/imap.py index 8fa01e9bf..2a2a5e54c 100644 --- a/inbox/search/backends/imap.py +++ b/inbox/search/backends/imap.py @@ -16,43 +16,52 @@ import socket from imapclient import IMAPClient -PROVIDER = 'imap' +PROVIDER = "imap" class IMAPSearchClient(object): - def __init__(self, account): self.account = account self.account_id = account.id - self.log = get_logger().new(account_id=account.id, - component='search') + self.log = get_logger().new(account_id=account.id, component="search") def _open_crispin_connection(self, db_session): account = db_session.query(Account).get(self.account_id) try: conn = account.auth_handler.connect_account(account) except (IMAPClient.Error, socket.error, IMAP4.error): - raise SearchBackendException(('Unable to connect to the IMAP ' - 'server. Please retry in a ' - 'couple minutes.'), 503) + raise SearchBackendException( + ( + "Unable to connect to the IMAP " + "server. Please retry in a " + "couple minutes." + ), + 503, + ) except ValidationError: - raise SearchBackendException(("This search can't be performed " - "because the account's credentials " - "are out of date. Please " - "reauthenticate and try again."), 403) + raise SearchBackendException( + ( + "This search can't be performed " + "because the account's credentials " + "are out of date. Please " + "reauthenticate and try again." + ), + 403, + ) try: acct_provider_info = provider_info(account.provider) except NotSupportedError: - self.log.warn('Account provider not supported', - provider=account.provider) + self.log.warn("Account provider not supported", provider=account.provider) raise - self.crispin_client = CrispinClient(self.account_id, - acct_provider_info, - account.email_address, - conn, - readonly=True) + self.crispin_client = CrispinClient( + self.account_id, + acct_provider_info, + account.email_address, + conn, + readonly=True, + ) def _close_crispin_connection(self): self.crispin_client.logout() @@ -62,12 +71,14 @@ def search_messages(self, db_session, search_query, offset=0, limit=40): for uids in self._search(db_session, search_query): imap_uids.extend(uids) - query = db_session.query(Message) \ - .join(ImapUid) \ - .filter(ImapUid.account_id == self.account_id, - ImapUid.msg_uid.in_(imap_uids))\ - .order_by(desc(Message.received_date))\ - + query = ( + db_session.query(Message) + .join(ImapUid) + .filter( + ImapUid.account_id == self.account_id, ImapUid.msg_uid.in_(imap_uids) + ) + .order_by(desc(Message.received_date)) + ) if offset: query = query.offset(offset) @@ -82,13 +93,16 @@ def g(): with session_scope(self.account_id) as db_session: for imap_uids in self._search(db_session, search_query): - query = db_session.query(Message) \ - .join(ImapUid) \ - .filter(ImapUid.account_id == self.account_id, - ImapUid.msg_uid.in_(imap_uids))\ - .order_by(desc(Message.received_date))\ - - yield encoder.cereal(query.all()) + '\n' + query = ( + db_session.query(Message) + .join(ImapUid) + .filter( + ImapUid.account_id == self.account_id, + ImapUid.msg_uid.in_(imap_uids), + ) + .order_by(desc(Message.received_date)) + ) + yield encoder.cereal(query.all()) + "\n" return g @@ -97,14 +111,18 @@ def search_threads(self, db_session, search_query, offset=0, limit=40): for uids in self._search(db_session, search_query): imap_uids.extend(uids) - query = db_session.query(Thread) \ - .join(Message, Message.thread_id == Thread.id) \ - .join(ImapUid) \ - .filter(ImapUid.account_id == self.account_id, - ImapUid.msg_uid.in_(imap_uids), - Thread.deleted_at == None, - Thread.id == Message.thread_id)\ + query = ( + db_session.query(Thread) + .join(Message, Message.thread_id == Thread.id) + .join(ImapUid) + .filter( + ImapUid.account_id == self.account_id, + ImapUid.msg_uid.in_(imap_uids), + Thread.deleted_at == None, + Thread.id == Message.thread_id, + ) .order_by(desc(Message.received_date)) + ) if offset: query = query.offset(offset) @@ -119,15 +137,19 @@ def g(): with session_scope(self.account_id) as db_session: for imap_uids in self._search(db_session, search_query): - query = db_session.query(Thread) \ - .join(Message, Message.thread_id == Thread.id) \ - .join(ImapUid) \ - .filter(ImapUid.account_id == self.account_id, - ImapUid.msg_uid.in_(imap_uids), - Thread.id == Message.thread_id)\ + query = ( + db_session.query(Thread) + .join(Message, Message.thread_id == Thread.id) + .join(ImapUid) + .filter( + ImapUid.account_id == self.account_id, + ImapUid.msg_uid.in_(imap_uids), + Thread.id == Message.thread_id, + ) .order_by(desc(Message.received_date)) + ) - yield encoder.cereal(query.all()) + '\n' + yield encoder.cereal(query.all()) + "\n" return g @@ -135,30 +157,34 @@ def _search(self, db_session, search_query): self._open_crispin_connection(db_session) try: - criteria = ['TEXT', search_query.encode('ascii')] + criteria = ["TEXT", search_query.encode("ascii")] charset = None except UnicodeEncodeError: - criteria = [u'TEXT', search_query] - charset = 'UTF-8' + criteria = [u"TEXT", search_query] + charset = "UTF-8" folders = [] account_folders = db_session.query(Folder).filter( - Folder.account_id == self.account_id) + Folder.account_id == self.account_id + ) # We want to start the search with the 'inbox', 'sent' # and 'archive' folders, if they exist. - for cname in ['inbox', 'sent', 'archive']: - special_folder = db_session.query(Folder).filter( - Folder.account_id == self.account_id, - Folder.canonical_name == cname).one_or_none() + for cname in ["inbox", "sent", "archive"]: + special_folder = ( + db_session.query(Folder) + .filter( + Folder.account_id == self.account_id, Folder.canonical_name == cname + ) + .one_or_none() + ) if special_folder is not None: folders.append(special_folder) # Don't search the folder twice. - account_folders = account_folders.filter( - Folder.id != special_folder.id) + account_folders = account_folders.filter(Folder.id != special_folder.id) folders = folders + account_folders.all() @@ -174,17 +200,20 @@ def _search_folder(self, folder, criteria, charset): self.log.warn("Won't search missing IMAP folder", exc_info=True) return [] except UidInvalid: - self.log.error(("Got Uidvalidity error when searching. " - "Skipping."), exc_info=True) + self.log.error( + ("Got Uidvalidity error when searching. " "Skipping."), exc_info=True + ) return [] try: uids = self.crispin_client.conn.search(criteria, charset=charset) except IMAP4.error: - self.log.warn('Search error', exc_info=True) - raise SearchBackendException(('Unknown IMAP error when ' - 'performing search.'), 503) - - self.log.debug('Search found messages for folder', - folder_name=folder.id, uids=len(uids)) + self.log.warn("Search error", exc_info=True) + raise SearchBackendException( + ("Unknown IMAP error when " "performing search."), 503 + ) + + self.log.debug( + "Search found messages for folder", folder_name=folder.id, uids=len(uids) + ) return uids diff --git a/inbox/search/base.py b/inbox/search/base.py index 40661d29f..b3afc620b 100644 --- a/inbox/search/base.py +++ b/inbox/search/base.py @@ -15,14 +15,14 @@ def __init__(self, message, http_code, server_error=None): self.message = message self.http_code = http_code self.server_error = server_error - super(SearchBackendException, self).__init__( - message, http_code, server_error) + super(SearchBackendException, self).__init__(message, http_code, server_error) class SearchStoreException(Exception): """Raised if there's an error proxying the search request to the provider. This is a special EAS case where the Status code for the Store element has an error""" + def __init__(self, err_code): self.err_code = err_code super(SearchStoreException, self).__init__(err_code) diff --git a/inbox/security/blobstorage.py b/inbox/security/blobstorage.py index 38c1bea22..4ddff5181 100644 --- a/inbox/security/blobstorage.py +++ b/inbox/security/blobstorage.py @@ -22,19 +22,19 @@ def _pack_header(scheme): - return struct.pack('\r\n' - 'User-Agent: NylasMailer/{1}\r\n').format(our_uid, VERSION) + new_headers = ( + "X-INBOX-ID: {0}-0\r\n" + "Message-Id: <{0}-0@mailer.nylas.com>\r\n" + "User-Agent: NylasMailer/{1}\r\n" + ).format(our_uid, VERSION) new_body = new_headers + raw_mime with db_session.no_autoflush: - msg = Message.create_from_synced(account, '', '', - datetime.utcnow(), new_body) + msg = Message.create_from_synced(account, "", "", datetime.utcnow(), new_body) if msg.from_addr and len(msg.from_addr) > 1: raise InputError("from_addr field can have at most one item") if msg.reply_to and len(msg.reply_to) > 1: raise InputError("reply_to field can have at most one item") - if msg.subject is not None and not \ - isinstance(msg.subject, basestring): + if msg.subject is not None and not isinstance(msg.subject, basestring): raise InputError('"subject" should be a string') if not isinstance(msg.body, basestring): @@ -79,7 +84,8 @@ def create_draft_from_mime(account, raw_mime, db_session): subject=msg.subject, recentdate=msg.received_date, namespace=account.namespace, - subjectdate=msg.received_date) + subjectdate=msg.received_date, + ) msg.is_created = True msg.is_sent = True @@ -91,7 +97,7 @@ def create_draft_from_mime(account, raw_mime, db_session): def block_to_part(block, message, namespace): - inline_image_uri = r'cid:{}'.format(block.public_id) + inline_image_uri = r"cid:{}".format(block.public_id) is_inline = re.search(inline_image_uri, message.body) is not None # Create a new Part object to associate to the message object. # (You can't just set block.message, because if block is an @@ -100,7 +106,7 @@ def block_to_part(block, message, namespace): part = Part(block=block) part.content_id = block.public_id if is_inline else None part.namespace_id = namespace.id - part.content_disposition = 'inline' if is_inline else 'attachment' + part.content_disposition = "inline" if is_inline else "attachment" part.is_inboxapp_attachment = True return part @@ -112,33 +118,35 @@ def create_message_from_json(data, namespace, db_session, is_draft): # Validate the input and get referenced objects (thread, attachments) # as necessary. - to_addr = get_recipients(data.get('to'), 'to') - cc_addr = get_recipients(data.get('cc'), 'cc') - bcc_addr = get_recipients(data.get('bcc'), 'bcc') - from_addr = get_recipients(data.get('from'), 'from') - reply_to = get_recipients(data.get('reply_to'), 'reply_to') + to_addr = get_recipients(data.get("to"), "to") + cc_addr = get_recipients(data.get("cc"), "cc") + bcc_addr = get_recipients(data.get("bcc"), "bcc") + from_addr = get_recipients(data.get("from"), "from") + reply_to = get_recipients(data.get("reply_to"), "reply_to") if from_addr and len(from_addr) > 1: raise InputError("from_addr field can have at most one item") if reply_to and len(reply_to) > 1: raise InputError("reply_to field can have at most one item") - subject = data.get('subject') + subject = data.get("subject") if subject is not None and not isinstance(subject, basestring): raise InputError('"subject" should be a string') - body = data.get('body', '') + body = data.get("body", "") if not isinstance(body, basestring): raise InputError('"body" should be a string') - blocks = get_attachments(data.get('file_ids'), namespace.id, db_session) - reply_to_thread = get_thread(data.get('thread_id'), namespace.id, - db_session) - reply_to_message = get_message(data.get('reply_to_message_id'), - namespace.id, db_session) + blocks = get_attachments(data.get("file_ids"), namespace.id, db_session) + reply_to_thread = get_thread(data.get("thread_id"), namespace.id, db_session) + reply_to_message = get_message( + data.get("reply_to_message_id"), namespace.id, db_session + ) if reply_to_message is not None and reply_to_thread is not None: if reply_to_message not in reply_to_thread.messages: - raise InputError('Message {} is not in thread {}'. - format(reply_to_message.public_id, - reply_to_thread.public_id)) + raise InputError( + "Message {} is not in thread {}".format( + reply_to_message.public_id, reply_to_thread.public_id + ) + ) with db_session.no_autoflush: account = namespace.account @@ -157,14 +165,15 @@ def create_message_from_json(data, namespace, db_session, is_draft): subject = reply_to_message.subject elif reply_to_thread is not None: subject = reply_to_thread.subject - subject = subject or '' + subject = subject or "" message = Message() message.namespace = namespace message.is_created = True message.is_draft = is_draft - message.from_addr = from_addr if from_addr else \ - [(account.name, account.email_address)] + message.from_addr = ( + from_addr if from_addr else [(account.name, account.email_address)] + ) # TODO(emfree): we should maybe make received_date nullable, so its # value doesn't change in the case of a drafted-and-later-reconciled # message. @@ -219,21 +228,33 @@ def create_message_from_json(data, namespace, db_session, is_draft): subject=message.subject, recentdate=message.received_date, namespace=namespace, - subjectdate=message.received_date) + subjectdate=message.received_date, + ) message.thread = thread db_session.add(message) if is_draft: - schedule_action('save_draft', message, namespace.id, db_session, - version=message.version) + schedule_action( + "save_draft", message, namespace.id, db_session, version=message.version + ) db_session.flush() return message -def update_draft(db_session, account, draft, to_addr=None, - subject=None, body=None, blocks=None, cc_addr=None, - bcc_addr=None, from_addr=None, reply_to=None): +def update_draft( + db_session, + account, + draft, + to_addr=None, + subject=None, + body=None, + blocks=None, + cc_addr=None, + bcc_addr=None, + from_addr=None, + reply_to=None, +): """ Update draft with new attributes. """ @@ -242,25 +263,23 @@ def update(attr, value=None): if value is not None: setattr(draft, attr, value) - if attr == 'body': + if attr == "body": # Update size, snippet too draft.size = len(value) - draft.snippet = draft.calculate_html_snippet( - value) - - update('to_addr', to_addr) - update('cc_addr', cc_addr) - update('bcc_addr', bcc_addr) - update('reply_to', reply_to) - update('from_addr', from_addr) - update('subject', subject if subject else None) - update('body', body if body else None) - update('received_date', datetime.utcnow()) + draft.snippet = draft.calculate_html_snippet(value) + + update("to_addr", to_addr) + update("cc_addr", cc_addr) + update("bcc_addr", bcc_addr) + update("reply_to", reply_to) + update("from_addr", from_addr) + update("subject", subject if subject else None) + update("body", body if body else None) + update("received_date", datetime.utcnow()) # Remove any attachments that aren't specified new_block_ids = [b.id for b in blocks] - for part in filter(lambda x: x.block_id not in new_block_ids, - draft.parts): + for part in filter(lambda x: x.block_id not in new_block_ids, draft.parts): draft.parts.remove(part) db_session.delete(part) @@ -298,9 +317,14 @@ def update(attr, value=None): draft.regenerate_nylas_uid() # Sync to remote - schedule_action('update_draft', draft, draft.namespace.id, db_session, - version=draft.version, - old_message_id_header=old_message_id_header) + schedule_action( + "update_draft", + draft, + draft.namespace.id, + db_session, + version=draft.version, + old_message_id_header=old_message_id_header, + ) db_session.commit() return draft @@ -311,9 +335,14 @@ def delete_draft(db_session, account, draft): assert draft.is_draft # Delete remotely. - schedule_action('delete_draft', draft, draft.namespace.id, db_session, - nylas_uid=draft.nylas_uid, - message_id_header=draft.message_id_header) + schedule_action( + "delete_draft", + draft, + draft.namespace.id, + db_session, + nylas_uid=draft.nylas_uid, + message_id_header=draft.message_id_header, + ) db_session.delete(draft) @@ -327,19 +356,21 @@ def delete_draft(db_session, account, draft): def generate_attachments(message, blocks): attachment_dicts = [] for block in blocks: - content_disposition = 'attachment' + content_disposition = "attachment" for part in block.parts: - if part.message_id == message.id and part.content_disposition == 'inline': - content_disposition = 'inline' + if part.message_id == message.id and part.content_disposition == "inline": + content_disposition = "inline" break - attachment_dicts.append({ - 'block_id': block.public_id, - 'filename': block.filename, - 'data': block.data, - 'content_type': block.content_type, - 'content_disposition': content_disposition, - }) + attachment_dicts.append( + { + "block_id": block.public_id, + "filename": block.filename, + "data": block.data, + "content_type": block.content_type, + "content_disposition": content_disposition, + } + ) return attachment_dicts @@ -349,7 +380,8 @@ def _set_reply_headers(new_message, previous_message): if previous_message.message_id_header: new_message.in_reply_to = previous_message.message_id_header if previous_message.references: - new_message.references = (previous_message.references + - [previous_message.message_id_header]) + new_message.references = previous_message.references + [ + previous_message.message_id_header + ] else: new_message.references = [previous_message.message_id_header] diff --git a/inbox/sendmail/generic.py b/inbox/sendmail/generic.py index b8917396d..3e3727a5f 100644 --- a/inbox/sendmail/generic.py +++ b/inbox/sendmail/generic.py @@ -1,6 +1,6 @@ from inbox.sendmail.smtp.postel import SMTPClient -__all__ = ['SMTPClient'] +__all__ = ["SMTPClient"] -PROVIDER = 'generic' -SENDMAIL_CLS = 'SMTPClient' +PROVIDER = "generic" +SENDMAIL_CLS = "SMTPClient" diff --git a/inbox/sendmail/gmail.py b/inbox/sendmail/gmail.py index 3e495c588..53a2c07fb 100644 --- a/inbox/sendmail/gmail.py +++ b/inbox/sendmail/gmail.py @@ -1,6 +1,6 @@ from inbox.sendmail.smtp.postel import SMTPClient -__all__ = ['SMTPClient'] +__all__ = ["SMTPClient"] -PROVIDER = 'gmail' -SENDMAIL_CLS = 'SMTPClient' +PROVIDER = "gmail" +SENDMAIL_CLS = "SMTPClient" diff --git a/inbox/sendmail/message.py b/inbox/sendmail/message.py index 1732cc99c..a1613dc80 100644 --- a/inbox/sendmail/message.py +++ b/inbox/sendmail/message.py @@ -23,9 +23,9 @@ from flanker.addresslib.address import MAX_ADDRESS_LENGTH from html2text import html2text -VERSION = pkg_resources.get_distribution('inbox-sync').version +VERSION = pkg_resources.get_distribution("inbox-sync").version -REPLYSTR = 'Re: ' +REPLYSTR = "Re: " # Patch flanker to use base64 rather than quoted-printable encoding for @@ -36,33 +36,34 @@ # '=\r\n'. Their expectation seems to be technically correct, per RFC1521 # section 5.1. However, we opt to simply avoid this mess entirely. def fallback_to_base64(charset, preferred_encoding, body): - if charset in ('ascii', 'iso8859=1', 'us-ascii'): + if charset in ("ascii", "iso8859=1", "us-ascii"): if mime.message.part.has_long_lines(body): # In the original implementation, this was # return stronger_encoding(preferred_encoding, 'quoted-printable') - return mime.message.part.stronger_encoding(preferred_encoding, - 'base64') + return mime.message.part.stronger_encoding(preferred_encoding, "base64") else: return preferred_encoding else: - return mime.message.part.stronger_encoding(preferred_encoding, - 'base64') + return mime.message.part.stronger_encoding(preferred_encoding, "base64") + mime.message.part.choose_text_encoding = fallback_to_base64 -def create_email(from_name, - from_email, - reply_to, - nylas_uid, - to_addr, - cc_addr, - bcc_addr, - subject, - html, - in_reply_to, - references, - attachments): +def create_email( + from_name, + from_email, + reply_to, + nylas_uid, + to_addr, + cc_addr, + bcc_addr, + subject, + html, + in_reply_to, + references, + attachments, +): """ Creates a MIME email message (both body and sets the needed headers). @@ -90,19 +91,17 @@ def create_email(from_name, attachments: list of dicts, optional a list of dicts(filename, data, content_type, content_disposition) """ - html = html if html else '' + html = html if html else "" plaintext = html2text(html) # Create a multipart/alternative message - msg = mime.create.multipart('alternative') - msg.append( - mime.create.text('plain', plaintext), - mime.create.text('html', html)) + msg = mime.create.multipart("alternative") + msg.append(mime.create.text("plain", plaintext), mime.create.text("html", html)) # Create an outer multipart/mixed message if attachments: text_msg = msg - msg = mime.create.multipart('mixed') + msg = mime.create.multipart("mixed") # The first part is the multipart/alternative text part msg.append(text_msg) @@ -111,15 +110,16 @@ def create_email(from_name, for a in attachments: # Disposition should be inline if we add Content-ID attachment = mime.create.attachment( - a['content_type'], - a['data'], - filename=a['filename'], - disposition=a['content_disposition']) - if a['content_disposition'] == 'inline': - attachment.headers['Content-Id'] = '<{}>'.format(a['block_id']) + a["content_type"], + a["data"], + filename=a["filename"], + disposition=a["content_disposition"], + ) + if a["content_disposition"] == "inline": + attachment.headers["Content-Id"] = "<{}>".format(a["block_id"]) msg.append(attachment) - msg.headers['Subject'] = subject if subject else '' + msg.headers["Subject"] = subject if subject else "" # Gmail sets the From: header to the default sending account. We can # however set our own custom phrase i.e. the name that appears next to the @@ -127,33 +127,37 @@ def create_email(from_name, # specify which to send as), see: http://lee-phillips.org/gmailRewriting/ # For other providers, we simply use name = '' from_addr = address.EmailAddress(from_name, from_email) - msg.headers['From'] = from_addr.full_spec() + msg.headers["From"] = from_addr.full_spec() # Need to set these headers so recipients know we sent the email to them # TODO(emfree): should these really be unicode? if to_addr: - full_to_specs = [_get_full_spec_without_validation(name, spec) - for name, spec in to_addr] - msg.headers['To'] = u', '.join(full_to_specs) + full_to_specs = [ + _get_full_spec_without_validation(name, spec) for name, spec in to_addr + ] + msg.headers["To"] = u", ".join(full_to_specs) if cc_addr: - full_cc_specs = [_get_full_spec_without_validation(name, spec) - for name, spec in cc_addr] - msg.headers['Cc'] = u', '.join(full_cc_specs) + full_cc_specs = [ + _get_full_spec_without_validation(name, spec) for name, spec in cc_addr + ] + msg.headers["Cc"] = u", ".join(full_cc_specs) if bcc_addr: - full_bcc_specs = [_get_full_spec_without_validation(name, spec) - for name, spec in bcc_addr] - msg.headers['Bcc'] = u', '.join(full_bcc_specs) + full_bcc_specs = [ + _get_full_spec_without_validation(name, spec) for name, spec in bcc_addr + ] + msg.headers["Bcc"] = u", ".join(full_bcc_specs) if reply_to: # reply_to is only ever a list with one element - msg.headers['Reply-To'] = _get_full_spec_without_validation( - reply_to[0][0], reply_to[0][1]) + msg.headers["Reply-To"] = _get_full_spec_without_validation( + reply_to[0][0], reply_to[0][1] + ) add_nylas_headers(msg, nylas_uid) if in_reply_to: - msg.headers['In-Reply-To'] = in_reply_to + msg.headers["In-Reply-To"] = in_reply_to if references: - msg.headers['References'] = '\t'.join(references) + msg.headers["References"] = "\t".join(references) # Most ISPs set date automatically, but we need to set it here for those # which do not. The Date header is required and omitting it causes issues @@ -161,10 +165,10 @@ def create_email(from_name, # Set dates in UTC since we don't know the timezone of the sending user # +0000 means UTC, whereas -0000 means unsure of timezone utc_datetime = datetime.utcnow() - day = utc_datetime.strftime('%a') - date = utc_datetime.strftime('%d %b %Y %X') - date_header = '{day}, {date} +0000\r\n'.format(day=day, date=date) - msg.headers['Date'] = date_header + day = utc_datetime.strftime("%a") + date = utc_datetime.strftime("%d %b %Y %X") + date_header = "{day}, {date} +0000\r\n".format(day=day, date=date) + msg.headers["Date"] = date_header rfcmsg = _rfc_transform(msg) @@ -179,10 +183,11 @@ def _get_full_spec_without_validation(name, email): an invalid email address. """ if name: - encoded_name = smart_quote(encode_string( - None, name, maxlinelen=MAX_ADDRESS_LENGTH)) - return '{0} <{1}>'.format(encoded_name, email) - return u'{0}'.format(email) + encoded_name = smart_quote( + encode_string(None, name, maxlinelen=MAX_ADDRESS_LENGTH) + ) + return "{0} <{1}>".format(encoded_name, email) + return u"{0}".format(email) def add_nylas_headers(msg, nylas_uid): @@ -202,14 +207,14 @@ def add_nylas_headers(msg, nylas_uid): """ # Set our own custom header for tracking in `Sent Mail` folder - msg.headers['X-INBOX-ID'] = nylas_uid - msg.headers['Message-Id'] = generate_message_id_header(nylas_uid) + msg.headers["X-INBOX-ID"] = nylas_uid + msg.headers["Message-Id"] = generate_message_id_header(nylas_uid) # Potentially also use `X-Mailer` - msg.headers['User-Agent'] = 'NylasMailer/{0}'.format(VERSION) + msg.headers["User-Agent"] = "NylasMailer/{0}".format(VERSION) def generate_message_id_header(uid): - return '<{}@mailer.nylas.com>'.format(uid) + return "<{}@mailer.nylas.com>".format(uid) def _rfc_transform(msg): @@ -222,17 +227,16 @@ def _rfc_transform(msg): """ msgstring = msg.to_string() - start = msgstring.find('References: ') + start = msgstring.find("References: ") if start == -1: return msgstring - end = msgstring.find('\r\n', start + len('References: ')) + end = msgstring.find("\r\n", start + len("References: ")) substring = msgstring[start:end] - separator = '\n\t' - rfcmsg = msgstring[:start] + substring.replace('\t', separator) +\ - msgstring[end:] + separator = "\n\t" + rfcmsg = msgstring[:start] + substring.replace("\t", separator) + msgstring[end:] return rfcmsg diff --git a/inbox/sendmail/smtp/postel.py b/inbox/sendmail/smtp/postel.py index 4e272effd..203e24e0e 100644 --- a/inbox/sendmail/smtp/postel.py +++ b/inbox/sendmail/smtp/postel.py @@ -7,6 +7,7 @@ import smtplib from nylas.logging import get_logger + log = get_logger() from inbox.models.session import session_scope from inbox.models.backends.imap import ImapAccount @@ -21,8 +22,7 @@ from util import SMTP_ERRORS # TODO[k]: Other types (LOGIN, XOAUTH, PLAIN-CLIENTTOKEN, CRAM-MD5) -AUTH_EXTNS = {'oauth2': 'XOAUTH2', - 'password': 'PLAIN'} +AUTH_EXTNS = {"oauth2": "XOAUTH2", "password": "PLAIN"} SMTP_MAX_RETRIES = 1 # Timeout in seconds for blocking operations. If no timeout is specified, @@ -40,14 +40,13 @@ class _TokenManagerWrapper: - def get_token(self, account, force_refresh=False): - if account.provider == 'gmail': + if account.provider == "gmail": return g_token_manager.get_token_for_email( - account, force_refresh=force_refresh) + account, force_refresh=force_refresh + ) else: - return default_token_manager.get_token( - account, force_refresh=force_refresh) + return default_token_manager.get_token(account, force_refresh=force_refresh) token_manager = _TokenManagerWrapper() @@ -76,7 +75,7 @@ def rset(self): try: smtplib.SMTP_SSL.rset(self) except smtplib.SMTPServerDisconnected: - log.warning('Server disconnect during SMTP rset', exc_info=True) + log.warning("Server disconnect during SMTP rset", exc_info=True) class SMTP(smtplib.SMTP): @@ -102,7 +101,7 @@ def rset(self): try: smtplib.SMTP.rset(self) except smtplib.SMTPServerDisconnected: - log.warning('Server disconnect during SMTP rset', exc_info=True) + log.warning("Server disconnect during SMTP rset", exc_info=True) def _transform_ssl_error(strerror): @@ -112,9 +111,9 @@ def _transform_ssl_error(strerror): """ if strerror is None: - return 'Unknown connection error' - elif strerror.endswith('certificate verify failed'): - return 'SMTP server SSL certificate verify failed' + return "Unknown connection error" + elif strerror.endswith("certificate verify failed"): + return "SMTP server SSL certificate verify failed" else: return strerror @@ -123,15 +122,22 @@ def _substitute_bcc(raw_message): """ Substitute BCC in raw message. """ - bcc_regexp = re.compile(r'^Bcc: [^\r\n]*\r\n', - re.IGNORECASE | re.MULTILINE) - return bcc_regexp.sub('', raw_message) + bcc_regexp = re.compile(r"^Bcc: [^\r\n]*\r\n", re.IGNORECASE | re.MULTILINE) + return bcc_regexp.sub("", raw_message) class SMTPConnection(object): - - def __init__(self, account_id, email_address, smtp_username, - auth_type, auth_token, smtp_endpoint, ssl_required, log): + def __init__( + self, + account_id, + email_address, + smtp_username, + auth_type, + auth_token, + smtp_endpoint, + ssl_required, + log, + ): self.account_id = account_id self.email_address = email_address self.smtp_username = smtp_username @@ -141,8 +147,10 @@ def __init__(self, account_id, email_address, smtp_username, self.ssl_required = ssl_required self.log = log self.log.bind(account_id=self.account_id) - self.auth_handlers = {'oauth2': self.smtp_oauth2, - 'password': self.smtp_password} + self.auth_handlers = { + "oauth2": self.smtp_oauth2, + "password": self.smtp_password, + } self.setup() def __enter__(self): @@ -160,8 +168,7 @@ def _connect(self, host, port): self.connection.connect(host, port) except socket.error as e: # 'Connection refused', SSL errors for non-TLS connections, etc. - log.error('SMTP connection error', exc_info=True, - server_error=e.strerror) + log.error("SMTP connection error", exc_info=True, server_error=e.strerror) msg = _transform_ssl_error(e.strerror) raise SendMailException(msg, 503) @@ -192,50 +199,56 @@ def _upgrade_connection(self): # to determine whether to fail or continue with plaintext # authentication. self.connection.ehlo() - if self.connection.has_extn('starttls'): + if self.connection.has_extn("starttls"): try: self.connection.starttls() except ssl.SSLError as e: if not self.ssl_required: - log.warning('STARTTLS supported but failed for SSL NOT ' - 'required authentication', exc_info=True) + log.warning( + "STARTTLS supported but failed for SSL NOT " + "required authentication", + exc_info=True, + ) else: msg = _transform_ssl_error(e.strerror) raise SendMailException(msg, 503) elif self.ssl_required: - raise SendMailException('Required SMTP STARTTLS not supported.', - 403) + raise SendMailException("Required SMTP STARTTLS not supported.", 403) # OAuth2 authentication def _smtp_oauth2_try_refresh(self): with session_scope(self.account_id) as db_session: account = db_session.query(ImapAccount).get(self.account_id) - self.auth_token = token_manager.get_token( - account, force_refresh=True) + self.auth_token = token_manager.get_token(account, force_refresh=True) def _try_xoauth2(self): - auth_string = 'user={0}\1auth=Bearer {1}\1\1'.\ - format(self.email_address, self.auth_token) - code, resp = self.connection.docmd('AUTH', 'XOAUTH2 {0}'.format( - base64.b64encode(auth_string))) + auth_string = "user={0}\1auth=Bearer {1}\1\1".format( + self.email_address, self.auth_token + ) + code, resp = self.connection.docmd( + "AUTH", "XOAUTH2 {0}".format(base64.b64encode(auth_string)) + ) if code == SMTP_AUTH_CHALLENGE: - log.error('Challenge in SMTP XOAUTH2 authentication', - response_code=code, response_line=resp) + log.error( + "Challenge in SMTP XOAUTH2 authentication", + response_code=code, + response_line=resp, + ) # Handle server challenge so that we can properly retry with the # connection. code, resp = self.connection.noop() if code != SMTP_AUTH_SUCCESS: - log.error('SMTP XOAUTH2 error response', - response_code=code, response_line=resp) + log.error( + "SMTP XOAUTH2 error response", response_code=code, response_line=resp + ) return code, resp def smtp_oauth2(self): code, resp = self._try_xoauth2() - if code in SMTP_TEMP_AUTH_FAIL_CODES and resp.startswith('4.7.0'): + if code in SMTP_TEMP_AUTH_FAIL_CODES and resp.startswith("4.7.0"): # If we're getting 'too many login attempt errors', tell the client # they are being rate-limited. - raise SendMailException('Temporary provider send throttling', - 429) + raise SendMailException("Temporary provider send throttling", 429) if code != SMTP_AUTH_SUCCESS: # If auth failed for any other reason, try to refresh the access @@ -244,8 +257,9 @@ def smtp_oauth2(self): code, resp = self._try_xoauth2() if code != SMTP_AUTH_SUCCESS: raise SendMailException( - 'Could not authenticate with the SMTP server.', 403) - self.log.info('SMTP Auth(OAuth2) success', account_id=self.account_id) + "Could not authenticate with the SMTP server.", 403 + ) + self.log.info("SMTP Auth(OAuth2) success", account_id=self.account_id) # Password authentication def smtp_password(self): @@ -254,29 +268,28 @@ def smtp_password(self): try: c.login(self.smtp_username, self.auth_token) except smtplib.SMTPAuthenticationError as e: - self.log.error('SMTP login refused', exc=e) - raise SendMailException( - 'Could not authenticate with the SMTP server.', 403) + self.log.error("SMTP login refused", exc=e) + raise SendMailException("Could not authenticate with the SMTP server.", 403) except smtplib.SMTPException as e: # Raised by smtplib if the server doesn't support the AUTH # extension or doesn't support any of the implemented mechanisms. # Shouldn't really happen normally. - self.log.error('SMTP auth failed due to unsupported mechanism', - exc=e) + self.log.error("SMTP auth failed due to unsupported mechanism", exc=e) raise SendMailException(str(e), 403) - self.log.info('SMTP Auth(Password) success') + self.log.info("SMTP Auth(Password) success") def sendmail(self, recipients, msg): try: - return self.connection.sendmail( - self.email_address, recipients, msg) + return self.connection.sendmail(self.email_address, recipients, msg) except UnicodeEncodeError: - self.log.error('Unicode error when trying to decode email', - logstash_tag='sendmail_encode_error', - account_id=self.account_id, recipients=recipients) - raise SendMailException( - 'Invalid character in recipient address', 402) + self.log.error( + "Unicode error when trying to decode email", + logstash_tag="sendmail_encode_error", + account_id=self.account_id, + recipients=recipients, + ) + raise SendMailException("Invalid character in recipient address", 402) class SMTPClient(object): @@ -297,16 +310,17 @@ def __init__(self, account): self.provider_name = account.provider self.sender_name = account.name self.smtp_endpoint = account.smtp_endpoint - self.auth_type = provider_info(self.provider_name)['auth'] + self.auth_type = provider_info(self.provider_name)["auth"] - if self.auth_type == 'oauth2': + if self.auth_type == "oauth2": try: self.auth_token = token_manager.get_token(account) except OAuthError: raise SendMailException( - 'Could not authenticate with the SMTP server.', 403) + "Could not authenticate with the SMTP server.", 403 + ) else: - assert self.auth_type == 'password' + assert self.auth_type == "password" if isinstance(account, GenericAccount): self.auth_token = account.smtp_password else: @@ -341,30 +355,31 @@ def _send(self, recipients, msg): # but at least one recipient got it. Don't retry; raise # exception so that we fail to client. raise SendMailException( - 'Sending to at least one recipent failed', + "Sending to at least one recipent failed", http_code=200, - failures=failures) + failures=failures, + ) except smtplib.SMTPException as err: - self.log.error('Error sending', error=err, exc_info=True) + self.log.error("Error sending", error=err, exc_info=True) - self.log.error('Max retries reached; failing to client', - error=err) + self.log.error("Max retries reached; failing to client", error=err) self._handle_sending_exception(err) def _handle_sending_exception(self, err): if isinstance(err, smtplib.SMTPServerDisconnected): raise SendMailException( - 'The server unexpectedly closed the connection', 503) + "The server unexpectedly closed the connection", 503 + ) elif isinstance(err, smtplib.SMTPRecipientsRefused): - raise SendMailException('Sending to all recipients failed', 402) + raise SendMailException("Sending to all recipients failed", 402) elif isinstance(err, smtplib.SMTPResponseException): # Distinguish between permanent failures due to message # content or recipients, and temporary failures for other reasons. # In particular, see https://support.google.com/a/answer/3726730 - message = 'Sending failed' + message = "Sending failed" http_code = 503 if err.smtp_code in SMTP_ERRORS: @@ -375,16 +390,22 @@ def _handle_sending_exception(self, err): message = res[1] break - server_error = '{} : {}'.format(err.smtp_code, err.smtp_error) + server_error = "{} : {}".format(err.smtp_code, err.smtp_error) - self.log.error('Sending failed', message=message, - http_code=http_code, server_error=server_error) + self.log.error( + "Sending failed", + message=message, + http_code=http_code, + server_error=server_error, + ) - raise SendMailException(message, http_code=http_code, - server_error=server_error) + raise SendMailException( + message, http_code=http_code, server_error=server_error + ) else: - raise SendMailException('Sending failed', http_code=503, - server_error=str(err)) + raise SendMailException( + "Sending failed", http_code=503, server_error=str(err) + ) def send_generated_email(self, recipients, raw_message): # A tiny wrapper over _send because the API differs @@ -408,25 +429,27 @@ def send_custom(self, draft, body, recipients): blocks = [p.block for p in draft.attachments] attachments = generate_attachments(draft, blocks) from_addr = draft.from_addr[0] - msg = create_email(from_name=from_addr[0], - from_email=from_addr[1], - reply_to=draft.reply_to, - nylas_uid=draft.nylas_uid, - to_addr=draft.to_addr, - cc_addr=draft.cc_addr, - bcc_addr=None, - subject=draft.subject, - html=body, - in_reply_to=draft.in_reply_to, - references=draft.references, - attachments=attachments) + msg = create_email( + from_name=from_addr[0], + from_email=from_addr[1], + reply_to=draft.reply_to, + nylas_uid=draft.nylas_uid, + to_addr=draft.to_addr, + cc_addr=draft.cc_addr, + bcc_addr=None, + subject=draft.subject, + html=body, + in_reply_to=draft.in_reply_to, + references=draft.references, + attachments=attachments, + ) recipient_emails = [email for name, email in recipients] self._send(recipient_emails, msg) # Sent successfully - self.log.info('Sending successful', draft_id=draft.id) + self.log.info("Sending successful", draft_id=draft.id) def send(self, draft): """ @@ -458,29 +481,37 @@ def send(self, draft): # from_addr is only ever a list with one element from_addr = draft.from_addr[0] - msg = create_email(from_name=from_addr[0], - from_email=from_addr[1], - reply_to=draft.reply_to, - nylas_uid=draft.nylas_uid, - to_addr=draft.to_addr, - cc_addr=draft.cc_addr, - bcc_addr=None, - subject=draft.subject, - html=draft.body, - in_reply_to=draft.in_reply_to, - references=draft.references, - attachments=attachments) - - recipient_emails = [email for name, email in itertools.chain( - draft.to_addr, draft.cc_addr, draft.bcc_addr)] + msg = create_email( + from_name=from_addr[0], + from_email=from_addr[1], + reply_to=draft.reply_to, + nylas_uid=draft.nylas_uid, + to_addr=draft.to_addr, + cc_addr=draft.cc_addr, + bcc_addr=None, + subject=draft.subject, + html=draft.body, + in_reply_to=draft.in_reply_to, + references=draft.references, + attachments=attachments, + ) + + recipient_emails = [ + email + for name, email in itertools.chain( + draft.to_addr, draft.cc_addr, draft.bcc_addr + ) + ] self._send(recipient_emails, msg) # Sent to all successfully - self.log.info('Sending successful', draft_id=draft.id) + self.log.info("Sending successful", draft_id=draft.id) def send_raw(self, msg): - recipient_emails = [email for name, email in itertools.chain( - msg.bcc_addr, msg.cc_addr, msg.to_addr)] + recipient_emails = [ + email + for name, email in itertools.chain(msg.bcc_addr, msg.cc_addr, msg.to_addr) + ] raw_message = get_from_blockstore(msg.data_sha256) mime_body = _substitute_bcc(raw_message) @@ -488,16 +519,19 @@ def send_raw(self, msg): # Sent to all successfully sender_email = msg.from_addr[0][1] - self.log.info('Sending successful', sender=sender_email, - recipients=recipient_emails) + self.log.info( + "Sending successful", sender=sender_email, recipients=recipient_emails + ) def _get_connection(self): - smtp_connection = SMTPConnection(account_id=self.account_id, - email_address=self.email_address, - smtp_username=self.smtp_username, - auth_type=self.auth_type, - auth_token=self.auth_token, - smtp_endpoint=self.smtp_endpoint, - ssl_required=self.ssl_required, - log=self.log) + smtp_connection = SMTPConnection( + account_id=self.account_id, + email_address=self.email_address, + smtp_username=self.smtp_username, + auth_type=self.auth_type, + auth_token=self.auth_token, + smtp_endpoint=self.smtp_endpoint, + ssl_required=self.ssl_required, + log=self.log, + ) return smtp_connection diff --git a/inbox/sendmail/smtp/util.py b/inbox/sendmail/smtp/util.py index 5da210818..a49ff5d98 100644 --- a/inbox/sendmail/smtp/util.py +++ b/inbox/sendmail/smtp/util.py @@ -1,16 +1,22 @@ SMTP_ERRORS = { 421: { "4.4.5": (503, "Server busy, try again later."), - "4.7.0": (429, "Our system has detected an unusual rate of unsolicited " - "mail originating from your IP address. To protect our " - "users from spam, mail sent from your IP address has " - "been temporarily blocked."), + "4.7.0": ( + 429, + "Our system has detected an unusual rate of unsolicited " + "mail originating from your IP address. To protect our " + "users from spam, mail sent from your IP address has " + "been temporarily blocked.", + ), "4.7.2": (429, "Server busy, try again later."), }, 450: { - "4.2.1": (429, "The user you are trying to contact is receiving mail " - "at a rate that prevents additional messages from being" - " delivered. Please resend your message at a later time."), + "4.2.1": ( + 429, + "The user you are trying to contact is receiving mail " + "at a rate that prevents additional messages from being" + " delivered. Please resend your message at a later time.", + ), "4.3.0": (429, "Mail server temporarily rejected message."), "4.7.1": (429, "Mail server temporarily rejected message."), }, @@ -18,41 +24,46 @@ "4.3.5": (429, "Mail server temporarily rejected message."), "4.7.1": (429, "Mail server temporarily rejected message."), }, - 452: { - "4.5.3": (402, "Your message has too many recipients"), - }, + 452: {"4.5.3": (402, "Your message has too many recipients"),}, 454: { - "4.7.0": (429, "Cannot authenticate due to temporary system problem. " - "Try again later.") - }, - 522: { - "5.7.1": (402, "Recipient address rejected."), - }, - 530: { - "5.7.0": (402, "Recipient address rejected"), + "4.7.0": ( + 429, + "Cannot authenticate due to temporary system problem. " "Try again later.", + ) }, + 522: {"5.7.1": (402, "Recipient address rejected."),}, + 530: {"5.7.0": (402, "Recipient address rejected"),}, 535: { - "5.7.1": (429, "Please log in to Gmail with your web browser and " - "try again."), + "5.7.1": ( + 429, + "Please log in to Gmail with your web browser and " "try again.", + ), }, 550: { - "5.1.1": (402, "The email account that you tried to reach does not " - "exist. Please try double-checking the recipient's " - "email address for typos or unnecessary spaces."), + "5.1.1": ( + 402, + "The email account that you tried to reach does not " + "exist. Please try double-checking the recipient's " + "email address for typos or unnecessary spaces.", + ), "5.2.1": (402, "The email account that you tried to reach is disabled."), "5.3.2": (429, "Server busy, try again later."), "5.4.5": (429, "Daily sending quota exceeded"), "5.4.6": (429, "Mail server temporarily rejected message."), "5.7.0": (402, "Mail relay denied."), "5.7.1": (429, "Daily sending quota exceeded"), - "This message was classified as SPAM and may not be delivered.": ( - 402, "Message blocked due to spam content in the message."), + 402, + "Message blocked due to spam content in the message.", + ), "exceeded recipient rate limit": ( - 429, "Daily email quota for this address exceeded."), + 429, + "Daily email quota for this address exceeded.", + ), "has exceeded its 24-hour sending limit.": ( - 429, "Daily email quota for this address exceeded.") - + 429, + "Daily email quota for this address exceeded.", + ), }, 552: { "5.2.3": (402, "Message too large"), @@ -61,14 +72,17 @@ "5.7.1": (402, "Message content rejected for security reasons"), }, 553: { - "5.1.2": (402, "Unable to find recipient domain. Please check for any " - "spelling errors, and make sure you didn't enter any " - "spaces, periods, or other punctuation after the " - "recipient's email address."), + "5.1.2": ( + 402, + "Unable to find recipient domain. Please check for any " + "spelling errors, and make sure you didn't enter any " + "spaces, periods, or other punctuation after the " + "recipient's email address.", + ), "5.7.1": (402, "Sender address rejected"), }, 554: { "5.6.0": (402, "Mail message is malformed. Not accepted."), "5.7.1": (402, "Message blocked due to spam content in the message."), - } + }, } diff --git a/inbox/sqlalchemy_ext/util.py b/inbox/sqlalchemy_ext/util.py index bbf82af5f..247e1bf89 100644 --- a/inbox/sqlalchemy_ext/util.py +++ b/inbox/sqlalchemy_ext/util.py @@ -5,6 +5,7 @@ import weakref from bson import json_util, EPOCH_NAIVE + # Monkeypatch to not include tz_info in decoded JSON. # Kind of a ridiculous solution, but works. json_util.EPOCH_AWARE = EPOCH_NAIVE @@ -21,6 +22,7 @@ from inbox.util.encoding import base36encode, base36decode from nylas.logging import get_logger + log = get_logger() @@ -50,27 +52,28 @@ def disabled_dubiously_many_queries_warning(): @event.listens_for(Engine, "before_cursor_execute") -def before_cursor_execute(conn, cursor, statement, - parameters, context, executemany): +def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): if conn not in query_counts: query_counts[conn] = 1 else: query_counts[conn] += 1 -@event.listens_for(Engine, 'commit') +@event.listens_for(Engine, "commit") def before_commit(conn): if not should_log_dubiously_many_queries: return if query_counts.get(conn, 0) > MAX_SANE_QUERIES_PER_SESSION: - log.warning('Dubiously many queries per session!', - query_count=query_counts.get(conn)) + log.warning( + "Dubiously many queries per session!", query_count=query_counts.get(conn) + ) class SQLAlchemyCompatibleAbstractMetaClass(DeclarativeMeta, abc.ABCMeta): """Declarative model classes that *also* inherit from an abstract base class need a metaclass like this one, in order to prevent metaclass conflict errors.""" + pass @@ -78,6 +81,7 @@ class ABCMixin(object): """Use this if you want a mixin class which is actually an abstract base class, for example in order to enforce that concrete subclasses define particular methods or properties.""" + __metaclass__ = SQLAlchemyCompatibleAbstractMetaClass __abstract__ = True @@ -98,14 +102,15 @@ class StringWithTransform(TypeDecorator): the transform applied, you must manually apply it using a custom property setter or a @validates decorator """ + impl = String def __init__(self, string_transform, *args, **kwargs): super(StringWithTransform, self).__init__(*args, **kwargs) if string_transform is None: - raise ValueError('Must provide a string_transform') - if not hasattr(string_transform, '__call__'): - raise TypeError('`string_transform` must be callable') + raise ValueError("Must provide a string_transform") + if not hasattr(string_transform, "__call__"): + raise TypeError("`string_transform` must be callable") self._string_transform = string_transform def process_bind_param(self, value, dialect): @@ -138,7 +143,7 @@ def process_result_value(self, value, dialect): try: return json_util.loads(value) except ValueError: - log.error('ValueError on decoding JSON', value=value) + log.error("ValueError on decoding JSON", value=value) def json_field_too_long(value): @@ -171,7 +176,6 @@ def process_result_value(self, value, dialect): # dumps() return standard Python dicts like the json.* equivalents # (because these are simply called under the hood) class MutableDict(Mutable, dict): - @classmethod def coerce(cls, key, value): """ Convert plain dictionaries to MutableDict. """ @@ -207,7 +211,6 @@ def __setstate__(self, state): class MutableList(Mutable, list): - @classmethod def coerce(cls, key, value): """Convert plain list to MutableList""" @@ -265,7 +268,7 @@ def int128_to_b36(int128): if not int128: return None assert len(int128) == 16, "should be 16 bytes (128 bits)" - a, b = struct.unpack('>QQ', int128) # uuid() is big-endian + a, b = struct.unpack(">QQ", int128) # uuid() is big-endian pub_id = (a << 64) | b return base36encode(pub_id).lower() @@ -276,10 +279,7 @@ def b36_to_bin(b36_string): """ int128 = base36decode(b36_string) MAX_INT64 = 0xFFFFFFFFFFFFFFFF - return struct.pack( - '>QQ', - (int128 >> 64) & MAX_INT64, - int128 & MAX_INT64) + return struct.pack(">QQ", (int128 >> 64) & MAX_INT64, int128 & MAX_INT64) def generate_public_id(): @@ -299,12 +299,13 @@ def generate_public_id(): # Without this, MySQL will silently insert invalid values in the database if # not running with sql-mode=traditional. class ForceStrictMode(PoolListener): - def connect(self, dbapi_con, connection_record): cur = dbapi_con.cursor() - cur.execute("SET SESSION sql_mode='STRICT_TRANS_TABLES,STRICT_ALL_TABLES," - "NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO," - "NO_ENGINE_SUBSTITUTION'") + cur.execute( + "SET SESSION sql_mode='STRICT_TRANS_TABLES,STRICT_ALL_TABLES," + "NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO," + "NO_ENGINE_SUBSTITUTION'" + ) cur = None @@ -331,8 +332,7 @@ def safer_yield_per(query, id_field, start_id, count): """ cur_id = start_id while True: - results = query.filter(id_field >= cur_id).order_by(id_field).\ - limit(count).all() + results = query.filter(id_field >= cur_id).order_by(id_field).limit(count).all() if not results: return for result in results: diff --git a/inbox/sync/base_sync.py b/inbox/sync/base_sync.py index d20e07908..3676347f8 100644 --- a/inbox/sync/base_sync.py +++ b/inbox/sync/base_sync.py @@ -1,6 +1,7 @@ from gevent import event, Greenlet, sleep from nylas.logging import get_logger + logger = get_logger() from inbox.util.concurrency import retry_with_logging @@ -24,9 +25,17 @@ class BaseSyncMonitor(Greenlet): """ - def __init__(self, account_id, namespace_id, email_address, folder_id, - folder_name, provider_name, poll_frequency=1, - scope=None): + def __init__( + self, + account_id, + namespace_id, + email_address, + folder_id, + folder_name, + provider_name, + poll_frequency=1, + scope=None, + ): self.account_id = account_id self.namespace_id = namespace_id @@ -37,11 +46,9 @@ def __init__(self, account_id, namespace_id, email_address, folder_id, self.log = logger.new(account_id=account_id) self.shutdown = event.Event() - self.heartbeat_status = HeartbeatStatusProxy(self.account_id, - folder_id, - folder_name, - email_address, - provider_name) + self.heartbeat_status = HeartbeatStatusProxy( + self.account_id, folder_id, folder_name, email_address, provider_name + ) Greenlet.__init__(self) def _run(self): @@ -49,13 +56,20 @@ def _run(self): self.log = self.log.new(account_id=self.account_id) try: while True: - retry_with_logging(self._run_impl, account_id=self.account_id, - fail_classes=[ValidationError], - provider=self.provider_name, logger=self.log) + retry_with_logging( + self._run_impl, + account_id=self.account_id, + fail_classes=[ValidationError], + provider=self.provider_name, + logger=self.log, + ) except ValidationError: # Bad account credentials; exit. - self.log.error('Credential validation error; exiting', - exc_info=True, logstash_tag='mark_invalid') + self.log.error( + "Credential validation error; exiting", + exc_info=True, + logstash_tag="mark_invalid", + ) with session_scope(self.namespace_id) as db_session: account = db_session.query(Account).get(self.account_id) account.mark_invalid(scope=self.scope) @@ -63,12 +77,12 @@ def _run(self): def _run_impl(self): try: self.sync() - self.heartbeat_status.publish(state='poll') + self.heartbeat_status.publish(state="poll") # If we get a connection or API permissions error, then sleep # 2x poll frequency. except ConnectionError: - self.log.error('Error while polling', exc_info=True) + self.log.error("Error while polling", exc_info=True) sleep(self.poll_frequency) sleep(self.poll_frequency) diff --git a/inbox/test/__init__.py b/inbox/test/__init__.py index f5d7f9802..5fd0c57dd 100644 --- a/inbox/test/__init__.py +++ b/inbox/test/__init__.py @@ -1,3 +1,4 @@ # Allow out-of-tree submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/inbox/test/api/base.py b/inbox/test/api/base.py index 4ebf56b71..900b8ed63 100644 --- a/inbox/test/api/base.py +++ b/inbox/test/api/base.py @@ -5,7 +5,8 @@ def new_api_client(db, namespace): from inbox.api.srv import app - app.config['TESTING'] = True + + app.config["TESTING"] = True with app.test_client() as c: return TestAPIClient(c, namespace.public_id) @@ -13,7 +14,8 @@ def new_api_client(db, namespace): @yield_fixture def api_client(db, default_namespace): from inbox.api.srv import app - app.config['TESTING'] = True + + app.config["TESTING"] = True with app.test_client() as c: yield TestAPIClient(c, default_namespace.public_id) @@ -29,9 +31,8 @@ class TestAPIClient(object): def __init__(self, test_client, default_namespace_public_id): self.client = test_client - credential = '{}:'.format(default_namespace_public_id) - self.auth_header = {'Authorization': 'Basic {}' - .format(b64encode(credential))} + credential = "{}:".format(default_namespace_public_id) + self.auth_header = {"Authorization": "Basic {}".format(b64encode(credential))} def get_raw(self, path, headers={}): headers.update(self.auth_header) @@ -51,10 +52,8 @@ def post_raw(self, path, data, headers={}): def put_data(self, path, data, headers={}): headers.update(self.auth_header) - return self.client.put(path, headers=headers, - data=json.dumps(data)) + return self.client.put(path, headers=headers, data=json.dumps(data)) def delete(self, path, data=None, headers={}): headers.update(self.auth_header) - return self.client.delete(path, headers=headers, - data=json.dumps(data)) + return self.client.delete(path, headers=headers, data=json.dumps(data)) diff --git a/inbox/test/api/test_account.py b/inbox/test/api/test_account.py index af6794253..4eafada91 100644 --- a/inbox/test/api/test_account.py +++ b/inbox/test/api/test_account.py @@ -1,70 +1,80 @@ # flake8: noqa: F811 -from inbox.test.util.base import (generic_account, gmail_account, db, - add_fake_yahoo_account) +from inbox.test.util.base import ( + generic_account, + gmail_account, + db, + add_fake_yahoo_account, +) from inbox.test.api.base import api_client, new_api_client -__all__ = ['db', 'api_client', 'generic_account', 'gmail_account'] +__all__ = ["db", "api_client", "generic_account", "gmail_account"] def test_account(db, api_client, generic_account, gmail_account): # Because we're using the generic_account namespace api_client = new_api_client(db, generic_account.namespace) - resp_data = api_client.get_data('/account') + resp_data = api_client.get_data("/account") - assert resp_data['id'] == generic_account.namespace.public_id - assert resp_data['object'] == 'account' - assert resp_data['account_id'] == generic_account.namespace.public_id - assert resp_data['email_address'] == generic_account.email_address - assert resp_data['name'] == generic_account.name - assert resp_data['organization_unit'] == 'folder' - assert 'sync_state' in resp_data - assert 'server_settings' not in resp_data + assert resp_data["id"] == generic_account.namespace.public_id + assert resp_data["object"] == "account" + assert resp_data["account_id"] == generic_account.namespace.public_id + assert resp_data["email_address"] == generic_account.email_address + assert resp_data["name"] == generic_account.name + assert resp_data["organization_unit"] == "folder" + assert "sync_state" in resp_data + assert "server_settings" not in resp_data # Because we're using the gmail account namespace api_client = new_api_client(db, gmail_account.namespace) - resp_data = api_client.get_data('/account') + resp_data = api_client.get_data("/account") - assert resp_data['id'] == gmail_account.namespace.public_id - assert resp_data['provider'] == 'gmail' - assert resp_data['organization_unit'] == 'label' - assert 'sync_state' in resp_data - assert 'server_settings' not in resp_data + assert resp_data["id"] == gmail_account.namespace.public_id + assert resp_data["provider"] == "gmail" + assert resp_data["organization_unit"] == "label" + assert "sync_state" in resp_data + assert "server_settings" not in resp_data def test_account_expanded(db, api_client, generic_account, gmail_account): # Generic accounts expose a `server_settings` attribute # Custom IMAP api_client = new_api_client(db, generic_account.namespace) - resp_data = api_client.get_data('/account/?view=expanded') - assert resp_data['provider'] == 'custom' - assert 'server_settings' in resp_data - assert set(resp_data['server_settings']) == set({ - 'imap_host': 'imap.custom.com', - 'smtp_host': 'smtp.custom.com', - 'imap_port': 993, - 'smtp_port': 587, - 'ssl_required': True}) + resp_data = api_client.get_data("/account/?view=expanded") + assert resp_data["provider"] == "custom" + assert "server_settings" in resp_data + assert set(resp_data["server_settings"]) == set( + { + "imap_host": "imap.custom.com", + "smtp_host": "smtp.custom.com", + "imap_port": 993, + "smtp_port": 587, + "ssl_required": True, + } + ) # Yahoo yahoo_account = add_fake_yahoo_account(db.session) api_client = new_api_client(db, yahoo_account.namespace) - resp_data = api_client.get_data('/account/?view=expanded') - assert resp_data['provider'] == 'yahoo' - assert 'server_settings' in resp_data - assert set(resp_data['server_settings']) == set({ - 'imap_host': 'imap.mail.yahoo.com', - 'smtp_host': 'smtp.mail.yahoo.com', - 'imap_port': 993, - 'smtp_port': 587, - 'ssl_required': True}) + resp_data = api_client.get_data("/account/?view=expanded") + assert resp_data["provider"] == "yahoo" + assert "server_settings" in resp_data + assert set(resp_data["server_settings"]) == set( + { + "imap_host": "imap.mail.yahoo.com", + "smtp_host": "smtp.mail.yahoo.com", + "imap_port": 993, + "smtp_port": 587, + "ssl_required": True, + } + ) # Gmail accounts don't expose a `server_settings` attribute api_client = new_api_client(db, gmail_account.namespace) - resp_data = api_client.get_data('/account/?view=expanded') - assert resp_data['provider'] == 'gmail' - assert 'server_settings' not in resp_data + resp_data = api_client.get_data("/account/?view=expanded") + assert resp_data["provider"] == "gmail" + assert "server_settings" not in resp_data def test_account_repr_for_new_account(db): @@ -76,15 +86,15 @@ def test_account_repr_for_new_account(db): # However the API-returned account object has `sync_state=running` # so API clients can do the right thing. api_client = new_api_client(db, account.namespace) - resp_data = api_client.get_data('/account') - assert resp_data['id'] == account.namespace.public_id - assert resp_data['sync_state'] == 'running' + resp_data = api_client.get_data("/account") + assert resp_data["id"] == account.namespace.public_id + assert resp_data["sync_state"] == "running" # Verify other sync_states are not masked. - account.sync_state = 'invalid' + account.sync_state = "invalid" db.session.commit() api_client = new_api_client(db, account.namespace) - resp_data = api_client.get_data('/account') - assert resp_data['id'] == account.namespace.public_id - assert resp_data['sync_state'] == 'invalid' + resp_data = api_client.get_data("/account") + assert resp_data["id"] == account.namespace.public_id + assert resp_data["sync_state"] == "invalid" diff --git a/inbox/test/api/test_auth.py b/inbox/test/api/test_auth.py index fa7ecea4f..8866ae3db 100644 --- a/inbox/test/api/test_auth.py +++ b/inbox/test/api/test_auth.py @@ -12,49 +12,49 @@ def test_no_auth(db, generic_account): # noqa api_client = new_api_client(db, generic_account.namespace) api_client.auth_header = {} - response = api_client.get_raw('/account') + response = api_client.get_raw("/account") assert response.status_code == 401 def test_basic_auth(db, generic_account): # noqa api_client = new_api_client(db, generic_account.namespace) - response = api_client.get_raw('/account') + response = api_client.get_raw("/account") assert response.status_code == 200 resp_data = json.loads(response.data) - assert resp_data['id'] == generic_account.namespace.public_id + assert resp_data["id"] == generic_account.namespace.public_id def test_bearer_token_auth(db, generic_account): # noqa api_client = new_api_client(db, generic_account.namespace) api_client.auth_header = { - 'Authorization': 'Bearer {}' - .format(generic_account.namespace.public_id)} + "Authorization": "Bearer {}".format(generic_account.namespace.public_id) + } - response = api_client.get_raw('/account') + response = api_client.get_raw("/account") assert response.status_code == 200 resp_data = json.loads(response.data) - assert resp_data['id'] == generic_account.namespace.public_id + assert resp_data["id"] == generic_account.namespace.public_id -BAD_TOKEN = '1234567890abcdefg' +BAD_TOKEN = "1234567890abcdefg" def test_invalid_basic_auth(db, generic_account): # noqa api_client = new_api_client(db, generic_account.namespace) - api_client.auth_header = {'Authorization': 'Basic {}' - .format(b64encode(BAD_TOKEN + ':'))} + api_client.auth_header = { + "Authorization": "Basic {}".format(b64encode(BAD_TOKEN + ":")) + } - response = api_client.get_raw('/account') + response = api_client.get_raw("/account") assert response.status_code == 401 def test_invalid_bearer_token_auth(db, generic_account): # noqa api_client = new_api_client(db, generic_account.namespace) - api_client.auth_header = { - 'Authorization': 'Bearer {}'.format(BAD_TOKEN)} + api_client.auth_header = {"Authorization": "Bearer {}".format(BAD_TOKEN)} - response = api_client.get_raw('/account') + response = api_client.get_raw("/account") assert response.status_code == 401 diff --git a/inbox/test/api/test_calendars.py b/inbox/test/api/test_calendars.py index 5ae33a1eb..fc8b8e418 100644 --- a/inbox/test/api/test_calendars.py +++ b/inbox/test/api/test_calendars.py @@ -4,91 +4,104 @@ from inbox.test.util.base import db, default_namespace -__all__ = ['api_client', 'db', 'default_namespace'] +__all__ = ["api_client", "db", "default_namespace"] def test_get_calendar(db, default_namespace, api_client): cal = Calendar( namespace_id=default_namespace.id, - uid='uid', - provider_name='WTF', - name='Holidays') + uid="uid", + provider_name="WTF", + name="Holidays", + ) db.session.add(cal) db.session.commit() cal_id = cal.public_id - calendar_item = api_client.get_data('/calendars/{}'.format(cal_id)) + calendar_item = api_client.get_data("/calendars/{}".format(cal_id)) - assert calendar_item['account_id'] == default_namespace.public_id - assert calendar_item['name'] == 'Holidays' - assert calendar_item['description'] is None - assert calendar_item['read_only'] is False - assert calendar_item['object'] == 'calendar' + assert calendar_item["account_id"] == default_namespace.public_id + assert calendar_item["name"] == "Holidays" + assert calendar_item["description"] is None + assert calendar_item["read_only"] is False + assert calendar_item["object"] == "calendar" def test_handle_not_found_calendar(api_client): - resp_data = api_client.get_raw('/calendars/foo') + resp_data = api_client.get_raw("/calendars/foo") assert resp_data.status_code == 404 def test_add_to_specific_calendar(db, default_namespace, api_client): cal = Calendar( - namespace_id=default_namespace.id, - uid='uid', - provider_name='WTF', - name='Custom') + namespace_id=default_namespace.id, uid="uid", provider_name="WTF", name="Custom" + ) db.session.add(cal) db.session.commit() cal_id = cal.public_id - e_data = {'calendar_id': cal_id, - 'title': 'subj', 'description': 'body1', - 'when': {'time': 1}, 'location': 'NylasHQ'} - r = api_client.post_data('/events', e_data) + e_data = { + "calendar_id": cal_id, + "title": "subj", + "description": "body1", + "when": {"time": 1}, + "location": "NylasHQ", + } + r = api_client.post_data("/events", e_data) assert r.status_code == 200 - events = api_client.get_data('/events?calendar_id={}'.format(cal_id)) + events = api_client.get_data("/events?calendar_id={}".format(cal_id)) assert len(events) == 1 def test_add_to_read_only_calendar(db, api_client): - cal_list = api_client.get_data('/calendars') + cal_list = api_client.get_data("/calendars") ro_cal = None for c in cal_list: - if c['read_only']: + if c["read_only"]: ro_cal = c assert ro_cal - e_data = {'calendar_id': ro_cal['id'], - 'title': 'subj', 'description': 'body1', - 'when': {'time': 1}, 'location': 'NylasHQ'} - resp = api_client.post_data('/events', e_data) + e_data = { + "calendar_id": ro_cal["id"], + "title": "subj", + "description": "body1", + "when": {"time": 1}, + "location": "NylasHQ", + } + resp = api_client.post_data("/events", e_data) assert resp.status_code == 400 def test_delete_from_readonly_calendar(db, default_namespace, api_client): - add_fake_event(db.session, default_namespace.id, - calendar=db.session.query(Calendar).filter( - Calendar.namespace_id == default_namespace.id, - Calendar.read_only == True).first(), # noqa - read_only=True) - calendar_list = api_client.get_data('/calendars') + add_fake_event( + db.session, + default_namespace.id, + calendar=db.session.query(Calendar) + .filter( + Calendar.namespace_id == default_namespace.id, Calendar.read_only == True + ) + .first(), # noqa + read_only=True, + ) + calendar_list = api_client.get_data("/calendars") read_only_calendar = None for c in calendar_list: - if c['read_only']: + if c["read_only"]: read_only_calendar = c break - events = api_client.get_data('/events?calendar_id={}'.format( - read_only_calendar['id'])) + events = api_client.get_data( + "/events?calendar_id={}".format(read_only_calendar["id"]) + ) for event in events: - if event['read_only']: + if event["read_only"]: read_only_event = event break assert read_only_calendar assert read_only_event - e_id = read_only_event['id'] - resp = api_client.delete('/events/{}'.format(e_id)) + e_id = read_only_event["id"] + resp = api_client.delete("/events/{}".format(e_id)) assert resp.status_code == 400 diff --git a/inbox/test/api/test_contacts.py b/inbox/test/api/test_contacts.py index dba8f75c9..657b77746 100644 --- a/inbox/test/api/test_contacts.py +++ b/inbox/test/api/test_contacts.py @@ -2,56 +2,54 @@ from inbox.test.util.base import contact_sync, contacts_provider from inbox.test.api.base import api_client -__all__ = ['contacts_provider', 'contact_sync', 'api_client'] +__all__ = ["contacts_provider", "contact_sync", "api_client"] -def test_api_list(contacts_provider, contact_sync, db, api_client, - default_namespace): - contacts_provider.supply_contact('Contact One', - 'contact.one@email.address') - contacts_provider.supply_contact('Contact Two', - 'contact.two@email.address') +def test_api_list(contacts_provider, contact_sync, db, api_client, default_namespace): + contacts_provider.supply_contact("Contact One", "contact.one@email.address") + contacts_provider.supply_contact("Contact Two", "contact.two@email.address") contact_sync.provider = contacts_provider contact_sync.sync() - contact_list = api_client.get_data('/contacts') - contact_names = [contact['name'] for contact in contact_list] - assert 'Contact One' in contact_names - assert 'Contact Two' in contact_names + contact_list = api_client.get_data("/contacts") + contact_names = [contact["name"] for contact in contact_list] + assert "Contact One" in contact_names + assert "Contact Two" in contact_names - contact_emails = [contact['email'] for contact in contact_list] - assert 'contact.one@email.address' in contact_emails - assert 'contact.two@email.address' in contact_emails + contact_emails = [contact["email"] for contact in contact_list] + assert "contact.one@email.address" in contact_emails + assert "contact.two@email.address" in contact_emails - contact_count = api_client.get_data('/contacts?view=count') - assert contact_count['count'] == db.session.query(Contact). \ - filter(Contact.namespace_id == default_namespace.id).count() + contact_count = api_client.get_data("/contacts?view=count") + assert ( + contact_count["count"] + == db.session.query(Contact) + .filter(Contact.namespace_id == default_namespace.id) + .count() + ) -def test_api_get(contacts_provider, contact_sync, db, api_client, - default_namespace): - contacts_provider.supply_contact('Contact One', - 'contact.one@email.address') - contacts_provider.supply_contact('Contact Two', - 'contact.two@email.address') +def test_api_get(contacts_provider, contact_sync, db, api_client, default_namespace): + contacts_provider.supply_contact("Contact One", "contact.one@email.address") + contacts_provider.supply_contact("Contact Two", "contact.two@email.address") contact_sync.provider = contacts_provider contact_sync.sync() - contact_list = api_client.get_data('/contacts') + contact_list = api_client.get_data("/contacts") - contact_ids = [contact['id'] for contact in contact_list] + contact_ids = [contact["id"] for contact in contact_list] c1found = False c2found = False for c_id in contact_ids: - contact = api_client.get_data('/contacts/' + c_id) + contact = api_client.get_data("/contacts/" + c_id) - if contact['name'] == 'Contact One': + if contact["name"] == "Contact One": c1found = True - if contact['name'] == 'Contact Two': + if contact["name"] == "Contact Two": c2found = True assert c1found diff --git a/inbox/test/api/test_data_processing.py b/inbox/test/api/test_data_processing.py index 8a1259de6..6975a524c 100644 --- a/inbox/test/api/test_data_processing.py +++ b/inbox/test/api/test_data_processing.py @@ -1,21 +1,22 @@ import json from inbox.models import DataProcessingCache from sqlalchemy.orm.exc import NoResultFound -from inbox.test.util.base import (add_fake_thread, - add_fake_message, default_namespace) +from inbox.test.util.base import add_fake_thread, add_fake_message, default_namespace from inbox.test.api.base import api_client -__all__ = ['api_client', 'default_namespace'] +__all__ = ["api_client", "default_namespace"] def test_contact_rankings(db, api_client, default_namespace): # Clear cached data (if it exists) namespace_id = default_namespace.id try: - cached_data = db.session.query(DataProcessingCache) \ - .filter(DataProcessingCache.namespace_id == - namespace_id).one() + cached_data = ( + db.session.query(DataProcessingCache) + .filter(DataProcessingCache.namespace_id == namespace_id) + .one() + ) cached_data.contact_rankings_last_updated = None db.session.add(cached_data) db.session.commit() @@ -25,29 +26,37 @@ def test_contact_rankings(db, api_client, default_namespace): # Send some emails namespace_email = default_namespace.email_address - me = ('me', namespace_email) - recipients = ([[('first', 'number1@nylas.com')]] * 8 + - [[('second', 'number2@nylas.com')]] * 4 + - [[('third', 'number3@nylas.com')]] + - [[('third', 'number3@nylas.com'), - ('fourth', 'number4@nylas.com')]]) + me = ("me", namespace_email) + recipients = ( + [[("first", "number1@nylas.com")]] * 8 + + [[("second", "number2@nylas.com")]] * 4 + + [[("third", "number3@nylas.com")]] + + [[("third", "number3@nylas.com"), ("fourth", "number4@nylas.com")]] + ) for recipients_list in recipients: fake_thread = add_fake_thread(db.session, namespace_id) - add_fake_message(db.session, namespace_id, fake_thread, - subject='Froop', - from_addr=[me], - to_addr=recipients_list, - add_sent_category=True) + add_fake_message( + db.session, + namespace_id, + fake_thread, + subject="Froop", + from_addr=[me], + to_addr=recipients_list, + add_sent_category=True, + ) # Check contact rankings - resp = api_client.get_raw( - '/contacts/rankings?force_recalculate=true') + resp = api_client.get_raw("/contacts/rankings?force_recalculate=true") assert resp.status_code == 200 emails_scores = {e: s for (e, s) in json.loads(resp.data)} - emails = ['number1@nylas.com', 'number2@nylas.com', - 'number3@nylas.com', 'number4@nylas.com'] + emails = [ + "number1@nylas.com", + "number2@nylas.com", + "number3@nylas.com", + "number4@nylas.com", + ] for email in emails: assert email in emails_scores @@ -55,12 +64,16 @@ def test_contact_rankings(db, api_client, default_namespace): assert emails_scores[e1] > emails_scores[e2] # make sure it works if we call it again! - resp = api_client.get_raw('/contacts/rankings') + resp = api_client.get_raw("/contacts/rankings") assert resp.status_code == 200 emails_scores = {e: s for (e, s) in json.loads(resp.data)} - emails = ['number1@nylas.com', 'number2@nylas.com', - 'number3@nylas.com', 'number4@nylas.com'] + emails = [ + "number1@nylas.com", + "number2@nylas.com", + "number3@nylas.com", + "number4@nylas.com", + ] for email in emails: assert email in emails_scores @@ -68,9 +81,11 @@ def test_contact_rankings(db, api_client, default_namespace): assert emails_scores[e1] > emails_scores[e2] try: - cached_data = db.session.query(DataProcessingCache) \ - .filter(DataProcessingCache.namespace_id == - namespace_id).one() + cached_data = ( + db.session.query(DataProcessingCache) + .filter(DataProcessingCache.namespace_id == namespace_id) + .one() + ) assert cached_data.contact_rankings_last_updated is not None except (NoResultFound, AssertionError): assert False, "Contact rankings not cached" @@ -80,9 +95,11 @@ def test_contact_groups(db, api_client, default_namespace): # Clear cached data (if it exists) namespace_id = default_namespace.id try: - cached_data = db.session.query(DataProcessingCache) \ - .filter(DataProcessingCache.namespace_id == - namespace_id).one() + cached_data = ( + db.session.query(DataProcessingCache) + .filter(DataProcessingCache.namespace_id == namespace_id) + .one() + ) cached_data.contact_groups_last_updated = None db.session.add(cached_data) db.session.commit() @@ -91,48 +108,52 @@ def test_contact_groups(db, api_client, default_namespace): # Send some emails namespace_email = default_namespace.email_address - me = ('me', namespace_email) - recipients = ([[('a', 'a@nylas.com'), - ('b', 'b@nylas.com'), - ('c', 'c@nylas.com')]] * 8 + - [[('b', 'b@nylas.com'), - ('c', 'c@nylas.com'), - ('d', 'd@nylas.com')]] * 8 + - [[('d', 'd@nylas.com'), - ('e', 'e@nylas.com'), - ('f', 'f@nylas.com')]] * 8 + - [[('g', 'g@nylas.com'), - ('h', 'h@nylas.com'), - ('i', 'i@nylas.com'), - ('j', 'j@nylas.com')]] * 5 + - [[('g', 'g@nylas.com'), - ('h', 'h@nylas.com'), - ('i', 'i@nylas.com')]] * 2 + - [[('k', 'k@nylas.com'), - ('l', 'l@nylas.com')]] * 3) + me = ("me", namespace_email) + recipients = ( + [[("a", "a@nylas.com"), ("b", "b@nylas.com"), ("c", "c@nylas.com")]] * 8 + + [[("b", "b@nylas.com"), ("c", "c@nylas.com"), ("d", "d@nylas.com")]] * 8 + + [[("d", "d@nylas.com"), ("e", "e@nylas.com"), ("f", "f@nylas.com")]] * 8 + + [ + [ + ("g", "g@nylas.com"), + ("h", "h@nylas.com"), + ("i", "i@nylas.com"), + ("j", "j@nylas.com"), + ] + ] + * 5 + + [[("g", "g@nylas.com"), ("h", "h@nylas.com"), ("i", "i@nylas.com")]] * 2 + + [[("k", "k@nylas.com"), ("l", "l@nylas.com")]] * 3 + ) for recipients_list in recipients: fake_thread = add_fake_thread(db.session, namespace_id) - add_fake_message(db.session, namespace_id, fake_thread, - subject='Froop', - from_addr=[me], - to_addr=recipients_list, - add_sent_category=True) + add_fake_message( + db.session, + namespace_id, + fake_thread, + subject="Froop", + from_addr=[me], + to_addr=recipients_list, + add_sent_category=True, + ) # Check contact groups - resp = api_client.get_raw('/groups/intrinsic?force_recalculate=true') + resp = api_client.get_raw("/groups/intrinsic?force_recalculate=true") assert resp.status_code == 200 groups_scores = {g: s for (g, s) in json.loads(resp.data)} - groups = ['a@nylas.com, b@nylas.com, c@nylas.com, d@nylas.com', - 'd@nylas.com, e@nylas.com, f@nylas.com', - 'g@nylas.com, h@nylas.com, i@nylas.com, j@nylas.com', - 'k@nylas.com, l@nylas.com'] + groups = [ + "a@nylas.com, b@nylas.com, c@nylas.com, d@nylas.com", + "d@nylas.com, e@nylas.com, f@nylas.com", + "g@nylas.com, h@nylas.com, i@nylas.com, j@nylas.com", + "k@nylas.com, l@nylas.com", + ] for g in groups: assert g in groups_scores # make sure it works when we do it again - resp = api_client.get_raw('/groups/intrinsic') + resp = api_client.get_raw("/groups/intrinsic") assert resp.status_code == 200 groups_scores = {g: s for (g, s) in json.loads(resp.data)} @@ -140,9 +161,11 @@ def test_contact_groups(db, api_client, default_namespace): assert g in groups_scores try: - cached_data = db.session.query(DataProcessingCache) \ - .filter(DataProcessingCache.namespace_id == - namespace_id).one() + cached_data = ( + db.session.query(DataProcessingCache) + .filter(DataProcessingCache.namespace_id == namespace_id) + .one() + ) assert cached_data.contact_groups_last_updated is not None except (NoResultFound, AssertionError): assert False, "Contact groups not cached" diff --git a/inbox/test/api/test_drafts.py b/inbox/test/api/test_drafts.py index 82241889e..019fd132c 100644 --- a/inbox/test/api/test_drafts.py +++ b/inbox/test/api/test_drafts.py @@ -11,50 +11,50 @@ from inbox.test.util.base import add_fake_message, add_fake_thread from inbox.test.api.base import api_client -__all__ = ['api_client'] +__all__ = ["api_client"] @pytest.fixture def example_draft(db, default_account): return { - 'subject': 'Draft test at {}'.format(datetime.utcnow()), - 'body': '

Sea, birds and sand.

', - 'to': [{'name': 'The red-haired mermaid', - 'email': default_account.email_address}] + "subject": "Draft test at {}".format(datetime.utcnow()), + "body": "

Sea, birds and sand.

", + "to": [ + {"name": "The red-haired mermaid", "email": default_account.email_address} + ], } @pytest.fixture def example_bad_recipient_drafts(): bad_email = { - 'subject': 'Draft test at {}'.format(datetime.utcnow()), - 'body': '

Sea, birds and sand.

', - 'to': [{'name': 'The red-haired mermaid', - 'email': 'froop'}] + "subject": "Draft test at {}".format(datetime.utcnow()), + "body": "

Sea, birds and sand.

", + "to": [{"name": "The red-haired mermaid", "email": "froop"}], } empty_email = { - 'subject': 'Draft test at {}'.format(datetime.utcnow()), - 'body': '

Sea, birds and sand.

', - 'to': [{'name': 'The red-haired mermaid', - 'email': ''}] + "subject": "Draft test at {}".format(datetime.utcnow()), + "body": "

Sea, birds and sand.

", + "to": [{"name": "The red-haired mermaid", "email": ""}], } return [empty_email, bad_email] -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def attachments(db): - filenames = ['muir.jpg', 'LetMeSendYouEmail.wav', 'piece-jointe.jpg'] + filenames = ["muir.jpg", "LetMeSendYouEmail.wav", "piece-jointe.jpg"] data = [] for filename in filenames: - path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', - 'data', filename).encode('utf-8') + path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "..", "data", filename + ).encode("utf-8") # Mac and linux fight over filesystem encodings if we store this # filename on the fs. Work around by changing the filename we upload # instead. - if filename == 'piece-jointe.jpg': - filename = u'pièce-jointe.jpg' + if filename == "piece-jointe.jpg": + filename = u"pièce-jointe.jpg" data.append((filename, path)) return data @@ -68,39 +68,39 @@ def mock_save_draft(account_id, message_id, args): saved_drafts.append(message_id) # Patch both, just in case - monkeypatch.setattr('inbox.actions.base.save_draft', mock_save_draft) + monkeypatch.setattr("inbox.actions.base.save_draft", mock_save_draft) return saved_drafts -def test_save_update_bad_recipient_draft(db, patch_remote_save_draft, - default_account, - example_bad_recipient_drafts): +def test_save_update_bad_recipient_draft( + db, patch_remote_save_draft, default_account, example_bad_recipient_drafts +): # You should be able to save a draft, even if # the recipient's email is invalid. from inbox.sendmail.base import create_message_from_json from inbox.actions.base import save_draft for example_draft in example_bad_recipient_drafts: - draft = create_message_from_json(example_draft, - default_account.namespace, db.session, - is_draft=True) + draft = create_message_from_json( + example_draft, default_account.namespace, db.session, is_draft=True + ) - save_draft(default_account.id, draft.id, {'version': draft.version}) + save_draft(default_account.id, draft.id, {"version": draft.version}) assert len(patch_remote_save_draft) == 2 def test_create_and_get_draft(api_client, example_draft): - r = api_client.post_data('/drafts', example_draft) + r = api_client.post_data("/drafts", example_draft) assert r.status_code == 200 - public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] + public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] assert version == 0 - r = api_client.get_data('/drafts') - matching_saved_drafts = [draft for draft in r if draft['id'] == public_id] + r = api_client.get_data("/drafts") + matching_saved_drafts = [draft for draft in r if draft["id"] == public_id] assert len(matching_saved_drafts) == 1 saved_draft = matching_saved_drafts[0] @@ -108,353 +108,327 @@ def test_create_and_get_draft(api_client, example_draft): def test_create_draft_replying_to_thread(api_client, thread, message): - thread = api_client.get_data('/threads')[0] - thread_id = thread['id'] - latest_message_id = thread['message_ids'][-1] + thread = api_client.get_data("/threads")[0] + thread_id = thread["id"] + latest_message_id = thread["message_ids"][-1] reply_draft = { - 'subject': 'test reply', - 'body': 'test reply', - 'thread_id': thread_id + "subject": "test reply", + "body": "test reply", + "thread_id": thread_id, } - r = api_client.post_data('/drafts', reply_draft) - draft_id = json.loads(r.data)['id'] + r = api_client.post_data("/drafts", reply_draft) + draft_id = json.loads(r.data)["id"] - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert len(drafts) == 1 - assert thread_id == drafts[0]['thread_id'] - assert drafts[0]['reply_to_message_id'] == latest_message_id + assert thread_id == drafts[0]["thread_id"] + assert drafts[0]["reply_to_message_id"] == latest_message_id - thread_data = api_client.get_data('/threads/{}'.format(thread_id)) - assert draft_id in thread_data['draft_ids'] + thread_data = api_client.get_data("/threads/{}".format(thread_id)) + assert draft_id in thread_data["draft_ids"] def test_create_draft_replying_to_message(api_client, message): - message = api_client.get_data('/messages')[0] + message = api_client.get_data("/messages")[0] reply_draft = { - 'subject': 'test reply', - 'body': 'test reply', - 'reply_to_message_id': message['id'] + "subject": "test reply", + "body": "test reply", + "reply_to_message_id": message["id"], } - r = api_client.post_data('/drafts', reply_draft) + r = api_client.post_data("/drafts", reply_draft) data = json.loads(r.data) - assert data['reply_to_message_id'] == message['id'] - assert data['thread_id'] == message['thread_id'] + assert data["reply_to_message_id"] == message["id"] + assert data["thread_id"] == message["thread_id"] def test_reject_incompatible_reply_thread_and_message( - db, api_client, message, thread, default_namespace): + db, api_client, message, thread, default_namespace +): alt_thread = add_fake_thread(db.session, default_namespace.id) add_fake_message(db.session, default_namespace.id, alt_thread) - thread = api_client.get_data('/threads')[0] - alt_message_id = api_client.get_data('/threads')[1]['message_ids'][0] - alt_message = api_client.get_data('/messages/{}'.format(alt_message_id)) - assert thread['id'] != alt_message['thread_id'] + thread = api_client.get_data("/threads")[0] + alt_message_id = api_client.get_data("/threads")[1]["message_ids"][0] + alt_message = api_client.get_data("/messages/{}".format(alt_message_id)) + assert thread["id"] != alt_message["thread_id"] reply_draft = { - 'subject': 'test reply', - 'reply_to_message_id': alt_message['id'], - 'thread_id': thread['id'] + "subject": "test reply", + "reply_to_message_id": alt_message["id"], + "thread_id": thread["id"], } - r = api_client.post_data('/drafts', reply_draft) + r = api_client.post_data("/drafts", reply_draft) assert r.status_code == 400 def test_drafts_filter(api_client, example_draft): - r = api_client.post_data('/drafts', example_draft) - thread_id = json.loads(r.data)['thread_id'] + r = api_client.post_data("/drafts", example_draft) + thread_id = json.loads(r.data)["thread_id"] reply_draft = { - 'subject': 'test reply', - 'body': 'test reply', - 'thread_id': thread_id + "subject": "test reply", + "body": "test reply", + "thread_id": thread_id, } - r = api_client.post_data('/drafts', reply_draft) + r = api_client.post_data("/drafts", reply_draft) - _filter = '?thread_id=0000000000000000000000000' - results = api_client.get_data('/drafts' + _filter) + _filter = "?thread_id=0000000000000000000000000" + results = api_client.get_data("/drafts" + _filter) assert len(results) == 0 - results = api_client.get_data('/drafts?thread_id={}' - .format(thread_id)) + results = api_client.get_data("/drafts?thread_id={}".format(thread_id)) assert len(results) == 2 - results = api_client.get_data('/drafts?offset={}&thread_id={}' - .format(1, thread_id)) + results = api_client.get_data("/drafts?offset={}&thread_id={}".format(1, thread_id)) assert len(results) == 1 def test_create_draft_with_attachments(api_client, attachments, example_draft): attachment_ids = [] - upload_path = '/files' + upload_path = "/files" for filename, path in attachments: - data = {'file': (open(path, 'rb'), filename)} + data = {"file": (open(path, "rb"), filename)} r = api_client.post_raw(upload_path, data=data) assert r.status_code == 200 - attachment_id = json.loads(r.data)[0]['id'] + attachment_id = json.loads(r.data)[0]["id"] attachment_ids.append(attachment_id) first_attachment = attachment_ids.pop() - example_draft['file_ids'] = [first_attachment] - r = api_client.post_data('/drafts', example_draft) + example_draft["file_ids"] = [first_attachment] + r = api_client.post_data("/drafts", example_draft) assert r.status_code == 200 returned_draft = json.loads(r.data) - draft_public_id = returned_draft['id'] - assert returned_draft['version'] == 0 - example_draft['version'] = returned_draft['version'] - assert len(returned_draft['files']) == 1 + draft_public_id = returned_draft["id"] + assert returned_draft["version"] == 0 + example_draft["version"] = returned_draft["version"] + assert len(returned_draft["files"]) == 1 attachment_ids.append(first_attachment) - example_draft['file_ids'] = attachment_ids - r = api_client.put_data('/drafts/{}'.format(draft_public_id), - example_draft) + example_draft["file_ids"] = attachment_ids + r = api_client.put_data("/drafts/{}".format(draft_public_id), example_draft) assert r.status_code == 200 returned_draft = json.loads(r.data) - assert len(returned_draft['files']) == 3 - assert returned_draft['version'] == 1 - example_draft['version'] = returned_draft['version'] + assert len(returned_draft["files"]) == 3 + assert returned_draft["version"] == 1 + example_draft["version"] = returned_draft["version"] # Make sure we can't delete the files now for file_id in attachment_ids: - r = api_client.delete('/files/{}'.format(file_id)) + r = api_client.delete("/files/{}".format(file_id)) assert r.status_code == 400 # Now remove the attachment - example_draft['file_ids'] = [first_attachment] - r = api_client.put_data('/drafts/{}'.format(draft_public_id), - example_draft) - - draft_data = api_client.get_data('/drafts/{}'.format(draft_public_id)) - assert len(draft_data['files']) == 1 - assert draft_data['version'] == 2 - example_draft['version'] = draft_data['version'] - - example_draft['file_ids'] = [] - r = api_client.put_data('/drafts/{}'.format(draft_public_id), - example_draft) - draft_data = api_client.get_data('/drafts/{}'.format(draft_public_id)) + example_draft["file_ids"] = [first_attachment] + r = api_client.put_data("/drafts/{}".format(draft_public_id), example_draft) + + draft_data = api_client.get_data("/drafts/{}".format(draft_public_id)) + assert len(draft_data["files"]) == 1 + assert draft_data["version"] == 2 + example_draft["version"] = draft_data["version"] + + example_draft["file_ids"] = [] + r = api_client.put_data("/drafts/{}".format(draft_public_id), example_draft) + draft_data = api_client.get_data("/drafts/{}".format(draft_public_id)) assert r.status_code == 200 - assert len(draft_data['files']) == 0 - assert draft_data['version'] == 3 + assert len(draft_data["files"]) == 0 + assert draft_data["version"] == 3 # now that they're not attached, we should be able to delete them for file_id in attachment_ids: - r = api_client.delete('/files/{}'.format(file_id)) + r = api_client.delete("/files/{}".format(file_id)) assert r.status_code == 200 def test_get_all_drafts(api_client, example_draft): - r = api_client.post_data('/drafts', example_draft) - first_public_id = json.loads(r.data)['id'] + r = api_client.post_data("/drafts", example_draft) + first_public_id = json.loads(r.data)["id"] - r = api_client.post_data('/drafts', example_draft) - second_public_id = json.loads(r.data)['id'] + r = api_client.post_data("/drafts", example_draft) + second_public_id = json.loads(r.data)["id"] - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert len(drafts) == 2 assert first_public_id != second_public_id - assert {first_public_id, second_public_id} == {draft['id'] for draft in - drafts} - assert all(item['object'] == 'draft' for item in drafts) + assert {first_public_id, second_public_id} == {draft["id"] for draft in drafts} + assert all(item["object"] == "draft" for item in drafts) def test_update_draft(api_client): with freeze_time(datetime.now()) as freezer: - original_draft = { - 'subject': 'original draft', - 'body': 'parent draft' - } - r = api_client.post_data('/drafts', original_draft) - draft_public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] + original_draft = {"subject": "original draft", "body": "parent draft"} + r = api_client.post_data("/drafts", original_draft) + draft_public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] assert version == 0 freezer.tick() updated_draft = { - 'subject': 'updated draft', - 'body': 'updated draft', - 'version': version + "subject": "updated draft", + "body": "updated draft", + "version": version, } - r = api_client.put_data('/drafts/{}'.format(draft_public_id), - updated_draft) - updated_public_id = json.loads(r.data)['id'] - updated_version = json.loads(r.data)['version'] + r = api_client.put_data("/drafts/{}".format(draft_public_id), updated_draft) + updated_public_id = json.loads(r.data)["id"] + updated_version = json.loads(r.data)["version"] assert updated_public_id == draft_public_id assert updated_version > 0 - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert len(drafts) == 1 - assert drafts[0]['id'] == updated_public_id + assert drafts[0]["id"] == updated_public_id # Check that the thread is updated too. - thread = api_client.get_data('/threads/{}'.format(drafts[0]['thread_id'])) - assert thread['subject'] == 'updated draft' - assert thread['first_message_timestamp'] == drafts[0]['date'] - assert thread['last_message_timestamp'] == drafts[0]['date'] + thread = api_client.get_data("/threads/{}".format(drafts[0]["thread_id"])) + assert thread["subject"] == "updated draft" + assert thread["first_message_timestamp"] == drafts[0]["date"] + assert thread["last_message_timestamp"] == drafts[0]["date"] def test_delete_draft(api_client, thread, message): - original_draft = { - 'subject': 'parent draft', - 'body': 'parent draft' - } - r = api_client.post_data('/drafts', original_draft) - draft_public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] + original_draft = {"subject": "parent draft", "body": "parent draft"} + r = api_client.post_data("/drafts", original_draft) + draft_public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] updated_draft = { - 'subject': 'updated draft', - 'body': 'updated draft', - 'version': version + "subject": "updated draft", + "body": "updated draft", + "version": version, } - r = api_client.put_data('/drafts/{}'.format(draft_public_id), - updated_draft) - updated_public_id = json.loads(r.data)['id'] - updated_version = json.loads(r.data)['version'] + r = api_client.put_data("/drafts/{}".format(draft_public_id), updated_draft) + updated_public_id = json.loads(r.data)["id"] + updated_version = json.loads(r.data)["version"] - r = api_client.delete('/drafts/{}'.format(updated_public_id), - {'version': updated_version}) + r = api_client.delete( + "/drafts/{}".format(updated_public_id), {"version": updated_version} + ) # Check that drafts were deleted - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert not drafts # Check that no orphaned threads are around - threads = api_client.get_data('/threads?subject=parent%20draft') + threads = api_client.get_data("/threads?subject=parent%20draft") assert not threads - threads = api_client.get_data('/threads?subject=updated%20draft') + threads = api_client.get_data("/threads?subject=updated%20draft") assert not threads # And check that threads aren't deleted if they still have messages. - thread_public_id = api_client.get_data('/threads')[0]['id'] + thread_public_id = api_client.get_data("/threads")[0]["id"] reply_draft = { - 'subject': 'test reply', - 'body': 'test reply', - 'thread_id': thread_public_id + "subject": "test reply", + "body": "test reply", + "thread_id": thread_public_id, } - r = api_client.post_data('/drafts', reply_draft) - public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] - thread = api_client.get_data('/threads/{}'.format(thread_public_id)) - assert len(thread['draft_ids']) > 0 - api_client.delete('/drafts/{}'.format(public_id), - {'version': version}) - thread = api_client.get_data('/threads/{}'.format(thread_public_id)) + r = api_client.post_data("/drafts", reply_draft) + public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] + thread = api_client.get_data("/threads/{}".format(thread_public_id)) + assert len(thread["draft_ids"]) > 0 + api_client.delete("/drafts/{}".format(public_id), {"version": version}) + thread = api_client.get_data("/threads/{}".format(thread_public_id)) assert thread - assert len(thread['draft_ids']) == 0 + assert len(thread["draft_ids"]) == 0 def test_delete_remote_draft(db, api_client, message): message.is_draft = True db.session.commit() - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert len(drafts) == 1 - public_id = drafts[0]['id'] - version = drafts[0]['version'] + public_id = drafts[0]["id"] + version = drafts[0]["version"] assert public_id == message.public_id and version == message.version - api_client.delete('/drafts/{}'.format(public_id), - {'version': version}) + api_client.delete("/drafts/{}".format(public_id), {"version": version}) # Check that drafts were deleted - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert not drafts def test_conflicting_updates(api_client): - original_draft = { - 'subject': 'parent draft', - 'body': 'parent draft' - } - r = api_client.post_data('/drafts', original_draft) - original_public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] + original_draft = {"subject": "parent draft", "body": "parent draft"} + r = api_client.post_data("/drafts", original_draft) + original_public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] updated_draft = { - 'subject': 'updated draft', - 'body': 'updated draft', - 'version': version + "subject": "updated draft", + "body": "updated draft", + "version": version, } - r = api_client.put_data('/drafts/{}'.format(original_public_id), - updated_draft) + r = api_client.put_data("/drafts/{}".format(original_public_id), updated_draft) assert r.status_code == 200 - updated_public_id = json.loads(r.data)['id'] - updated_version = json.loads(r.data)['version'] + updated_public_id = json.loads(r.data)["id"] + updated_version = json.loads(r.data)["version"] assert updated_version != version conflicting_draft = { - 'subject': 'conflicting draft', - 'body': 'conflicting draft', - 'version': version + "subject": "conflicting draft", + "body": "conflicting draft", + "version": version, } - r = api_client.put_data('/drafts/{}'.format(original_public_id), - conflicting_draft) + r = api_client.put_data("/drafts/{}".format(original_public_id), conflicting_draft) assert r.status_code == 409 - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert len(drafts) == 1 - assert drafts[0]['id'] == updated_public_id + assert drafts[0]["id"] == updated_public_id def test_update_to_nonexistent_draft(api_client): - updated_draft = { - 'subject': 'updated draft', - 'body': 'updated draft', - 'version': 22 - } + updated_draft = {"subject": "updated draft", "body": "updated draft", "version": 22} - r = api_client.put_data('/drafts/{}'.format('notarealid'), updated_draft) + r = api_client.put_data("/drafts/{}".format("notarealid"), updated_draft) assert r.status_code == 404 - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert len(drafts) == 0 def test_contacts_updated(api_client): """Tests that draft-contact associations are properly created and updated.""" - draft = { - 'to': [{'email': 'alice@example.com'}, {'email': 'bob@example.com'}] - } + draft = {"to": [{"email": "alice@example.com"}, {"email": "bob@example.com"}]} - r = api_client.post_data('/drafts', draft) + r = api_client.post_data("/drafts", draft) assert r.status_code == 200 - draft_id = json.loads(r.data)['id'] - draft_version = json.loads(r.data)['version'] + draft_id = json.loads(r.data)["id"] + draft_version = json.loads(r.data)["version"] - r = api_client.get_data('/threads?to=alice@example.com') + r = api_client.get_data("/threads?to=alice@example.com") assert len(r) == 1 updated_draft = { - 'to': [{'email': 'alice@example.com'}, {'email': 'joe@example.com'}], - 'version': draft_version + "to": [{"email": "alice@example.com"}, {"email": "joe@example.com"}], + "version": draft_version, } - r = api_client.put_data('/drafts/{}'.format(draft_id), updated_draft) + r = api_client.put_data("/drafts/{}".format(draft_id), updated_draft) assert r.status_code == 200 - r = api_client.get_data('/threads?to=alice@example.com') + r = api_client.get_data("/threads?to=alice@example.com") assert len(r) == 1 - r = api_client.get_data('/threads?to=bob@example.com') + r = api_client.get_data("/threads?to=bob@example.com") assert len(r) == 0 - r = api_client.get_data('/threads?to=joe@example.com') + r = api_client.get_data("/threads?to=joe@example.com") assert len(r) == 1 # Check that contacts aren't created for garbage recipients. - r = api_client.post_data('/drafts', - {'to': [{'name': 'who', 'email': 'nope'}]}) + r = api_client.post_data("/drafts", {"to": [{"name": "who", "email": "nope"}]}) assert r.status_code == 200 - r = api_client.get_data('/threads?to=nope') + r = api_client.get_data("/threads?to=nope") assert len(r) == 0 - r = api_client.get_data('/contacts?filter=nope') + r = api_client.get_data("/contacts?filter=nope") assert len(r) == 0 diff --git a/inbox/test/api/test_event_participants.py b/inbox/test/api/test_event_participants.py index 55cdf195e..b4ba302c0 100644 --- a/inbox/test/api/test_event_participants.py +++ b/inbox/test/api/test_event_participants.py @@ -5,184 +5,164 @@ from inbox.test.api.base import api_client -__all__ = ['calendar', 'api_client'] +__all__ = ["calendar", "api_client"] # TODO(emfree) WTF is all this crap anyways? + def test_api_create(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'name': 'alyssa p. hacker', - 'email': 'alyssa@example.com' - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [{"name": "alyssa p. hacker", "email": "alyssa@example.com"}], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 1 - participant = e_resp_data['participants'][0] - assert participant['name'] == e_data['participants'][0]['name'] - assert participant['email'] == e_data['participants'][0]['email'] - assert participant['status'] == 'noreply' + assert len(e_resp_data["participants"]) == 1 + participant = e_resp_data["participants"][0] + assert participant["name"] == e_data["participants"][0]["name"] + assert participant["email"] == e_data["participants"][0]["email"] + assert participant["status"] == "noreply" - e_resp_data = api_client.get_data('/events/' + e_resp_data['id']) + e_resp_data = api_client.get_data("/events/" + e_resp_data["id"]) - assert len(e_resp_data['participants']) == 1 - participant = e_resp_data['participants'][0] - assert participant['name'] == e_data['participants'][0]['name'] - assert participant['email'] == e_data['participants'][0]['email'] - assert participant['status'] == 'noreply' + assert len(e_resp_data["participants"]) == 1 + participant = e_resp_data["participants"][0] + assert participant["name"] == e_data["participants"][0]["name"] + assert participant["email"] == e_data["participants"][0]["email"] + assert participant["status"] == "noreply" def test_api_create_status_yes(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'email': 'alyssa@example.com', - 'status': 'yes' - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [{"email": "alyssa@example.com", "status": "yes"}], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 1 - participant = e_resp_data['participants'][0] - assert participant['name'] is None - assert participant['email'] == e_data['participants'][0]['email'] - assert participant['status'] == 'yes' + assert len(e_resp_data["participants"]) == 1 + participant = e_resp_data["participants"][0] + assert participant["name"] is None + assert participant["email"] == e_data["participants"][0]["email"] + assert participant["status"] == "yes" def test_api_create_multiple(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'email': 'alyssa@example.com', - }, { - 'email': 'ben.bitdiddle@example.com', - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com",}, + {"email": "ben.bitdiddle@example.com",}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 2 - for participant in e_resp_data['participants']: - res = [e for e in e_data['participants'] - if e['email'] == participant['email']] + assert len(e_resp_data["participants"]) == 2 + for participant in e_resp_data["participants"]: + res = [e for e in e_data["participants"] if e["email"] == participant["email"]] assert len(res) == 1 - participant0 = e_resp_data['participants'][0] - participant1 = e_resp_data['participants'][1] - assert participant0['name'] is None - assert participant0['status'] == 'noreply' - assert participant1['name'] is None - assert participant1['status'] == 'noreply' + participant0 = e_resp_data["participants"][0] + participant1 = e_resp_data["participants"][1] + assert participant0["name"] is None + assert participant0["status"] == "noreply" + assert participant1["name"] is None + assert participant1["status"] == "noreply" def test_api_create_status_no(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'email': 'alyssa@example.com', - 'status': 'no' - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [{"email": "alyssa@example.com", "status": "no"}], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 1 - participant = e_resp_data['participants'][0] - assert participant['name'] is None - assert participant['email'] == e_data['participants'][0]['email'] - assert participant['status'] == e_data['participants'][0]['status'] + assert len(e_resp_data["participants"]) == 1 + participant = e_resp_data["participants"][0] + assert participant["name"] is None + assert participant["email"] == e_data["participants"][0]["email"] + assert participant["status"] == e_data["participants"][0]["status"] def test_api_create_status_maybe(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'email': 'alyssa@example.com', - 'status': 'maybe' - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [{"email": "alyssa@example.com", "status": "maybe"}], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 1 - participant = e_resp_data['participants'][0] - assert participant['name'] is None - assert participant['email'] == e_data['participants'][0]['email'] - assert participant['status'] == e_data['participants'][0]['status'] + assert len(e_resp_data["participants"]) == 1 + participant = e_resp_data["participants"][0] + assert participant["name"] is None + assert participant["email"] == e_data["participants"][0]["email"] + assert participant["status"] == e_data["participants"][0]["status"] def test_api_create_status_noreply(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'email': 'alyssa@example.com', - 'status': 'noreply' - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [{"email": "alyssa@example.com", "status": "noreply"}], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 1 - participant = e_resp_data['participants'][0] - assert participant['name'] is None - assert participant['email'] == e_data['participants'][0]['email'] - assert participant['status'] == e_data['participants'][0]['status'] + assert len(e_resp_data["participants"]) == 1 + participant = e_resp_data["participants"][0] + assert participant["name"] is None + assert participant["email"] == e_data["participants"][0]["email"] + assert participant["status"] == e_data["participants"][0]["status"] def test_api_create_no_name(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'email': 'alyssa@example.com' - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [{"email": "alyssa@example.com"}], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 1 - participant = e_resp_data['participants'][0] - assert participant['name'] is None - assert participant['email'] == e_data['participants'][0]['email'] - assert participant['status'] == 'noreply' + assert len(e_resp_data["participants"]) == 1 + participant = e_resp_data["participants"][0] + assert participant["name"] is None + assert participant["email"] == e_data["participants"][0]["email"] + assert participant["status"] == "noreply" def test_api_create_no_email(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'name': 'alyssa p. hacker', - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [{"name": "alyssa p. hacker",}], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) assert e_resp_data["type"] == "invalid_request_error" @@ -190,17 +170,15 @@ def test_api_create_no_email(db, api_client, calendar): def test_api_create_bad_status(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{ - 'name': 'alyssa p. hacker', - 'email': 'alyssa@example.com', - 'status': 'bad' - }] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"name": "alyssa p. hacker", "email": "alyssa@example.com", "status": "bad"} + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) assert e_resp_data["type"] == "invalid_request_error" @@ -208,230 +186,243 @@ def test_api_create_bad_status(db, api_client, calendar): def test_api_add_participant(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 - for i, p in enumerate(e_resp_data['participants']): - res = [e for e in e_resp_data['participants'] - if e['email'] == p['email']] + assert len(e_resp_data["participants"]) == 5 + for i, p in enumerate(e_resp_data["participants"]): + res = [e for e in e_resp_data["participants"] if e["email"] == p["email"]] assert len(res) == 1 - assert res[0]['name'] is None + assert res[0]["name"] is None - event_id = e_resp_data['id'] - e_data['participants'].append({'email': 'filet.minyon@example.com'}) - e_resp = api_client.put_data('/events/' + event_id, e_data) + event_id = e_resp_data["id"] + e_data["participants"].append({"email": "filet.minyon@example.com"}) + e_resp = api_client.put_data("/events/" + event_id, e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 6 - for i, p in enumerate(e_resp_data['participants']): - res = [e for e in e_resp_data['participants'] - if e['email'] == p['email']] + assert len(e_resp_data["participants"]) == 6 + for i, p in enumerate(e_resp_data["participants"]): + res = [e for e in e_resp_data["participants"] if e["email"] == p["email"]] assert len(res) == 1 - assert res[0]['name'] is None + assert res[0]["name"] is None def test_api_remove_participant(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 - for i, p in enumerate(e_resp_data['participants']): - res = [e for e in e_resp_data['participants'] - if e['email'] == p['email']] + assert len(e_resp_data["participants"]) == 5 + for i, p in enumerate(e_resp_data["participants"]): + res = [e for e in e_resp_data["participants"] if e["email"] == p["email"]] assert len(res) == 1 - assert res[0]['name'] is None + assert res[0]["name"] is None - event_id = e_resp_data['id'] - e_data['participants'].pop() - e_resp = api_client.put_data('/events/' + event_id, e_data) + event_id = e_resp_data["id"] + e_data["participants"].pop() + e_resp = api_client.put_data("/events/" + event_id, e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 4 - for i, p in enumerate(e_resp_data['participants']): - res = [e for e in e_resp_data['participants'] - if e['email'] == p['email']] + assert len(e_resp_data["participants"]) == 4 + for i, p in enumerate(e_resp_data["participants"]): + res = [e for e in e_resp_data["participants"] if e["email"] == p["email"]] assert len(res) == 1 - assert p['name'] is None + assert p["name"] is None def test_api_update_participant_status(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 - for i, p in enumerate(e_resp_data['participants']): - res = [e for e in e_data['participants'] if e['email'] == p['email']] + assert len(e_resp_data["participants"]) == 5 + for i, p in enumerate(e_resp_data["participants"]): + res = [e for e in e_data["participants"] if e["email"] == p["email"]] assert len(res) == 1 - assert p['name'] is None + assert p["name"] is None - event_id = e_resp_data['id'] + event_id = e_resp_data["id"] update_data = { - 'calendar_id': calendar.public_id, - 'participants': [{'email': 'alyssa@example.com', - 'status': 'yes'}, - {'email': 'ben.bitdiddle@example.com', - 'status': 'no'}, - {'email': 'pei.mihn@example.com', - 'status': 'maybe'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com", "status": "yes"}, + {"email": "ben.bitdiddle@example.com", "status": "no"}, + {"email": "pei.mihn@example.com", "status": "maybe"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.put_data('/events/' + event_id, update_data) + e_resp = api_client.put_data("/events/" + event_id, update_data) e_resp_data = json.loads(e_resp.data) # Make sure that nothing changed that we didn't specify - assert e_resp_data['title'] == 'Friday Office Party' - assert e_resp_data['when']['time'] == 1407542195 + assert e_resp_data["title"] == "Friday Office Party" + assert e_resp_data["when"]["time"] == 1407542195 - assert len(e_resp_data['participants']) == 5 - for i, p in enumerate(e_resp_data['participants']): - res = [e for e in e_data['participants'] if e['email'] == p['email']] + assert len(e_resp_data["participants"]) == 5 + for i, p in enumerate(e_resp_data["participants"]): + res = [e for e in e_data["participants"] if e["email"] == p["email"]] assert len(res) == 1 - assert p['name'] is None + assert p["name"] is None -@pytest.mark.parametrize('rsvp', ['yes', 'no', 'maybe']) +@pytest.mark.parametrize("rsvp", ["yes", "no", "maybe"]) def test_api_participant_reply(db, api_client, rsvp, calendar): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, - 'when': {'time': 1407542195}, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "calendar_id": calendar.public_id, + "when": {"time": 1407542195}, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 + assert len(e_resp_data["participants"]) == 5 - assert e_resp_data['id'] - assert e_resp_data['participants'] + assert e_resp_data["id"] + assert e_resp_data["participants"] def test_api_participant_reply_invalid_rsvp(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, - 'when': {'time': 1407542195}, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "calendar_id": calendar.public_id, + "when": {"time": 1407542195}, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 + assert len(e_resp_data["participants"]) == 5 - assert e_resp_data['id'] - assert e_resp_data['participants'] + assert e_resp_data["id"] + assert e_resp_data["participants"] def test_api_participant_reply_invalid_participant(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 + assert len(e_resp_data["participants"]) == 5 - assert e_resp_data['id'] + assert e_resp_data["id"] def test_api_participant_reply_invalid_event(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 + assert len(e_resp_data["participants"]) == 5 - assert e_resp_data['participants'] + assert e_resp_data["participants"] def test_api_participant_reply_invalid_event2(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 + assert len(e_resp_data["participants"]) == 5 def test_api_participant_reply_invalid_action(db, api_client, calendar): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'participants': [{'email': 'alyssa@example.com'}, - {'email': 'ben.bitdiddle@example.com'}, - {'email': 'pei.mihn@example.com'}, - {'email': 'bill.ling@example.com'}, - {'email': 'john.q@example.com'}] + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "participants": [ + {"email": "alyssa@example.com"}, + {"email": "ben.bitdiddle@example.com"}, + {"email": "pei.mihn@example.com"}, + {"email": "bill.ling@example.com"}, + {"email": "john.q@example.com"}, + ], } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert len(e_resp_data['participants']) == 5 - assert e_resp_data['id'] + assert len(e_resp_data["participants"]) == 5 + assert e_resp_data["id"] diff --git a/inbox/test/api/test_event_when.py b/inbox/test/api/test_event_when.py index aa82e758d..3172fc926 100644 --- a/inbox/test/api/test_event_when.py +++ b/inbox/test/api/test_event_when.py @@ -5,7 +5,7 @@ from inbox.test.api.base import api_client -__all__ = ['api_client'] +__all__ = ["api_client"] class CreateError(Exception): @@ -13,255 +13,252 @@ class CreateError(Exception): def _verify_create(ns_id, api_client, e_data): - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) if e_resp.status_code != 200: raise CreateError("Expected status 200, got %d" % e_resp.status_code) e_resp_data = json.loads(e_resp.data) - assert e_resp_data['object'] == 'event' - assert e_resp_data['account_id'] == ns_id - assert e_resp_data['title'] == e_data['title'] - assert e_resp_data['location'] == e_data['location'] - for k, v in e_data['when'].iteritems(): - assert arrow.get(e_resp_data['when'][k]) == arrow.get(v) - assert 'id' in e_resp_data - e_id = e_resp_data['id'] - e_get_resp = api_client.get_data('/events/' + e_id) - - assert e_get_resp['object'] == 'event' - assert e_get_resp['account_id'] == ns_id - assert e_get_resp['id'] == e_id - assert e_get_resp['title'] == e_data['title'] - for k, v in e_data['when'].iteritems(): - assert arrow.get(e_get_resp['when'][k]) == arrow.get(v) + assert e_resp_data["object"] == "event" + assert e_resp_data["account_id"] == ns_id + assert e_resp_data["title"] == e_data["title"] + assert e_resp_data["location"] == e_data["location"] + for k, v in e_data["when"].iteritems(): + assert arrow.get(e_resp_data["when"][k]) == arrow.get(v) + assert "id" in e_resp_data + e_id = e_resp_data["id"] + e_get_resp = api_client.get_data("/events/" + e_id) + + assert e_get_resp["object"] == "event" + assert e_get_resp["account_id"] == ns_id + assert e_get_resp["id"] == e_id + assert e_get_resp["title"] == e_data["title"] + for k, v in e_data["when"].iteritems(): + assert arrow.get(e_get_resp["when"][k]) == arrow.get(v) return e_resp_data def test_api_when_as_str(db, api_client, calendar, default_namespace): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': '1407542195'}, - 'calendar_id': calendar.public_id, - 'location': 'Nylas HQ', + "title": "Friday Office Party", + "when": {"time": "1407542195"}, + "calendar_id": calendar.public_id, + "location": "Nylas HQ", } - e_resp_data = _verify_create(default_namespace.public_id, api_client, - e_data) - assert e_resp_data['when']['object'] == 'time' + e_resp_data = _verify_create(default_namespace.public_id, api_client, e_data) + assert e_resp_data["when"]["object"] == "time" def test_api_time(db, api_client, calendar, default_namespace): e_data = { - 'title': 'Friday Office Party', - 'when': {'time': 1407542195}, - 'calendar_id': calendar.public_id, - 'location': 'Nylas HQ', + "title": "Friday Office Party", + "when": {"time": 1407542195}, + "calendar_id": calendar.public_id, + "location": "Nylas HQ", } - e_resp_data = _verify_create(default_namespace.public_id, api_client, - e_data) - assert e_resp_data['when']['object'] == 'time' + e_resp_data = _verify_create(default_namespace.public_id, api_client, e_data) + assert e_resp_data["when"]["object"] == "time" def test_api_timespan(db, api_client, calendar, default_namespace): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, - 'when': {'start_time': 1407542195, 'end_time': 1407548195}, - 'location': 'Nylas HQ', + "title": "Friday Office Party", + "calendar_id": calendar.public_id, + "when": {"start_time": 1407542195, "end_time": 1407548195}, + "location": "Nylas HQ", } - e_resp_data = _verify_create(default_namespace.public_id, api_client, - e_data) - assert e_resp_data['when']['object'] == 'timespan' + e_resp_data = _verify_create(default_namespace.public_id, api_client, e_data) + assert e_resp_data["when"]["object"] == "timespan" def test_api_date(db, api_client, calendar, default_namespace): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, - 'when': {'date': '2014-08-27'}, - 'location': 'Nylas HQ', + "title": "Friday Office Party", + "calendar_id": calendar.public_id, + "when": {"date": "2014-08-27"}, + "location": "Nylas HQ", } - e_resp_data = _verify_create(default_namespace.public_id, api_client, - e_data) - assert e_resp_data['when']['object'] == 'date' + e_resp_data = _verify_create(default_namespace.public_id, api_client, e_data) + assert e_resp_data["when"]["object"] == "date" def test_api_datespan(db, api_client, calendar, default_namespace): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, - 'when': {'start_date': '2014-08-27', 'end_date': '2014-08-28'}, - 'location': 'Nylas HQ', + "title": "Friday Office Party", + "calendar_id": calendar.public_id, + "when": {"start_date": "2014-08-27", "end_date": "2014-08-28"}, + "location": "Nylas HQ", } - e_resp_data = _verify_create(default_namespace.public_id, api_client, - e_data) - assert e_resp_data['when']['object'] == 'datespan' + e_resp_data = _verify_create(default_namespace.public_id, api_client, e_data) + assert e_resp_data["when"]["object"] == "datespan" # Invalid -def test_api_invalid_event_no_when(db, api_client, calendar, - default_namespace): - e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id - } +def test_api_invalid_event_no_when(db, api_client, calendar, default_namespace): + e_data = {"title": "Friday Office Party", "calendar_id": calendar.public_id} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) -def test_api_invalid_event_when_no_params(db, api_client, calendar, - default_namespace): +def test_api_invalid_event_when_no_params(db, api_client, calendar, default_namespace): e_data = { - 'title': 'Friday Office Party', - 'when': {}, - 'calendar_id': calendar.public_id, + "title": "Friday Office Party", + "when": {}, + "calendar_id": calendar.public_id, } with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) -def test_api_invalid_event_when_bad_params(db, api_client, calendar, - default_namespace): +def test_api_invalid_event_when_bad_params(db, api_client, calendar, default_namespace): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, - 'when': {'start': 0}} + "title": "Friday Office Party", + "calendar_id": calendar.public_id, + "when": {"start": 0}, + } with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) -def test_api_invalid_event_when_timespan_bad_params(db, api_client, calendar, - default_namespace): - e_data = {'title': 'Friday Office Party'} +def test_api_invalid_event_when_timespan_bad_params( + db, api_client, calendar, default_namespace +): + e_data = {"title": "Friday Office Party"} - e_data['when'] = {'object': 'time', 'start': 0} + e_data["when"] = {"object": "time", "start": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'object': 'time', 'start_time': 0} + e_data["when"] = {"object": "time", "start_time": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_time': 0} + e_data["when"] = {"start_time": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_time': 'a', 'end_time': 0} + e_data["when"] = {"start_time": "a", "end_time": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_time': 0, 'end_time': 'a'} + e_data["when"] = {"start_time": 0, "end_time": "a"} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_time': 2, 'end_time': 1} + e_data["when"] = {"start_time": 2, "end_time": 1} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_time': 0, 'end_time': 1, 'time': 2} + e_data["when"] = {"start_time": 0, "end_time": 1, "time": 2} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) -def test_api_invalid_event_when_datespan_bad_params(db, api_client, calendar, - default_namespace): +def test_api_invalid_event_when_datespan_bad_params( + db, api_client, calendar, default_namespace +): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, + "title": "Friday Office Party", + "calendar_id": calendar.public_id, } - e_data['when'] = {'object': 'date', 'start': 0} + e_data["when"] = {"object": "date", "start": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'object': 'date', 'start_date': 0} + e_data["when"] = {"object": "date", "start_date": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_date': 0} + e_data["when"] = {"start_date": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_date': 'a', 'end_date': 0} + e_data["when"] = {"start_date": "a", "end_date": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_date': 0, 'end_date': 'a'} + e_data["when"] = {"start_date": 0, "end_date": "a"} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_date': '2014-08-27', - 'end_date': '2014-08-28', - 'date': '2014-08-27'} + e_data["when"] = { + "start_date": "2014-08-27", + "end_date": "2014-08-28", + "date": "2014-08-27", + } with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'start_date': '2014-08-29', - 'end_date': '2014-08-28', - 'date': '2014-08-27'} + e_data["when"] = { + "start_date": "2014-08-29", + "end_date": "2014-08-28", + "date": "2014-08-27", + } with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) -def test_api_invalid_event_when_time_bad_params(db, api_client, calendar, - default_namespace): +def test_api_invalid_event_when_time_bad_params( + db, api_client, calendar, default_namespace +): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, + "title": "Friday Office Party", + "calendar_id": calendar.public_id, } - e_data['when'] = {'object': 'date', 'time': 0} + e_data["when"] = {"object": "date", "time": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'time': 'a'} + e_data["when"] = {"time": "a"} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'time': 0, 'date': '2014-08-23'} + e_data["when"] = {"time": 0, "date": "2014-08-23"} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) -def test_api_invalid_event_when_date_bad_params(db, api_client, calendar, - default_namespace): +def test_api_invalid_event_when_date_bad_params( + db, api_client, calendar, default_namespace +): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, + "title": "Friday Office Party", + "calendar_id": calendar.public_id, } - e_data['when'] = {'object': 'time', 'date': 0} + e_data["when"] = {"object": "time", "date": 0} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) - e_data['when'] = {'date': 'j'} + e_data["when"] = {"date": "j"} with pytest.raises(CreateError): _verify_create(default_namespace.public_id, api_client, e_data) def test_api_event_when_update(db, api_client, calendar, default_namespace): e_data = { - 'title': 'Friday Office Party', - 'location': 'home', - 'calendar_id': calendar.public_id, + "title": "Friday Office Party", + "location": "home", + "calendar_id": calendar.public_id, } - e_data['when'] = {'time': 0} - e_resp_data = _verify_create(default_namespace.public_id, api_client, - e_data) - e_id = e_resp_data['id'] + e_data["when"] = {"time": 0} + e_resp_data = _verify_create(default_namespace.public_id, api_client, e_data) + e_id = e_resp_data["id"] - e_update_data = {'when': {'time': 1}} - e_put_resp = api_client.put_data('/events/' + e_id, e_update_data) + e_update_data = {"when": {"time": 1}} + e_put_resp = api_client.put_data("/events/" + e_id, e_update_data) e_put_data = json.loads(e_put_resp.data) - assert e_put_data['when']['object'] == 'time' - assert e_put_data['when']['time'] == e_update_data['when']['time'] + assert e_put_data["when"]["object"] == "time" + assert e_put_data["when"]["time"] == e_update_data["when"]["time"] diff --git a/inbox/test/api/test_events.py b/inbox/test/api/test_events.py index e9917ba4a..623af063b 100644 --- a/inbox/test/api/test_events.py +++ b/inbox/test/api/test_events.py @@ -6,41 +6,57 @@ from inbox.test.util.base import db, calendar, add_fake_event from inbox.test.api.base import api_client -__all__ = ['api_client', 'calendar', 'db'] +__all__ = ["api_client", "calendar", "db"] def test_create_event(db, api_client, calendar): - e_data = {'title': 'subj', 'description': 'body1', - 'calendar_id': calendar.public_id, - 'when': {'time': 1}, 'location': 'NylasHQ'} - e_data2 = {'title': 'subj2', 'description': 'body2', - 'calendar_id': calendar.public_id, - 'when': {'time': 1}, 'location': 'NylasHQ'} - api_client.post_data('/events', e_data) - api_client.post_data('/events', e_data2) + e_data = { + "title": "subj", + "description": "body1", + "calendar_id": calendar.public_id, + "when": {"time": 1}, + "location": "NylasHQ", + } + e_data2 = { + "title": "subj2", + "description": "body2", + "calendar_id": calendar.public_id, + "when": {"time": 1}, + "location": "NylasHQ", + } + api_client.post_data("/events", e_data) + api_client.post_data("/events", e_data2) db.session.commit() def test_api_list(db, api_client, calendar): - e_data = {'title': 'subj', 'description': 'body1', - 'calendar_id': calendar.public_id, - 'when': {'time': 1}, 'location': 'NylasHQ'} - e_data2 = {'title': 'subj2', 'description': 'body2', - 'calendar_id': calendar.public_id, - 'when': {'time': 1}, 'location': 'NylasHQ'} - api_client.post_data('/events', e_data) - api_client.post_data('/events', e_data2) - - event_list = api_client.get_data('/events') - event_titles = [event['title'] for event in event_list] - assert 'subj' in event_titles - assert 'subj2' in event_titles - - event_descriptions = [event['description'] for event in event_list] - assert 'body1' in event_descriptions - assert 'body2' in event_descriptions - - event_ids = [event['id'] for event in event_list] + e_data = { + "title": "subj", + "description": "body1", + "calendar_id": calendar.public_id, + "when": {"time": 1}, + "location": "NylasHQ", + } + e_data2 = { + "title": "subj2", + "description": "body2", + "calendar_id": calendar.public_id, + "when": {"time": 1}, + "location": "NylasHQ", + } + api_client.post_data("/events", e_data) + api_client.post_data("/events", e_data2) + + event_list = api_client.get_data("/events") + event_titles = [event["title"] for event in event_list] + assert "subj" in event_titles + assert "subj2" in event_titles + + event_descriptions = [event["description"] for event in event_list] + assert "body1" in event_descriptions + assert "body2" in event_descriptions + + event_ids = [event["id"] for event in event_list] for e_id in event_ids: ev = db.session.query(Event).filter_by(public_id=e_id).one() @@ -49,26 +65,34 @@ def test_api_list(db, api_client, calendar): def test_api_get(db, api_client, calendar): - e_data = {'title': 'subj', 'when': {'time': 1}, - 'calendar_id': calendar.public_id, 'location': 'NylasHQ'} - e_data2 = {'title': 'subj2', 'when': {'time': 1}, - 'calendar_id': calendar.public_id, 'location': 'NylasHQ'} - api_client.post_data('/events', e_data) - api_client.post_data('/events', e_data2) + e_data = { + "title": "subj", + "when": {"time": 1}, + "calendar_id": calendar.public_id, + "location": "NylasHQ", + } + e_data2 = { + "title": "subj2", + "when": {"time": 1}, + "calendar_id": calendar.public_id, + "location": "NylasHQ", + } + api_client.post_data("/events", e_data) + api_client.post_data("/events", e_data2) - event_list = api_client.get_data('/events') + event_list = api_client.get_data("/events") - event_ids = [event['id'] for event in event_list] + event_ids = [event["id"] for event in event_list] c1found = False c2found = False for c_id in event_ids: - event = api_client.get_data('/events/' + c_id) + event = api_client.get_data("/events/" + c_id) - if event['title'] == 'subj': + if event["title"] == "subj": c1found = True - if event['title'] == 'subj2': + if event["title"] == "subj2": c2found = True assert c1found @@ -77,307 +101,318 @@ def test_api_get(db, api_client, calendar): def test_api_create(db, api_client, calendar, default_account): e_data = { - 'title': 'Friday Office Party', - 'calendar_id': calendar.public_id, - 'when': {'time': 1407542195}, - 'location': 'Nylas HQ', + "title": "Friday Office Party", + "calendar_id": calendar.public_id, + "when": {"time": 1407542195}, + "location": "Nylas HQ", } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert e_resp_data['object'] == 'event' - assert e_resp_data['account_id'] == default_account.namespace.public_id - assert e_resp_data['title'] == e_data['title'] - assert e_resp_data['location'] == e_data['location'] - assert e_resp_data['when']['time'] == e_data['when']['time'] - assert 'id' in e_resp_data - e_id = e_resp_data['id'] - e_get_resp = api_client.get_data('/events/' + e_id) - - assert e_get_resp['object'] == 'event' - assert e_get_resp['account_id'] == default_account.namespace.public_id - assert e_get_resp['id'] == e_id - assert e_get_resp['title'] == e_data['title'] - assert e_get_resp['when']['time'] == e_data['when']['time'] + assert e_resp_data["object"] == "event" + assert e_resp_data["account_id"] == default_account.namespace.public_id + assert e_resp_data["title"] == e_data["title"] + assert e_resp_data["location"] == e_data["location"] + assert e_resp_data["when"]["time"] == e_data["when"]["time"] + assert "id" in e_resp_data + e_id = e_resp_data["id"] + e_get_resp = api_client.get_data("/events/" + e_id) + + assert e_get_resp["object"] == "event" + assert e_get_resp["account_id"] == default_account.namespace.public_id + assert e_get_resp["id"] == e_id + assert e_get_resp["title"] == e_data["title"] + assert e_get_resp["when"]["time"] == e_data["when"]["time"] def test_api_create_no_title(db, api_client, calendar, default_account): e_data = { - 'title': '', - 'calendar_id': calendar.public_id, - 'when': {'time': 1407542195}, + "title": "", + "calendar_id": calendar.public_id, + "when": {"time": 1407542195}, } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert e_resp_data['object'] == 'event' - assert e_resp_data['account_id'] == default_account.namespace.public_id - assert e_resp_data['title'] == e_data['title'] - assert e_resp_data['when']['time'] == e_data['when']['time'] - assert 'id' in e_resp_data - e_id = e_resp_data['id'] - e_get_resp = api_client.get_data('/events/' + e_id) + assert e_resp_data["object"] == "event" + assert e_resp_data["account_id"] == default_account.namespace.public_id + assert e_resp_data["title"] == e_data["title"] + assert e_resp_data["when"]["time"] == e_data["when"]["time"] + assert "id" in e_resp_data + e_id = e_resp_data["id"] + e_get_resp = api_client.get_data("/events/" + e_id) - assert e_get_resp['object'] == 'event' - assert e_get_resp['account_id'] == default_account.namespace.public_id - assert e_get_resp['id'] == e_id - assert e_get_resp['title'] == e_data['title'] - assert e_get_resp['when']['time'] == e_data['when']['time'] + assert e_get_resp["object"] == "event" + assert e_get_resp["account_id"] == default_account.namespace.public_id + assert e_get_resp["id"] == e_id + assert e_get_resp["title"] == e_data["title"] + assert e_get_resp["when"]["time"] == e_data["when"]["time"] def test_api_update_title(db, api_client, calendar, default_account): e_data = { - 'title': '', - 'calendar_id': calendar.public_id, - 'when': {'time': 1407542195}, + "title": "", + "calendar_id": calendar.public_id, + "when": {"time": 1407542195}, } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert e_resp_data['object'] == 'event' - assert e_resp_data['account_id'] == default_account.namespace.public_id - assert e_resp_data['title'] == e_data['title'] - assert e_resp_data['when']['time'] == e_data['when']['time'] - assert 'id' in e_resp_data - e_id = e_resp_data['id'] - - e_update_data = {'title': 'new title'} - e_put_resp = api_client.put_data('/events/' + e_id, e_update_data) + assert e_resp_data["object"] == "event" + assert e_resp_data["account_id"] == default_account.namespace.public_id + assert e_resp_data["title"] == e_data["title"] + assert e_resp_data["when"]["time"] == e_data["when"]["time"] + assert "id" in e_resp_data + e_id = e_resp_data["id"] + + e_update_data = {"title": "new title"} + e_put_resp = api_client.put_data("/events/" + e_id, e_update_data) e_put_data = json.loads(e_put_resp.data) - assert e_put_data['object'] == 'event' - assert e_put_data['account_id'] == default_account.namespace.public_id - assert e_put_data['id'] == e_id - assert e_put_data['title'] == 'new title' - assert e_put_data['when']['object'] == 'time' - assert e_put_data['when']['time'] == e_data['when']['time'] + assert e_put_data["object"] == "event" + assert e_put_data["account_id"] == default_account.namespace.public_id + assert e_put_data["id"] == e_id + assert e_put_data["title"] == "new title" + assert e_put_data["when"]["object"] == "time" + assert e_put_data["when"]["time"] == e_data["when"]["time"] import pytest + + @pytest.mark.only def test_api_pessimistic_update(db, api_client, calendar, default_account): e_data = { - 'title': '', - 'calendar_id': calendar.public_id, - 'when': {'time': 1407542195}, + "title": "", + "calendar_id": calendar.public_id, + "when": {"time": 1407542195}, } - e_resp = api_client.post_data('/events', e_data, - headers={ "Api-Version": API_VERSIONS[1] }) + e_resp = api_client.post_data( + "/events", e_data, headers={"Api-Version": API_VERSIONS[1]} + ) e_resp_data = json.loads(e_resp.data) - assert e_resp_data['object'] == 'event' - assert e_resp_data['account_id'] == default_account.namespace.public_id - assert e_resp_data['title'] == e_data['title'] - assert e_resp_data['when']['time'] == e_data['when']['time'] - assert 'id' in e_resp_data - e_id = e_resp_data['id'] - - e_update_data = {'title': 'new title'} - e_put_resp = api_client.put_data('/events/' + e_id, e_update_data, - headers={ "Api-Version": API_VERSIONS[1] }) + assert e_resp_data["object"] == "event" + assert e_resp_data["account_id"] == default_account.namespace.public_id + assert e_resp_data["title"] == e_data["title"] + assert e_resp_data["when"]["time"] == e_data["when"]["time"] + assert "id" in e_resp_data + e_id = e_resp_data["id"] + + e_update_data = {"title": "new title"} + e_put_resp = api_client.put_data( + "/events/" + e_id, e_update_data, headers={"Api-Version": API_VERSIONS[1]} + ) e_put_data = json.loads(e_put_resp.data) - assert e_put_data['object'] == 'event' - assert e_put_data['account_id'] == default_account.namespace.public_id - assert e_put_data['id'] == e_id - assert e_put_data['title'] == '' - assert e_put_data['when']['object'] == 'time' - assert e_put_data['when']['time'] == e_data['when']['time'] + assert e_put_data["object"] == "event" + assert e_put_data["account_id"] == default_account.namespace.public_id + assert e_put_data["id"] == e_id + assert e_put_data["title"] == "" + assert e_put_data["when"]["object"] == "time" + assert e_put_data["when"]["time"] == e_data["when"]["time"] def test_api_update_invalid(db, api_client, calendar): - e_update_data = {'title': 'new title'} + e_update_data = {"title": "new title"} e_id = generate_public_id() - e_put_resp = api_client.put_data('/events/' + e_id, e_update_data) + e_put_resp = api_client.put_data("/events/" + e_id, e_update_data) assert e_put_resp.status_code != 200 def test_api_delete(db, api_client, calendar, default_account): e_data = { - 'title': '', - 'calendar_id': calendar.public_id, - 'when': {'time': 1407542195}, + "title": "", + "calendar_id": calendar.public_id, + "when": {"time": 1407542195}, } - e_resp = api_client.post_data('/events', e_data) + e_resp = api_client.post_data("/events", e_data) e_resp_data = json.loads(e_resp.data) - assert e_resp_data['object'] == 'event' - assert e_resp_data['title'] == e_data['title'] - assert e_resp_data['when']['time'] == e_data['when']['time'] - assert 'id' in e_resp_data - e_id = e_resp_data['id'] + assert e_resp_data["object"] == "event" + assert e_resp_data["title"] == e_data["title"] + assert e_resp_data["when"]["time"] == e_data["when"]["time"] + assert "id" in e_resp_data + e_id = e_resp_data["id"] - e_delete_resp = api_client.delete('/events/' + e_id) + e_delete_resp = api_client.delete("/events/" + e_id) assert e_delete_resp.status_code == 200 - e_resp = api_client.get_data('/events/' + e_id) - assert e_resp['status'] == 'cancelled' + e_resp = api_client.get_data("/events/" + e_id) + assert e_resp["status"] == "cancelled" def test_api_pessimistic_delete(db, api_client, calendar, default_account): e_data = { - 'title': '', - 'calendar_id': calendar.public_id, - 'when': {'time': 1407542195}, + "title": "", + "calendar_id": calendar.public_id, + "when": {"time": 1407542195}, } - e_resp = api_client.post_data('/events', e_data, - headers={ "Api-Version": API_VERSIONS[1] }) + e_resp = api_client.post_data( + "/events", e_data, headers={"Api-Version": API_VERSIONS[1]} + ) e_resp_data = json.loads(e_resp.data) - assert e_resp_data['object'] == 'event' - assert e_resp_data['title'] == e_data['title'] - assert e_resp_data['when']['time'] == e_data['when']['time'] - assert 'id' in e_resp_data - e_id = e_resp_data['id'] - - e_delete_resp = api_client.delete('/events/' + e_id, - headers={ "Api-Version": API_VERSIONS[1] }) + assert e_resp_data["object"] == "event" + assert e_resp_data["title"] == e_data["title"] + assert e_resp_data["when"]["time"] == e_data["when"]["time"] + assert "id" in e_resp_data + e_id = e_resp_data["id"] + + e_delete_resp = api_client.delete( + "/events/" + e_id, headers={"Api-Version": API_VERSIONS[1]} + ) assert e_delete_resp.status_code == 200 - e_resp = api_client.get_data('/events/' + e_id) - assert e_resp['status'] == 'confirmed' + e_resp = api_client.get_data("/events/" + e_id) + assert e_resp["status"] == "confirmed" def test_api_delete_invalid(db, api_client, calendar): - e_id = 'asdf' - resp = api_client.delete('/events/' + e_id) + e_id = "asdf" + resp = api_client.delete("/events/" + e_id) assert resp.status_code != 200 e_id = generate_public_id() - resp = api_client.delete('/events/' + e_id) + resp = api_client.delete("/events/" + e_id) assert resp.status_code != 200 def test_api_update_read_only(db, api_client, calendar, default_namespace): - add_fake_event(db.session, default_namespace.id, - calendar=calendar, - read_only=True) - event_list = api_client.get_data('/events') + add_fake_event(db.session, default_namespace.id, calendar=calendar, read_only=True) + event_list = api_client.get_data("/events") read_only_event = None for e in event_list: - if e['read_only']: + if e["read_only"]: read_only_event = e break assert read_only_event - e_id = read_only_event['id'] - e_update_data = {'title': 'new title'} - e_put_resp = api_client.put_data('/events/' + e_id, e_update_data) + e_id = read_only_event["id"] + e_update_data = {"title": "new title"} + e_put_resp = api_client.put_data("/events/" + e_id, e_update_data) assert e_put_resp.status_code != 200 def test_api_filter(db, api_client, calendar, default_namespace): - cal = Calendar(namespace_id=default_namespace.id, - uid='uid', - provider_name='Nylas', - name='Climbing Schedule') + cal = Calendar( + namespace_id=default_namespace.id, + uid="uid", + provider_name="Nylas", + name="Climbing Schedule", + ) db.session.add(cal) db.session.commit() cal_id = cal.public_id - e1_data = {'calendar_id': cal_id, - 'title': 'Normal Party', - 'description': 'Everyone Eats Cake', - 'when': {'time': 1}, - 'location': 'Normal Town'} - post_1 = api_client.post_data('/events', e1_data) + e1_data = { + "calendar_id": cal_id, + "title": "Normal Party", + "description": "Everyone Eats Cake", + "when": {"time": 1}, + "location": "Normal Town", + } + post_1 = api_client.post_data("/events", e1_data) assert post_1.status_code == 200 - e2_data = {'calendar_id': cal_id, - 'title': 'Hipster Party', - 'description': 'Everyone Eats Kale', - 'when': {'time': 3}, - 'location': 'Hipster Town'} - post_2 = api_client.post_data('/events', e2_data) + e2_data = { + "calendar_id": cal_id, + "title": "Hipster Party", + "description": "Everyone Eats Kale", + "when": {"time": 3}, + "location": "Hipster Town", + } + post_2 = api_client.post_data("/events", e2_data) assert post_2.status_code == 200 # This event exists to test for unicode handling. - e3_data = {'calendar_id': cal_id, - 'title': u'Unicode Party \U0001F389', - 'description': u'Everyone Eats Unicode Tests \u2713', - 'when': {'start_time': 2678401, - 'end_time': 5097601}, - 'location': u'Unicode Castle \U0001F3F0'} - event_3 = api_client.post_data('/events', e3_data) + e3_data = { + "calendar_id": cal_id, + "title": u"Unicode Party \U0001F389", + "description": u"Everyone Eats Unicode Tests \u2713", + "when": {"start_time": 2678401, "end_time": 5097601}, + "location": u"Unicode Castle \U0001F3F0", + } + event_3 = api_client.post_data("/events", e3_data) assert event_3.status_code == 200 - e3_id = json.loads(event_3.data)['id'] + e3_id = json.loads(event_3.data)["id"] - events = api_client.get_data('/events?offset=%s' % '1') + events = api_client.get_data("/events?offset=%s" % "1") assert len(events) == 2 - events = api_client.get_data('/events?limit=%s' % '1') + events = api_client.get_data("/events?limit=%s" % "1") assert len(events) == 1 # Test description queries: all, some, unicode, none - events = api_client.get_data('/events?description=%s' % 'Everyone Eats') + events = api_client.get_data("/events?description=%s" % "Everyone Eats") assert len(events) == 3 - events = api_client.get_data('/events?description=%s' % 'Cake') + events = api_client.get_data("/events?description=%s" % "Cake") assert len(events) == 1 - events = api_client.get_data('/events?description=%s' % u'\u2713') + events = api_client.get_data("/events?description=%s" % u"\u2713") assert len(events) == 1 - events = api_client.get_data('/events?description=%s' % 'bad') + events = api_client.get_data("/events?description=%s" % "bad") assert len(events) == 0 # Test title queries: all, some, unicode, none - events = api_client.get_data('/events?title=%s' % 'Party') + events = api_client.get_data("/events?title=%s" % "Party") assert len(events) == 3 - events = api_client.get_data('/events?title=%s' % 'Hipster') + events = api_client.get_data("/events?title=%s" % "Hipster") assert len(events) == 1 - events = api_client.get_data('/events?title=%s' % u'\U0001F389') + events = api_client.get_data("/events?title=%s" % u"\U0001F389") assert len(events) == 1 - events = api_client.get_data('/events?title=%s' % 'bad') + events = api_client.get_data("/events?title=%s" % "bad") assert len(events) == 0 # Test location queries: all, some, unicode, none - events = api_client.get_data('/events?location=%s' % 'o') + events = api_client.get_data("/events?location=%s" % "o") assert len(events) == 3 - events = api_client.get_data('/events?location=%s' % 'Town') + events = api_client.get_data("/events?location=%s" % "Town") assert len(events) == 2 - events = api_client.get_data('/events?location=%s' % u'\U0001F3F0') + events = api_client.get_data("/events?location=%s" % u"\U0001F3F0") assert len(events) == 1 - events = api_client.get_data('/events?location=%s' % 'bad') + events = api_client.get_data("/events?location=%s" % "bad") assert len(events) == 0 # Test ID queries - _filter = 'event_id={}'.format(e3_id) - events = api_client.get_data('/events?' + _filter) + _filter = "event_id={}".format(e3_id) + events = api_client.get_data("/events?" + _filter) assert len(events) == 1 # Test time queries - _filter = 'starts_before=2' - events = api_client.get_data('/events?' + _filter) + _filter = "starts_before=2" + events = api_client.get_data("/events?" + _filter) assert len(events) == 1 - _filter = 'starts_after=2' - events = api_client.get_data('/events?' + _filter) + _filter = "starts_after=2" + events = api_client.get_data("/events?" + _filter) assert len(events) == 2 - _filter = 'ends_before=2700000' - events = api_client.get_data('/events?' + _filter) + _filter = "ends_before=2700000" + events = api_client.get_data("/events?" + _filter) assert len(events) == 2 - _filter = 'ends_after=2700000' - events = api_client.get_data('/events?' + _filter) + _filter = "ends_after=2700000" + events = api_client.get_data("/events?" + _filter) assert len(events) == 1 # Test calendar queries - _filter = 'calendar_id={}'.format(cal_id) - events = api_client.get_data('/events?' + _filter) + _filter = "calendar_id={}".format(cal_id) + events = api_client.get_data("/events?" + _filter) assert len(events) == 3 - _filter = 'calendar_id=0000000000000000000000000' - events = api_client.get_data('/events?' + _filter) + _filter = "calendar_id=0000000000000000000000000" + events = api_client.get_data("/events?" + _filter) assert len(events) == 0 diff --git a/inbox/test/api/test_events_recurring.py b/inbox/test/api/test_events_recurring.py index a74f77b0a..b5ecf128a 100644 --- a/inbox/test/api/test_events_recurring.py +++ b/inbox/test/api/test_events_recurring.py @@ -6,38 +6,44 @@ from inbox.test.api.base import api_client from inbox.test.util.base import message -__all__ = ['api_client'] +__all__ = ["api_client"] @pytest.fixture(params=[{"all_day": True}, {"all_day": False}]) def recurring_event(db, default_namespace, request): params = request.param - all_day = params.get('all_day', False) + all_day = params.get("all_day", False) rrule = ["RRULE:FREQ=WEEKLY", "EXDATE:20150324T013000,20150331T013000Z"] - cal = db.session.query(Calendar).filter_by( - namespace_id=default_namespace.id).order_by('id').first() - ev = Event(namespace_id=default_namespace.id, - calendar=cal, - title='recurring-weekly', - description='', - uid='recurapitest', - location='', - busy=False, - read_only=False, - reminders='', - recurrence=rrule, - start=arrow.get(2015, 3, 17, 1, 30, 00), - end=arrow.get(2015, 3, 17, 1, 45, 00), - all_day=all_day, - is_owner=True, - participants=[], - provider_name='inbox', - raw_data='', - original_start_tz='America/Los_Angeles', - original_start_time=None, - master_event_uid=None, - source='local') + cal = ( + db.session.query(Calendar) + .filter_by(namespace_id=default_namespace.id) + .order_by("id") + .first() + ) + ev = Event( + namespace_id=default_namespace.id, + calendar=cal, + title="recurring-weekly", + description="", + uid="recurapitest", + location="", + busy=False, + read_only=False, + reminders="", + recurrence=rrule, + start=arrow.get(2015, 3, 17, 1, 30, 00), + end=arrow.get(2015, 3, 17, 1, 45, 00), + all_day=all_day, + is_owner=True, + participants=[], + provider_name="inbox", + raw_data="", + original_start_tz="America/Los_Angeles", + original_start_time=None, + master_event_uid=None, + source="local", + ) db.session.add(ev) db.session.commit() return ev @@ -46,56 +52,57 @@ def recurring_event(db, default_namespace, request): def test_api_expand_recurring(db, api_client, recurring_event): event = recurring_event - events = api_client.get_data('/events?expand_recurring=false') + events = api_client.get_data("/events?expand_recurring=false") assert len(events) == 1 # Make sure the recurrence info is on the recurring event for e in events: - if e['title'] == 'recurring-weekly': - assert e.get('recurrence') is not None + if e["title"] == "recurring-weekly": + assert e.get("recurrence") is not None thirty_weeks = event.start.replace(weeks=+30).isoformat() starts_after = event.start.replace(days=-1).isoformat() - recur = 'expand_recurring=true&starts_after={}&ends_before={}'.format( - urllib.quote_plus(starts_after), urllib.quote_plus(thirty_weeks)) - all_events = api_client.get_data('/events?' + recur) + recur = "expand_recurring=true&starts_after={}&ends_before={}".format( + urllib.quote_plus(starts_after), urllib.quote_plus(thirty_weeks) + ) + all_events = api_client.get_data("/events?" + recur) if not event.all_day: assert len(all_events) == 28 # the ordering should be correct - prev = all_events[0]['when']['start_time'] + prev = all_events[0]["when"]["start_time"] for e in all_events[1:]: - assert e['when']['start_time'] > prev - prev = e['when']['start_time'] + assert e["when"]["start_time"] > prev + prev = e["when"]["start_time"] # Check that the parent event recurring id is included # too. - assert e['calendar_id'] == recurring_event.calendar.public_id + assert e["calendar_id"] == recurring_event.calendar.public_id - events = api_client.get_data('/events?' + recur + '&view=count') - assert events.get('count') == 28 + events = api_client.get_data("/events?" + recur + "&view=count") + assert events.get("count") == 28 else: # Since an all-day event starts at 00:00 we're returning one # more event. assert len(all_events) == 29 # the ordering should be correct - prev = all_events[0]['when']['date'] + prev = all_events[0]["when"]["date"] for e in all_events[1:]: - assert e['when']['date'] > prev - prev = e['when']['date'] + assert e["when"]["date"] > prev + prev = e["when"]["date"] # Check that the parent event recurring id is included # too. - assert e['calendar_id'] == recurring_event.calendar.public_id + assert e["calendar_id"] == recurring_event.calendar.public_id - events = api_client.get_data('/events?' + recur + '&view=count') - assert events.get('count') == 29 + events = api_client.get_data("/events?" + recur + "&view=count") + assert events.get("count") == 29 - events = api_client.get_data('/events?' + recur + '&limit=5') + events = api_client.get_data("/events?" + recur + "&limit=5") assert len(events) == 5 - events = api_client.get_data('/events?' + recur + '&offset=5') - assert events[0]['id'] == all_events[5]['id'] + events = api_client.get_data("/events?" + recur + "&offset=5") + assert events[0]["id"] == all_events[5]["id"] def urlsafe(dt): @@ -107,35 +114,40 @@ def test_api_expand_recurring_before_after(db, api_client, recurring_event): starts_after = event.start.replace(weeks=+15) ends_before = starts_after.replace(days=+1) - recur = 'expand_recurring=true&starts_after={}&ends_before={}'.format( - urlsafe(starts_after), urlsafe(ends_before)) - all_events = api_client.get_data('/events?' + recur) + recur = "expand_recurring=true&starts_after={}&ends_before={}".format( + urlsafe(starts_after), urlsafe(ends_before) + ) + all_events = api_client.get_data("/events?" + recur) assert len(all_events) == 1 - recur = 'expand_recurring=true&starts_after={}&starts_before={}'.format( - urlsafe(starts_after), urlsafe(ends_before)) - all_events = api_client.get_data('/events?' + recur) + recur = "expand_recurring=true&starts_after={}&starts_before={}".format( + urlsafe(starts_after), urlsafe(ends_before) + ) + all_events = api_client.get_data("/events?" + recur) assert len(all_events) == 1 - recur = 'expand_recurring=true&ends_after={}&starts_before={}'.format( - urlsafe(starts_after), urlsafe(ends_before)) - all_events = api_client.get_data('/events?' + recur) + recur = "expand_recurring=true&ends_after={}&starts_before={}".format( + urlsafe(starts_after), urlsafe(ends_before) + ) + all_events = api_client.get_data("/events?" + recur) assert len(all_events) == 1 - recur = 'expand_recurring=true&ends_after={}&ends_before={}'.format( - urlsafe(starts_after), urlsafe(ends_before)) - all_events = api_client.get_data('/events?' + recur) + recur = "expand_recurring=true&ends_after={}&ends_before={}".format( + urlsafe(starts_after), urlsafe(ends_before) + ) + all_events = api_client.get_data("/events?" + recur) assert len(all_events) == 1 -def test_api_override_serialization(db, api_client, default_namespace, - recurring_event): +def test_api_override_serialization(db, api_client, default_namespace, recurring_event): event = recurring_event - override = Event(original_start_time=event.start, - master_event_uid=event.uid, - namespace_id=default_namespace.id, - calendar_id=event.calendar_id) + override = Event( + original_start_time=event.start, + master_event_uid=event.uid, + namespace_id=default_namespace.id, + calendar_id=event.calendar_id, + ) override.update(event) override.uid = event.uid + "_" + event.start.strftime("%Y%m%dT%H%M%SZ") override.master = event @@ -144,43 +156,42 @@ def test_api_override_serialization(db, api_client, default_namespace, db.session.add(override) db.session.commit() - filter = 'starts_after={}&ends_before={}'.format( - urlsafe(event.start.replace(hours=-1)), - urlsafe(event.start.replace(weeks=+1))) - events = api_client.get_data('/events?' + filter) + filter = "starts_after={}&ends_before={}".format( + urlsafe(event.start.replace(hours=-1)), urlsafe(event.start.replace(weeks=+1)) + ) + events = api_client.get_data("/events?" + filter) # We should have the base event and the override back, but no extras; # this allows clients to do their own expansion, should they ever desire # to experience the joy that is RFC 2445 section 4.8.5.4. assert len(events) == 2 - assert events[0].get('object') == 'event' - assert events[0].get('recurrence') is not None - assert events[1].get('object') == 'event' - assert events[1].get('status') == 'cancelled' + assert events[0].get("object") == "event" + assert events[0].get("recurrence") is not None + assert events[1].get("object") == "event" + assert events[1].get("status") == "cancelled" -def test_api_expand_recurring_message(db, api_client, message, - recurring_event): +def test_api_expand_recurring_message(db, api_client, message, recurring_event): # This is a regression test for https://phab.nylas.com/T3556 # ("InflatedEvent should not be committed" exception in API"). event = recurring_event event.message = message db.session.commit() - events = api_client.get_data('/events?expand_recurring=false') + events = api_client.get_data("/events?expand_recurring=false") assert len(events) == 1 # Make sure the recurrence info is on the recurring event for e in events: - if e['title'] == 'recurring-weekly': - assert e.get('recurrence') is not None - assert e.get('message_id') is not None + if e["title"] == "recurring-weekly": + assert e.get("recurrence") is not None + assert e.get("message_id") is not None - r = api_client.get_raw('/events?expand_recurring=true') + r = api_client.get_raw("/events?expand_recurring=true") assert r.status_code == 200 - all_events = api_client.get_data('/events?expand_recurring=true') + all_events = api_client.get_data("/events?expand_recurring=true") assert len(all_events) != 0 for event in all_events: - assert event['master_event_id'] is not None - assert 'message_id' not in event + assert event["master_event_id"] is not None + assert "message_id" not in event diff --git a/inbox/test/api/test_files.py b/inbox/test/api/test_files.py index 99c670a93..eef288575 100644 --- a/inbox/test/api/test_files.py +++ b/inbox/test/api/test_files.py @@ -11,121 +11,119 @@ from inbox.test.api.base import api_client from inbox.util.testutils import FILENAMES -__all__ = ['api_client'] +__all__ = ["api_client"] @pytest.fixture def draft(db, default_account): return { - 'subject': 'Draft test at {}'.format(datetime.utcnow()), - 'body': '

Sea, birds and sand.

', - 'to': [{'name': 'The red-haired mermaid', - 'email': default_account.email_address}] + "subject": "Draft test at {}".format(datetime.utcnow()), + "body": "

Sea, birds and sand.

", + "to": [ + {"name": "The red-haired mermaid", "email": default_account.email_address} + ], } def test_file_filtering(api_client, uploaded_file_ids, draft): # Attach the files to a draft and search there - draft['file_ids'] = uploaded_file_ids - r = api_client.post_data('/drafts', draft) + draft["file_ids"] = uploaded_file_ids + r = api_client.post_data("/drafts", draft) assert r.status_code == 200 draft_resp = json.loads(r.data) - assert len(draft_resp['files']) == len(uploaded_file_ids) - d_id = draft_resp['id'] + assert len(draft_resp["files"]) == len(uploaded_file_ids) + d_id = draft_resp["id"] - results = api_client.get_data('/files') + results = api_client.get_data("/files") assert len(results) == len(uploaded_file_ids) - results = api_client.get_data('/files?message_id={}'.format(d_id)) + results = api_client.get_data("/files?message_id={}".format(d_id)) - assert all([d_id in f['message_ids'] for f in results]) + assert all([d_id in f["message_ids"] for f in results]) assert len(results) == len(uploaded_file_ids) - results = api_client.get_data('/files?message_id={}&limit=1' - .format(d_id)) + results = api_client.get_data("/files?message_id={}&limit=1".format(d_id)) assert len(results) == 1 - results = api_client.get_data('/files?message_id={}&offset=2' - .format(d_id)) + results = api_client.get_data("/files?message_id={}&offset=2".format(d_id)) assert len(results) == 3 - results = api_client.get_data('/files?filename=LetMeSendYouEmail.wav') + results = api_client.get_data("/files?filename=LetMeSendYouEmail.wav") assert len(results) == 1 - results = api_client.get_data('/files?content_type=audio%2Fx-wav') + results = api_client.get_data("/files?content_type=audio%2Fx-wav") assert len(results) == 1 - results = api_client.get_data('/files?content_type=image%2Fjpeg') + results = api_client.get_data("/files?content_type=image%2Fjpeg") assert len(results) == 2 - results = api_client.get_data( - '/files?content_type=image%2Fjpeg&view=count') + results = api_client.get_data("/files?content_type=image%2Fjpeg&view=count") assert results["count"] == 2 - results = api_client.get_data('/files?content_type=image%2Fjpeg&view=ids') + results = api_client.get_data("/files?content_type=image%2Fjpeg&view=ids") assert len(results) == 2 def test_attachment_has_same_id(api_client, uploaded_file_ids, draft): attachment_id = uploaded_file_ids.pop() - draft['file_ids'] = [attachment_id] - r = api_client.post_data('/drafts', draft) + draft["file_ids"] = [attachment_id] + r = api_client.post_data("/drafts", draft) assert r.status_code == 200 draft_resp = json.loads(r.data) - assert attachment_id in [x['id'] for x in draft_resp['files']] + assert attachment_id in [x["id"] for x in draft_resp["files"]] def test_delete(api_client, uploaded_file_ids, draft): non_attachment_id = uploaded_file_ids.pop() attachment_id = uploaded_file_ids.pop() - draft['file_ids'] = [attachment_id] - r = api_client.post_data('/drafts', draft) + draft["file_ids"] = [attachment_id] + r = api_client.post_data("/drafts", draft) assert r.status_code == 200 # Test that we can delete a non-attachment - r = api_client.delete('/files/{}'.format(non_attachment_id)) + r = api_client.delete("/files/{}".format(non_attachment_id)) assert r.status_code == 200 - data = api_client.get_data('/files/{}'.format(non_attachment_id)) - assert data['message'].startswith("Couldn't find file") + data = api_client.get_data("/files/{}".format(non_attachment_id)) + assert data["message"].startswith("Couldn't find file") # Make sure that we cannot delete attachments - r = api_client.delete('/files/{}'.format(attachment_id)) + r = api_client.delete("/files/{}".format(attachment_id)) assert r.status_code == 400 - data = api_client.get_data('/files/{}'.format(attachment_id)) - assert data['id'] == attachment_id + data = api_client.get_data("/files/{}".format(attachment_id)) + assert data["id"] == attachment_id @pytest.mark.parametrize("filename", FILENAMES) def test_get_with_id(api_client, uploaded_file_ids, filename): # See comment in uploaded_file_ids() - if filename == 'piece-jointe.jpg': - filename = u'pièce-jointe.jpg' - elif filename == 'andra-moi-ennepe.txt': - filename = u'ἄνδρα μοι ἔννεπε' - elif filename == 'long-non-ascii-filename.txt': - filename = 100 * u'μ' - in_file = api_client.get_data(u'/files?filename={}'.format(filename))[0] - data = api_client.get_data('/files/{}'.format(in_file['id'])) - assert data['filename'] == filename + if filename == "piece-jointe.jpg": + filename = u"pièce-jointe.jpg" + elif filename == "andra-moi-ennepe.txt": + filename = u"ἄνδρα μοι ἔννεπε" + elif filename == "long-non-ascii-filename.txt": + filename = 100 * u"μ" + in_file = api_client.get_data(u"/files?filename={}".format(filename))[0] + data = api_client.get_data("/files/{}".format(in_file["id"])) + assert data["filename"] == filename def test_get_invalid(api_client, uploaded_file_ids): - data = api_client.get_data('/files/0000000000000000000000000') - assert data['message'].startswith("Couldn't find file") - data = api_client.get_data('/files/!') - assert data['message'].startswith("Invalid id") + data = api_client.get_data("/files/0000000000000000000000000") + assert data["message"].startswith("Couldn't find file") + data = api_client.get_data("/files/!") + assert data["message"].startswith("Invalid id") - data = api_client.get_data('/files/0000000000000000000000000/download') - assert data['message'].startswith("Couldn't find file") - data = api_client.get_data('/files/!/download') - assert data['message'].startswith("Invalid id") + data = api_client.get_data("/files/0000000000000000000000000/download") + assert data["message"].startswith("Couldn't find file") + data = api_client.get_data("/files/!/download") + assert data["message"].startswith("Invalid id") - r = api_client.delete('/files/0000000000000000000000000') + r = api_client.delete("/files/0000000000000000000000000") assert r.status_code == 404 - r = api_client.delete('/files/!') + r = api_client.delete("/files/!") assert r.status_code == 400 @@ -133,68 +131,76 @@ def test_get_invalid(api_client, uploaded_file_ids): def test_download(api_client, uploaded_file_ids, filename): # See comment in uploaded_file_ids() original_filename = filename - if filename == 'piece-jointe.jpg': - filename = u'pièce-jointe.jpg' - elif filename == 'andra-moi-ennepe.txt': - filename = u'ἄνδρα μοι ἔννεπε' - elif filename == 'long-non-ascii-filename.txt': - filename = 100 * u'μ' - - in_file = api_client.get_data(u'/files?filename={}'.format(filename))[0] - data = api_client.get_raw('/files/{}/download'.format(in_file['id'])).data - - path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', - 'data', original_filename.encode('utf-8')) - local_data = open(path, 'rb').read() + if filename == "piece-jointe.jpg": + filename = u"pièce-jointe.jpg" + elif filename == "andra-moi-ennepe.txt": + filename = u"ἄνδρα μοι ἔννεπε" + elif filename == "long-non-ascii-filename.txt": + filename = 100 * u"μ" + + in_file = api_client.get_data(u"/files?filename={}".format(filename))[0] + data = api_client.get_raw("/files/{}/download".format(in_file["id"])).data + + path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "..", + "data", + original_filename.encode("utf-8"), + ) + local_data = open(path, "rb").read() local_md5 = md5.new(local_data).digest() dl_md5 = md5.new(data).digest() assert local_md5 == dl_md5 -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def fake_attachment(db, default_account, message): block = Block() namespace_id = default_account.namespace.id block.namespace_id = namespace_id - block.filename = 'zambla.txt' - block.content_type = 'text/plain' + block.filename = "zambla.txt" + block.content_type = "text/plain" block.size = 32 # This is sha256 of an attachment in our test email. - block.data_sha256 = '27dc8e801f962098fd4a741ccbd6ca24d42805df0b8035cfb881ad6e5a1bf4b2' + block.data_sha256 = ( + "27dc8e801f962098fd4a741ccbd6ca24d42805df0b8035cfb881ad6e5a1bf4b2" + ) p = Part(block=block, message=message) db.session.add(p) db.session.commit() return p -def test_direct_fetching(api_client, db, message, - fake_attachment, monkeypatch): +def test_direct_fetching(api_client, db, message, fake_attachment, monkeypatch): # Mark a file as missing and check that we try to # fetch it from the remote provider. get_mock = mock.Mock(return_value=None) - monkeypatch.setattr('inbox.util.blockstore.get_from_blockstore', - get_mock) + monkeypatch.setattr("inbox.util.blockstore.get_from_blockstore", get_mock) save_mock = mock.Mock() - monkeypatch.setattr('inbox.util.blockstore.save_to_blockstore', - save_mock) + monkeypatch.setattr("inbox.util.blockstore.save_to_blockstore", save_mock) # Mock the request to return the contents of an actual email. - path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', - 'data', 'raw_message_with_filename_attachment.txt') + path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "..", + "data", + "raw_message_with_filename_attachment.txt", + ) data = "" with open(path) as fd: data = fd.read() raw_mock = mock.Mock(return_value=data) - monkeypatch.setattr('inbox.s3.backends.gmail.get_gmail_raw_contents', - raw_mock) + monkeypatch.setattr("inbox.s3.backends.gmail.get_gmail_raw_contents", raw_mock) - resp = api_client.get_raw('/files/{}/download'.format(fake_attachment.block.public_id)) + resp = api_client.get_raw( + "/files/{}/download".format(fake_attachment.block.public_id) + ) for m in [get_mock, save_mock, raw_mock]: assert m.called # Check that we got back the right data, with the right headers. - assert resp.headers['Content-Disposition'] == 'attachment; filename=zambla.txt' - assert resp.data.decode("utf8") == u'Chuis pas rassur\xe9' + assert resp.headers["Content-Disposition"] == "attachment; filename=zambla.txt" + assert resp.data.decode("utf8") == u"Chuis pas rassur\xe9" diff --git a/inbox/test/api/test_filtering.py b/inbox/test/api/test_filtering.py index 12f9e43cb..a338b9545 100644 --- a/inbox/test/api/test_filtering.py +++ b/inbox/test/api/test_filtering.py @@ -4,22 +4,30 @@ from sqlalchemy import desc from inbox.models import Message, Thread, Namespace, Block, Category from inbox.util.misc import dt_to_timestamp -from inbox.test.util.base import (test_client, add_fake_message, - add_fake_thread) +from inbox.test.util.base import test_client, add_fake_message, add_fake_thread from inbox.test.api.base import api_client -__all__ = ['api_client', 'test_client'] +__all__ = ["api_client", "test_client"] def test_filtering(db, api_client, default_namespace): thread = add_fake_thread(db.session, default_namespace.id) - message = add_fake_message(db.session, default_namespace.id, thread, - to_addr=[('Bob', 'bob@foocorp.com')], - from_addr=[('Alice', 'alice@foocorp.com')], - subject='some subject') + message = add_fake_message( + db.session, + default_namespace.id, + thread, + to_addr=[("Bob", "bob@foocorp.com")], + from_addr=[("Alice", "alice@foocorp.com")], + subject="some subject", + ) message.categories.add( - Category(namespace_id=message.namespace_id, - name='inbox', display_name='Inbox', type_='label')) + Category( + namespace_id=message.namespace_id, + name="inbox", + display_name="Inbox", + type_="label", + ) + ) thread.subject = message.subject db.session.commit() @@ -32,104 +40,103 @@ def test_filtering(db, api_client, default_namespace): unread = not message.is_read starred = message.is_starred - results = api_client.get_data('/threads?thread_id={}' - .format(thread.public_id)) + results = api_client.get_data("/threads?thread_id={}".format(thread.public_id)) assert len(results) == 1 - results = api_client.get_data('/messages?thread_id={}' - .format(thread.public_id)) + results = api_client.get_data("/messages?thread_id={}".format(thread.public_id)) assert len(results) == 1 - results = api_client.get_data('/threads?cc={}' - .format(message.cc_addr)) + results = api_client.get_data("/threads?cc={}".format(message.cc_addr)) assert len(results) == 0 - results = api_client.get_data('/messages?cc={}' - .format(message.cc_addr)) + results = api_client.get_data("/messages?cc={}".format(message.cc_addr)) assert len(results) == 0 - results = api_client.get_data('/threads?bcc={}' - .format(message.bcc_addr)) + results = api_client.get_data("/threads?bcc={}".format(message.bcc_addr)) assert len(results) == 0 - results = api_client.get_data('/messages?bcc={}' - .format(message.bcc_addr)) + results = api_client.get_data("/messages?bcc={}".format(message.bcc_addr)) assert len(results) == 0 - results = api_client.get_data('/threads?filename=test') + results = api_client.get_data("/threads?filename=test") assert len(results) == 0 - results = api_client.get_data('/messages?filename=test') + results = api_client.get_data("/messages?filename=test") assert len(results) == 0 - results = api_client.get_data('/threads?started_after={}' - .format(t_start - 1)) + results = api_client.get_data("/threads?started_after={}".format(t_start - 1)) assert len(results) == 1 - results = api_client.get_data('/messages?started_after={}' - .format(t_start - 1)) + results = api_client.get_data("/messages?started_after={}".format(t_start - 1)) assert len(results) == 1 - results = api_client.get_data('/messages?last_message_before={}&limit=1' - .format(t_lastmsg + 1)) + results = api_client.get_data( + "/messages?last_message_before={}&limit=1".format(t_lastmsg + 1) + ) assert len(results) == 1 - results = api_client.get_data('/threads?last_message_before={}&limit=1' - .format(t_lastmsg + 1)) + results = api_client.get_data( + "/threads?last_message_before={}&limit=1".format(t_lastmsg + 1) + ) assert len(results) == 1 - results = api_client.get_data('/threads?in=inbox&limit=1') + results = api_client.get_data("/threads?in=inbox&limit=1") assert len(results) == 1 - results = api_client.get_data('/messages?in=inbox&limit=1') + results = api_client.get_data("/messages?in=inbox&limit=1") assert len(results) == 1 - results = api_client.get_data('/messages?in=banana%20rama') + results = api_client.get_data("/messages?in=banana%20rama") assert len(results) == 0 - results = api_client.get_data('/threads?subject={}'.format(subject)) + results = api_client.get_data("/threads?subject={}".format(subject)) assert len(results) == 1 - results = api_client.get_data('/messages?subject={}'.format(subject)) + results = api_client.get_data("/messages?subject={}".format(subject)) assert len(results) == 1 - results = api_client.get_data('/threads?unread={}'.format(unread)) + results = api_client.get_data("/threads?unread={}".format(unread)) assert len(results) == 1 - results = api_client.get_data('/messages?unread={}'.format((not unread))) + results = api_client.get_data("/messages?unread={}".format((not unread))) assert len(results) == 0 - results = api_client.get_data('/threads?starred={}'.format((not starred))) + results = api_client.get_data("/threads?starred={}".format((not starred))) assert len(results) == 0 - results = api_client.get_data('/messages?starred={}'.format(starred)) + results = api_client.get_data("/messages?starred={}".format(starred)) assert len(results) == 1 for _ in range(3): - add_fake_message(db.session, default_namespace.id, - to_addr=[('', 'inboxapptest@gmail.com')], - thread=add_fake_thread(db.session, - default_namespace.id)) - - results = api_client.get_data('/messages?any_email={}'. - format('inboxapptest@gmail.com')) + add_fake_message( + db.session, + default_namespace.id, + to_addr=[("", "inboxapptest@gmail.com")], + thread=add_fake_thread(db.session, default_namespace.id), + ) + + results = api_client.get_data( + "/messages?any_email={}".format("inboxapptest@gmail.com") + ) assert len(results) > 1 # Test multiple any_email params - multiple_results = api_client.get_data('/messages?any_email={},{},{}'. - format('inboxapptest@gmail.com', - 'bob@foocorp.com', - 'unused@gmail.com')) + multiple_results = api_client.get_data( + "/messages?any_email={},{},{}".format( + "inboxapptest@gmail.com", "bob@foocorp.com", "unused@gmail.com" + ) + ) assert len(multiple_results) > len(results) # Check that we canonicalize when searching. - alternate_results = api_client.get_data('/threads?any_email={}'. - format('inboxapp.test@gmail.com')) + alternate_results = api_client.get_data( + "/threads?any_email={}".format("inboxapp.test@gmail.com") + ) assert len(alternate_results) == len(results) - results = api_client.get_data('/messages?from={}'.format(from_addr)) + results = api_client.get_data("/messages?from={}".format(from_addr)) assert len(results) == 1 - results = api_client.get_data('/threads?from={}'.format(from_addr)) + results = api_client.get_data("/threads?from={}".format(from_addr)) assert len(results) == 1 early_time = received_date - datetime.timedelta(seconds=1) @@ -137,116 +144,140 @@ def test_filtering(db, api_client, default_namespace): early_ts = calendar.timegm(early_time.utctimetuple()) late_ts = calendar.timegm(late_time.utctimetuple()) - results = api_client.get_data('/messages?subject={}&started_before={}'. - format(subject, early_ts)) + results = api_client.get_data( + "/messages?subject={}&started_before={}".format(subject, early_ts) + ) assert len(results) == 0 - results = api_client.get_data('/threads?subject={}&started_before={}'. - format(subject, early_ts)) + results = api_client.get_data( + "/threads?subject={}&started_before={}".format(subject, early_ts) + ) assert len(results) == 0 - results = api_client.get_data('/messages?subject={}&started_before={}'. - format(subject, late_ts)) + results = api_client.get_data( + "/messages?subject={}&started_before={}".format(subject, late_ts) + ) assert len(results) == 1 - results = api_client.get_data('/threads?subject={}&started_before={}'. - format(subject, late_ts)) + results = api_client.get_data( + "/threads?subject={}&started_before={}".format(subject, late_ts) + ) assert len(results) == 1 - results = api_client.get_data('/messages?subject={}&last_message_after={}'. - format(subject, early_ts)) + results = api_client.get_data( + "/messages?subject={}&last_message_after={}".format(subject, early_ts) + ) assert len(results) == 1 - results = api_client.get_data('/threads?subject={}&last_message_after={}'. - format(subject, early_ts)) + results = api_client.get_data( + "/threads?subject={}&last_message_after={}".format(subject, early_ts) + ) assert len(results) == 1 - results = api_client.get_data('/messages?subject={}&last_message_after={}'. - format(subject, late_ts)) + results = api_client.get_data( + "/messages?subject={}&last_message_after={}".format(subject, late_ts) + ) assert len(results) == 0 - results = api_client.get_data('/threads?subject={}&last_message_after={}'. - format(subject, late_ts)) + results = api_client.get_data( + "/threads?subject={}&last_message_after={}".format(subject, late_ts) + ) assert len(results) == 0 - results = api_client.get_data('/messages?subject={}&started_before={}'. - format(subject, early_ts)) + results = api_client.get_data( + "/messages?subject={}&started_before={}".format(subject, early_ts) + ) assert len(results) == 0 - results = api_client.get_data('/threads?subject={}&started_before={}'. - format(subject, early_ts)) + results = api_client.get_data( + "/threads?subject={}&started_before={}".format(subject, early_ts) + ) assert len(results) == 0 - results = api_client.get_data('/messages?subject={}&started_before={}'. - format(subject, late_ts)) + results = api_client.get_data( + "/messages?subject={}&started_before={}".format(subject, late_ts) + ) assert len(results) == 1 - results = api_client.get_data('/threads?subject={}&started_before={}'. - format(subject, late_ts)) + results = api_client.get_data( + "/threads?subject={}&started_before={}".format(subject, late_ts) + ) assert len(results) == 1 - results = api_client.get_data('/messages?from={}&to={}'. - format(from_addr, to_addr)) + results = api_client.get_data("/messages?from={}&to={}".format(from_addr, to_addr)) assert len(results) == 1 - results = api_client.get_data('/threads?from={}&to={}'. - format(from_addr, to_addr)) + results = api_client.get_data("/threads?from={}&to={}".format(from_addr, to_addr)) assert len(results) == 1 - results = api_client.get_data('/messages?to={}&limit={}&offset={}'. - format('inboxapptest@gmail.com', 2, 1)) + results = api_client.get_data( + "/messages?to={}&limit={}&offset={}".format("inboxapptest@gmail.com", 2, 1) + ) assert len(results) == 2 - results = api_client.get_data('/threads?to={}&limit={}'. - format('inboxapptest@gmail.com', 3)) + results = api_client.get_data( + "/threads?to={}&limit={}".format("inboxapptest@gmail.com", 3) + ) assert len(results) == 3 - results = api_client.get_data('/threads?view=count') + results = api_client.get_data("/threads?view=count") - assert results['count'] == 4 + assert results["count"] == 4 - results = api_client.get_data('/threads?view=ids&to={}&limit=3'. - format('inboxapptest@gmail.com', 3)) + results = api_client.get_data( + "/threads?view=ids&to={}&limit=3".format("inboxapptest@gmail.com", 3) + ) assert len(results) == 3 - assert all(isinstance(r, basestring) - for r in results), "Returns a list of string" + assert all(isinstance(r, basestring) for r in results), "Returns a list of string" def test_query_target(db, api_client, thread, default_namespace): - cat = Category(namespace_id=default_namespace.id, - name='inbox', display_name='Inbox', type_='label') + cat = Category( + namespace_id=default_namespace.id, + name="inbox", + display_name="Inbox", + type_="label", + ) for _ in range(3): - message = add_fake_message(db.session, default_namespace.id, thread, - to_addr=[('Bob', 'bob@foocorp.com')], - from_addr=[('Alice', 'alice@foocorp.com')], - subject='some subject') + message = add_fake_message( + db.session, + default_namespace.id, + thread, + to_addr=[("Bob", "bob@foocorp.com")], + from_addr=[("Alice", "alice@foocorp.com")], + subject="some subject", + ) message.categories.add(cat) db.session.commit() - results = api_client.get_data('/messages?in=inbox') + results = api_client.get_data("/messages?in=inbox") assert len(results) == 3 - count = api_client.get_data('/messages?in=inbox&view=count') - assert count['count'] == 3 + count = api_client.get_data("/messages?in=inbox&view=count") + assert count["count"] == 3 def test_ordering(api_client, db, default_namespace): for i in range(3): thr = add_fake_thread(db.session, default_namespace.id) - received_date = (datetime.datetime.utcnow() + - datetime.timedelta(seconds=22 * (i + 1))) - add_fake_message(db.session, default_namespace.id, - thr, received_date=received_date) - ordered_results = api_client.get_data('/messages') - ordered_dates = [result['date'] for result in ordered_results] + received_date = datetime.datetime.utcnow() + datetime.timedelta( + seconds=22 * (i + 1) + ) + add_fake_message( + db.session, default_namespace.id, thr, received_date=received_date + ) + ordered_results = api_client.get_data("/messages") + ordered_dates = [result["date"] for result in ordered_results] assert ordered_dates == sorted(ordered_dates, reverse=True) - ordered_results = api_client.get_data('/messages?limit=3') + ordered_results = api_client.get_data("/messages?limit=3") expected_public_ids = [ - public_id for public_id, in - db.session.query(Message.public_id). - filter(Message.namespace_id == default_namespace.id). - order_by(desc(Message.received_date)).limit(3)] - assert expected_public_ids == [r['id'] for r in ordered_results] + public_id + for public_id, in db.session.query(Message.public_id) + .filter(Message.namespace_id == default_namespace.id) + .order_by(desc(Message.received_date)) + .limit(3) + ] + assert expected_public_ids == [r["id"] for r in ordered_results] def test_strict_argument_parsing(api_client): - r = api_client.get_raw('/threads?foo=bar') + r = api_client.get_raw("/threads?foo=bar") assert r.status_code == 400 @@ -255,77 +286,95 @@ def test_distinct_results(api_client, db, default_namespace): multiple matching messages per thread.""" # Create a thread with multiple messages on it. first_thread = add_fake_thread(db.session, default_namespace.id) - add_fake_message(db.session, default_namespace.id, first_thread, - from_addr=[('', 'hello@example.com')], - received_date=datetime.datetime.utcnow(), - add_sent_category=True) - add_fake_message(db.session, default_namespace.id, first_thread, - from_addr=[('', 'hello@example.com')], - received_date=datetime.datetime.utcnow(), - add_sent_category=True) + add_fake_message( + db.session, + default_namespace.id, + first_thread, + from_addr=[("", "hello@example.com")], + received_date=datetime.datetime.utcnow(), + add_sent_category=True, + ) + add_fake_message( + db.session, + default_namespace.id, + first_thread, + from_addr=[("", "hello@example.com")], + received_date=datetime.datetime.utcnow(), + add_sent_category=True, + ) # Now create another thread with the same participants older_date = datetime.datetime.utcnow() - datetime.timedelta(hours=1) second_thread = add_fake_thread(db.session, default_namespace.id) - add_fake_message(db.session, default_namespace.id, second_thread, - from_addr=[('', 'hello@example.com')], - received_date=older_date, - add_sent_category=True) - add_fake_message(db.session, default_namespace.id, second_thread, - from_addr=[('', 'hello@example.com')], - received_date=older_date, - add_sent_category=True) + add_fake_message( + db.session, + default_namespace.id, + second_thread, + from_addr=[("", "hello@example.com")], + received_date=older_date, + add_sent_category=True, + ) + add_fake_message( + db.session, + default_namespace.id, + second_thread, + from_addr=[("", "hello@example.com")], + received_date=older_date, + add_sent_category=True, + ) second_thread.recentdate = older_date db.session.commit() - filtered_results = api_client.get_data('/threads?from=hello@example.com' - '&limit=1&offset=0') + filtered_results = api_client.get_data( + "/threads?from=hello@example.com" "&limit=1&offset=0" + ) assert len(filtered_results) == 1 - assert filtered_results[0]['id'] == first_thread.public_id + assert filtered_results[0]["id"] == first_thread.public_id - filtered_results = api_client.get_data('/threads?from=hello@example.com' - '&limit=1&offset=1') + filtered_results = api_client.get_data( + "/threads?from=hello@example.com" "&limit=1&offset=1" + ) assert len(filtered_results) == 1 - assert filtered_results[0]['id'] == second_thread.public_id + assert filtered_results[0]["id"] == second_thread.public_id - filtered_results = api_client.get_data('/threads?from=hello@example.com' - '&limit=2&offset=0') + filtered_results = api_client.get_data( + "/threads?from=hello@example.com" "&limit=2&offset=0" + ) assert len(filtered_results) == 2 - filtered_results = api_client.get_data('/threads?from=hello@example.com' - '&limit=2&offset=1') + filtered_results = api_client.get_data( + "/threads?from=hello@example.com" "&limit=2&offset=1" + ) assert len(filtered_results) == 1 # Ensure that it works when using the _in filter - filtered_results = api_client.get_data('/threads?in=sent' - '&limit=2&offset=0') + filtered_results = api_client.get_data("/threads?in=sent" "&limit=2&offset=0") assert len(filtered_results) == 2 - filtered_results = api_client.get_data('/threads?in=sent' - '&limit=1&offset=0') + filtered_results = api_client.get_data("/threads?in=sent" "&limit=1&offset=0") assert len(filtered_results) == 1 def test_filtering_accounts(db, test_client, default_namespace): - all_accounts = json.loads(test_client.get('/accounts/?limit=100').data) - email = all_accounts[0]['email_address'] + all_accounts = json.loads(test_client.get("/accounts/?limit=100").data) + email = all_accounts[0]["email_address"] - some_accounts = json.loads(test_client.get('/accounts/?offset=1&limit=99').data) + some_accounts = json.loads(test_client.get("/accounts/?offset=1&limit=99").data) assert len(some_accounts) == len(all_accounts) - 1 - no_all_accounts = json.loads(test_client.get('/accounts/?limit=0').data) + no_all_accounts = json.loads(test_client.get("/accounts/?limit=0").data) assert no_all_accounts == [] - all_accounts = json.loads(test_client.get('/accounts/?limit=1').data) + all_accounts = json.loads(test_client.get("/accounts/?limit=1").data) assert len(all_accounts) == 1 - filter_ = '?email_address={}'.format(email) - all_accounts = json.loads(test_client.get('/accounts/' + filter_).data) - assert all_accounts[0]['email_address'] == email + filter_ = "?email_address={}".format(email) + all_accounts = json.loads(test_client.get("/accounts/" + filter_).data) + assert all_accounts[0]["email_address"] == email - filter_ = '?email_address=unknown@email.com' - accounts = json.loads(test_client.get('/accounts/' + filter_).data) + filter_ = "?email_address=unknown@email.com" + accounts = json.loads(test_client.get("/accounts/" + filter_).data) assert len(accounts) == 0 @@ -335,19 +384,17 @@ def test_namespace_limiting(db, api_client, default_namespaces): namespaces = db.session.query(Namespace).all() assert len(namespaces) > 1 for ns in namespaces: - thread = Thread(namespace=ns, subjectdate=dt, recentdate=dt, - subject=subject) - add_fake_message(db.session, ns.id, thread, received_date=dt, - subject=subject) + thread = Thread(namespace=ns, subjectdate=dt, recentdate=dt, subject=subject) + add_fake_message(db.session, ns.id, thread, received_date=dt, subject=subject) db.session.add(Block(namespace=ns, filename=subject)) db.session.commit() for ns in namespaces: - r = api_client.get_data('/threads?subject={}'.format(subject)) + r = api_client.get_data("/threads?subject={}".format(subject)) assert len(r) == 1 - r = api_client.get_data('/messages?subject={}'.format(subject)) + r = api_client.get_data("/messages?subject={}".format(subject)) assert len(r) == 1 - r = api_client.get_data('/files?filename={}'.format(subject)) + r = api_client.get_data("/files?filename={}".format(subject)) assert len(r) == 1 diff --git a/inbox/test/api/test_folders.py b/inbox/test/api/test_folders.py index 61e3cab07..1d0d5edb3 100644 --- a/inbox/test/api/test_folders.py +++ b/inbox/test/api/test_folders.py @@ -11,51 +11,53 @@ # to A/B. def test_folder_stripping(db, generic_account, imap_api_client): # Check that regular IMAP paths get converted to unix-style paths - generic_account.folder_separator = '.' + generic_account.folder_separator = "." folder = add_fake_folder(db.session, generic_account) - add_fake_category(db.session, generic_account.namespace.id, - 'INBOX.Red.Carpet') + add_fake_category(db.session, generic_account.namespace.id, "INBOX.Red.Carpet") - r = imap_api_client.get_data('/folders') + r = imap_api_client.get_data("/folders") for folder in r: - if 'Carpet' in folder['display_name']: - assert folder['display_name'] == 'INBOX/Red/Carpet' + if "Carpet" in folder["display_name"]: + assert folder["display_name"] == "INBOX/Red/Carpet" # Check that if we define an account-level prefix, it gets stripped # from the API response. - generic_account.folder_prefix = 'INBOX.' + generic_account.folder_prefix = "INBOX." db.session.commit() - r = imap_api_client.get_data('/folders') + r = imap_api_client.get_data("/folders") for folder in r: - if 'Carpet' in folder['display_name']: - assert folder['display_name'] == 'Red/Carpet' + if "Carpet" in folder["display_name"]: + assert folder["display_name"] == "Red/Carpet" # Test again with a prefix without integrated separator: - generic_account.folder_prefix = 'INBOX' + generic_account.folder_prefix = "INBOX" db.session.commit() - r = imap_api_client.get_data('/folders') + r = imap_api_client.get_data("/folders") for folder in r: - if 'Carpet' in folder['display_name']: - assert folder['display_name'] == 'Red/Carpet' + if "Carpet" in folder["display_name"]: + assert folder["display_name"] == "Red/Carpet" # This test is kind of complicated --- basically we mock # the output of the IMAP NAMESPACE command to check that # we are correctly translating Unix-style paths to IMAP # paths. -def test_folder_name_translation(empty_db, generic_account, imap_api_client, - mock_imapclient, monkeypatch): +def test_folder_name_translation( + empty_db, generic_account, imap_api_client, mock_imapclient, monkeypatch +): from inbox.transactions.actions import SyncbackService - syncback = SyncbackService(syncback_id=0, process_number=0, - total_processes=1, num_workers=2) - imap_namespaces = (((u'INBOX.', u'.'),),) + syncback = SyncbackService( + syncback_id=0, process_number=0, total_processes=1, num_workers=2 + ) + + imap_namespaces = (((u"INBOX.", u"."),),) mock_imapclient.create_folder = mock.Mock() mock_imapclient.namespace = mock.Mock(return_value=imap_namespaces) - folder_list = [(('\\HasChildren',), '.', u'INBOX')] + folder_list = [(("\\HasChildren",), ".", u"INBOX")] mock_imapclient.list_folders = mock.Mock(return_value=folder_list) mock_imapclient.has_capability = mock.Mock(return_value=True) @@ -64,11 +66,11 @@ def test_folder_name_translation(empty_db, generic_account, imap_api_client, generic_account.folder_separator = folder_separator empty_db.session.commit() - folder_json = {'display_name': 'Taxes/Accounting'} - imap_api_client.post_data('/folders', folder_json) + folder_json = {"display_name": "Taxes/Accounting"} + imap_api_client.post_data("/folders", folder_json) syncback._process_log() syncback._restart_workers() while not syncback.task_queue.empty(): gevent.sleep(0.1) - mock_imapclient.create_folder.assert_called_with('INBOX.Taxes.Accounting') + mock_imapclient.create_folder.assert_called_with("INBOX.Taxes.Accounting") diff --git a/inbox/test/api/test_folders_labels.py b/inbox/test/api/test_folders_labels.py index 6c62c7a27..239e51f4e 100644 --- a/inbox/test/api/test_folders_labels.py +++ b/inbox/test/api/test_folders_labels.py @@ -8,20 +8,24 @@ from inbox.api.ns_api import API_VERSIONS from inbox.models.category import Category, EPOCH -from inbox.test.util.base import (add_fake_message, thread, add_fake_thread, - generic_account, gmail_account) +from inbox.test.util.base import ( + add_fake_message, + thread, + add_fake_thread, + generic_account, + gmail_account, +) from inbox.test.api.base import api_client, new_api_client -__all__ = ['api_client', 'thread', 'generic_account', 'gmail_account'] +__all__ = ["api_client", "thread", "generic_account", "gmail_account"] @pytest.fixture def folder_client(db, generic_account): api_client = new_api_client(db, generic_account.namespace) - api_client.post_data('/folders/', - {"display_name": "Test_Folder"}) + api_client.post_data("/folders/", {"display_name": "Test_Folder"}) return api_client @@ -35,149 +39,155 @@ def label_client(db, gmail_account): # conflicts if a label is "created" more than once. Since # labels can't be deleted and then re-created, this fixture only # makes a new label if there are no existing labels. - g_data = api_client.get_raw('/labels/') + g_data = api_client.get_raw("/labels/") if not json.loads(g_data.data): - api_client.post_data('/labels/', - {"display_name": "Test_Label"}) + api_client.post_data("/labels/", {"display_name": "Test_Label"}) return api_client def test_folder_post(db, generic_account): api_client = new_api_client(db, generic_account.namespace) - po_data = api_client.post_data('/folders/', - {"display_name": "Test_Folder"}) + po_data = api_client.post_data("/folders/", {"display_name": "Test_Folder"}) assert po_data.status_code == 200 - category_id = json.loads(po_data.data)['id'] - category = db.session.query(Category).filter( - Category.public_id == category_id).one() - assert category.display_name == 'Test_Folder' - assert category.name == '' - assert category.type == 'folder' + category_id = json.loads(po_data.data)["id"] + category = ( + db.session.query(Category).filter(Category.public_id == category_id).one() + ) + assert category.display_name == "Test_Folder" + assert category.name == "" + assert category.type == "folder" assert category.deleted_at == EPOCH assert category.is_deleted is False def test_label_post(db, gmail_account): api_client = new_api_client(db, gmail_account.namespace) - po_data = api_client.post_data('/labels/', - {"display_name": "Test_Label"}) + po_data = api_client.post_data("/labels/", {"display_name": "Test_Label"}) assert po_data.status_code == 200 - category_id = json.loads(po_data.data)['id'] - category = db.session.query(Category).filter( - Category.public_id == category_id).one() - assert category.display_name == 'Test_Label' - assert category.name == '' - assert category.type == 'label' + category_id = json.loads(po_data.data)["id"] + category = ( + db.session.query(Category).filter(Category.public_id == category_id).one() + ) + assert category.display_name == "Test_Label" + assert category.name == "" + assert category.type == "label" assert category.deleted_at == EPOCH assert category.is_deleted is False def test_folder_get(folder_client): - g_data = folder_client.get_raw('/folders/') + g_data = folder_client.get_raw("/folders/") assert g_data.status_code == 200 gen_folder = json.loads(g_data.data)[0] - gid_data = folder_client.get_raw('/folders/{}'.format(gen_folder['id'])) + gid_data = folder_client.get_raw("/folders/{}".format(gen_folder["id"])) assert gid_data.status_code == 200 def test_label_get(label_client): - g_data = label_client.get_raw('/labels/') + g_data = label_client.get_raw("/labels/") assert g_data.status_code == 200 gmail_label = json.loads(g_data.data)[0] - gid_data = label_client.get_raw('/labels/{}'.format(gmail_label['id'])) + gid_data = label_client.get_raw("/labels/{}".format(gmail_label["id"])) assert gid_data.status_code == 200 @pytest.mark.parametrize("api_version", API_VERSIONS) def test_folder_put(db, folder_client, api_version): headers = dict() - headers['Api-Version'] = api_version + headers["Api-Version"] = api_version # GET request for the folder ID - g_data = folder_client.get_raw('/folders/') + g_data = folder_client.get_raw("/folders/") gen_folder = json.loads(g_data.data)[0] - pu_data = folder_client.put_data('/folders/{}'.format(gen_folder['id']), - {"display_name": "Test_Folder_Renamed"}, - headers=headers) + pu_data = folder_client.put_data( + "/folders/{}".format(gen_folder["id"]), + {"display_name": "Test_Folder_Renamed"}, + headers=headers, + ) assert pu_data.status_code == 200 if api_version == API_VERSIONS[0]: - assert json.loads(pu_data.data)['display_name'] == 'Test_Folder_Renamed' - - category_id = gen_folder['id'] - category = db.session.query(Category).filter( - Category.public_id == category_id).one() - assert category.display_name == 'Test_Folder_Renamed' - assert category.name == '' + assert json.loads(pu_data.data)["display_name"] == "Test_Folder_Renamed" + + category_id = gen_folder["id"] + category = ( + db.session.query(Category).filter(Category.public_id == category_id).one() + ) + assert category.display_name == "Test_Folder_Renamed" + assert category.name == "" else: - assert json.loads(pu_data.data)['display_name'] == gen_folder['display_name'] + assert json.loads(pu_data.data)["display_name"] == gen_folder["display_name"] @pytest.mark.parametrize("api_version", API_VERSIONS) def test_label_put(db, label_client, api_version): headers = dict() - headers['Api-Version'] = api_version + headers["Api-Version"] = api_version # GET request for the label ID - g_data = label_client.get_raw('/labels/') + g_data = label_client.get_raw("/labels/") gmail_label = json.loads(g_data.data)[0] new_name = "Test_Label_Renamed {}".format(api_version) - pu_data = label_client.put_data('/labels/{}'.format(gmail_label['id']), - {"display_name": new_name}, headers=headers) + pu_data = label_client.put_data( + "/labels/{}".format(gmail_label["id"]), + {"display_name": new_name}, + headers=headers, + ) assert pu_data.status_code == 200 if api_version == API_VERSIONS[0]: - assert json.loads(pu_data.data)['display_name'] == new_name + assert json.loads(pu_data.data)["display_name"] == new_name - category_id = gmail_label['id'] - category = db.session.query(Category).filter( - Category.public_id == category_id).one() + category_id = gmail_label["id"] + category = ( + db.session.query(Category).filter(Category.public_id == category_id).one() + ) assert category.display_name == new_name - assert category.name == '' + assert category.name == "" else: # non-optimistic update - assert json.loads(pu_data.data)['display_name'] == gmail_label['display_name'] + assert json.loads(pu_data.data)["display_name"] == gmail_label["display_name"] @pytest.mark.parametrize("api_version", API_VERSIONS) def test_folder_delete(db, generic_account, folder_client, api_version): headers = dict() - headers['Api-Version'] = api_version + headers["Api-Version"] = api_version # Make a new message generic_thread = add_fake_thread(db.session, generic_account.namespace.id) - gen_message = add_fake_message(db.session, - generic_account.namespace.id, - generic_thread) - g_data = folder_client.get_raw('/folders/') + gen_message = add_fake_message( + db.session, generic_account.namespace.id, generic_thread + ) + g_data = folder_client.get_raw("/folders/") # Add message to folder generic_folder = json.loads(g_data.data)[0] - data = {"folder_id": generic_folder['id']} - folder_client.put_data('/messages/{}'.format(gen_message.public_id), data) + data = {"folder_id": generic_folder["id"]} + folder_client.put_data("/messages/{}".format(gen_message.public_id), data) # Test that DELETE requests 403 on folders with items in them - d_data = folder_client.delete('/folders/{}'.format(generic_folder['id'])) + d_data = folder_client.delete("/folders/{}".format(generic_folder["id"])) assert d_data.status_code == 400 # Make an empty folder - resp = folder_client.post_data('/folders/', - {"display_name": "Empty_Folder"}) + resp = folder_client.post_data("/folders/", {"display_name": "Empty_Folder"}) empty_folder = json.loads(resp.data) # Test that DELETE requests delete empty folders - d_data = folder_client.delete('/folders/{}'.format(empty_folder['id'])) + d_data = folder_client.delete("/folders/{}".format(empty_folder["id"])) assert d_data.status_code == 200 if api_version == API_VERSIONS[0]: # Did we update things optimistically? - category_id = empty_folder['id'] - category = db.session.query(Category).filter( - Category.public_id == category_id).one() + category_id = empty_folder["id"] + category = ( + db.session.query(Category).filter(Category.public_id == category_id).one() + ) assert category.deleted_at != EPOCH assert category.is_deleted is True @@ -187,76 +197,83 @@ def test_folder_delete(db, generic_account, folder_client, api_version): @pytest.mark.parametrize("api_version", API_VERSIONS) def test_label_delete(db, gmail_account, label_client, api_version): headers = dict() - headers['Api-Version'] = api_version + headers["Api-Version"] = api_version # Make a new message gmail_thread = add_fake_thread(db.session, gmail_account.namespace.id) - gmail_message = add_fake_message(db.session, - gmail_account.namespace.id, gmail_thread) - g_data = label_client.get_raw('/labels/', headers=headers) + gmail_message = add_fake_message( + db.session, gmail_account.namespace.id, gmail_thread + ) + g_data = label_client.get_raw("/labels/", headers=headers) # Add label to message gmail_label = json.loads(g_data.data)[0] - data = {"labels": [gmail_label['id']]} - label_client.put_data('/messages/{}'.format(gmail_message.public_id), data, - headers=headers) + data = {"labels": [gmail_label["id"]]} + label_client.put_data( + "/messages/{}".format(gmail_message.public_id), data, headers=headers + ) # DELETE requests should work on labels whether or not messages have them - d_data = label_client.delete('/labels/{}'.format(gmail_label['id']), - headers=headers) + d_data = label_client.delete( + "/labels/{}".format(gmail_label["id"]), headers=headers + ) assert d_data.status_code == 200 if api_version == API_VERSIONS[0]: # Optimistic update. - category_id = gmail_label['id'] - category = db.session.query(Category).filter( - Category.public_id == category_id).one() + category_id = gmail_label["id"] + category = ( + db.session.query(Category).filter(Category.public_id == category_id).one() + ) assert category.deleted_at != EPOCH assert category.is_deleted is True def test_folder_exclusivity(folder_client): - g_data = folder_client.get_raw('/folders/') + g_data = folder_client.get_raw("/folders/") generic_folder = json.loads(g_data.data)[0] # These requests to /labels/ should all 404, since the account uses folders - po_data = folder_client.post_data('/labels/', - {"display_name": "Test_E_Label"}) + po_data = folder_client.post_data("/labels/", {"display_name": "Test_E_Label"}) assert po_data.status_code == 404 - pu_data = folder_client.put_data('/labels/{}'.format(generic_folder['id']), - {"display_name": "Test_E_Folder_Renamed"}) + pu_data = folder_client.put_data( + "/labels/{}".format(generic_folder["id"]), + {"display_name": "Test_E_Folder_Renamed"}, + ) assert pu_data.status_code == 404 - g_data = folder_client.get_raw('/labels/') + g_data = folder_client.get_raw("/labels/") assert g_data.status_code == 404 - gid_data = folder_client.get_raw('/labels/{}'.format(generic_folder['id'])) + gid_data = folder_client.get_raw("/labels/{}".format(generic_folder["id"])) assert gid_data.status_code == 404 - d_data = folder_client.delete('/labels/{}'.format(generic_folder['id'])) + d_data = folder_client.delete("/labels/{}".format(generic_folder["id"])) assert d_data.status_code == 404 def test_label_exclusivity(label_client): - g_data = label_client.get_raw('/labels/') + g_data = label_client.get_raw("/labels/") gmail_label = json.loads(g_data.data)[0] # These requests to /folders/ should all 404, since the account uses labels - po_data = label_client.post_data('/folders/', - {"display_name": "Test_E_Folder"}) + po_data = label_client.post_data("/folders/", {"display_name": "Test_E_Folder"}) assert po_data.status_code == 404 - pu_data = label_client.put_data('/folders/{}'.format(gmail_label['id']), - {"display_name": "Test_E _Label_Renamed"}) + pu_data = label_client.put_data( + "/folders/{}".format(gmail_label["id"]), + {"display_name": "Test_E _Label_Renamed"}, + ) assert pu_data.status_code == 404 - g_data = label_client.get_raw('/folders/') + g_data = label_client.get_raw("/folders/") assert g_data.status_code == 404 - gid_data = label_client.get_raw('/folders/{}'.format(gmail_label['id'])) + gid_data = label_client.get_raw("/folders/{}".format(gmail_label["id"])) assert gid_data.status_code == 404 - d_data = label_client.delete('/folders/{}'.format(gmail_label['id'])) + d_data = label_client.delete("/folders/{}".format(gmail_label["id"])) assert d_data.status_code == 404 def test_duplicate_folder_create(folder_client): # Creating a folder with an existing, non-deleted folder's name # returns an HTTP 400. - data = folder_client.get_raw('/folders/') + data = folder_client.get_raw("/folders/") folder = json.loads(data.data)[0] - data = folder_client.post_data('/folders/', - {"display_name": folder['display_name']}) + data = folder_client.post_data( + "/folders/", {"display_name": folder["display_name"]} + ) assert data.status_code == 400 # Deleting the folder and re-creating (with the same name) succeeds. @@ -264,25 +281,25 @@ def test_duplicate_folder_create(folder_client): # spaced >= 1 second apart (MySQL rounds up microseconds). initial_ts = datetime.utcnow() with freeze_time(initial_ts) as frozen_ts: - data = folder_client.delete('/folders/{}'.format(folder['id'])) + data = folder_client.delete("/folders/{}".format(folder["id"])) assert data.status_code == 200 - data = folder_client.post_data('/folders/', - {"display_name": folder['display_name']}) + data = folder_client.post_data( + "/folders/", {"display_name": folder["display_name"]} + ) assert data.status_code == 200 new_folder = json.loads(data.data) - assert new_folder['display_name'] == folder['display_name'] - assert new_folder['id'] != folder['id'] + assert new_folder["display_name"] == folder["display_name"] + assert new_folder["id"] != folder["id"] folder = new_folder frozen_ts.tick() def test_duplicate_label_create(label_client): - data = label_client.get_raw('/labels/') + data = label_client.get_raw("/labels/") label = json.loads(data.data)[0] - data = label_client.post_data('/labels/', - {"display_name": label['display_name']}) + data = label_client.post_data("/labels/", {"display_name": label["display_name"]}) assert data.status_code == 400 # Deleting the label and re-creating (with the same name) succeeds. @@ -290,15 +307,16 @@ def test_duplicate_label_create(label_client): # spaced >= 1 second apart (MySQL rounds up microseconds). initial_ts = datetime.utcnow() with freeze_time(initial_ts) as frozen_ts: - data = label_client.delete('/labels/{}'.format(label['id'])) + data = label_client.delete("/labels/{}".format(label["id"])) assert data.status_code == 200 - data = label_client.post_data('/labels/', - {"display_name": label['display_name']}) + data = label_client.post_data( + "/labels/", {"display_name": label["display_name"]} + ) assert data.status_code == 200 new_label = json.loads(data.data) - assert new_label['display_name'] == label['display_name'] - assert new_label['id'] != label['id'] + assert new_label["display_name"] == label["display_name"] + assert new_label["id"] != label["id"] label = new_label frozen_ts.tick() diff --git a/inbox/test/api/test_invalid_account.py b/inbox/test/api/test_invalid_account.py index 25dcf662e..19d596cee 100644 --- a/inbox/test/api/test_invalid_account.py +++ b/inbox/test/api/test_invalid_account.py @@ -8,14 +8,15 @@ from inbox.test.util.base import db from inbox.test.api.base import api_client -__all__ = ['api_client', 'db'] +__all__ = ["api_client", "db"] @pytest.fixture def token_manager(monkeypatch): monkeypatch.setattr( - 'inbox.models.backends.gmail.g_token_manager.get_token_for_email', - lambda *args, **kwargs: 'token') + "inbox.models.backends.gmail.g_token_manager.get_token_for_email", + lambda *args, **kwargs: "token", + ) @pytest.fixture @@ -23,95 +24,93 @@ def search_response(): resp = requests.Response() resp.status_code = 200 resp.elapsed = datetime.timedelta(seconds=22) - resp._content = json.dumps({ - 'messages': [{'id': '1'}, {'id': '2'}, {'id': '3'}] - }) + resp._content = json.dumps({"messages": [{"id": "1"}, {"id": "2"}, {"id": "3"}]}) requests.get = mock.Mock(return_value=resp) @pytest.fixture def setup_account(message, thread, label, contact, event): return { - 'message': message.public_id, - 'thread': thread.public_id, - 'label': label.category.public_id, - 'contact': contact.public_id, - 'event': event.public_id + "message": message.public_id, + "thread": thread.public_id, + "label": label.category.public_id, + "contact": contact.public_id, + "event": event.public_id, } def test_read_endpoints(db, setup_account, api_client, default_account): # Read operations succeed. for resource, public_id in setup_account.items(): - endpoint = '/{}s'.format(resource) + endpoint = "/{}s".format(resource) r = api_client.get_raw(endpoint) assert r.status_code == 200 - read_endpoint = '{}/{}'.format(endpoint, public_id) + read_endpoint = "{}/{}".format(endpoint, public_id) r = api_client.get_raw(read_endpoint) assert r.status_code == 200 - default_account.sync_state = 'invalid' + default_account.sync_state = "invalid" db.session.commit() # Read operations on an invalid account also succeed. for resource, public_id in setup_account.items(): - endpoint = '/{}s'.format(resource) + endpoint = "/{}s".format(resource) r = api_client.get_raw(endpoint) assert r.status_code == 200 - read_endpoint = '{}/{}'.format(endpoint, public_id) + read_endpoint = "{}/{}".format(endpoint, public_id) r = api_client.get_raw(read_endpoint) assert r.status_code == 200 -@pytest.mark.skipif(True, reason='Need to investigate') -def test_search_endpoints(db, api_client, token_manager, search_response, - default_account): +@pytest.mark.skipif(True, reason="Need to investigate") +def test_search_endpoints( + db, api_client, token_manager, search_response, default_account +): # Message, thread search succeeds. - for endpoint in ('messages', 'threads'): - r = api_client.get_raw('/{}/search?q=queryme'.format(endpoint)) + for endpoint in ("messages", "threads"): + r = api_client.get_raw("/{}/search?q=queryme".format(endpoint)) assert r.status_code == 200 - default_account.sync_state = 'invalid' + default_account.sync_state = "invalid" db.session.commit() # Message, thread search on an invalid account fails with an HTTP 403. - for endpoint in ('messages', 'threads'): - r = api_client.get_raw('/{}/search?q=queryme'.format(endpoint)) + for endpoint in ("messages", "threads"): + r = api_client.get_raw("/{}/search?q=queryme".format(endpoint)) assert r.status_code == 403 -@pytest.mark.skipif(True, reason='Need to investigate') +@pytest.mark.skipif(True, reason="Need to investigate") def test_write_endpoints(db, setup_account, api_client, default_account): # Write operations (create, update, delete) succeed. r = api_client.post_data( - '/drafts', - data={ - 'body': '

Sea, birds and sand.

' - }) + "/drafts", + data={"body": "

Sea, birds and sand.

"}, + ) assert r.status_code == 200 - draft_id = json.loads(r.data)['id'] + draft_id = json.loads(r.data)["id"] - endpoint = '/messages/{}'.format(setup_account['message']) + endpoint = "/messages/{}".format(setup_account["message"]) r = api_client.put_data(endpoint, data={"starred": True}) assert r.status_code == 200 - endpoint = '/events/{}'.format(setup_account['event']) + endpoint = "/events/{}".format(setup_account["event"]) r = api_client.delete(endpoint) assert r.status_code == 200 - default_account.sync_state = 'invalid' + default_account.sync_state = "invalid" db.session.commit() # Write operations fail with an HTTP 403. - r = api_client.post_data('/labels', data={"display_name": "Neu!"}) + r = api_client.post_data("/labels", data={"display_name": "Neu!"}) assert r.status_code == 403 - endpoint = '/threads/{}'.format(setup_account['thread']) + endpoint = "/threads/{}".format(setup_account["thread"]) r = api_client.put_data(endpoint, data={"starred": True}) assert r.status_code == 403 - endpoint = '/drafts/{}'.format(draft_id) + endpoint = "/drafts/{}".format(draft_id) r = api_client.delete(endpoint) assert r.status_code == 403 diff --git a/inbox/test/api/test_messages.py b/inbox/test/api/test_messages.py index 3b6b7199b..8f539e1ca 100644 --- a/inbox/test/api/test_messages.py +++ b/inbox/test/api/test_messages.py @@ -6,14 +6,28 @@ from inbox.api.ns_api import API_VERSIONS from inbox.util.blockstore import get_from_blockstore -from inbox.test.util.base import (add_fake_message, default_namespace, - new_message_from_synced, mime_message, thread, - add_fake_thread, generic_account, gmail_account) +from inbox.test.util.base import ( + add_fake_message, + default_namespace, + new_message_from_synced, + mime_message, + thread, + add_fake_thread, + generic_account, + gmail_account, +) from inbox.test.api.base import api_client, new_api_client -__all__ = ['api_client', 'default_namespace', 'new_message_from_synced', - 'mime_message', 'thread', 'generic_account', 'gmail_account'] +__all__ = [ + "api_client", + "default_namespace", + "new_message_from_synced", + "mime_message", + "thread", + "generic_account", + "gmail_account", +] @pytest.fixture @@ -27,11 +41,15 @@ def stub_message_from_raw(db, thread, new_message_from_synced): @pytest.fixture def stub_message(db, new_message_from_synced, default_namespace, thread): - message = add_fake_message(db.session, default_namespace.id, thread, - subject="Golden Gate Park next Sat", - from_addr=[('alice', 'alice@example.com')], - to_addr=[('bob', 'bob@example.com')]) - message.snippet = 'Banh mi paleo pickled, sriracha' + message = add_fake_message( + db.session, + default_namespace.id, + thread, + subject="Golden Gate Park next Sat", + from_addr=[("alice", "alice@example.com")], + to_addr=[("bob", "bob@example.com")], + ) + message.snippet = "Banh mi paleo pickled, sriracha" message.body = """ Banh mi paleo pickled, sriracha biodiesel chambray seitan mumblecore mustache. Raw denim gastropub 8-bit, butcher @@ -39,23 +57,31 @@ def stub_message(db, new_message_from_synced, default_namespace, thread): cliche bitters aesthetic. Ugh. """ - message = add_fake_message(db.session, default_namespace.id, thread, - subject="Re:Golden Gate Park next Sat", - from_addr=[('bob', 'bob@example.com')], - to_addr=[('alice', 'alice@example.com')], - cc_addr=[('Cheryl', 'cheryl@gmail.com')]) - message.snippet = 'Bushwick meggings ethical keffiyeh' + message = add_fake_message( + db.session, + default_namespace.id, + thread, + subject="Re:Golden Gate Park next Sat", + from_addr=[("bob", "bob@example.com")], + to_addr=[("alice", "alice@example.com")], + cc_addr=[("Cheryl", "cheryl@gmail.com")], + ) + message.snippet = "Bushwick meggings ethical keffiyeh" message.body = """ Bushwick meggings ethical keffiyeh. Chambray lumbersexual wayfarers, irony Banksy cred bicycle rights scenester artisan tote bag YOLO gastropub. """ - draft = add_fake_message(db.session, default_namespace.id, thread, - subject="Re:Golden Gate Park next Sat", - from_addr=[('alice', 'alice@example.com')], - to_addr=[('bob', 'bob@example.com')], - cc_addr=[('Cheryl', 'cheryl@gmail.com')]) - draft.snippet = 'Hey there friend writing a draft' + draft = add_fake_message( + db.session, + default_namespace.id, + thread, + subject="Re:Golden Gate Park next Sat", + from_addr=[("alice", "alice@example.com")], + to_addr=[("bob", "bob@example.com")], + cc_addr=[("Cheryl", "cheryl@gmail.com")], + ) + draft.snippet = "Hey there friend writing a draft" draft.body = """ DIY tousled Tumblr, VHS meditation 3 wolf moon listicle fingerstache viral bicycle rights. Thundercats kale chips church-key American Apparel. @@ -70,136 +96,157 @@ def stub_message(db, new_message_from_synced, default_namespace, thread): # TODO(emfree) clean up fixture dependencies def test_rfc822_format(stub_message_from_raw, api_client, mime_message): """ Test the API response to retreive raw message contents """ - full_path = '/messages/{}'.format(stub_message_from_raw.public_id) + full_path = "/messages/{}".format(stub_message_from_raw.public_id) - resp = api_client.get_raw(full_path, - headers={'Accept': 'message/rfc822'}) + resp = api_client.get_raw(full_path, headers={"Accept": "message/rfc822"}) assert resp.data == get_from_blockstore(stub_message_from_raw.data_sha256) -def test_direct_fetching(stub_message_from_raw, api_client, mime_message, - monkeypatch): +def test_direct_fetching(stub_message_from_raw, api_client, mime_message, monkeypatch): # Mark a message as missing and check that we try to # fetch it from the remote provider. get_mock = mock.Mock(return_value=None) - monkeypatch.setattr('inbox.util.blockstore.get_from_blockstore', - get_mock) + monkeypatch.setattr("inbox.util.blockstore.get_from_blockstore", get_mock) save_mock = mock.Mock() - monkeypatch.setattr('inbox.util.blockstore.save_to_blockstore', - save_mock) + monkeypatch.setattr("inbox.util.blockstore.save_to_blockstore", save_mock) - raw_mock = mock.Mock(return_value='Return contents') - monkeypatch.setattr('inbox.s3.backends.gmail.get_gmail_raw_contents', - raw_mock) + raw_mock = mock.Mock(return_value="Return contents") + monkeypatch.setattr("inbox.s3.backends.gmail.get_gmail_raw_contents", raw_mock) - full_path = '/messages/{}'.format(stub_message_from_raw.public_id) + full_path = "/messages/{}".format(stub_message_from_raw.public_id) - resp = api_client.get_raw(full_path, - headers={'Accept': 'message/rfc822'}) + resp = api_client.get_raw(full_path, headers={"Accept": "message/rfc822"}) for m in [get_mock, save_mock, raw_mock]: assert m.called - assert resp.data == 'Return contents' + assert resp.data == "Return contents" @pytest.mark.parametrize("api_version", API_VERSIONS) def test_sender_and_participants(stub_message, api_client, api_version): headers = dict() - headers['Api-Version'] = api_version + headers["Api-Version"] = api_version - resp = api_client.get_raw('/threads/{}' - .format(stub_message.thread.public_id), - headers=headers) + resp = api_client.get_raw( + "/threads/{}".format(stub_message.thread.public_id), headers=headers + ) assert resp.status_code == 200 resp_dict = json.loads(resp.data) - participants = resp_dict['participants'] + participants = resp_dict["participants"] assert len(participants) == 3 # Not expanded, should only return IDs - assert 'message' not in resp_dict - assert 'drafts' not in resp_dict + assert "message" not in resp_dict + assert "drafts" not in resp_dict @pytest.mark.parametrize("api_version", API_VERSIONS) def test_expanded_threads(stub_message, api_client, api_version): def _check_json_thread(resp_dict): - assert 'message_ids' not in resp_dict - assert 'messages' in resp_dict - assert 'drafts' in resp_dict - assert len(resp_dict['participants']) == 3 - assert len(resp_dict['messages']) == 2 - assert len(resp_dict['drafts']) == 1 - - for msg_dict in resp_dict['messages']: - assert 'body' not in msg_dict - assert msg_dict['object'] == 'message' - assert msg_dict['thread_id'] == stub_message.thread.public_id - valid_keys = ['account_id', 'to', 'from', 'files', 'unread', - 'unread', 'date', 'snippet'] + assert "message_ids" not in resp_dict + assert "messages" in resp_dict + assert "drafts" in resp_dict + assert len(resp_dict["participants"]) == 3 + assert len(resp_dict["messages"]) == 2 + assert len(resp_dict["drafts"]) == 1 + + for msg_dict in resp_dict["messages"]: + assert "body" not in msg_dict + assert msg_dict["object"] == "message" + assert msg_dict["thread_id"] == stub_message.thread.public_id + valid_keys = [ + "account_id", + "to", + "from", + "files", + "unread", + "unread", + "date", + "snippet", + ] assert all(x in msg_dict for x in valid_keys) - for draft in resp_dict['drafts']: - assert 'body' not in draft - assert draft['object'] == 'draft' - assert draft['thread_id'] == stub_message.thread.public_id - valid_keys = ['account_id', 'to', 'from', 'files', 'unread', - 'snippet', 'date', 'version', 'reply_to_message_id'] + for draft in resp_dict["drafts"]: + assert "body" not in draft + assert draft["object"] == "draft" + assert draft["thread_id"] == stub_message.thread.public_id + valid_keys = [ + "account_id", + "to", + "from", + "files", + "unread", + "snippet", + "date", + "version", + "reply_to_message_id", + ] assert all(x in draft for x in valid_keys) headers = dict() - headers['Api-Version'] = api_version + headers["Api-Version"] = api_version # /threads/ resp = api_client.get_raw( - '/threads/{}?view=expanded'.format(stub_message.thread.public_id), - headers=headers) + "/threads/{}?view=expanded".format(stub_message.thread.public_id), + headers=headers, + ) assert resp.status_code == 200 resp_dict = json.loads(resp.data) _check_json_thread(resp_dict) # /threads/ resp = api_client.get_raw( - '/threads/?view=expanded'.format(stub_message.thread.public_id), - headers=headers) + "/threads/?view=expanded".format(stub_message.thread.public_id), headers=headers + ) assert resp.status_code == 200 resp_dict = json.loads(resp.data) for thread_json in resp_dict: - if thread_json['id'] == stub_message.thread.public_id: + if thread_json["id"] == stub_message.thread.public_id: _check_json_thread(thread_json) def test_expanded_message(stub_message, api_client): def _check_json_message(msg_dict): - assert 'body' in msg_dict - assert msg_dict['object'] == 'message' - assert msg_dict['thread_id'] == stub_message.thread.public_id - - assert isinstance(msg_dict['headers'], dict) - assert 'In-Reply-To' in msg_dict['headers'] - assert 'References' in msg_dict['headers'] - assert 'Message-Id' in msg_dict['headers'] - - valid_keys = ['account_id', 'to', 'from', 'files', 'unread', - 'unread', 'date', 'snippet'] + assert "body" in msg_dict + assert msg_dict["object"] == "message" + assert msg_dict["thread_id"] == stub_message.thread.public_id + + assert isinstance(msg_dict["headers"], dict) + assert "In-Reply-To" in msg_dict["headers"] + assert "References" in msg_dict["headers"] + assert "Message-Id" in msg_dict["headers"] + + valid_keys = [ + "account_id", + "to", + "from", + "files", + "unread", + "unread", + "date", + "snippet", + ] assert all(x in msg_dict for x in valid_keys) # /message/ resp = api_client.get_raw( - '/messages/{}?view=expanded'.format(stub_message.public_id)) + "/messages/{}?view=expanded".format(stub_message.public_id) + ) assert resp.status_code == 200 resp_dict = json.loads(resp.data) _check_json_message(resp_dict) # /messages/ - resp = api_client.get_raw('/messages/?view=expanded') + resp = api_client.get_raw("/messages/?view=expanded") assert resp.status_code == 200 resp_dict = json.loads(resp.data) for message_json in resp_dict: - if message_json['id'] == stub_message.public_id: + if message_json["id"] == stub_message.public_id: _check_json_message(message_json) @@ -209,23 +256,21 @@ def test_message_folders(db, generic_account): # Generic IMAP threads, messages have a 'folders' field generic_thread = add_fake_thread(db.session, generic_account.namespace.id) - generic_message = add_fake_message(db.session, - generic_account.namespace.id, - generic_thread) + generic_message = add_fake_message( + db.session, generic_account.namespace.id, generic_thread + ) - resp_data = api_client.get_data( - '/threads/{}'.format(generic_thread.public_id)) + resp_data = api_client.get_data("/threads/{}".format(generic_thread.public_id)) - assert resp_data['id'] == generic_thread.public_id - assert resp_data['object'] == 'thread' - assert 'folders' in resp_data and 'labels' not in resp_data + assert resp_data["id"] == generic_thread.public_id + assert resp_data["object"] == "thread" + assert "folders" in resp_data and "labels" not in resp_data - resp_data = api_client.get_data( - '/messages/{}'.format(generic_message.public_id)) + resp_data = api_client.get_data("/messages/{}".format(generic_message.public_id)) - assert resp_data['id'] == generic_message.public_id - assert resp_data['object'] == 'message' - assert 'folder' in resp_data and 'labels' not in resp_data + assert resp_data["id"] == generic_message.public_id + assert resp_data["object"] == "message" + assert "folder" in resp_data and "labels" not in resp_data def test_message_labels(db, gmail_account): @@ -234,55 +279,57 @@ def test_message_labels(db, gmail_account): # Gmail threads, messages have a 'labels' field gmail_thread = add_fake_thread(db.session, gmail_account.namespace.id) - gmail_message = add_fake_message(db.session, - gmail_account.namespace.id, gmail_thread) + gmail_message = add_fake_message( + db.session, gmail_account.namespace.id, gmail_thread + ) - resp_data = api_client.get_data( - '/threads/{}'.format(gmail_thread.public_id)) + resp_data = api_client.get_data("/threads/{}".format(gmail_thread.public_id)) - assert resp_data['id'] == gmail_thread.public_id - assert resp_data['object'] == 'thread' - assert 'labels' in resp_data and 'folders' not in resp_data + assert resp_data["id"] == gmail_thread.public_id + assert resp_data["object"] == "thread" + assert "labels" in resp_data and "folders" not in resp_data - resp_data = api_client.get_data( - '/messages/{}'.format(gmail_message.public_id)) + resp_data = api_client.get_data("/messages/{}".format(gmail_message.public_id)) - assert resp_data['id'] == gmail_message.public_id - assert resp_data['object'] == 'message' - assert 'labels' in resp_data and 'folders' not in resp_data + assert resp_data["id"] == gmail_message.public_id + assert resp_data["object"] == "message" + assert "labels" in resp_data and "folders" not in resp_data -@pytest.mark.skipif(True, reason='Need to investigate') +@pytest.mark.skipif(True, reason="Need to investigate") @pytest.mark.parametrize("api_version", API_VERSIONS) -def test_message_label_updates(db, api_client, default_account, api_version, - custom_label): +def test_message_label_updates( + db, api_client, default_account, api_version, custom_label +): """Check that you can update a message (optimistically or not), and that the update is queued in the ActionLog.""" headers = dict() - headers['Api-Version'] = api_version + headers["Api-Version"] = api_version # Gmail threads, messages have a 'labels' field gmail_thread = add_fake_thread(db.session, default_account.namespace.id) - gmail_message = add_fake_message(db.session, - default_account.namespace.id, gmail_thread) + gmail_message = add_fake_message( + db.session, default_account.namespace.id, gmail_thread + ) resp_data = api_client.get_data( - '/messages/{}'.format(gmail_message.public_id), headers=headers) + "/messages/{}".format(gmail_message.public_id), headers=headers + ) - assert resp_data['labels'] == [] + assert resp_data["labels"] == [] category = custom_label.category update = dict(labels=[category.public_id]) resp = api_client.put_data( - '/messages/{}'.format(gmail_message.public_id), update, - headers=headers) + "/messages/{}".format(gmail_message.public_id), update, headers=headers + ) resp_data = json.loads(resp.data) if api_version == API_VERSIONS[0]: - assert len(resp_data['labels']) == 1 - assert resp_data['labels'][0]['id'] == category.public_id + assert len(resp_data["labels"]) == 1 + assert resp_data["labels"][0]["id"] == category.public_id else: - assert resp_data['labels'] == [] + assert resp_data["labels"] == [] diff --git a/inbox/test/api/test_searching.py b/inbox/test/api/test_searching.py index 78a7a784b..231f27aba 100644 --- a/inbox/test/api/test_searching.py +++ b/inbox/test/api/test_searching.py @@ -10,11 +10,15 @@ from inbox.search.base import get_search_client from inbox.search.backends.gmail import GmailSearchClient from inbox.search.backends.imap import IMAPSearchClient -from inbox.test.util.base import (add_fake_message, add_fake_thread, - add_fake_imapuid, add_fake_folder) +from inbox.test.util.base import ( + add_fake_message, + add_fake_thread, + add_fake_imapuid, + add_fake_folder, +) from inbox.test.api.base import api_client, imap_api_client -__all__ = ['api_client'] +__all__ = ["api_client"] @fixture @@ -24,8 +28,9 @@ def test_gmail_thread(db, default_account): @fixture def imap_folder(db, generic_account): - f = Folder.find_or_create(db.session, generic_account, - u'Boîte de réception', 'inbox') + f = Folder.find_or_create( + db.session, generic_account, u"Boîte de réception", "inbox" + ) db.session.add(f) db.session.commit() return f @@ -33,8 +38,7 @@ def imap_folder(db, generic_account): @fixture def different_imap_folder(db, generic_account): - f = Folder.find_or_create(db.session, generic_account, - 'Archive', 'archive') + f = Folder.find_or_create(db.session, generic_account, "Archive", "archive") db.session.add(f) db.session.commit() return f @@ -52,47 +56,44 @@ def sorted_gmail_threads(db, default_account): @fixture def sorted_gmail_messages(db, default_account, sorted_gmail_threads, folder): thread1, thread2, thread3 = sorted_gmail_threads - message1 = add_fake_message(db.session, default_account.namespace.id, - thread=thread1, - g_msgid=1, - from_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - to_addr=[{'name': 'Barrack Obama', - 'email': 'barrack@obama.com'}], - received_date=datetime. - datetime(2015, 7, 9, 23, 50, 7), - subject='YOO!') - - add_fake_imapuid(db.session, default_account.id, message1, - folder, 3000) - - message2 = add_fake_message(db.session, default_account.namespace.id, - thread=thread2, - g_msgid=2, - from_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - to_addr=[{'name': 'Barrack Obama', - 'email': 'barrack@obama.com'}], - received_date=datetime. - datetime(2014, 7, 9, 23, 50, 7), - subject='Hey!') - - add_fake_imapuid(db.session, default_account.id, message2, - folder, 3001) - - message3 = add_fake_message(db.session, default_account.namespace.id, - thread=thread3, - g_msgid=3, - from_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - to_addr=[{'name': 'Barrack Obama', - 'email': 'barrack@obama.com'}], - received_date=datetime. - datetime(2013, 7, 9, 23, 50, 7), - subject='Sup?') - - add_fake_imapuid(db.session, default_account.id, message3, - folder, 3002) + message1 = add_fake_message( + db.session, + default_account.namespace.id, + thread=thread1, + g_msgid=1, + from_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + to_addr=[{"name": "Barrack Obama", "email": "barrack@obama.com"}], + received_date=datetime.datetime(2015, 7, 9, 23, 50, 7), + subject="YOO!", + ) + + add_fake_imapuid(db.session, default_account.id, message1, folder, 3000) + + message2 = add_fake_message( + db.session, + default_account.namespace.id, + thread=thread2, + g_msgid=2, + from_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + to_addr=[{"name": "Barrack Obama", "email": "barrack@obama.com"}], + received_date=datetime.datetime(2014, 7, 9, 23, 50, 7), + subject="Hey!", + ) + + add_fake_imapuid(db.session, default_account.id, message2, folder, 3001) + + message3 = add_fake_message( + db.session, + default_account.namespace.id, + thread=thread3, + g_msgid=3, + from_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + to_addr=[{"name": "Barrack Obama", "email": "barrack@obama.com"}], + received_date=datetime.datetime(2013, 7, 9, 23, 50, 7), + subject="Sup?", + ) + + add_fake_imapuid(db.session, default_account.id, message3, folder, 3002) return [message1, message2, message3] @@ -109,47 +110,41 @@ def sorted_imap_threads(db, generic_account): @fixture def sorted_imap_messages(db, generic_account, sorted_imap_threads, imap_folder): thread1, thread2, thread3 = sorted_imap_threads - message1 = add_fake_message(db.session, generic_account.namespace.id, - thread=thread1, - from_addr=[{'name': '', - 'email': - 'inboxapptest@example.com'}], - to_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - received_date=datetime. - datetime(2015, 7, 9, 23, 50, 7), - subject='YOO!') - - add_fake_imapuid(db.session, generic_account.id, message1, - imap_folder, 2000) - - message2 = add_fake_message(db.session, generic_account.namespace.id, - thread=thread2, - from_addr=[{'name': '', - 'email': - 'inboxapptest@example.com'}], - to_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - received_date=datetime. - datetime(2014, 7, 9, 23, 50, 7), - subject='Hey!') - - add_fake_imapuid(db.session, generic_account.id, message2, - imap_folder, 2001) - - message3 = add_fake_message(db.session, generic_account.namespace.id, - thread=thread3, - from_addr=[{'name': '', - 'email': - 'inboxapptest@example.com'}], - to_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - received_date=datetime. - datetime(2013, 7, 9, 23, 50, 7), - subject='Sup?') - - add_fake_imapuid(db.session, generic_account.id, message3, - imap_folder, 2002) + message1 = add_fake_message( + db.session, + generic_account.namespace.id, + thread=thread1, + from_addr=[{"name": "", "email": "inboxapptest@example.com"}], + to_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + received_date=datetime.datetime(2015, 7, 9, 23, 50, 7), + subject="YOO!", + ) + + add_fake_imapuid(db.session, generic_account.id, message1, imap_folder, 2000) + + message2 = add_fake_message( + db.session, + generic_account.namespace.id, + thread=thread2, + from_addr=[{"name": "", "email": "inboxapptest@example.com"}], + to_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + received_date=datetime.datetime(2014, 7, 9, 23, 50, 7), + subject="Hey!", + ) + + add_fake_imapuid(db.session, generic_account.id, message2, imap_folder, 2001) + + message3 = add_fake_message( + db.session, + generic_account.namespace.id, + thread=thread3, + from_addr=[{"name": "", "email": "inboxapptest@example.com"}], + to_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + received_date=datetime.datetime(2013, 7, 9, 23, 50, 7), + subject="Sup?", + ) + + add_fake_imapuid(db.session, generic_account.id, message3, imap_folder, 2002) return [message1, message2, message3] @@ -164,61 +159,61 @@ def different_sorted_imap_threads(db, generic_account): @fixture -def different_imap_messages(db, generic_account, different_sorted_imap_threads, - different_imap_folder): +def different_imap_messages( + db, generic_account, different_sorted_imap_threads, different_imap_folder +): thread1, thread2, thread3 = different_sorted_imap_threads - message1 = add_fake_message(db.session, generic_account.namespace.id, - thread=thread1, - from_addr=[{'name': '', - 'email': - 'inboxapptest@example.com'}], - to_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - received_date=datetime. - datetime(2015, 7, 9, 23, 50, 7), - subject='LOL') - - add_fake_imapuid(db.session, generic_account.id, message1, - different_imap_folder, 5000) - - message2 = add_fake_message(db.session, generic_account.namespace.id, - thread=thread2, - from_addr=[{'name': '', - 'email': - 'inboxapptest@example.com'}], - to_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - received_date=datetime. - datetime(2014, 7, 9, 23, 50, 7), - subject='ROTFLMO') - - add_fake_imapuid(db.session, generic_account.id, message2, - different_imap_folder, 5001) - - message3 = add_fake_message(db.session, generic_account.namespace.id, - thread=thread3, - from_addr=[{'name': '', - 'email': - 'inboxapptest@example.com'}], - to_addr=[{'name': 'Ben Bitdiddle', - 'email': 'ben@bitdiddle.com'}], - received_date=datetime. - datetime(2013, 7, 9, 23, 50, 7), - subject='ROFLCOPTER') - - add_fake_imapuid(db.session, generic_account.id, message3, - different_imap_folder, 5002) + message1 = add_fake_message( + db.session, + generic_account.namespace.id, + thread=thread1, + from_addr=[{"name": "", "email": "inboxapptest@example.com"}], + to_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + received_date=datetime.datetime(2015, 7, 9, 23, 50, 7), + subject="LOL", + ) + + add_fake_imapuid( + db.session, generic_account.id, message1, different_imap_folder, 5000 + ) + + message2 = add_fake_message( + db.session, + generic_account.namespace.id, + thread=thread2, + from_addr=[{"name": "", "email": "inboxapptest@example.com"}], + to_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + received_date=datetime.datetime(2014, 7, 9, 23, 50, 7), + subject="ROTFLMO", + ) + + add_fake_imapuid( + db.session, generic_account.id, message2, different_imap_folder, 5001 + ) + + message3 = add_fake_message( + db.session, + generic_account.namespace.id, + thread=thread3, + from_addr=[{"name": "", "email": "inboxapptest@example.com"}], + to_addr=[{"name": "Ben Bitdiddle", "email": "ben@bitdiddle.com"}], + received_date=datetime.datetime(2013, 7, 9, 23, 50, 7), + subject="ROFLCOPTER", + ) + + add_fake_imapuid( + db.session, generic_account.id, message3, different_imap_folder, 5002 + ) return [message1, message2, message3] class MockImapConnection(object): - def __init__(self): self.search_args = None def select_folder(self, name, **_): - return {'UIDVALIDITY': 123} + return {"UIDVALIDITY": 123} def logout(self): pass @@ -235,8 +230,8 @@ def assert_search(self, criteria, charset=None): def imap_connection(monkeypatch): conn = MockImapConnection() monkeypatch.setattr( - 'inbox.auth.generic.GenericAuthHandler.connect_account', - lambda *_, **__: conn) + "inbox.auth.generic.GenericAuthHandler.connect_account", lambda *_, **__: conn + ) return conn @@ -246,18 +241,20 @@ def invalid_imap_connection(monkeypatch): def raise_401(*args): raise ValidationError() + conn = MockImapConnection() monkeypatch.setattr( - 'inbox.auth.generic.GenericAuthHandler.connect_account', - raise_401) + "inbox.auth.generic.GenericAuthHandler.connect_account", raise_401 + ) return conn @fixture def patch_token_manager(monkeypatch): monkeypatch.setattr( - 'inbox.models.backends.gmail.g_token_manager.get_token_for_email', - lambda *args, **kwargs: 'token') + "inbox.models.backends.gmail.g_token_manager.get_token_for_email", + lambda *args, **kwargs: "token", + ) @fixture @@ -265,9 +262,7 @@ def patch_gmail_search_response(): resp = requests.Response() resp.status_code = 200 resp.elapsed = datetime.timedelta(seconds=22) - resp._content = json.dumps({ - 'messages': [{'id': '1'}, {'id': '2'}, {'id': '3'}] - }) + resp._content = json.dumps({"messages": [{"id": "1"}, {"id": "2"}, {"id": "3"}]}) requests.get = mock.Mock(return_value=resp) @@ -277,174 +272,225 @@ def invalid_gmail_token(monkeypatch): def raise_401(*args): raise OAuthError() - monkeypatch.setattr( - 'inbox.models.backends.gmail.g_token_manager.get_token_for_email', - raise_401) - -@pytest.mark.parametrize('is_streaming', [True, False]) -def test_gmail_message_search(api_client, default_account, - patch_token_manager, - patch_gmail_search_response, - sorted_gmail_messages, is_streaming): + monkeypatch.setattr( + "inbox.models.backends.gmail.g_token_manager.get_token_for_email", raise_401 + ) + + +@pytest.mark.parametrize("is_streaming", [True, False]) +def test_gmail_message_search( + api_client, + default_account, + patch_token_manager, + patch_gmail_search_response, + sorted_gmail_messages, + is_streaming, +): search_client = get_search_client(default_account) assert isinstance(search_client, GmailSearchClient) if is_streaming: - messages = api_client.get_data('/messages/search/streaming?q=blah%20blah%20blah') + messages = api_client.get_data( + "/messages/search/streaming?q=blah%20blah%20blah" + ) else: - messages = api_client.get_data('/messages/search?q=blah%20blah%20blah') + messages = api_client.get_data("/messages/search?q=blah%20blah%20blah") assert_search_result(sorted_gmail_messages, messages) -@pytest.mark.parametrize('is_streaming', [True, False]) -def test_gmail_thread_search(api_client, test_gmail_thread, - default_account, - patch_token_manager, - patch_gmail_search_response, - sorted_gmail_messages, - sorted_gmail_threads, is_streaming): +@pytest.mark.parametrize("is_streaming", [True, False]) +def test_gmail_thread_search( + api_client, + test_gmail_thread, + default_account, + patch_token_manager, + patch_gmail_search_response, + sorted_gmail_messages, + sorted_gmail_threads, + is_streaming, +): search_client = get_search_client(default_account) assert isinstance(search_client, GmailSearchClient) if is_streaming: - threads = api_client.get_data('/threads/search/streaming?q=blah%20blah%20blah') + threads = api_client.get_data("/threads/search/streaming?q=blah%20blah%20blah") else: - threads = api_client.get_data('/threads/search?q=blah%20blah%20blah') + threads = api_client.get_data("/threads/search?q=blah%20blah%20blah") assert_search_result(sorted_gmail_threads, threads) -@pytest.mark.parametrize('is_streaming', [True, False]) -def test_gmail_search_unicode(db, api_client, test_gmail_thread, - patch_token_manager, - patch_gmail_search_response, - default_account, - sorted_gmail_messages, - sorted_gmail_threads, is_streaming): +@pytest.mark.parametrize("is_streaming", [True, False]) +def test_gmail_search_unicode( + db, + api_client, + test_gmail_thread, + patch_token_manager, + patch_gmail_search_response, + default_account, + sorted_gmail_messages, + sorted_gmail_threads, + is_streaming, +): search_client = get_search_client(default_account) assert isinstance(search_client, GmailSearchClient) if is_streaming: - threads = api_client.get_data('/threads/search/streaming?q=存档') + threads = api_client.get_data("/threads/search/streaming?q=存档") else: - threads = api_client.get_data('/threads/search?q=存档') + threads = api_client.get_data("/threads/search?q=存档") assert_search_result(sorted_gmail_threads, threads) -@pytest.mark.parametrize('is_streaming', [True, False]) -def test_invalid_gmail_account_search(db, api_client, default_account, - invalid_gmail_token, - patch_gmail_search_response, - sorted_gmail_messages, is_streaming): +@pytest.mark.parametrize("is_streaming", [True, False]) +def test_invalid_gmail_account_search( + db, + api_client, + default_account, + invalid_gmail_token, + patch_gmail_search_response, + sorted_gmail_messages, + is_streaming, +): if is_streaming: - response = api_client.get_raw('/messages/search/streaming?' - 'q=blah%20blah%20blah') + response = api_client.get_raw( + "/messages/search/streaming?" "q=blah%20blah%20blah" + ) else: - response = api_client.get_raw('/messages/search?' - 'q=blah%20blah%20blah') + response = api_client.get_raw("/messages/search?" "q=blah%20blah%20blah") assert response.status_code == 403 - assert "This search can\'t be performed because the account\'s "\ - "credentials are out of date." in json.loads(response.data)['message'] - - -@pytest.mark.parametrize('is_streaming', [True, False]) -def test_imap_message_search(imap_api_client, generic_account, - imap_folder, imap_connection, - sorted_imap_messages, is_streaming): + assert ( + "This search can't be performed because the account's " + "credentials are out of date." in json.loads(response.data)["message"] + ) + + +@pytest.mark.parametrize("is_streaming", [True, False]) +def test_imap_message_search( + imap_api_client, + generic_account, + imap_folder, + imap_connection, + sorted_imap_messages, + is_streaming, +): search_client = get_search_client(generic_account) assert isinstance(search_client, IMAPSearchClient) if is_streaming: - messages = imap_api_client.get_data('/messages/search/streaming?' - 'q=blah%20blah%20blah') + messages = imap_api_client.get_data( + "/messages/search/streaming?" "q=blah%20blah%20blah" + ) else: - messages = imap_api_client.get_data('/messages/search?' - 'q=blah%20blah%20blah') + messages = imap_api_client.get_data("/messages/search?" "q=blah%20blah%20blah") imap_connection.assert_search(["TEXT", "blah blah blah"]) assert_search_result(sorted_imap_messages, messages) -@pytest.mark.parametrize('is_streaming', [True, False]) -def test_imap_thread_search(imap_api_client, generic_account, - imap_folder, imap_connection, - sorted_imap_messages, - sorted_imap_threads, is_streaming): +@pytest.mark.parametrize("is_streaming", [True, False]) +def test_imap_thread_search( + imap_api_client, + generic_account, + imap_folder, + imap_connection, + sorted_imap_messages, + sorted_imap_threads, + is_streaming, +): search_client = get_search_client(generic_account) assert isinstance(search_client, IMAPSearchClient) if is_streaming: - threads = imap_api_client.get_data('/threads/search/streaming?q=blah%20blah%20blah') + threads = imap_api_client.get_data( + "/threads/search/streaming?q=blah%20blah%20blah" + ) else: - threads = imap_api_client.get_data('/threads/search?q=blah%20blah%20blah') + threads = imap_api_client.get_data("/threads/search?q=blah%20blah%20blah") imap_connection.assert_search(["TEXT", "blah blah blah"]) assert_search_result(sorted_imap_threads, threads) -@pytest.mark.parametrize('is_streaming', [True, False]) -def test_imap_thread_search_unicode(db, imap_api_client, generic_account, - imap_folder, imap_connection, - sorted_imap_messages, - sorted_imap_threads, is_streaming): +@pytest.mark.parametrize("is_streaming", [True, False]) +def test_imap_thread_search_unicode( + db, + imap_api_client, + generic_account, + imap_folder, + imap_connection, + sorted_imap_messages, + sorted_imap_threads, + is_streaming, +): search_client = get_search_client(generic_account) assert isinstance(search_client, IMAPSearchClient) if is_streaming: - threads = imap_api_client.get_data('/threads/search/streaming?q=存档') + threads = imap_api_client.get_data("/threads/search/streaming?q=存档") else: - threads = imap_api_client.get_data('/threads/search?q=存档') + threads = imap_api_client.get_data("/threads/search?q=存档") imap_connection.assert_search([u"TEXT", u"\u5b58\u6863"], "UTF-8") assert_search_result(sorted_imap_threads, threads) -@pytest.mark.parametrize('is_streaming', [True, False]) -def test_invalid_imap_account_search(db, imap_api_client, generic_account, - invalid_imap_connection, - imap_folder, - sorted_imap_messages, is_streaming): +@pytest.mark.parametrize("is_streaming", [True, False]) +def test_invalid_imap_account_search( + db, + imap_api_client, + generic_account, + invalid_imap_connection, + imap_folder, + sorted_imap_messages, + is_streaming, +): if is_streaming: # Because of the way streaming search work, it will return a # 200 response even though we can't access the account. - response = imap_api_client.get_raw('/messages/search/streaming?' - 'q=blah%20blah%20blah') + response = imap_api_client.get_raw( + "/messages/search/streaming?" "q=blah%20blah%20blah" + ) assert response.status_code == 200 else: - response = imap_api_client.get_raw('/messages/search?' - 'q=blah%20blah%20blah') + response = imap_api_client.get_raw("/messages/search?" "q=blah%20blah%20blah") assert response.status_code == 403 - assert "This search can\'t be performed because the account\'s "\ - "credentials are out of date." in json.loads(response.data)['message'] + assert ( + "This search can't be performed because the account's " + "credentials are out of date." in json.loads(response.data)["message"] + ) def assert_search_result(expected, actual): assert len(expected) == len(actual) for expected_item, actual_item in zip(expected, actual): - assert expected_item.public_id == actual_item['id'] - - -@pytest.mark.parametrize('endpoint', ['messages', 'threads']) -def test_streaming_search_results(db, imap_api_client, generic_account, - imap_folder, monkeypatch, sorted_imap_messages, - different_imap_messages, endpoint): + assert expected_item.public_id == actual_item["id"] + + +@pytest.mark.parametrize("endpoint", ["messages", "threads"]) +def test_streaming_search_results( + db, + imap_api_client, + generic_account, + imap_folder, + monkeypatch, + sorted_imap_messages, + different_imap_messages, + endpoint, +): # Check that the streaming search returns results from different # folders. class MultiFolderMockImapConnection(MockImapConnection): - def __init__(self): - self._responses = list(reversed([ - [2000, 2001, 2002], - [5000, 5001], - ])) + self._responses = list(reversed([[2000, 2001, 2002], [5000, 5001],])) def search(self, criteria, charset=None): self.search_args = (criteria, charset) @@ -452,18 +498,18 @@ def search(self, criteria, charset=None): conn = MultiFolderMockImapConnection() monkeypatch.setattr( - 'inbox.auth.generic.GenericAuthHandler.connect_account', - lambda *_, **__: conn) + "inbox.auth.generic.GenericAuthHandler.connect_account", lambda *_, **__: conn + ) search_client = get_search_client(generic_account) assert isinstance(search_client, IMAPSearchClient) - url = '/{}/search/streaming?q=fantastic'.format(endpoint) + url = "/{}/search/streaming?q=fantastic".format(endpoint) raw_data = imap_api_client.get_raw(url).data assert len(conn._responses) == 0, "Search should go through both folders" # The API returns JSON lists separated by '\n' - responses = raw_data.split('\n') - assert len(responses) == 3 and responses[2] == '' + responses = raw_data.split("\n") + assert len(responses) == 3 and responses[2] == "" assert len(json.loads(responses[0])) == 3 assert len(json.loads(responses[1])) == 2 diff --git a/inbox/test/api/test_sending.py b/inbox/test/api/test_sending.py index 6737d9988..8ae360139 100644 --- a/inbox/test/api/test_sending.py +++ b/inbox/test/api/test_sending.py @@ -17,46 +17,50 @@ from inbox.sendmail.smtp.postel import _substitute_bcc -__all__ = ['thread', 'message', 'api_client', 'imported_event'] +__all__ = ["thread", "message", "api_client", "imported_event"] class MockTokenManager(object): - def __init__(self, allow_auth=True): self.allow_auth = allow_auth def get_token(self, account, force_refresh=True): if self.allow_auth: # return a fake token. - return 'foo' + return "foo" raise OAuthError() class MockGoogleTokenManager(object): - def __init__(self, allow_auth=True): self.allow_auth = allow_auth def get_token_for_email(self, account, force_refresh=False): if self.allow_auth: - return 'foo' + return "foo" raise OAuthError() @pytest.fixture def patch_token_manager(monkeypatch): - monkeypatch.setattr('inbox.sendmail.smtp.postel.default_token_manager', - MockTokenManager()) - monkeypatch.setattr('inbox.sendmail.smtp.postel.g_token_manager', - MockGoogleTokenManager()) + monkeypatch.setattr( + "inbox.sendmail.smtp.postel.default_token_manager", MockTokenManager() + ) + monkeypatch.setattr( + "inbox.sendmail.smtp.postel.g_token_manager", MockGoogleTokenManager() + ) @pytest.fixture def disallow_auth(monkeypatch): - monkeypatch.setattr('inbox.sendmail.smtp.postel.default_token_manager', - MockTokenManager(allow_auth=False)) - monkeypatch.setattr('inbox.sendmail.smtp.postel.g_token_manager', - MockGoogleTokenManager(allow_auth=False)) + monkeypatch.setattr( + "inbox.sendmail.smtp.postel.default_token_manager", + MockTokenManager(allow_auth=False), + ) + monkeypatch.setattr( + "inbox.sendmail.smtp.postel.g_token_manager", + MockGoogleTokenManager(allow_auth=False), + ) @pytest.fixture @@ -64,7 +68,6 @@ def patch_smtp(patch_token_manager, monkeypatch): submitted_messages = [] class MockSMTPConnection(object): - def __init__(self, *args, **kwargs): pass @@ -77,14 +80,12 @@ def __exit__(self, exc_type, value, traceback): def sendmail(self, recipients, msg): submitted_messages.append((recipients, msg)) - monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection', - MockSMTPConnection) + monkeypatch.setattr("inbox.sendmail.smtp.postel.SMTPConnection", MockSMTPConnection) return submitted_messages def erring_smtp_connection(exc_type, *args): class ErringSMTPConnection(object): - def __init__(self, *args, **kwargs): pass @@ -102,547 +103,602 @@ def sendmail(self, recipients, msg): # Different providers use slightly different errors, so parametrize this test # fixture to imitate them. -@pytest.fixture(params=[ - "5.4.5 Daily sending quota exceeded", - "5.7.1 You have exceeded your daily sending limits"]) +@pytest.fixture( + params=[ + "5.4.5 Daily sending quota exceeded", + "5.7.1 You have exceeded your daily sending limits", + ] +) @pytest.fixture def quota_exceeded(patch_token_manager, monkeypatch, request): - monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection', - erring_smtp_connection( - smtplib.SMTPDataError, 550, - request.param)) + monkeypatch.setattr( + "inbox.sendmail.smtp.postel.SMTPConnection", + erring_smtp_connection(smtplib.SMTPDataError, 550, request.param), + ) @pytest.fixture def connection_closed(patch_token_manager, monkeypatch): - monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection', - erring_smtp_connection(smtplib.SMTPServerDisconnected)) - - -@pytest.fixture(params=[ - "User unknown", - "5.1.1 : Recipient address rejected: " - "User unknown in virtual mailbox table" -]) + monkeypatch.setattr( + "inbox.sendmail.smtp.postel.SMTPConnection", + erring_smtp_connection(smtplib.SMTPServerDisconnected), + ) + + +@pytest.fixture( + params=[ + "User unknown", + "5.1.1 : Recipient address rejected: " + "User unknown in virtual mailbox table", + ] +) def recipients_refused(patch_token_manager, monkeypatch, request): - monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection', - erring_smtp_connection(smtplib.SMTPRecipientsRefused, - {'foo@foocorp.com': - (550, request.param)})) + monkeypatch.setattr( + "inbox.sendmail.smtp.postel.SMTPConnection", + erring_smtp_connection( + smtplib.SMTPRecipientsRefused, {"foo@foocorp.com": (550, request.param)} + ), + ) # Different providers use slightly different errors, so parametrize this test # fixture to imitate them. -@pytest.fixture(params=[ - "5.2.3 Your message exceeded Google's message size limits", - "5.3.4 Message size exceeds fixed maximum message size"]) +@pytest.fixture( + params=[ + "5.2.3 Your message exceeded Google's message size limits", + "5.3.4 Message size exceeds fixed maximum message size", + ] +) def message_too_large(patch_token_manager, monkeypatch, request): monkeypatch.setattr( - 'inbox.sendmail.smtp.postel.SMTPConnection', - erring_smtp_connection( - smtplib.SMTPSenderRefused, 552, - request.param, None)) + "inbox.sendmail.smtp.postel.SMTPConnection", + erring_smtp_connection(smtplib.SMTPSenderRefused, 552, request.param, None), + ) @pytest.fixture def insecure_content(patch_token_manager, monkeypatch): monkeypatch.setattr( - 'inbox.sendmail.smtp.postel.SMTPConnection', + "inbox.sendmail.smtp.postel.SMTPConnection", erring_smtp_connection( - smtplib.SMTPDataError, 552, - '5.7.0 This message was blocked because its content presents a ' - 'potential\\n5.7.0 security issue.')) + smtplib.SMTPDataError, + 552, + "5.7.0 This message was blocked because its content presents a " + "potential\\n5.7.0 security issue.", + ), + ) @pytest.fixture def example_draft(db, default_account): return { - 'subject': 'Draft test', - 'body': '

Sea, birds and sand.

', - 'to': [{'name': 'The red-haired mermaid', - 'email': default_account.email_address}] + "subject": "Draft test", + "body": "

Sea, birds and sand.

", + "to": [ + {"name": "The red-haired mermaid", "email": default_account.email_address} + ], } @pytest.fixture def example_rsvp(imported_event): return { - 'event_id': imported_event.public_id, - 'comment': 'I will come.', - 'status': 'yes', + "event_id": imported_event.public_id, + "comment": "I will come.", + "status": "yes", } @pytest.fixture def example_draft_bad_subject(db, default_account): return { - 'subject': ['draft', 'test'], - 'body': '

Sea, birds and sand.

', - 'to': [{'name': 'The red-haired mermaid', - 'email': default_account.email_address}] + "subject": ["draft", "test"], + "body": "

Sea, birds and sand.

", + "to": [ + {"name": "The red-haired mermaid", "email": default_account.email_address} + ], } @pytest.fixture def example_draft_bad_body(db, default_account): return { - 'subject': 'Draft test', - 'body': {'foo': 'bar'}, - 'to': [{'name': 'The red-haired mermaid', - 'email': default_account.email_address}] + "subject": "Draft test", + "body": {"foo": "bar"}, + "to": [ + {"name": "The red-haired mermaid", "email": default_account.email_address} + ], } @pytest.fixture def example_event(db, api_client): from inbox.models.calendar import Calendar + cal = db.session.query(Calendar).get(1) event = { - 'title': 'Invite test', - 'when': { - "end_time": 1436210662, - "start_time": 1436207062 - }, - 'participants': [ - {'email': 'helena@nylas.com'} - ], - 'calendar_id': cal.public_id, + "title": "Invite test", + "when": {"end_time": 1436210662, "start_time": 1436207062}, + "participants": [{"email": "helena@nylas.com"}], + "calendar_id": cal.public_id, } - r = api_client.post_data('/events', event) - event_public_id = json.loads(r.data)['id'] + r = api_client.post_data("/events", event) + event_public_id = json.loads(r.data)["id"] return event_public_id def test_send_existing_draft(patch_smtp, api_client, example_draft): - r = api_client.post_data('/drafts', example_draft) - draft_public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] + r = api_client.post_data("/drafts", example_draft) + draft_public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] - r = api_client.post_data('/send', - {'draft_id': draft_public_id, - 'version': version}) + r = api_client.post_data("/send", {"draft_id": draft_public_id, "version": version}) assert r.status_code == 200 # Test that the sent draft can't be sent again. - r = api_client.post_data('/send', - {'draft_id': draft_public_id, - 'version': version}) + r = api_client.post_data("/send", {"draft_id": draft_public_id, "version": version}) assert r.status_code == 400 - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert not drafts - message = api_client.get_data('/messages/{}'.format(draft_public_id)) - assert message['object'] == 'message' + message = api_client.get_data("/messages/{}".format(draft_public_id)) + assert message["object"] == "message" def test_send_rejected_without_version(api_client, example_draft): - r = api_client.post_data('/drafts', example_draft) - draft_public_id = json.loads(r.data)['id'] - r = api_client.post_data('/send', {'draft_id': draft_public_id}) + r = api_client.post_data("/drafts", example_draft) + draft_public_id = json.loads(r.data)["id"] + r = api_client.post_data("/send", {"draft_id": draft_public_id}) assert r.status_code == 400 def test_send_rejected_with_wrong_version(api_client, example_draft): - r = api_client.post_data('/drafts', example_draft) - draft_public_id = json.loads(r.data)['id'] - r = api_client.post_data('/send', {'draft_id': draft_public_id, - 'version': 222}) + r = api_client.post_data("/drafts", example_draft) + draft_public_id = json.loads(r.data)["id"] + r = api_client.post_data("/send", {"draft_id": draft_public_id, "version": 222}) assert r.status_code == 409 def test_send_rejected_without_recipients(api_client): - r = api_client.post_data('/drafts', {'subject': 'Hello there'}) - draft_public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] + r = api_client.post_data("/drafts", {"subject": "Hello there"}) + draft_public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] - r = api_client.post_data('/send', - {'draft_id': draft_public_id, - 'version': version}) + r = api_client.post_data("/send", {"draft_id": draft_public_id, "version": version}) assert r.status_code == 400 -def test_send_new_draft(patch_smtp, api_client, default_account, - example_draft): - r = api_client.post_data('/send', example_draft) +def test_send_new_draft(patch_smtp, api_client, default_account, example_draft): + r = api_client.post_data("/send", example_draft) assert r.status_code == 200 def test_malformed_body_rejected(api_client, example_draft_bad_body): - r = api_client.post_data('/send', example_draft_bad_body) + r = api_client.post_data("/send", example_draft_bad_body) assert r.status_code == 400 decoded = json.loads(r.get_data()) - assert decoded['type'] == 'invalid_request_error' - assert decoded['message'] == '"body" should be a string' + assert decoded["type"] == "invalid_request_error" + assert decoded["message"] == '"body" should be a string' def test_malformed_subject_rejected(api_client, example_draft_bad_subject): - r = api_client.post_data('/send', example_draft_bad_subject) + r = api_client.post_data("/send", example_draft_bad_subject) assert r.status_code == 400 decoded = json.loads(r.get_data()) - assert decoded['type'] == 'invalid_request_error' - assert decoded['message'] == '"subject" should be a string' + assert decoded["type"] == "invalid_request_error" + assert decoded["message"] == '"subject" should be a string' def test_malformed_request_rejected(api_client): - r = api_client.post_data('/send', {}) + r = api_client.post_data("/send", {}) assert r.status_code == 400 def test_recipient_validation(patch_smtp, api_client): - r = api_client.post_data('/drafts', {'to': [{'email': 'foo@example.com'}]}) + r = api_client.post_data("/drafts", {"to": [{"email": "foo@example.com"}]}) assert r.status_code == 200 - r = api_client.post_data('/drafts', {'to': {'email': 'foo@example.com'}}) + r = api_client.post_data("/drafts", {"to": {"email": "foo@example.com"}}) assert r.status_code == 400 - r = api_client.post_data('/drafts', {'to': 'foo@example.com'}) + r = api_client.post_data("/drafts", {"to": "foo@example.com"}) assert r.status_code == 400 - r = api_client.post_data('/drafts', {'to': [{'name': 'foo'}]}) + r = api_client.post_data("/drafts", {"to": [{"name": "foo"}]}) assert r.status_code == 400 - r = api_client.post_data('/send', {'to': [{'email': 'foo'}]}) + r = api_client.post_data("/send", {"to": [{"email": "foo"}]}) assert r.status_code == 400 - r = api_client.post_data('/send', {'to': [{'email': 'föö'}]}) + r = api_client.post_data("/send", {"to": [{"email": "föö"}]}) assert r.status_code == 400 - r = api_client.post_data('/drafts', {'to': [{'email': ['foo']}]}) + r = api_client.post_data("/drafts", {"to": [{"email": ["foo"]}]}) assert r.status_code == 400 - r = api_client.post_data('/drafts', {'to': [{'name': ['Mr. Foo'], - 'email': 'foo@example.com'}]}) + r = api_client.post_data( + "/drafts", {"to": [{"name": ["Mr. Foo"], "email": "foo@example.com"}]} + ) assert r.status_code == 400 - r = api_client.post_data('/drafts', - {'to': [{'name': 'Good Recipient', - 'email': 'goodrecipient@example.com'}, - 'badrecipient@example.com']}) + r = api_client.post_data( + "/drafts", + { + "to": [ + {"name": "Good Recipient", "email": "goodrecipient@example.com"}, + "badrecipient@example.com", + ] + }, + ) assert r.status_code == 400 # Test that sending a draft with invalid recipients fails. - for field in ('to', 'cc', 'bcc'): - r = api_client.post_data('/drafts', {field: [{'email': 'foo'}]}) - draft_id = json.loads(r.data)['id'] - draft_version = json.loads(r.data)['version'] - r = api_client.post_data('/send', {'draft_id': draft_id, - 'draft_version': draft_version}) + for field in ("to", "cc", "bcc"): + r = api_client.post_data("/drafts", {field: [{"email": "foo"}]}) + draft_id = json.loads(r.data)["id"] + draft_version = json.loads(r.data)["version"] + r = api_client.post_data( + "/send", {"draft_id": draft_id, "draft_version": draft_version} + ) assert r.status_code == 400 def test_handle_invalid_credentials(disallow_auth, api_client, example_draft): - r = api_client.post_data('/send', example_draft) + r = api_client.post_data("/send", example_draft) assert r.status_code == 403 - assert json.loads(r.data)['message'] == 'Could not authenticate with ' \ - 'the SMTP server.' + assert ( + json.loads(r.data)["message"] == "Could not authenticate with " + "the SMTP server." + ) def test_handle_quota_exceeded(quota_exceeded, api_client, example_draft): - r = api_client.post_data('/send', example_draft) + r = api_client.post_data("/send", example_draft) assert r.status_code == 429 - assert json.loads(r.data)['message'] == 'Daily sending quota exceeded' + assert json.loads(r.data)["message"] == "Daily sending quota exceeded" -def test_handle_server_disconnected(connection_closed, api_client, - example_draft): - r = api_client.post_data('/send', example_draft) +def test_handle_server_disconnected(connection_closed, api_client, example_draft): + r = api_client.post_data("/send", example_draft) assert r.status_code == 503 - assert json.loads(r.data)['message'] == 'The server unexpectedly closed ' \ - 'the connection' + assert ( + json.loads(r.data)["message"] == "The server unexpectedly closed " + "the connection" + ) -def test_handle_recipients_rejected(recipients_refused, api_client, - example_draft): - r = api_client.post_data('/send', example_draft) +def test_handle_recipients_rejected(recipients_refused, api_client, example_draft): + r = api_client.post_data("/send", example_draft) assert r.status_code == 402 - assert json.loads(r.data)['message'] == 'Sending to all recipients failed' + assert json.loads(r.data)["message"] == "Sending to all recipients failed" -def test_handle_message_too_large(message_too_large, api_client, - example_draft): - r = api_client.post_data('/send', example_draft) +def test_handle_message_too_large(message_too_large, api_client, example_draft): + r = api_client.post_data("/send", example_draft) assert r.status_code == 402 - assert json.loads(r.data)['message'] == 'Message too large' + assert json.loads(r.data)["message"] == "Message too large" -def test_message_rejected_for_security(insecure_content, api_client, - example_draft): - r = api_client.post_data('/send', example_draft) +def test_message_rejected_for_security(insecure_content, api_client, example_draft): + r = api_client.post_data("/send", example_draft) assert r.status_code == 402 - assert json.loads(r.data)['message'] == \ - 'Message content rejected for security reasons' + assert ( + json.loads(r.data)["message"] == "Message content rejected for security reasons" + ) def test_bcc_in_recipients_but_stripped_from_headers(patch_smtp, api_client): r = api_client.post_data( - '/send', + "/send", { - 'to': [{'email': 'bob@foocorp.com'}], - 'cc': [{'email': 'jane@foocorp.com'}], - 'bcc': [{'email': 'spies@nsa.gov'}], - 'subject': 'Banalities' - }) + "to": [{"email": "bob@foocorp.com"}], + "cc": [{"email": "jane@foocorp.com"}], + "bcc": [{"email": "spies@nsa.gov"}], + "subject": "Banalities", + }, + ) assert r.status_code == 200 recipients, msg = patch_smtp[0] - assert set(recipients) == {'bob@foocorp.com', 'jane@foocorp.com', - 'spies@nsa.gov'} + assert set(recipients) == {"bob@foocorp.com", "jane@foocorp.com", "spies@nsa.gov"} parsed = mime.from_string(msg) - assert 'Bcc' not in parsed.headers - assert parsed.headers.get('To') == 'bob@foocorp.com' - assert parsed.headers.get('Cc') == 'jane@foocorp.com' + assert "Bcc" not in parsed.headers + assert parsed.headers.get("To") == "bob@foocorp.com" + assert parsed.headers.get("Cc") == "jane@foocorp.com" -def test_reply_headers_set(db, patch_smtp, api_client, example_draft, thread, - message): - message.message_id_header = '' +def test_reply_headers_set(db, patch_smtp, api_client, example_draft, thread, message): + message.message_id_header = "" db.session.commit() - thread_id = api_client.get_data('/threads')[0]['id'] + thread_id = api_client.get_data("/threads")[0]["id"] - api_client.post_data('/send', {'to': [{'email': 'bob@foocorp.com'}], - 'thread_id': thread_id}) + api_client.post_data( + "/send", {"to": [{"email": "bob@foocorp.com"}], "thread_id": thread_id} + ) _, msg = patch_smtp[-1] parsed = mime.from_string(msg) - assert 'In-Reply-To' in parsed.headers - assert 'References' in parsed.headers + assert "In-Reply-To" in parsed.headers + assert "References" in parsed.headers def test_body_construction(patch_smtp, api_client): - api_client.post_data('/send', - {'to': [{'email': 'bob@foocorp.com'}], - 'subject': 'Banalities', - 'body': 'Hello there'}) + api_client.post_data( + "/send", + { + "to": [{"email": "bob@foocorp.com"}], + "subject": "Banalities", + "body": "Hello there", + }, + ) _, msg = patch_smtp[-1] parsed = mime.from_string(msg) assert len(parsed.parts) == 2 plain_part_found = False html_part_found = False for part in parsed.parts: - if part.content_type.value == 'text/plain': + if part.content_type.value == "text/plain": plain_part_found = True - assert part.body.strip() == 'Hello there' - elif part.content_type.value == 'text/html': + assert part.body.strip() == "Hello there" + elif part.content_type.value == "text/html": html_part_found = True - assert part.body.strip() == 'Hello there' + assert part.body.strip() == "Hello there" assert plain_part_found and html_part_found -def test_quoted_printable_encoding_avoided_for_compatibility( - patch_smtp, api_client): +def test_quoted_printable_encoding_avoided_for_compatibility(patch_smtp, api_client): # Test that messages with long lines don't get quoted-printable encoded, # for maximum server compatibility. api_client.post_data( - '/send', - {'to': [{'email': 'bob@foocorp.com'}], - 'subject': 'In Catilinam', - 'body': 'Etenim quid est, Catilina, quod iam amplius exspectes, si ' - 'neque nox tenebris obscurare coeptus nefarios neque privata domus ' - 'parietibus continere voces conjurationis tuae potest? Si ' - 'illustrantur, si erumpunt omnia? Muta iam istam mentem, mihi crede! ' - 'obliviscere caedis atque incendiorum. Teneris undique: luce sunt ' - 'clariora nobis tua consilia omnia; quae iam mecum licet recognoscas.' - ' Meministine me ante diem duodecimum Kalendas Novembres dicere in ' - 'senatu, fore in armis certo die, qui dies futurus esset ante diem ' - 'sextum Kalendas Novembres, C. Manlium, audaciae satellitem atque ' - 'administrum tuae? Num me fefellit, Catilina, non modo res tanta, tam' - ' atrox, tamque incredibilis, verum id quod multo magis admirandum, ' - 'dies? '}) + "/send", + { + "to": [{"email": "bob@foocorp.com"}], + "subject": "In Catilinam", + "body": "Etenim quid est, Catilina, quod iam amplius exspectes, si " + "neque nox tenebris obscurare coeptus nefarios neque privata domus " + "parietibus continere voces conjurationis tuae potest? Si " + "illustrantur, si erumpunt omnia? Muta iam istam mentem, mihi crede! " + "obliviscere caedis atque incendiorum. Teneris undique: luce sunt " + "clariora nobis tua consilia omnia; quae iam mecum licet recognoscas." + " Meministine me ante diem duodecimum Kalendas Novembres dicere in " + "senatu, fore in armis certo die, qui dies futurus esset ante diem " + "sextum Kalendas Novembres, C. Manlium, audaciae satellitem atque " + "administrum tuae? Num me fefellit, Catilina, non modo res tanta, tam" + " atrox, tamque incredibilis, verum id quod multo magis admirandum, " + "dies? ", + }, + ) _, msg = patch_smtp[-1] parsed = mime.from_string(msg) assert len(parsed.parts) == 2 for part in parsed.parts: - if part.content_type.value == 'text/html': - assert part.content_encoding[0] == 'base64' - elif part.content_type.value == 'text/plain': - assert part.content_encoding[0] in ('7bit', 'base64') + if part.content_type.value == "text/html": + assert part.content_encoding[0] == "base64" + elif part.content_type.value == "text/plain": + assert part.content_encoding[0] in ("7bit", "base64") -def test_draft_not_persisted_if_sending_fails(recipients_refused, api_client, - db): - api_client.post_data('/send', {'to': [{'email': 'bob@foocorp.com'}], - 'subject': 'some unique subject'}) - assert db.session.query(Message).filter_by( - subject='some unique subject').first() is None +def test_draft_not_persisted_if_sending_fails(recipients_refused, api_client, db): + api_client.post_data( + "/send", + {"to": [{"email": "bob@foocorp.com"}], "subject": "some unique subject"}, + ) + assert ( + db.session.query(Message).filter_by(subject="some unique subject").first() + is None + ) def test_setting_reply_to_headers(patch_smtp, api_client): - api_client.post_data('/send', - {'to': [{'email': 'bob@foocorp.com'}], - 'reply_to': [{'name': 'admin', - 'email': 'prez@whitehouse.gov'}], - 'subject': 'Banalities', - 'body': 'Hello there'}) + api_client.post_data( + "/send", + { + "to": [{"email": "bob@foocorp.com"}], + "reply_to": [{"name": "admin", "email": "prez@whitehouse.gov"}], + "subject": "Banalities", + "body": "Hello there", + }, + ) _, msg = patch_smtp[-1] parsed = mime.from_string(msg) - assert 'Reply-To' in parsed.headers - assert parsed.headers['Reply-To'] == 'admin ' + assert "Reply-To" in parsed.headers + assert parsed.headers["Reply-To"] == "admin " def test_sending_from_email_alias(patch_smtp, api_client): - api_client.post_data('/send', - {'to': [{'email': 'bob@foocorp.com'}], - 'from': [{'name': 'admin', - 'email': 'prez@whitehouse.gov'}], - 'subject': 'Banalities', - 'body': 'Hello there'}) + api_client.post_data( + "/send", + { + "to": [{"email": "bob@foocorp.com"}], + "from": [{"name": "admin", "email": "prez@whitehouse.gov"}], + "subject": "Banalities", + "body": "Hello there", + }, + ) _, msg = patch_smtp[-1] parsed = mime.from_string(msg) - assert 'From' in parsed.headers - assert parsed.headers['From'] == 'admin ' + assert "From" in parsed.headers + assert parsed.headers["From"] == "admin " def test_sending_raw_mime(patch_smtp, api_client): - api_client.post_raw('/send', ('From: bob@foocorp.com\r\n' - 'To: golang-nuts ' - '\r\n' - 'Cc: prez@whitehouse.gov\r\n' - 'Bcc: Some Guy \r\n' - 'Subject: ' - '[go-nuts] Runtime Panic On Method Call' - '\r\n' - 'Mime-Version: 1.0\r\n' - 'In-Reply-To: ' - '<78pgxboai332pi9p2smo4db73-0' - '@mailer.nylas.com>\r\n' - 'References: ' - '<78pgxboai332pi9p2smo4db73-0' - '@mailer.nylas.com>\r\n' - 'Content-Type: text/plain; charset=UTF-8' - '\r\n' - 'Content-Transfer-Encoding: 7bit\r\n' - 'X-My-Custom-Header: Random\r\n\r\n' - 'Yo.'), - headers={'Content-Type': 'message/rfc822'}) + api_client.post_raw( + "/send", + ( + "From: bob@foocorp.com\r\n" + "To: golang-nuts " + "\r\n" + "Cc: prez@whitehouse.gov\r\n" + "Bcc: Some Guy \r\n" + "Subject: " + "[go-nuts] Runtime Panic On Method Call" + "\r\n" + "Mime-Version: 1.0\r\n" + "In-Reply-To: " + "<78pgxboai332pi9p2smo4db73-0" + "@mailer.nylas.com>\r\n" + "References: " + "<78pgxboai332pi9p2smo4db73-0" + "@mailer.nylas.com>\r\n" + "Content-Type: text/plain; charset=UTF-8" + "\r\n" + "Content-Transfer-Encoding: 7bit\r\n" + "X-My-Custom-Header: Random\r\n\r\n" + "Yo." + ), + headers={"Content-Type": "message/rfc822"}, + ) _, msg = patch_smtp[-1] parsed = mime.from_string(msg) - assert parsed.body == 'Yo.' - assert parsed.headers['From'] == 'bob@foocorp.com' - assert parsed.headers['Subject'] == \ - '[go-nuts] Runtime Panic On Method Call' - assert parsed.headers['Cc'] == 'prez@whitehouse.gov' - assert parsed.headers['To'] == 'golang-nuts ' - assert parsed.headers['In-Reply-To'] == \ - '<78pgxboai332pi9p2smo4db73-0@mailer.nylas.com>' - assert parsed.headers['References'] == \ - '<78pgxboai332pi9p2smo4db73-0@mailer.nylas.com>' - assert parsed.headers['X-My-Custom-Header'] == 'Random' - assert 'Bcc' not in parsed.headers - assert 'X-INBOX-ID' in parsed.headers - assert 'Message-Id' in parsed.headers - assert 'User-Agent' in parsed.headers + assert parsed.body == "Yo." + assert parsed.headers["From"] == "bob@foocorp.com" + assert parsed.headers["Subject"] == "[go-nuts] Runtime Panic On Method Call" + assert parsed.headers["Cc"] == "prez@whitehouse.gov" + assert parsed.headers["To"] == "golang-nuts " + assert ( + parsed.headers["In-Reply-To"] + == "<78pgxboai332pi9p2smo4db73-0@mailer.nylas.com>" + ) + assert ( + parsed.headers["References"] == "<78pgxboai332pi9p2smo4db73-0@mailer.nylas.com>" + ) + assert parsed.headers["X-My-Custom-Header"] == "Random" + assert "Bcc" not in parsed.headers + assert "X-INBOX-ID" in parsed.headers + assert "Message-Id" in parsed.headers + assert "User-Agent" in parsed.headers def test_sending_bad_raw_mime(patch_smtp, api_client): - res = api_client.post_raw('/send', ('From: bob@foocorp.com\r\n' - 'To: \r\n' - 'Subject: ' - '[go-nuts] Runtime Panic On Method' - 'Call \r\n' - 'Mime-Version: 1.0\r\n' - 'Content-Type: ' - 'text/plain; charset=UTF-8\r\n' - 'Content-Transfer-Encoding: 7bit\r\n' - 'X-My-Custom-Header: Random' - '\r\n\r\n' - 'Yo.'), headers={'Content-Type': - 'message/rfc822'}) + res = api_client.post_raw( + "/send", + ( + "From: bob@foocorp.com\r\n" + "To: \r\n" + "Subject: " + "[go-nuts] Runtime Panic On Method" + "Call \r\n" + "Mime-Version: 1.0\r\n" + "Content-Type: " + "text/plain; charset=UTF-8\r\n" + "Content-Transfer-Encoding: 7bit\r\n" + "X-My-Custom-Header: Random" + "\r\n\r\n" + "Yo." + ), + headers={"Content-Type": "message/rfc822"}, + ) assert res.status_code == 400 -def test_sending_from_email_multiple_aliases(patch_smtp, patch_token_manager, - api_client): - res = api_client.post_data('/send', - {'to': [{'email': 'bob@foocorp.com'}], - 'from': [{'name': 'admin', - 'email': 'prez@whitehouse.gov'}, - {'name': 'the rock', - 'email': 'd.johnson@gmail.com'}], - 'subject': 'Banalities', - 'body': 'Hello there'}) +def test_sending_from_email_multiple_aliases( + patch_smtp, patch_token_manager, api_client +): + res = api_client.post_data( + "/send", + { + "to": [{"email": "bob@foocorp.com"}], + "from": [ + {"name": "admin", "email": "prez@whitehouse.gov"}, + {"name": "the rock", "email": "d.johnson@gmail.com"}, + ], + "subject": "Banalities", + "body": "Hello there", + }, + ) assert res.status_code == 400 - res = api_client.post_data('/send', - {'to': [{'email': 'bob@foocorp.com'}], - 'reply_to': [{'name': 'admin', - 'email': 'prez@whitehouse.gov'}, - {'name': 'the rock', - 'email': 'd.johnson@gmail.com'}], - 'subject': 'Banalities', - 'body': 'Hello there'}) + res = api_client.post_data( + "/send", + { + "to": [{"email": "bob@foocorp.com"}], + "reply_to": [ + {"name": "admin", "email": "prez@whitehouse.gov"}, + {"name": "the rock", "email": "d.johnson@gmail.com"}, + ], + "subject": "Banalities", + "body": "Hello there", + }, + ) assert res.status_code == 400 def test_rsvp_invalid_credentials(disallow_auth, api_client, example_rsvp): - r = api_client.post_data('/send-rsvp', example_rsvp) + r = api_client.post_data("/send-rsvp", example_rsvp) assert r.status_code == 403 - assert json.loads(r.data)['message'] == 'Could not authenticate with ' \ - 'the SMTP server.' + assert ( + json.loads(r.data)["message"] == "Could not authenticate with " + "the SMTP server." + ) def test_rsvp_quota_exceeded(quota_exceeded, api_client, example_rsvp): - r = api_client.post_data('/send-rsvp', example_rsvp) + r = api_client.post_data("/send-rsvp", example_rsvp) assert r.status_code == 429 - assert json.loads(r.data)['message'] == 'Daily sending quota exceeded' + assert json.loads(r.data)["message"] == "Daily sending quota exceeded" def test_rsvp_server_disconnected(connection_closed, api_client, example_rsvp): - r = api_client.post_data('/send-rsvp', example_rsvp) + r = api_client.post_data("/send-rsvp", example_rsvp) assert r.status_code == 503 - assert json.loads(r.data)['message'] == 'The server unexpectedly closed ' \ - 'the connection' + assert ( + json.loads(r.data)["message"] == "The server unexpectedly closed " + "the connection" + ) -def test_rsvp_recipients_rejected(recipients_refused, api_client, - example_rsvp): - r = api_client.post_data('/send-rsvp', example_rsvp) +def test_rsvp_recipients_rejected(recipients_refused, api_client, example_rsvp): + r = api_client.post_data("/send-rsvp", example_rsvp) assert r.status_code == 402 - assert json.loads(r.data)['message'] == 'Sending to all recipients failed' + assert json.loads(r.data)["message"] == "Sending to all recipients failed" def test_rsvp_message_too_large(message_too_large, api_client, example_rsvp): - r = api_client.post_data('/send-rsvp', example_rsvp) + r = api_client.post_data("/send-rsvp", example_rsvp) assert r.status_code == 402 - assert json.loads(r.data)['message'] == 'Message too large' + assert json.loads(r.data)["message"] == "Message too large" -def test_rsvp_message_rejected_for_security(insecure_content, api_client, - example_rsvp): - r = api_client.post_data('/send-rsvp', example_rsvp) +def test_rsvp_message_rejected_for_security(insecure_content, api_client, example_rsvp): + r = api_client.post_data("/send-rsvp", example_rsvp) assert r.status_code == 402 - assert json.loads(r.data)['message'] == \ - 'Message content rejected for security reasons' + assert ( + json.loads(r.data)["message"] == "Message content rejected for security reasons" + ) -def test_rsvp_updates_status(patch_smtp, api_client, example_rsvp, - imported_event): +def test_rsvp_updates_status(patch_smtp, api_client, example_rsvp, imported_event): assert len(imported_event.participants) == 1 - assert imported_event.participants[0]['email'] == 'inboxapptest@gmail.com' - assert imported_event.participants[0]['status'] == 'noreply' + assert imported_event.participants[0]["email"] == "inboxapptest@gmail.com" + assert imported_event.participants[0]["status"] == "noreply" - r = api_client.post_data('/send-rsvp', example_rsvp) + r = api_client.post_data("/send-rsvp", example_rsvp) assert r.status_code == 200 dct = json.loads(r.data) # check that the event's status got updated - assert len(dct['participants']) == 1 - assert dct['participants'][0]['email'] == 'inboxapptest@gmail.com' - assert dct['participants'][0]['status'] == 'yes' - assert dct['participants'][0]['comment'] == 'I will come.' - - -@pytest.mark.parametrize('status,comment', [ - ('yes', ''), - ('no', ''), - ('yes', None), - ('maybe', None), - ('yes', 'I will come'), - ('no', "I won't come"), - ('yes', u"Нэ дуо рэгяонэ фабулаз аккоммодары."), -]) -def test_rsvp_idempotent(db, patch_smtp, api_client, example_rsvp, - imported_event, status, comment): + assert len(dct["participants"]) == 1 + assert dct["participants"][0]["email"] == "inboxapptest@gmail.com" + assert dct["participants"][0]["status"] == "yes" + assert dct["participants"][0]["comment"] == "I will come." + + +@pytest.mark.parametrize( + "status,comment", + [ + ("yes", ""), + ("no", ""), + ("yes", None), + ("maybe", None), + ("yes", "I will come"), + ("no", "I won't come"), + ("yes", u"Нэ дуо рэгяонэ фабулаз аккоммодары."), + ], +) +def test_rsvp_idempotent( + db, patch_smtp, api_client, example_rsvp, imported_event, status, comment +): part = imported_event.participants[0] - part['status'] = status - part['comment'] = comment + part["status"] = status + part["comment"] = comment # MutableList shenanigans -- it won't update # what's stored in the db otherwise. @@ -655,18 +711,17 @@ def test_rsvp_idempotent(db, patch_smtp, api_client, example_rsvp, old_update_date = imported_event.updated_at db.session.expunge(imported_event) - rsvp = {'event_id': imported_event.public_id, - 'status': status, 'comment': comment} - r = api_client.post_data('/send-rsvp', rsvp) + rsvp = {"event_id": imported_event.public_id, "status": status, "comment": comment} + r = api_client.post_data("/send-rsvp", rsvp) assert r.status_code == 200 dct = json.loads(r.data) # check that the event's status is the same. - assert len(dct['participants']) == 1 - assert dct['participants'][0]['email'] == 'inboxapptest@gmail.com' - assert dct['participants'][0]['status'] == status + assert len(dct["participants"]) == 1 + assert dct["participants"][0]["email"] == "inboxapptest@gmail.com" + assert dct["participants"][0]["status"] == status - assert dct['participants'][0]['comment'] == comment + assert dct["participants"][0]["comment"] == comment # Check that the event hasn't been updated. refreshed_event = db.session.query(Event).get(imported_event.id) @@ -675,88 +730,84 @@ def test_rsvp_idempotent(db, patch_smtp, api_client, example_rsvp, def test_sent_messages_shown_in_delta(patch_smtp, api_client, example_draft): ts = int(time.time()) - r = api_client.post_data('/delta/generate_cursor', {'start': ts}) - cursor = json.loads(r.data)['cursor'] - r = api_client.post_data('/send', example_draft) - message_id = json.loads(r.data)['id'] - deltas = api_client.get_data('/delta?cursor={}'.format(cursor))['deltas'] - message_delta = next((d for d in deltas if d['id'] == message_id), None) + r = api_client.post_data("/delta/generate_cursor", {"start": ts}) + cursor = json.loads(r.data)["cursor"] + r = api_client.post_data("/send", example_draft) + message_id = json.loads(r.data)["id"] + deltas = api_client.get_data("/delta?cursor={}".format(cursor))["deltas"] + message_delta = next((d for d in deltas if d["id"] == message_id), None) assert message_delta is not None - assert message_delta['object'] == 'message' - assert message_delta['event'] == 'create' + assert message_delta["object"] == "message" + assert message_delta["event"] == "create" # MULTI-SEND # + def test_multisend_init_new_draft(patch_smtp, api_client, example_draft): - r = api_client.post_data('/send-multiple', - example_draft) + r = api_client.post_data("/send-multiple", example_draft) assert r.status_code == 200 - draft_public_id = json.loads(r.data)['id'] + draft_public_id = json.loads(r.data)["id"] # Test that the sent draft can't be sent normally now - r = api_client.post_data('/send', - {'draft_id': draft_public_id, - 'version': 0}) + r = api_client.post_data("/send", {"draft_id": draft_public_id, "version": 0}) assert r.status_code == 400 # It's not a draft anymore - drafts = api_client.get_data('/drafts') + drafts = api_client.get_data("/drafts") assert not drafts # We can retrieve it as a message, but it's not "sent" yet - message = api_client.get_data('/messages/{}'.format(draft_public_id)) - assert message['object'] == 'message' + message = api_client.get_data("/messages/{}".format(draft_public_id)) + assert message["object"] == "message" -def test_multisend_init_rejected_with_existing_draft(api_client, - example_draft): - r = api_client.post_data('/drafts', example_draft) - draft_public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] +def test_multisend_init_rejected_with_existing_draft(api_client, example_draft): + r = api_client.post_data("/drafts", example_draft) + draft_public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] - r = api_client.post_data('/send-multiple', - {'draft_id': draft_public_id, - 'version': version}) + r = api_client.post_data( + "/send-multiple", {"draft_id": draft_public_id, "version": version} + ) assert r.status_code == 400 def test_multisend_init_rejected_without_recipients(api_client): - r = api_client.post_data('/send-multiple', - {'subject': 'Hello there'}) + r = api_client.post_data("/send-multiple", {"subject": "Hello there"}) assert r.status_code == 400 -def test_multisend_init_malformed_body_rejected(api_client, - example_draft_bad_body): - r = api_client.post_data('/send-multiple', example_draft_bad_body) +def test_multisend_init_malformed_body_rejected(api_client, example_draft_bad_body): + r = api_client.post_data("/send-multiple", example_draft_bad_body) assert r.status_code == 400 decoded = json.loads(r.get_data()) - assert decoded['type'] == 'invalid_request_error' - assert decoded['message'] == '"body" should be a string' + assert decoded["type"] == "invalid_request_error" + assert decoded["message"] == '"body" should be a string' -def test_multisend_init_malformed_subject_rejected(api_client, - example_draft_bad_subject): - r = api_client.post_data('/send-multiple', example_draft_bad_subject) +def test_multisend_init_malformed_subject_rejected( + api_client, example_draft_bad_subject +): + r = api_client.post_data("/send-multiple", example_draft_bad_subject) assert r.status_code == 400 decoded = json.loads(r.get_data()) - assert decoded['type'] == 'invalid_request_error' - assert decoded['message'] == '"subject" should be a string' + assert decoded["type"] == "invalid_request_error" + assert decoded["message"] == '"subject" should be a string' def test_multisend_init_malformed_request_rejected(api_client): - r = api_client.post_data('/send-multiple', {}) + r = api_client.post_data("/send-multiple", {}) assert r.status_code == 400 @pytest.fixture def multisend_draft(api_client, example_draft): - example_draft['to'].append({'email': 'bob@foocorp.com'}) - r = api_client.post_data('/send-multiple', example_draft) + example_draft["to"].append({"email": "bob@foocorp.com"}) + r = api_client.post_data("/send-multiple", example_draft) assert r.status_code == 200 return json.loads(r.get_data()) @@ -764,20 +815,18 @@ def multisend_draft(api_client, example_draft): @pytest.fixture def multisend(multisend_draft): return { - 'id': multisend_draft['id'], - 'send_req': {'body': "email body", - 'send_to': multisend_draft['to'][0]}, - 'draft': multisend_draft + "id": multisend_draft["id"], + "send_req": {"body": "email body", "send_to": multisend_draft["to"][0]}, + "draft": multisend_draft, } @pytest.fixture def multisend2(multisend_draft): return { - 'id': multisend_draft['id'], - 'send_req': {'body': "email body 2", - 'send_to': multisend_draft['to'][1]}, - 'draft': multisend_draft + "id": multisend_draft["id"], + "send_req": {"body": "email body 2", "send_to": multisend_draft["to"][1]}, + "draft": multisend_draft, } @@ -791,8 +840,9 @@ def patch_crispin_del_sent(monkeypatch): # sent messages. They usually don't appear in API code, so this makes sure # their usage is correct. - def fake_remote_delete_sent(crispin_client, account_id, message_id_header, - delete_multiple=False): + def fake_remote_delete_sent( + crispin_client, account_id, message_id_header, delete_multiple=False + ): return True class FakeConnWrapper(object): @@ -805,19 +855,16 @@ def get(self): class MockCrispinClient(object): def folder_names(self): - return ['sent'] + return ["sent"] def delete_sent_message(message_id_header, delete_multiple=False): pass - def fake_conn_pool(acct_id): return FakeConnWrapper() - monkeypatch.setattr('inbox.api.ns_api.remote_delete_sent', - fake_remote_delete_sent) - monkeypatch.setattr('inbox.api.ns_api.writable_connection_pool', - fake_conn_pool) + monkeypatch.setattr("inbox.api.ns_api.remote_delete_sent", fake_remote_delete_sent) + monkeypatch.setattr("inbox.api.ns_api.writable_connection_pool", fake_conn_pool) @pytest.fixture @@ -828,158 +875,179 @@ def patch_sentry_to_raise(monkeypatch): def make_sentry_raise(): traceback.print_exc() raise - monkeypatch.setattr(nylas.logging.sentry, 'sentry_alert', - make_sentry_raise) + monkeypatch.setattr(nylas.logging.sentry, "sentry_alert", make_sentry_raise) -def test_multisend_session(api_client, multisend, multisend2, patch_smtp, - patch_crispin_del_sent, patch_sentry_to_raise): - r = api_client.post_data('/send-multiple/' + multisend['id'], - multisend['send_req']) +def test_multisend_session( + api_client, + multisend, + multisend2, + patch_smtp, + patch_crispin_del_sent, + patch_sentry_to_raise, +): + + r = api_client.post_data("/send-multiple/" + multisend["id"], multisend["send_req"]) assert r.status_code == 200 - assert json.loads(r.data)['body'] == multisend['send_req']['body'] + assert json.loads(r.data)["body"] == multisend["send_req"]["body"] - r = api_client.post_data('/send-multiple/' + multisend2['id'], - multisend2['send_req']) + r = api_client.post_data( + "/send-multiple/" + multisend2["id"], multisend2["send_req"] + ) assert r.status_code == 200 - assert json.loads(r.data)['body'] == multisend2['send_req']['body'] + assert json.loads(r.data)["body"] == multisend2["send_req"]["body"] # Make sure we can't send to people not in the message recipients - req_body = {'send_req': {'body': "you're not even a recipient!", - 'send_to': {'name': 'not in message', - 'email': 'not@in.msg'}}} - r = api_client.post_data('/send-multiple/' + multisend['id'], req_body) + req_body = { + "send_req": { + "body": "you're not even a recipient!", + "send_to": {"name": "not in message", "email": "not@in.msg"}, + } + } + r = api_client.post_data("/send-multiple/" + multisend["id"], req_body) assert r.status_code == 400 - r = api_client.delete('/send-multiple/' + multisend['id']) + r = api_client.delete("/send-multiple/" + multisend["id"]) assert r.status_code == 200 - assert json.loads(r.data)['body'] == multisend['draft']['body'] + assert json.loads(r.data)["body"] == multisend["draft"]["body"] -def test_multisend_handle_invalid_credentials(disallow_auth, api_client, - multisend, - patch_crispin_del_sent): - r = api_client.post_data('/send-multiple/' + multisend['id'], - multisend['send_req']) +def test_multisend_handle_invalid_credentials( + disallow_auth, api_client, multisend, patch_crispin_del_sent +): + r = api_client.post_data("/send-multiple/" + multisend["id"], multisend["send_req"]) assert r.status_code == 403 - assert json.loads(r.data)['message'] == 'Could not authenticate with ' \ - 'the SMTP server.' + assert ( + json.loads(r.data)["message"] == "Could not authenticate with " + "the SMTP server." + ) -def test_multisend_handle_quota_exceeded(quota_exceeded, api_client, - multisend, patch_crispin_del_sent): - r = api_client.post_data('/send-multiple/' + multisend['id'], - multisend['send_req']) +def test_multisend_handle_quota_exceeded( + quota_exceeded, api_client, multisend, patch_crispin_del_sent +): + r = api_client.post_data("/send-multiple/" + multisend["id"], multisend["send_req"]) assert r.status_code == 429 - assert json.loads(r.data)['message'] == 'Daily sending quota exceeded' + assert json.loads(r.data)["message"] == "Daily sending quota exceeded" -def test_multisend_handle_server_disconnected(connection_closed, api_client, - multisend, - patch_crispin_del_sent): - r = api_client.post_data('/send-multiple/' + multisend['id'], - multisend['send_req']) +def test_multisend_handle_server_disconnected( + connection_closed, api_client, multisend, patch_crispin_del_sent +): + r = api_client.post_data("/send-multiple/" + multisend["id"], multisend["send_req"]) assert r.status_code == 503 - assert json.loads(r.data)['message'] == 'The server unexpectedly closed ' \ - 'the connection' + assert ( + json.loads(r.data)["message"] == "The server unexpectedly closed " + "the connection" + ) -def test_multisend_handle_recipients_rejected(recipients_refused, api_client, - multisend, - patch_crispin_del_sent): - r = api_client.post_data('/send-multiple/' + multisend['id'], - multisend['send_req']) +def test_multisend_handle_recipients_rejected( + recipients_refused, api_client, multisend, patch_crispin_del_sent +): + r = api_client.post_data("/send-multiple/" + multisend["id"], multisend["send_req"]) assert r.status_code == 402 - assert json.loads(r.data)['message'] == 'Sending to all recipients failed' + assert json.loads(r.data)["message"] == "Sending to all recipients failed" -def test_multisend_handle_message_too_large(message_too_large, api_client, - multisend, patch_crispin_del_sent): - r = api_client.post_data('/send-multiple/' + multisend['id'], - multisend['send_req']) +def test_multisend_handle_message_too_large( + message_too_large, api_client, multisend, patch_crispin_del_sent +): + r = api_client.post_data("/send-multiple/" + multisend["id"], multisend["send_req"]) assert r.status_code == 402 - assert json.loads(r.data)['message'] == 'Message too large' + assert json.loads(r.data)["message"] == "Message too large" -def test_multisend_message_rejected_for_security(insecure_content, api_client, - multisend, - patch_crispin_del_sent): - r = api_client.post_data('/send-multiple/' + multisend['id'], - multisend['send_req']) +def test_multisend_message_rejected_for_security( + insecure_content, api_client, multisend, patch_crispin_del_sent +): + r = api_client.post_data("/send-multiple/" + multisend["id"], multisend["send_req"]) assert r.status_code == 402 - assert json.loads(r.data)['message'] == 'Message content rejected ' \ - 'for security reasons' + assert ( + json.loads(r.data)["message"] == "Message content rejected " + "for security reasons" + ) def test_raw_bcc_replacements(patch_smtp, api_client): # Check that we're replacing "Bcc:" correctly from messages. - res = _substitute_bcc('From: bob@foocorp.com\r\n' - 'To: \r\n' - 'Bcc: karim@nylas.com\r\n' - 'Subject: ' - '[go-nuts] Runtime Panic On Method' - 'Call \r\n' - 'Mime-Version: 1.0\r\n' - 'Content-Type: ' - 'text/plain; charset=UTF-8\r\n' - 'Content-Transfer-Encoding: 7bit\r\n' - 'X-My-Custom-Header: Random' - '\r\n\r\n') - - assert 'karim@nylas.com' not in res - - res = _substitute_bcc('From: bob@foocorp.com\r\n' - 'To: \r\n' - 'BCC: karim@nylas.com\r\n' - 'Subject: ' - '[go-nuts] Runtime BCC: On Method' - 'Call \r\n' - 'Mime-Version: 1.0\r\n' - 'Content-Type: ' - 'text/plain; charset=UTF-8\r\n' - 'Content-Transfer-Encoding: 7bit\r\n' - 'X-My-Custom-Header: Random' - '\r\n\r\n') - - assert 'karim@nylas.com' not in res - assert 'Runtime BCC:' in res + res = _substitute_bcc( + "From: bob@foocorp.com\r\n" + "To: \r\n" + "Bcc: karim@nylas.com\r\n" + "Subject: " + "[go-nuts] Runtime Panic On Method" + "Call \r\n" + "Mime-Version: 1.0\r\n" + "Content-Type: " + "text/plain; charset=UTF-8\r\n" + "Content-Transfer-Encoding: 7bit\r\n" + "X-My-Custom-Header: Random" + "\r\n\r\n" + ) + + assert "karim@nylas.com" not in res + + res = _substitute_bcc( + "From: bob@foocorp.com\r\n" + "To: \r\n" + "BCC: karim@nylas.com\r\n" + "Subject: " + "[go-nuts] Runtime BCC: On Method" + "Call \r\n" + "Mime-Version: 1.0\r\n" + "Content-Type: " + "text/plain; charset=UTF-8\r\n" + "Content-Transfer-Encoding: 7bit\r\n" + "X-My-Custom-Header: Random" + "\r\n\r\n" + ) + + assert "karim@nylas.com" not in res + assert "Runtime BCC:" in res def test_inline_image_send(patch_smtp, api_client, uploaded_file_ids): file_id = uploaded_file_ids[0] - r = api_client.post_data('/send', { - 'subject': 'Inline image test', - 'body': 'Before image\r\n[cid:{}]\r\nAfter image'.format(file_id), - 'file_ids': [file_id], - 'to': [{'name': 'Foo Bar', - 'email': 'foobar@nylas.com'}] - }) + r = api_client.post_data( + "/send", + { + "subject": "Inline image test", + "body": "Before image\r\n[cid:{}]\r\nAfter image".format(file_id), + "file_ids": [file_id], + "to": [{"name": "Foo Bar", "email": "foobar@nylas.com"}], + }, + ) assert r.status_code == 200 _, msg = patch_smtp[-1] parsed = mime.from_string(msg) for mimepart in parsed.walk(): - if mimepart.headers['Content-Type'] == 'image/jpeg': - assert mimepart.headers['Content-Id'] == '<{}>'.format(file_id) - assert mimepart.headers['Content-Disposition'][0] == 'inline' + if mimepart.headers["Content-Type"] == "image/jpeg": + assert mimepart.headers["Content-Id"] == "<{}>".format(file_id) + assert mimepart.headers["Content-Disposition"][0] == "inline" def test_inline_html_image_send(patch_smtp, api_client, uploaded_file_ids): file_id = uploaded_file_ids[0] - r = api_client.post_data('/send', { - 'subject': 'Inline image test', - 'body': 'Before image\r\n[cid:{}]\r\nAfter image'.format(file_id), - 'body': '
'.format(file_id), - 'file_ids': [file_id], - 'to': [{'name': 'Foo Bar', - 'email': 'foobar@nylas.com'}] - }) + r = api_client.post_data( + "/send", + { + "subject": "Inline image test", + "body": "Before image\r\n[cid:{}]\r\nAfter image".format(file_id), + "body": '
'.format( + file_id + ), + "file_ids": [file_id], + "to": [{"name": "Foo Bar", "email": "foobar@nylas.com"}], + }, + ) assert r.status_code == 200 _, msg = patch_smtp[-1] parsed = mime.from_string(msg) for mimepart in parsed.walk(): - if mimepart.headers['Content-Type'] == 'image/jpeg': - assert mimepart.headers['Content-Id'] == '<{}>'.format(file_id) - assert mimepart.headers['Content-Disposition'][0] == 'inline' + if mimepart.headers["Content-Type"] == "image/jpeg": + assert mimepart.headers["Content-Id"] == "<{}>".format(file_id) + assert mimepart.headers["Content-Disposition"][0] == "inline" diff --git a/inbox/test/api/test_streaming.py b/inbox/test/api/test_streaming.py index c41d5875e..34b55486b 100644 --- a/inbox/test/api/test_streaming.py +++ b/inbox/test/api/test_streaming.py @@ -7,154 +7,151 @@ from inbox.util.url import url_concat from inbox.test.api.base import api_client -GEVENT_EPSILON = .5 # Greenlet switching time. VMs on Macs suck :() +GEVENT_EPSILON = 0.5 # Greenlet switching time. VMs on Macs suck :() LONGPOLL_EPSILON = 2 + GEVENT_EPSILON # API implementation polls every second -__all__ = ['api_client'] +__all__ = ["api_client"] @pytest.yield_fixture def streaming_test_client(db): from inbox.api.srv import app - app.config['TESTING'] = True + + app.config["TESTING"] = True with app.test_client() as c: yield c def get_cursor(api_client, timestamp, namespace): cursor_response = api_client.post_data( - '/delta/generate_cursor', - data={'start': timestamp}) - return json.loads(cursor_response.data)['cursor'] + "/delta/generate_cursor", data={"start": timestamp} + ) + return json.loads(cursor_response.data)["cursor"] def validate_response_format(response_string): response = json.loads(response_string) - assert 'cursor' in response - assert 'attributes' in response - assert 'object' in response - assert 'id' in response - assert 'event' in response + assert "cursor" in response + assert "attributes" in response + assert "object" in response + assert "id" in response + assert "event" in response -def test_response_when_old_cursor_given(db, api_client, - default_namespace): - url = url_concat('/delta/streaming', {'timeout': .1, - 'cursor': '0'}) +def test_response_when_old_cursor_given(db, api_client, default_namespace): + url = url_concat("/delta/streaming", {"timeout": 0.1, "cursor": "0"}) r = api_client.get_raw(url) assert r.status_code == 200 - responses = r.data.split('\n') + responses = r.data.split("\n") for response_string in responses: if response_string: validate_response_format(response_string) -def test_empty_response_when_latest_cursor_given(db, - api_client, - default_namespace): - cursor = get_cursor(api_client, int(time.time() + 22), - default_namespace) - url = url_concat('/delta/streaming', {'timeout': .1, - 'cursor': cursor}) +def test_empty_response_when_latest_cursor_given(db, api_client, default_namespace): + cursor = get_cursor(api_client, int(time.time() + 22), default_namespace) + url = url_concat("/delta/streaming", {"timeout": 0.1, "cursor": cursor}) r = api_client.get_raw(url) assert r.status_code == 200 - assert r.data.strip() == '' + assert r.data.strip() == "" -def test_exclude_and_include_object_types(db, - api_client, thread, - default_namespace): +def test_exclude_and_include_object_types(db, api_client, thread, default_namespace): - add_fake_message(db.session, default_namespace.id, thread, - from_addr=[('Bob', 'bob@foocorp.com')]) + add_fake_message( + db.session, default_namespace.id, thread, from_addr=[("Bob", "bob@foocorp.com")] + ) # Check that we do get message and contact changes by default. - url = url_concat('/delta/streaming', {'timeout': .1, - 'cursor': '0'}) + url = url_concat("/delta/streaming", {"timeout": 0.1, "cursor": "0"}) r = api_client.get_raw(url) assert r.status_code == 200 - responses = r.data.split('\n') - parsed_responses = [json.loads(resp) for resp in responses if resp != ''] - assert any(resp['object'] == 'message' for resp in parsed_responses) - assert any(resp['object'] == 'contact' for resp in parsed_responses) + responses = r.data.split("\n") + parsed_responses = [json.loads(resp) for resp in responses if resp != ""] + assert any(resp["object"] == "message" for resp in parsed_responses) + assert any(resp["object"] == "contact" for resp in parsed_responses) # And check that we don't get message/contact changes if we exclude them. - url = url_concat('/delta/streaming', {'timeout': .1, - 'cursor': '0', - 'exclude_types': 'message,contact'}) + url = url_concat( + "/delta/streaming", + {"timeout": 0.1, "cursor": "0", "exclude_types": "message,contact"}, + ) r = api_client.get_raw(url) assert r.status_code == 200 - responses = r.data.split('\n') - parsed_responses = [json.loads(resp) for resp in responses if resp != ''] - assert not any(resp['object'] == 'message' for resp in parsed_responses) - assert not any(resp['object'] == 'contact' for resp in parsed_responses) + responses = r.data.split("\n") + parsed_responses = [json.loads(resp) for resp in responses if resp != ""] + assert not any(resp["object"] == "message" for resp in parsed_responses) + assert not any(resp["object"] == "contact" for resp in parsed_responses) # And check we only get message objects if we use include_types - url = url_concat('/delta/streaming', {'timeout': .1, - 'cursor': '0', - 'include_types': 'message'}) + url = url_concat( + "/delta/streaming", {"timeout": 0.1, "cursor": "0", "include_types": "message"} + ) r = api_client.get_raw(url) assert r.status_code == 200 - responses = r.data.split('\n') - parsed_responses = [json.loads(resp) for resp in responses if resp != ''] - assert all(resp['object'] == 'message' for resp in parsed_responses) + responses = r.data.split("\n") + parsed_responses = [json.loads(resp) for resp in responses if resp != ""] + assert all(resp["object"] == "message" for resp in parsed_responses) def test_expanded_view(db, api_client, thread, message, default_namespace): - url = url_concat('/delta/streaming', {'timeout': .1, 'cursor': '0', - 'include_types': 'message,thread', - 'view': 'expanded'}) + url = url_concat( + "/delta/streaming", + { + "timeout": 0.1, + "cursor": "0", + "include_types": "message,thread", + "view": "expanded", + }, + ) r = api_client.get_raw(url) assert r.status_code == 200 - responses = r.data.split('\n') - parsed_responses = [json.loads(resp) for resp in responses if resp != ''] + responses = r.data.split("\n") + parsed_responses = [json.loads(resp) for resp in responses if resp != ""] for delta in parsed_responses: - if delta['object'] == 'message': - assert 'headers' in delta['attributes'] - elif delta['object'] == 'thread': - assert 'messages' in delta['attributes'] + if delta["object"] == "message": + assert "headers" in delta["attributes"] + elif delta["object"] == "thread": + assert "messages" in delta["attributes"] def test_invalid_timestamp(api_client, default_namespace): # Valid UNIX timestamp response = api_client.post_data( - '/delta/generate_cursor', - data={'start': int(time.time())}) + "/delta/generate_cursor", data={"start": int(time.time())} + ) assert response.status_code == 200 # Invalid timestamp response = api_client.post_data( - '/delta/generate_cursor', - data={'start': 1434591487647}) + "/delta/generate_cursor", data={"start": 1434591487647} + ) assert response.status_code == 400 -def test_longpoll_delta_newitem(db, api_client, - default_namespace, thread): - cursor = get_cursor(api_client, int(time.time() + 22), - default_namespace) - url = url_concat('/delta/longpoll', {'cursor': cursor}) +def test_longpoll_delta_newitem(db, api_client, default_namespace, thread): + cursor = get_cursor(api_client, int(time.time() + 22), default_namespace) + url = url_concat("/delta/longpoll", {"cursor": cursor}) start_time = time.time() # Spawn the request in background greenlet longpoll_greenlet = Greenlet.spawn(api_client.get_raw, url) # This should make it return immediately - add_fake_message(db.session, default_namespace.id, thread, - from_addr=[('Bob', 'bob@foocorp.com')]) + add_fake_message( + db.session, default_namespace.id, thread, from_addr=[("Bob", "bob@foocorp.com")] + ) longpoll_greenlet.join() # now block and wait end_time = time.time() assert end_time - start_time < LONGPOLL_EPSILON parsed_responses = json.loads(longpoll_greenlet.value.data) - assert len(parsed_responses['deltas']) == 3 - assert set(k['object'] for k in parsed_responses['deltas']) == \ - set([u'message', u'contact', u'thread']) + assert len(parsed_responses["deltas"]) == 3 + assert set(k["object"] for k in parsed_responses["deltas"]) == set( + [u"message", u"contact", u"thread"] + ) -def test_longpoll_delta_timeout(db, api_client, - default_namespace): +def test_longpoll_delta_timeout(db, api_client, default_namespace): test_timeout = 2 - cursor = get_cursor(api_client, int(time.time() + 22), - default_namespace) - url = url_concat('/delta/longpoll', {'timeout': test_timeout, - 'cursor': cursor}) + cursor = get_cursor(api_client, int(time.time() + 22), default_namespace) + url = url_concat("/delta/longpoll", {"timeout": test_timeout, "cursor": cursor}) start_time = time.time() resp = api_client.get_raw(url) end_time = time.time() @@ -162,7 +159,7 @@ def test_longpoll_delta_timeout(db, api_client, assert end_time - start_time - test_timeout < GEVENT_EPSILON parsed_responses = json.loads(resp.data) - assert len(parsed_responses['deltas']) == 0 - assert type(parsed_responses['deltas']) == list - assert parsed_responses['cursor_start'] == cursor - assert parsed_responses['cursor_end'] == cursor + assert len(parsed_responses["deltas"]) == 0 + assert type(parsed_responses["deltas"]) == list + assert parsed_responses["cursor_start"] == cursor + assert parsed_responses["cursor_end"] == cursor diff --git a/inbox/test/api/test_threads.py b/inbox/test/api/test_threads.py index ae44d73a4..21102beb0 100644 --- a/inbox/test/api/test_threads.py +++ b/inbox/test/api/test_threads.py @@ -2,11 +2,10 @@ import datetime import pytest from inbox.api.ns_api import API_VERSIONS -from inbox.test.util.base import (add_fake_message, default_account, - add_fake_thread, db) +from inbox.test.util.base import add_fake_message, default_account, add_fake_thread, db from inbox.test.api.base import api_client -__all__ = ['db', 'api_client', 'default_account'] +__all__ = ["db", "api_client", "default_account"] def test_thread_received_recent_date(db, api_client, default_account): @@ -17,29 +16,44 @@ def test_thread_received_recent_date(db, api_client, default_account): date_dict = dict() - add_fake_message(db.session, default_account.namespace.id, thread1, - subject="Test Thread 1", received_date=date1, - add_sent_category=True) - add_fake_message(db.session, default_account.namespace.id, thread1, - subject="Test Thread 1", received_date=date2) + add_fake_message( + db.session, + default_account.namespace.id, + thread1, + subject="Test Thread 1", + received_date=date1, + add_sent_category=True, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread1, + subject="Test Thread 1", + received_date=date2, + ) date_dict["Test Thread 1"] = date2 thread2 = add_fake_thread(db.session, default_account.namespace.id) - add_fake_message(db.session, default_account.namespace.id, thread2, - subject="Test Thread 2", received_date=date1, - add_sent_category=True) + add_fake_message( + db.session, + default_account.namespace.id, + thread2, + subject="Test Thread 2", + received_date=date1, + add_sent_category=True, + ) date_dict["Test Thread 2"] = date1 - resp = api_client.get_raw('/threads/') + resp = api_client.get_raw("/threads/") assert resp.status_code == 200 threads = json.loads(resp.data) for thread in threads: - assert date_dict[thread['subject']] == \ - datetime.datetime.fromtimestamp( - thread['last_message_received_timestamp']) + assert date_dict[thread["subject"]] == datetime.datetime.fromtimestamp( + thread["last_message_received_timestamp"] + ) def test_thread_sent_recent_date(db, api_client, default_account): @@ -53,26 +67,53 @@ def test_thread_sent_recent_date(db, api_client, default_account): test_subject = "test_thread_sent_recent_date" - add_fake_message(db.session, default_account.namespace.id, thread1, - subject=test_subject, received_date=date1) - add_fake_message(db.session, default_account.namespace.id, thread1, - subject=test_subject, received_date=date2, - add_sent_category=True) - add_fake_message(db.session, default_account.namespace.id, thread1, - subject=test_subject, received_date=date3) - add_fake_message(db.session, default_account.namespace.id, thread1, - subject=test_subject, received_date=date4, - add_sent_category=True) - add_fake_message(db.session, default_account.namespace.id, thread1, - subject=test_subject, received_date=date5) - - resp = api_client.get_raw('/threads/') + add_fake_message( + db.session, + default_account.namespace.id, + thread1, + subject=test_subject, + received_date=date1, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread1, + subject=test_subject, + received_date=date2, + add_sent_category=True, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread1, + subject=test_subject, + received_date=date3, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread1, + subject=test_subject, + received_date=date4, + add_sent_category=True, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread1, + subject=test_subject, + received_date=date5, + ) + + resp = api_client.get_raw("/threads/") assert resp.status_code == 200 threads = json.loads(resp.data) for thread in threads: # should only be one - assert datetime.datetime.fromtimestamp( - thread['last_message_sent_timestamp']) == date2 + assert ( + datetime.datetime.fromtimestamp(thread["last_message_sent_timestamp"]) + == date2 + ) def test_thread_count(db, api_client, default_account): @@ -88,63 +129,92 @@ def test_thread_count(db, api_client, default_account): test_subject = "test_thread_view_count_with_category" for thread in [thread1, thread2]: - add_fake_message(db.session, default_account.namespace.id, thread, - subject=test_subject, received_date=date1) - add_fake_message(db.session, default_account.namespace.id, thread, - subject=test_subject, received_date=date2, - add_sent_category=True) - add_fake_message(db.session, default_account.namespace.id, thread, - subject=test_subject, received_date=date3) - add_fake_message(db.session, default_account.namespace.id, thread, - subject=test_subject, received_date=date4, - add_sent_category=True) - add_fake_message(db.session, default_account.namespace.id, thread, - subject=test_subject, received_date=date5) - - resp = api_client.get_raw('/threads/?view=count&in=sent') + add_fake_message( + db.session, + default_account.namespace.id, + thread, + subject=test_subject, + received_date=date1, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread, + subject=test_subject, + received_date=date2, + add_sent_category=True, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread, + subject=test_subject, + received_date=date3, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread, + subject=test_subject, + received_date=date4, + add_sent_category=True, + ) + add_fake_message( + db.session, + default_account.namespace.id, + thread, + subject=test_subject, + received_date=date5, + ) + + resp = api_client.get_raw("/threads/?view=count&in=sent") assert resp.status_code == 200 threads = json.loads(resp.data) - assert threads['count'] == 2 + assert threads["count"] == 2 -@pytest.mark.skipif(True, reason='Need to investigate') +@pytest.mark.skipif(True, reason="Need to investigate") @pytest.mark.parametrize("api_version", API_VERSIONS) -def test_thread_label_updates(db, api_client, default_account, api_version, - custom_label): +def test_thread_label_updates( + db, api_client, default_account, api_version, custom_label +): """Check that you can update a message (optimistically or not), and that the update is queued in the ActionLog.""" headers = dict() - headers['Api-Version'] = api_version + headers["Api-Version"] = api_version # Gmail threads, messages have a 'labels' field gmail_thread = add_fake_thread(db.session, default_account.namespace.id) - gmail_message = add_fake_message(db.session, - default_account.namespace.id, gmail_thread) + gmail_message = add_fake_message( + db.session, default_account.namespace.id, gmail_thread + ) resp_data = api_client.get_data( - '/threads/{}'.format(gmail_thread.public_id), headers=headers) + "/threads/{}".format(gmail_thread.public_id), headers=headers + ) - assert resp_data['labels'] == [] + assert resp_data["labels"] == [] category = custom_label.category update = dict(labels=[category.public_id]) resp = api_client.put_data( - '/threads/{}'.format(gmail_thread.public_id), update, - headers=headers) + "/threads/{}".format(gmail_thread.public_id), update, headers=headers + ) resp_data = json.loads(resp.data) if api_version == API_VERSIONS[0]: - assert len(resp_data['labels']) == 1 - assert resp_data['labels'][0]['id'] == category.public_id + assert len(resp_data["labels"]) == 1 + assert resp_data["labels"][0]["id"] == category.public_id # Also check that the label got added to the message. resp_data = api_client.get_data( - '/messages/{}'.format(gmail_message.public_id), headers=headers) + "/messages/{}".format(gmail_message.public_id), headers=headers + ) - assert len(resp_data['labels']) == 1 - assert resp_data['labels'][0]['id'] == category.public_id + assert len(resp_data["labels"]) == 1 + assert resp_data["labels"][0]["id"] == category.public_id else: - assert resp_data['labels'] == [] + assert resp_data["labels"] == [] diff --git a/inbox/test/api/test_validation.py b/inbox/test/api/test_validation.py index 8600b2efc..c547776ff 100644 --- a/inbox/test/api/test_validation.py +++ b/inbox/test/api/test_validation.py @@ -8,86 +8,98 @@ from inbox.test.util.base import db, calendar, add_fake_event from inbox.test.api.base import api_client -__all__ = ['api_client', 'db', 'calendar'] +__all__ = ["api_client", "db", "calendar"] # TODO(emfree): Add more comprehensive parameter-validation tests. -@pytest.mark.skipif(True, reason='Need to investigate') +@pytest.mark.skipif(True, reason="Need to investigate") def test_account_validation(api_client, db, default_namespace): - draft = { - 'body': '

Sea, birds and sand.

' - } + draft = {"body": "

Sea, birds and sand.

"} - r = api_client.post_data('/drafts', draft) + r = api_client.post_data("/drafts", draft) assert r.status_code == 200 - namespace_id = json.loads(r.data)['account_id'] - account = db.session.query(Namespace).filter( - Namespace.public_id == namespace_id).first().account + namespace_id = json.loads(r.data)["account_id"] + account = ( + db.session.query(Namespace) + .filter(Namespace.public_id == namespace_id) + .first() + .account + ) - account.sync_state = 'invalid' + account.sync_state = "invalid" db.session.commit() - r = api_client.post_data('/drafts', draft) + r = api_client.post_data("/drafts", draft) assert r.status_code == 403 def test_noop_event_update(db, default_namespace, calendar): - event = add_fake_event(db.session, default_namespace.id, - calendar=calendar, - read_only=True) + event = add_fake_event( + db.session, default_namespace.id, calendar=calendar, read_only=True + ) - event.title = 'Test event' - event.participants = [{'email': 'helena@nylas.com'}, - {'email': 'benb@nylas.com'}] + event.title = "Test event" + event.participants = [{"email": "helena@nylas.com"}, {"email": "benb@nylas.com"}] assert noop_event_update(event, {}) is True - update = {'title': 'Test event'} + update = {"title": "Test event"} assert noop_event_update(event, update) is True - update = {'title': 'Different'} + update = {"title": "Different"} assert noop_event_update(event, update) is False - update = {'location': 'Different'} + update = {"location": "Different"} assert noop_event_update(event, update) is False - update = {'description': 'Different'} + update = {"description": "Different"} assert noop_event_update(event, update) is False - update = {'when': {'start_time': 123453453, 'end_time': 1231231}} + update = {"when": {"start_time": 123453453, "end_time": 1231231}} assert noop_event_update(event, update) is False - event.when = {'start_time': 123453453, 'end_time': 1231231} - update = {'when': {'start_time': 123453453, 'end_time': 1231231}} + event.when = {"start_time": 123453453, "end_time": 1231231} + update = {"when": {"start_time": 123453453, "end_time": 1231231}} assert noop_event_update(event, update) is True - update = {'participants': [{'email': 'benb@nylas.com'}, - {'email': 'helena@nylas.com'}]} + update = { + "participants": [{"email": "benb@nylas.com"}, {"email": "helena@nylas.com"}] + } assert noop_event_update(event, update) is True - update = {'participants': [{'email': 'benb@nylas.com', 'status': 'yes'}, - {'email': 'helena@nylas.com'}]} + update = { + "participants": [ + {"email": "benb@nylas.com", "status": "yes"}, + {"email": "helena@nylas.com"}, + ] + } assert noop_event_update(event, update) is False - event.participants = [{'email': 'benb@nylas.com', 'status': 'yes'}, - {'email': 'helena@nylas.com'}] - update = {'participants': [{'email': 'benb@nylas.com', 'status': 'yes'}, - {'email': 'helena@nylas.com'}]} + event.participants = [ + {"email": "benb@nylas.com", "status": "yes"}, + {"email": "helena@nylas.com"}, + ] + update = { + "participants": [ + {"email": "benb@nylas.com", "status": "yes"}, + {"email": "helena@nylas.com"}, + ] + } assert noop_event_update(event, update) is True -@pytest.mark.skipif(True, reason='Need to investigate') +@pytest.mark.skipif(True, reason="Need to investigate") def test_valid_email(): - assert valid_email('karim@nylas.com') is True - assert valid_email('karim nylas.com') is False + assert valid_email("karim@nylas.com") is True + assert valid_email("karim nylas.com") is False # We want email addresses, not full addresses - assert valid_email('Helena Handbasket ') is False - assert valid_email('le roi de la montagne') is False - assert valid_email('le roi de la montagne@example.com') is False - assert valid_email('le-roi-de-la-montagne@example.com') is True - assert valid_email('le_roi_de_la_montagne@example.com') is True - assert valid_email('spaces with@example.com') is False + assert valid_email("Helena Handbasket ") is False + assert valid_email("le roi de la montagne") is False + assert valid_email("le roi de la montagne@example.com") is False + assert valid_email("le-roi-de-la-montagne@example.com") is True + assert valid_email("le_roi_de_la_montagne@example.com") is True + assert valid_email("spaces with@example.com") is False diff --git a/inbox/test/api/test_views.py b/inbox/test/api/test_views.py index 09e527f38..64f5a3582 100644 --- a/inbox/test/api/test_views.py +++ b/inbox/test/api/test_views.py @@ -3,29 +3,48 @@ from inbox.test.api.base import api_client, new_api_client from inbox.test.util.base import generic_account -__all__ = ['api_client', 'generic_account'] +__all__ = ["api_client", "generic_account"] # Label views should only work for Gmail accounts. folders 404 -@pytest.mark.parametrize('resource_name', - ['messages', 'drafts', 'files', 'events', - 'folders', 'labels', 'calendars', 'contacts']) -def test_resource_views(resource_name, db, api_client, generic_account, - message, thread, event, label, contact, folder): +@pytest.mark.parametrize( + "resource_name", + [ + "messages", + "drafts", + "files", + "events", + "folders", + "labels", + "calendars", + "contacts", + ], +) +def test_resource_views( + resource_name, + db, + api_client, + generic_account, + message, + thread, + event, + label, + contact, + folder, +): """Exercises various tests for views, mostly related to filtering. Note: this only tests views, it assumes the resources are working as expected.""" # Folders don't work with GMail accounts, need generic IMAP - if resource_name == 'folders': + if resource_name == "folders": api_client = new_api_client(db, generic_account.namespace) - elements = api_client.get_data('/{}'.format(resource_name)) - count = api_client.get_data('/{}?view=count'.format(resource_name)) + elements = api_client.get_data("/{}".format(resource_name)) + count = api_client.get_data("/{}?view=count".format(resource_name)) assert count["count"] == len(elements) - ids = api_client.get_data('/{}?view=ids'.format(resource_name)) + ids = api_client.get_data("/{}?view=ids".format(resource_name)) for i, elem in enumerate(elements): - assert isinstance(ids[i], basestring), \ - "&views=ids should return string" + assert isinstance(ids[i], basestring), "&views=ids should return string" assert elem["id"] == ids[i], "view=ids should preserve order" diff --git a/inbox/test/auth/__init__.py b/inbox/test/auth/__init__.py index f5d7f9802..5fd0c57dd 100644 --- a/inbox/test/auth/__init__.py +++ b/inbox/test/auth/__init__.py @@ -1,3 +1,4 @@ # Allow out-of-tree submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/inbox/test/auth/providers/__init__.py b/inbox/test/auth/providers/__init__.py index f5d7f9802..5fd0c57dd 100644 --- a/inbox/test/auth/providers/__init__.py +++ b/inbox/test/auth/providers/__init__.py @@ -1,3 +1,4 @@ # Allow out-of-tree submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/inbox/test/auth/providers/mock_gmail.py b/inbox/test/auth/providers/mock_gmail.py index 80eeebecc..3e24f748a 100644 --- a/inbox/test/auth/providers/mock_gmail.py +++ b/inbox/test/auth/providers/mock_gmail.py @@ -14,14 +14,19 @@ from inbox.models.backends.gmail import GmailAccount from inbox.auth.gmail import GmailAuthHandler -from inbox.basicauth import (OAuthError, UserRecoverableConfigError, - GmailSettingError, ImapSupportDisabledError) +from inbox.basicauth import ( + OAuthError, + UserRecoverableConfigError, + GmailSettingError, + ImapSupportDisabledError, +) from nylas.logging import get_logger + log = get_logger() -PROVIDER = 'gmail' # Uses the default gmail provider info from providers.py -AUTH_HANDLER_CLS = 'MockGmailAuthHandler' +PROVIDER = "gmail" # Uses the default gmail provider info from providers.py +AUTH_HANDLER_CLS = "MockGmailAuthHandler" def raise_setting_error(folder): @@ -37,15 +42,14 @@ def raise_oauth_error(e): fake_responses = { - 'no_all_mail': raise_setting_error, - 'no_trash': raise_setting_error, - 'oauth_fail': raise_oauth_error, - 'imap_disabled': raise_imap_disabled_error + "no_all_mail": raise_setting_error, + "no_trash": raise_setting_error, + "oauth_fail": raise_oauth_error, + "imap_disabled": raise_imap_disabled_error, } class MockGmailAuthHandler(GmailAuthHandler): - def create_account(self, email_address, response): # Override create_account to persist the 'login hint' email_address # rather than the canonical email that is contained in response. diff --git a/inbox/test/auth/test_generic_auth.py b/inbox/test/auth/test_generic_auth.py index 50f44664a..52a7b583d 100644 --- a/inbox/test/auth/test_generic_auth.py +++ b/inbox/test/auth/test_generic_auth.py @@ -11,33 +11,33 @@ settings = { - 'provider': 'custom', - 'settings': { - 'name': 'MyAOL', - 'email': 'benbitdit@aol.com', - 'imap_server_host': 'imap.aol.com', - 'imap_server_port': 143, - 'imap_username': 'benbitdit@aol.com', - 'imap_password': 'IHate2Gmail', - 'smtp_server_host': 'smtp.aol.com', - 'smtp_server_port': 587, - 'smtp_username': 'benbitdit@aol.com', - 'smtp_password': 'IHate2Gmail' - } + "provider": "custom", + "settings": { + "name": "MyAOL", + "email": "benbitdit@aol.com", + "imap_server_host": "imap.aol.com", + "imap_server_port": 143, + "imap_username": "benbitdit@aol.com", + "imap_password": "IHate2Gmail", + "smtp_server_host": "smtp.aol.com", + "smtp_server_port": 587, + "smtp_username": "benbitdit@aol.com", + "smtp_password": "IHate2Gmail", + }, } def test_create_account(db): - email = settings['settings']['email'] - imap_host = settings['settings']['imap_server_host'] - imap_port = settings['settings']['imap_server_port'] - smtp_host = settings['settings']['smtp_server_host'] - smtp_port = settings['settings']['smtp_server_port'] + email = settings["settings"]["email"] + imap_host = settings["settings"]["imap_server_host"] + imap_port = settings["settings"]["imap_server_port"] + smtp_host = settings["settings"]["smtp_server_host"] + smtp_port = settings["settings"]["smtp_server_port"] - handler = GenericAuthHandler(settings['provider']) + handler = GenericAuthHandler(settings["provider"]) # Create an authenticated account - account = handler.create_account(email, settings['settings']) + account = handler.create_account(email, settings["settings"]) db.session.add(account) db.session.commit() # Verify its settings @@ -47,39 +47,39 @@ def test_create_account(db): assert account.smtp_endpoint == (smtp_host, smtp_port) # Ensure that the emailed events calendar was created assert account._emailed_events_calendar is not None - assert account._emailed_events_calendar.name == 'Emailed events' + assert account._emailed_events_calendar.name == "Emailed events" -@pytest.mark.skipif(True, reason='Need to investigate') +@pytest.mark.skipif(True, reason="Need to investigate") def test_update_account(db): - email = settings['settings']['email'] - imap_host = settings['settings']['imap_server_host'] - imap_port = settings['settings']['imap_server_port'] - smtp_host = settings['settings']['smtp_server_host'] - smtp_port = settings['settings']['smtp_server_port'] + email = settings["settings"]["email"] + imap_host = settings["settings"]["imap_server_host"] + imap_port = settings["settings"]["imap_server_port"] + smtp_host = settings["settings"]["smtp_server_host"] + smtp_port = settings["settings"]["smtp_server_port"] - handler = GenericAuthHandler(settings['provider']) + handler = GenericAuthHandler(settings["provider"]) # Create an authenticated account - account = handler.create_account(email, settings['settings']) + account = handler.create_account(email, settings["settings"]) db.session.add(account) db.session.commit() id_ = account.id # A valid update updated_settings = copy.deepcopy(settings) - updated_settings['settings']['name'] = 'Neu!' - account = handler.update_account(account, updated_settings['settings']) + updated_settings["settings"]["name"] = "Neu!" + account = handler.update_account(account, updated_settings["settings"]) db.session.add(account) db.session.commit() account = db.session.query(Account).get(id_) - assert account.name == 'Neu!' + assert account.name == "Neu!" # Invalid updates for (attr, value, updated_settings) in generate_endpoint_updates(settings): - assert value in updated_settings['settings'].values() + assert value in updated_settings["settings"].values() with pytest.raises(SettingUpdateError): - account = handler.update_account(account, updated_settings['settings']) + account = handler.update_account(account, updated_settings["settings"]) db.session.add(account) db.session.commit() @@ -98,14 +98,14 @@ def test_update_account_with_different_subdomain(db, monkeypatch): # To test this we use Microsoft's Office365 setup, which # has mail.office365.com and outlook.office365.com point to # the same address. - email = settings['settings']['email'] - settings['settings']['imap_server_host'] = 'outlook.office365.com' - settings['settings']['smtp_server_host'] = 'outlook.office365.com' + email = settings["settings"]["email"] + settings["settings"]["imap_server_host"] = "outlook.office365.com" + settings["settings"]["smtp_server_host"] = "outlook.office365.com" - handler = GenericAuthHandler(settings['provider']) + handler = GenericAuthHandler(settings["provider"]) # Create an authenticated account - account = handler.create_account(email, settings['settings']) + account = handler.create_account(email, settings["settings"]) db.session.add(account) db.session.commit() id_ = account.id @@ -113,32 +113,32 @@ def test_update_account_with_different_subdomain(db, monkeypatch): def gethostbyname_patch(x): return "127.0.0.1" - monkeypatch.setattr(socket, 'gethostbyname', gethostbyname_patch) + monkeypatch.setattr(socket, "gethostbyname", gethostbyname_patch) # A valid update updated_settings = copy.deepcopy(settings) - updated_settings['settings']['imap_server_host'] = 'mail.office365.com' - updated_settings['settings']['smtp_server_host'] = 'mail.office365.com' - updated_settings['settings']['name'] = 'Neu!' - account = handler.update_account(account, updated_settings['settings']) + updated_settings["settings"]["imap_server_host"] = "mail.office365.com" + updated_settings["settings"]["smtp_server_host"] = "mail.office365.com" + updated_settings["settings"]["name"] = "Neu!" + account = handler.update_account(account, updated_settings["settings"]) db.session.add(account) db.session.commit() account = db.session.query(Account).get(id_) - assert account.name == 'Neu!' - assert account._imap_server_host == 'mail.office365.com' - assert account._smtp_server_host == 'mail.office365.com' + assert account.name == "Neu!" + assert account._imap_server_host == "mail.office365.com" + assert account._smtp_server_host == "mail.office365.com" def test_update_account_when_no_server_provided(db): - email = settings['settings']['email'] - imap_host = settings['settings']['imap_server_host'] - imap_port = settings['settings']['imap_server_port'] - smtp_host = settings['settings']['smtp_server_host'] - smtp_port = settings['settings']['smtp_server_port'] + email = settings["settings"]["email"] + imap_host = settings["settings"]["imap_server_host"] + imap_port = settings["settings"]["imap_server_port"] + smtp_host = settings["settings"]["smtp_server_host"] + smtp_port = settings["settings"]["smtp_server_port"] - handler = GenericAuthHandler(settings['provider']) + handler = GenericAuthHandler(settings["provider"]) - account = handler.create_account(email, settings['settings']) + account = handler.create_account(email, settings["settings"]) # On successful auth, the account's imap_server is stored. db.session.add(account) db.session.commit() @@ -151,10 +151,10 @@ def test_update_account_when_no_server_provided(db): account = db.session.query(Account).get(id_) updated_settings = copy.deepcopy(settings) - del updated_settings['settings']['imap_server_host'] - del updated_settings['settings']['smtp_server_host'] + del updated_settings["settings"]["imap_server_host"] + del updated_settings["settings"]["smtp_server_host"] - account = handler.update_account(account, updated_settings['settings']) + account = handler.update_account(account, updated_settings["settings"]) db.session.add(account) db.session.commit() account = db.session.query(Account).get(id_) @@ -173,9 +173,9 @@ def test_update_account_when_no_server_provided(db): # if not provided. db.session.expire(account) account = db.session.query(Account).get(id_) - updated_settings['settings']['imap_server_host'] = u'' - updated_settings['settings']['smtp_server_host'] = u'' - account = handler.update_account(account, updated_settings['settings']) + updated_settings["settings"]["imap_server_host"] = u"" + updated_settings["settings"]["smtp_server_host"] = u"" + account = handler.update_account(account, updated_settings["settings"]) db.session.add(account) db.session.commit() account = db.session.query(Account).get(id_) @@ -188,26 +188,27 @@ def test_update_account_when_no_server_provided(db): assert acc_smtp_port == smtp_port -@pytest.mark.usefixtures('mock_smtp_get_connection') +@pytest.mark.usefixtures("mock_smtp_get_connection") def test_double_auth(db, mock_imapclient): settings = { - 'provider': 'yahoo', - 'settings': { - 'name': 'Y.Y!', - 'locale': 'fr', - 'email': 'benbitdiddle1861@yahoo.com', - 'password': 'EverybodyLovesIMAPv4'} + "provider": "yahoo", + "settings": { + "name": "Y.Y!", + "locale": "fr", + "email": "benbitdiddle1861@yahoo.com", + "password": "EverybodyLovesIMAPv4", + }, } - email = settings['settings']['email'] - password = settings['settings']['password'] + email = settings["settings"]["email"] + password = settings["settings"]["password"] mock_imapclient._add_login(email, password) - handler = GenericAuthHandler(settings['provider']) + handler = GenericAuthHandler(settings["provider"]) # First authentication, using a valid password, succeeds. valid_settings = copy.deepcopy(settings) - account = handler.create_account(email, valid_settings['settings']) + account = handler.create_account(email, valid_settings["settings"]) assert handler.verify_account(account) is True db.session.add(account) @@ -223,9 +224,9 @@ def test_double_auth(db, mock_imapclient): # Second auth using an invalid password should fail. invalid_settings = copy.deepcopy(settings) - invalid_settings['settings']['password'] = 'invalid_password' + invalid_settings["settings"]["password"] = "invalid_password" with pytest.raises(ValidationError): - account = handler.update_account(account, invalid_settings['settings']) + account = handler.update_account(account, invalid_settings["settings"]) handler.verify_account(account) db.session.expire(account) @@ -241,33 +242,34 @@ def test_double_auth(db, mock_imapclient): def test_parent_domain(): - assert parent_domain('x.a.com') == 'a.com' - assert parent_domain('a.com') == 'a.com' - assert parent_domain('.com') == '' - assert parent_domain('test.google.com') == 'google.com' + assert parent_domain("x.a.com") == "a.com" + assert parent_domain("a.com") == "a.com" + assert parent_domain(".com") == "" + assert parent_domain("test.google.com") == "google.com" - assert parent_domain('smtp.example.a.com') == parent_domain('imap.example.a.com') - assert parent_domain('smtp.example.a.com') == parent_domain('imap.a.com') + assert parent_domain("smtp.example.a.com") == parent_domain("imap.example.a.com") + assert parent_domain("smtp.example.a.com") == parent_domain("imap.a.com") - assert parent_domain('company.co.uk') != parent_domain('evilcompany.co.uk') + assert parent_domain("company.co.uk") != parent_domain("evilcompany.co.uk") -@pytest.mark.usefixtures('mock_smtp_get_connection') +@pytest.mark.usefixtures("mock_smtp_get_connection") def test_successful_reauth_resets_sync_state(db, mock_imapclient): settings = { - 'provider': 'yahoo', - 'settings': { - 'name': 'Y.Y!', - 'locale': 'fr', - 'email': 'benbitdiddle1861@yahoo.com', - 'password': 'EverybodyLovesIMAPv4'} + "provider": "yahoo", + "settings": { + "name": "Y.Y!", + "locale": "fr", + "email": "benbitdiddle1861@yahoo.com", + "password": "EverybodyLovesIMAPv4", + }, } - email = settings['settings']['email'] - password = settings['settings']['password'] + email = settings["settings"]["email"] + password = settings["settings"]["password"] mock_imapclient._add_login(email, password) - handler = GenericAuthHandler(settings['provider']) + handler = GenericAuthHandler(settings["provider"]) - account = handler.create_account(email, settings['settings']) + account = handler.create_account(email, settings["settings"]) assert handler.verify_account(account) is True # Brand new accounts have `sync_state`=None. assert account.sync_state is None @@ -278,20 +280,20 @@ def test_successful_reauth_resets_sync_state(db, mock_imapclient): # causing the account to be in `sync_state`='invalid'. account.mark_invalid() db.session.commit() - assert account.sync_state == 'invalid' + assert account.sync_state == "invalid" # Verify the `sync_state` is reset to 'running' on a successful "re-auth". - account = handler.update_account(account, settings['settings']) + account = handler.update_account(account, settings["settings"]) assert handler.verify_account(account) is True - assert account.sync_state == 'running' + assert account.sync_state == "running" db.session.add(account) db.session.commit() def generate_endpoint_updates(settings): - for key in ('imap_server_host', 'smtp_server_host'): - attr = '_{}'.format(key) - value = 'I.am.Malicious.{}'.format(key) + for key in ("imap_server_host", "smtp_server_host"): + attr = "_{}".format(key) + value = "I.am.Malicious.{}".format(key) updated_settings = copy.deepcopy(settings) - updated_settings['settings'][key] = value + updated_settings["settings"][key] = value yield (attr, value, updated_settings) diff --git a/inbox/test/auth/test_gmail_auth.py b/inbox/test/auth/test_gmail_auth.py index 8406d67e2..1c9abd46a 100644 --- a/inbox/test/auth/test_gmail_auth.py +++ b/inbox/test/auth/test_gmail_auth.py @@ -7,14 +7,16 @@ from inbox.auth.gmail import GmailAuthHandler from inbox.basicauth import ImapSupportDisabledError -settings = {'email': 't.est@gmail.com', - 'name': 'T.Est', - 'refresh_token': 'MyRefreshToken', - 'scope': '', - 'id_token': '', - 'sync_email': True, - 'contacts': False, - 'events': True} +settings = { + "email": "t.est@gmail.com", + "name": "T.Est", + "refresh_token": "MyRefreshToken", + "scope": "", + "id_token": "", + "sync_email": True, + "contacts": False, + "events": True, +} @pytest.fixture @@ -22,55 +24,54 @@ def patched_gmail_client(monkeypatch): def raise_exc(*args, **kwargs): raise ImapSupportDisabledError() - monkeypatch.setattr('inbox.crispin.GmailCrispinClient.__init__', - raise_exc) + monkeypatch.setattr("inbox.crispin.GmailCrispinClient.__init__", raise_exc) def test_create_account(db): - handler = GmailAuthHandler('gmail') + handler = GmailAuthHandler("gmail") # Create an account - account = handler.create_account(settings['email'], settings) + account = handler.create_account(settings["email"], settings) db.session.add(account) db.session.commit() # Verify its settings id_ = account.id account = db.session.query(Account).get(id_) - assert account.email_address == settings['email'] - assert account.name == settings['name'] - assert account.sync_email == settings['sync_email'] - assert account.sync_contacts == settings['contacts'] - assert account.sync_events == settings['events'] + assert account.email_address == settings["email"] + assert account.name == settings["name"] + assert account.sync_email == settings["sync_email"] + assert account.sync_contacts == settings["contacts"] + assert account.sync_events == settings["events"] # Ensure that the emailed events calendar was created assert account._emailed_events_calendar is not None - assert account._emailed_events_calendar.name == 'Emailed events' + assert account._emailed_events_calendar.name == "Emailed events" def test_update_account(db): - handler = GmailAuthHandler('gmail') + handler = GmailAuthHandler("gmail") # Create an account - account = handler.create_account(settings['email'], settings) + account = handler.create_account(settings["email"], settings) db.session.add(account) db.session.commit() id_ = account.id # Verify it is updated correctly. updated_settings = copy.deepcopy(settings) - updated_settings['name'] = 'Neu!' + updated_settings["name"] = "Neu!" account = handler.update_account(account, updated_settings) db.session.add(account) db.session.commit() account = db.session.query(Account).get(id_) - assert account.name == 'Neu!' + assert account.name == "Neu!" def test_verify_account(db, patched_gmail_client): - handler = GmailAuthHandler('gmail') + handler = GmailAuthHandler("gmail") handler.connect_account = lambda account: None # Create an account with sync_email=True - account = handler.create_account(settings['email'], settings) + account = handler.create_account(settings["email"], settings) db.session.add(account) db.session.commit() assert account.sync_email is True @@ -80,9 +81,9 @@ def test_verify_account(db, patched_gmail_client): # Create an account with sync_email=True updated_settings = copy.deepcopy(settings) - updated_settings['email'] = 'another@gmail.com' - updated_settings['sync_email'] = False - account = handler.create_account(updated_settings['email'], updated_settings) + updated_settings["email"] = "another@gmail.com" + updated_settings["sync_email"] = False + account = handler.create_account(updated_settings["email"], updated_settings) db.session.add(account) db.session.commit() assert account.sync_email is False @@ -91,11 +92,11 @@ def test_verify_account(db, patched_gmail_client): def test_successful_reauth_resets_sync_state(monkeypatch, db): - monkeypatch.setattr('inbox.auth.gmail.GmailCrispinClient', mock.Mock()) - handler = GmailAuthHandler('gmail') + monkeypatch.setattr("inbox.auth.gmail.GmailCrispinClient", mock.Mock()) + handler = GmailAuthHandler("gmail") handler.connect_account = lambda account: mock.Mock() - account = handler.create_account(settings['email'], settings) + account = handler.create_account(settings["email"], settings) assert handler.verify_account(account) is True # Brand new accounts have `sync_state`=None. assert account.sync_state is None @@ -106,11 +107,11 @@ def test_successful_reauth_resets_sync_state(monkeypatch, db): # causing the account to be in `sync_state`='invalid'. account.mark_invalid() db.session.commit() - assert account.sync_state == 'invalid' + assert account.sync_state == "invalid" # Verify the `sync_state` is reset to 'running' on a successful "re-auth". account = handler.update_account(account, settings) assert handler.verify_account(account) is True - assert account.sync_state == 'running' + assert account.sync_state == "running" db.session.add(account) db.session.commit() diff --git a/inbox/test/auth/test_gmail_auth_credentials.py b/inbox/test/auth/test_gmail_auth_credentials.py index 3119c7bd0..dcd2bb25e 100644 --- a/inbox/test/auth/test_gmail_auth_credentials.py +++ b/inbox/test/auth/test_gmail_auth_credentials.py @@ -5,57 +5,62 @@ from inbox.auth.gmail import GmailAuthHandler from inbox.models.session import session_scope from inbox.models.account import Account -from inbox.models.backends.gmail import (GOOGLE_CALENDAR_SCOPE, - GOOGLE_CONTACTS_SCOPE, - GOOGLE_EMAIL_SCOPE, - GmailAccount) +from inbox.models.backends.gmail import ( + GOOGLE_CALENDAR_SCOPE, + GOOGLE_CONTACTS_SCOPE, + GOOGLE_EMAIL_SCOPE, + GmailAccount, +) from inbox.auth.gmail import g_token_manager from inbox.basicauth import OAuthError, ConnectionError SHARD_ID = 0 -ACCESS_TOKEN = 'this_is_an_access_token' +ACCESS_TOKEN = "this_is_an_access_token" @pytest.fixture def account_with_multiple_auth_creds(db): - email = 'test_account@localhost.com' - resp = {'access_token': '', - 'expires_in': 3600, - 'email': email, - 'family_name': '', - 'given_name': '', - 'name': '', - 'gender': '', - 'id': 0, - 'user_id': '', - 'id_token': '', - 'link': 'http://example.com', - 'locale': '', - 'picture': '', - 'hd': ''} - - all_scopes = ' '.join( - [GOOGLE_CALENDAR_SCOPE, GOOGLE_CONTACTS_SCOPE, GOOGLE_EMAIL_SCOPE]) + email = "test_account@localhost.com" + resp = { + "access_token": "", + "expires_in": 3600, + "email": email, + "family_name": "", + "given_name": "", + "name": "", + "gender": "", + "id": 0, + "user_id": "", + "id_token": "", + "link": "http://example.com", + "locale": "", + "picture": "", + "hd": "", + } + + all_scopes = " ".join( + [GOOGLE_CALENDAR_SCOPE, GOOGLE_CONTACTS_SCOPE, GOOGLE_EMAIL_SCOPE] + ) first_auth_args = { - 'refresh_token': 'refresh_token_1', - 'client_id': 'client_id_1', - 'client_secret': 'client_secret_1', - 'scope': all_scopes, - 'sync_contacts': True, - 'events': True + "refresh_token": "refresh_token_1", + "client_id": "client_id_1", + "client_secret": "client_secret_1", + "scope": all_scopes, + "sync_contacts": True, + "events": True, } second_auth_args = { - 'refresh_token': 'refresh_token_2', - 'client_id': 'client_id_2', - 'client_secret': 'client_secret_2', - 'scope': GOOGLE_EMAIL_SCOPE, - 'sync_contacts': False, - 'events': True + "refresh_token": "refresh_token_2", + "client_id": "client_id_2", + "client_secret": "client_secret_2", + "scope": GOOGLE_EMAIL_SCOPE, + "sync_contacts": False, + "events": True, } - g = GmailAuthHandler('gmail') + g = GmailAuthHandler("gmail") g.verify_config = lambda x: True resp.update(first_auth_args) @@ -73,30 +78,31 @@ def account_with_multiple_auth_creds(db): @pytest.fixture def account_with_single_auth_creds(db): - email = 'test_account2@localhost.com' - resp = {'access_token': '', - 'expires_in': 3600, - 'email': email, - 'family_name': '', - 'given_name': '', - 'name': '', - 'gender': '', - 'id': 0, - 'user_id': '', - 'id_token': '', - 'link': 'http://example.com', - 'locale': '', - 'picture': '', - 'hd': '', - 'refresh_token': 'refresh_token_3', - 'client_id': 'client_id_1', - 'client_secret': 'client_secret_1', - 'scope': ' '.join([GOOGLE_CALENDAR_SCOPE, GOOGLE_EMAIL_SCOPE]), - 'sync_contacts': False, - 'sync_events': True - } - - g = GmailAuthHandler('gmail') + email = "test_account2@localhost.com" + resp = { + "access_token": "", + "expires_in": 3600, + "email": email, + "family_name": "", + "given_name": "", + "name": "", + "gender": "", + "id": 0, + "user_id": "", + "id_token": "", + "link": "http://example.com", + "locale": "", + "picture": "", + "hd": "", + "refresh_token": "refresh_token_3", + "client_id": "client_id_1", + "client_secret": "client_secret_1", + "scope": " ".join([GOOGLE_CALENDAR_SCOPE, GOOGLE_EMAIL_SCOPE]), + "sync_contacts": False, + "sync_events": True, + } + + g = GmailAuthHandler("gmail") g.verify_config = lambda x: True account = g.get_account(SHARD_ID, email, resp) @@ -109,7 +115,6 @@ def account_with_single_auth_creds(db): @pytest.fixture def patch_access_token_getter(monkeypatch): class TokenGenerator: - def __init__(self): self.revoked_refresh_tokens = [] self.connection_error_tokens = [] @@ -129,13 +134,13 @@ def force_connection_errors(self, refresh_token): self.connection_error_tokens.append(refresh_token) token_generator = TokenGenerator() - monkeypatch.setattr('inbox.auth.oauth.OAuthAuthHandler.new_token', - token_generator.new_token) + monkeypatch.setattr( + "inbox.auth.oauth.OAuthAuthHandler.new_token", token_generator.new_token + ) return token_generator -def test_auth_revoke( - db, account_with_multiple_auth_creds, patch_access_token_getter): +def test_auth_revoke(db, account_with_multiple_auth_creds, patch_access_token_getter): account = account_with_multiple_auth_creds refresh_token1 = account.auth_credentials[0].refresh_token refresh_token2 = account.auth_credentials[1].refresh_token @@ -144,7 +149,7 @@ def test_auth_revoke( assert len(account.valid_auth_credentials) == 2 assert account.sync_contacts is True assert account.sync_events is True - assert account.sync_state != 'invalid' + assert account.sync_state != "invalid" assert account.sync_should_run is True patch_access_token_getter.revoke_refresh_token(refresh_token1) @@ -159,7 +164,7 @@ def test_auth_revoke( assert len(account.valid_auth_credentials) == 1 assert account.sync_contacts is False assert account.sync_events is False - assert account.sync_state != 'invalid' + assert account.sync_state != "invalid" assert account.sync_should_run is True patch_access_token_getter.revoke_refresh_token(refresh_token2) @@ -173,12 +178,13 @@ def test_auth_revoke( account.verify_all_credentials() assert len(account.auth_credentials) == 2 assert len(account.valid_auth_credentials) == 0 - assert account.sync_state == 'invalid' + assert account.sync_state == "invalid" assert account.sync_should_run is False def test_auth_revoke_different_order( - db, account_with_multiple_auth_creds, patch_access_token_getter): + db, account_with_multiple_auth_creds, patch_access_token_getter +): account = account_with_multiple_auth_creds refresh_token1 = account.auth_credentials[0].refresh_token refresh_token2 = account.auth_credentials[1].refresh_token @@ -187,7 +193,7 @@ def test_auth_revoke_different_order( assert len(account.valid_auth_credentials) == 2 assert account.sync_contacts is True assert account.sync_events is True - assert account.sync_state != 'invalid' + assert account.sync_state != "invalid" assert account.sync_should_run is True patch_access_token_getter.revoke_refresh_token(refresh_token2) @@ -199,7 +205,7 @@ def test_auth_revoke_different_order( assert len(account.auth_credentials) == 2 assert account.sync_contacts is True assert account.sync_events is True - assert account.sync_state != 'invalid' + assert account.sync_state != "invalid" assert account.sync_should_run is True assert len(account.valid_auth_credentials) == 1 @@ -216,41 +222,43 @@ def test_auth_revoke_different_order( assert len(account.valid_auth_credentials) == 0 assert account.sync_contacts is False assert account.sync_events is False - assert account.sync_state == 'invalid' + assert account.sync_state == "invalid" assert account.sync_should_run is False def test_create_account(db): - email = 'vault.test@localhost.com' - resp = {'access_token': '', - 'expires_in': 3600, - 'email': email, - 'family_name': '', - 'given_name': '', - 'name': '', - 'gender': '', - 'id': 0, - 'user_id': '', - 'id_token': '', - 'link': 'http://example.com', - 'locale': '', - 'picture': '', - 'hd': ''} - - g = GmailAuthHandler('gmail') + email = "vault.test@localhost.com" + resp = { + "access_token": "", + "expires_in": 3600, + "email": email, + "family_name": "", + "given_name": "", + "name": "", + "gender": "", + "id": 0, + "user_id": "", + "id_token": "", + "link": "http://example.com", + "locale": "", + "picture": "", + "hd": "", + } + + g = GmailAuthHandler("gmail") g.verify_config = lambda x: True # Auth me once... - token_1 = 'the_first_token' - client_id_1 = 'first client id' - client_secret_1 = 'first client secret' - scopes_1 = 'scope scop sco sc s' - scopes_1_list = scopes_1.split(' ') + token_1 = "the_first_token" + client_id_1 = "first client id" + client_secret_1 = "first client secret" + scopes_1 = "scope scop sco sc s" + scopes_1_list = scopes_1.split(" ") first_auth_args = { - 'refresh_token': token_1, - 'scope': scopes_1, - 'client_id': client_id_1, - 'client_secret': client_secret_1 + "refresh_token": token_1, + "scope": scopes_1, + "client_id": client_id_1, + "client_secret": client_secret_1, } resp.update(first_auth_args) @@ -260,8 +268,7 @@ def test_create_account(db): account_id = account.id with session_scope(account_id) as db_session: - account = db_session.query(Account).filter( - Account.email_address == email).one() + account = db_session.query(Account).filter(Account.email_address == email).one() assert account.id == account_id assert isinstance(account, GmailAccount) @@ -275,36 +282,38 @@ def test_create_account(db): def test_get_account(db): - email = 'vault.test@localhost.com' - resp = {'access_token': '', - 'expires_in': 3600, - 'email': email, - 'family_name': '', - 'given_name': '', - 'name': '', - 'gender': '', - 'id': 0, - 'user_id': '', - 'id_token': '', - 'link': 'http://example.com', - 'locale': '', - 'picture': '', - 'hd': ''} - - g = GmailAuthHandler('gmail') + email = "vault.test@localhost.com" + resp = { + "access_token": "", + "expires_in": 3600, + "email": email, + "family_name": "", + "given_name": "", + "name": "", + "gender": "", + "id": 0, + "user_id": "", + "id_token": "", + "link": "http://example.com", + "locale": "", + "picture": "", + "hd": "", + } + + g = GmailAuthHandler("gmail") g.verify_config = lambda x: True # Auth me once... - token_1 = 'the_first_token' - client_id_1 = 'first client id' - client_secret_1 = 'first client secret' - scopes_1 = 'scope scop sco sc s' - scopes_1_list = scopes_1.split(' ') + token_1 = "the_first_token" + client_id_1 = "first client id" + client_secret_1 = "first client secret" + scopes_1 = "scope scop sco sc s" + scopes_1_list = scopes_1.split(" ") first_auth_args = { - 'refresh_token': token_1, - 'scope': scopes_1, - 'client_id': client_id_1, - 'client_secret': client_secret_1 + "refresh_token": token_1, + "scope": scopes_1, + "client_id": client_id_1, + "client_secret": client_secret_1, } resp.update(first_auth_args) @@ -321,16 +330,16 @@ def test_get_account(db): assert auth_creds.refresh_token == token_1 # Auth me twice... - token_2 = 'second_token_!' - client_id_2 = 'second client id' - client_secret_2 = 'second client secret' - scopes_2 = 'scope scop sco sc s' - scopes_2_list = scopes_2.split(' ') + token_2 = "second_token_!" + client_id_2 = "second client id" + client_secret_2 = "second client secret" + scopes_2 = "scope scop sco sc s" + scopes_2_list = scopes_2.split(" ") second_auth_args = { - 'refresh_token': token_2, - 'scope': scopes_2, - 'client_id': client_id_2, - 'client_secret': client_secret_2 + "refresh_token": token_2, + "scope": scopes_2, + "client_id": client_id_2, + "client_secret": client_secret_2, } resp.update(second_auth_args) @@ -339,8 +348,10 @@ def test_get_account(db): db.session.commit() assert len(account.auth_credentials) == 2 - auth_creds = next((creds for creds in account.auth_credentials - if creds.refresh_token == token_2), False) + auth_creds = next( + (creds for creds in account.auth_credentials if creds.refresh_token == token_2), + False, + ) assert auth_creds assert auth_creds.client_id == client_id_2 assert auth_creds.client_secret == client_secret_2 @@ -349,7 +360,7 @@ def test_get_account(db): # Don't add duplicate row in GmailAuthCredentials for the same # client_id/client_secret pair. resp.update(first_auth_args) - resp['refresh_token'] = 'a new refresh token' + resp["refresh_token"] = "a new refresh token" account = g.get_account(SHARD_ID, email, resp) db.session.merge(account) db.session.commit() @@ -357,7 +368,7 @@ def test_get_account(db): assert len(account.auth_credentials) == 2 # Should still work okay if we don't get a refresh token back - del resp['refresh_token'] + del resp["refresh_token"] account = g.get_account(SHARD_ID, email, resp) db.session.merge(account) db.session.commit() @@ -366,21 +377,20 @@ def test_get_account(db): def test_g_token_manager( - db, patch_access_token_getter, - account_with_multiple_auth_creds, - account_with_single_auth_creds): + db, + patch_access_token_getter, + account_with_multiple_auth_creds, + account_with_single_auth_creds, +): account = account_with_multiple_auth_creds refresh_token1 = account.auth_credentials[0].refresh_token refresh_token2 = account.auth_credentials[1].refresh_token g_token_manager.clear_cache(account) # existing account w/ multiple credentials, all valid - assert (g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == - ACCESS_TOKEN) - assert (g_token_manager.get_token(account, GOOGLE_CONTACTS_SCOPE) == - ACCESS_TOKEN) - assert (g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) == - ACCESS_TOKEN) + assert g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == ACCESS_TOKEN + assert g_token_manager.get_token(account, GOOGLE_CONTACTS_SCOPE) == ACCESS_TOKEN + assert g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) == ACCESS_TOKEN for auth_creds in account.auth_credentials: assert auth_creds.is_valid @@ -391,8 +401,7 @@ def test_g_token_manager( with pytest.raises(OAuthError): g_token_manager.get_token(account, GOOGLE_CONTACTS_SCOPE) - assert (g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == - ACCESS_TOKEN) + assert g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == ACCESS_TOKEN with pytest.raises(OAuthError): g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) @@ -415,16 +424,15 @@ def test_g_token_manager( account = account_with_single_auth_creds g_token_manager.clear_cache(account) - assert (g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == - ACCESS_TOKEN) - assert (g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) == - ACCESS_TOKEN) + assert g_token_manager.get_token(account, GOOGLE_EMAIL_SCOPE) == ACCESS_TOKEN + assert g_token_manager.get_token(account, GOOGLE_CALENDAR_SCOPE) == ACCESS_TOKEN with pytest.raises(OAuthError): g_token_manager.get_token(account, GOOGLE_CONTACTS_SCOPE) def test_new_token_with_non_oauth_error( - db, patch_access_token_getter, account_with_multiple_auth_creds): + db, patch_access_token_getter, account_with_multiple_auth_creds +): account = account_with_multiple_auth_creds refresh_token1 = account.auth_credentials[0].refresh_token refresh_token2 = account.auth_credentials[1].refresh_token @@ -441,12 +449,14 @@ def test_new_token_with_non_oauth_error( assert len(account.valid_auth_credentials) == 1 -def test_invalid_token_during_connect(db, patch_access_token_getter, - account_with_single_auth_creds): +def test_invalid_token_during_connect( + db, patch_access_token_getter, account_with_single_auth_creds +): account_id = account_with_single_auth_creds.id patch_access_token_getter.revoke_refresh_token( - account_with_single_auth_creds.auth_credentials[0].refresh_token) + account_with_single_auth_creds.auth_credentials[0].refresh_token + ) account_with_single_auth_creds.verify_all_credentials() assert len(account_with_single_auth_creds.valid_auth_credentials) == 0 g_token_manager.clear_cache(account_with_single_auth_creds) @@ -454,16 +464,19 @@ def test_invalid_token_during_connect(db, patch_access_token_getter, # connect_account() takes an /expunged/ account object # that has the necessary relationships eager-loaded object_session(account_with_single_auth_creds).expunge( - account_with_single_auth_creds) + account_with_single_auth_creds + ) assert not object_session(account_with_single_auth_creds) - account = db.session.query(GmailAccount).options( - joinedload(GmailAccount.auth_credentials)).get( - account_id) + account = ( + db.session.query(GmailAccount) + .options(joinedload(GmailAccount.auth_credentials)) + .get(account_id) + ) db.session.expunge(account) assert not object_session(account) - g = GmailAuthHandler('gmail') + g = GmailAuthHandler("gmail") with pytest.raises(OAuthError): g.connect_account(account) diff --git a/inbox/test/auth/test_imap_smtp_auth.py b/inbox/test/auth/test_imap_smtp_auth.py index ff1c83f10..7cd3ed496 100644 --- a/inbox/test/auth/test_imap_smtp_auth.py +++ b/inbox/test/auth/test_imap_smtp_auth.py @@ -5,82 +5,84 @@ creds = [ { - 'provider': 'yahoo', - 'settings': { - 'name': 'Y.Y!', - 'locale': 'fr', - 'email': 'cypresstest@yahoo.com', - 'password': 'IHate2Gmail'} + "provider": "yahoo", + "settings": { + "name": "Y.Y!", + "locale": "fr", + "email": "cypresstest@yahoo.com", + "password": "IHate2Gmail", + }, }, { - 'provider': 'custom', - 'settings': { - 'name': 'MyAOL', - 'email': 'benbitdit@aol.com', - 'imap_server_host': 'imap.aol.com', - 'imap_server_port': 993, - 'imap_username': 'benbitdit@aol.com', - 'imap_password': 'IHate2Gmail', - 'smtp_server_host': 'smtp.aol.com', - 'smtp_server_port': 587, - 'smtp_username': 'benbitdit@aol.com', - 'smtp_password': 'IHate2Gmail' - } + "provider": "custom", + "settings": { + "name": "MyAOL", + "email": "benbitdit@aol.com", + "imap_server_host": "imap.aol.com", + "imap_server_port": 993, + "imap_username": "benbitdit@aol.com", + "imap_password": "IHate2Gmail", + "smtp_server_host": "smtp.aol.com", + "smtp_server_port": 587, + "smtp_username": "benbitdit@aol.com", + "smtp_password": "IHate2Gmail", + }, }, { - 'provider': 'custom', - 'settings': { - 'name': 'Nylas', - 'email': 'nylastest@runbox.com', - 'imap_server_host': 'mail.runbox.com', - 'imap_server_port': 993, - 'imap_username': 'nylastest', - 'imap_password': 'IHate2Gmail!', - 'smtp_server_host': 'mail.runbox.com', - 'smtp_server_port': 587, - 'smtp_username': 'nylastest', - 'smtp_password': 'IHate2Gmail!' - } - } + "provider": "custom", + "settings": { + "name": "Nylas", + "email": "nylastest@runbox.com", + "imap_server_host": "mail.runbox.com", + "imap_server_port": 993, + "imap_username": "nylastest", + "imap_password": "IHate2Gmail!", + "smtp_server_host": "mail.runbox.com", + "smtp_server_port": 587, + "smtp_username": "nylastest", + "smtp_password": "IHate2Gmail!", + }, + }, ] -@pytest.mark.parametrize('creds', creds) -@pytest.mark.usefixtures('mock_smtp_get_connection') +@pytest.mark.parametrize("creds", creds) +@pytest.mark.usefixtures("mock_smtp_get_connection") def test_auth(creds, mock_imapclient): - imap_username = creds['settings'].get('imap_username') + imap_username = creds["settings"].get("imap_username") if imap_username is None: - imap_username = creds['settings']['email'] - imap_password = creds['settings'].get('imap_password') + imap_username = creds["settings"]["email"] + imap_password = creds["settings"].get("imap_password") if imap_password is None: - imap_password = creds['settings']['password'] + imap_password = creds["settings"]["password"] mock_imapclient._add_login(imap_username, imap_password) - handler = GenericAuthHandler(creds['provider']) - email = creds['settings']['email'] - account = handler.create_account(email, creds['settings']) + handler = GenericAuthHandler(creds["provider"]) + email = creds["settings"]["email"] + account = handler.create_account(email, creds["settings"]) # Test that the account was successfully created by the handler. assert account.imap_password == imap_password - if 'smtp_password' in creds['settings']: - assert account.smtp_password == creds['settings']['smtp_password'] + if "smtp_password" in creds["settings"]: + assert account.smtp_password == creds["settings"]["smtp_password"] else: - assert account.imap_password == creds['settings']['password'] - assert account.smtp_password == creds['settings']['password'] + assert account.imap_password == creds["settings"]["password"] + assert account.smtp_password == creds["settings"]["password"] # Test that the account is valid. assert handler.verify_account(account) is True # Test that the password can be updated... - bad_creds = {'email': creds['settings']['email'], - 'imap_password': 'bad_password', - 'imap_server_host': creds['settings'].get('imap_server_host'), - 'imap_server_port': 993, - 'smtp_server_host': creds['settings'].get('smtp_server_host'), - 'smtp_server_port': 587 - } + bad_creds = { + "email": creds["settings"]["email"], + "imap_password": "bad_password", + "imap_server_host": creds["settings"].get("imap_server_host"), + "imap_server_port": 993, + "smtp_server_host": creds["settings"].get("smtp_server_host"), + "smtp_server_port": 587, + } handler.update_account(account, bad_creds) - assert account.imap_password == 'bad_password' + assert account.imap_password == "bad_password" # ...but logging in again won't work. with pytest.raises(ValidationError): handler.verify_account(account) diff --git a/inbox/test/auth/test_ssl_auth.py b/inbox/test/auth/test_ssl_auth.py index e559c406a..abb7b5ee2 100644 --- a/inbox/test/auth/test_ssl_auth.py +++ b/inbox/test/auth/test_ssl_auth.py @@ -11,46 +11,46 @@ settings = [ { - 'provider': 'custom', - 'settings': { - 'name': 'MyAOL', - 'email': 'benbitdit@aol.com', - 'imap_server_host': 'imap.aol.com', - 'imap_server_port': 143, - 'imap_username': 'benbitdit@aol.com', - 'imap_password': 'IHate2Gmail', - 'smtp_server_host': 'smtp.aol.com', - 'smtp_server_port': 587, - 'smtp_username': 'benbitdit@aol.com', - 'smtp_password': 'IHate2Gmail', - 'ssl_required': True - } + "provider": "custom", + "settings": { + "name": "MyAOL", + "email": "benbitdit@aol.com", + "imap_server_host": "imap.aol.com", + "imap_server_port": 143, + "imap_username": "benbitdit@aol.com", + "imap_password": "IHate2Gmail", + "smtp_server_host": "smtp.aol.com", + "smtp_server_port": 587, + "smtp_username": "benbitdit@aol.com", + "smtp_password": "IHate2Gmail", + "ssl_required": True, + }, }, { - 'provider': 'custom', - 'settings': { - 'name': 'Test', - 'email': 'test@tivertical.com', - 'imap_server_host': 'tivertical.com', - 'imap_server_port': 143, - 'imap_username': 'test@tivertical.com', - 'imap_password': 'testpwd', - 'smtp_server_host': 'tivertical.com', - 'smtp_server_port': 587, - 'smtp_username': 'test@tivertical.com', - 'smtp_password': 'testpwd', - 'ssl_required': False - } - } + "provider": "custom", + "settings": { + "name": "Test", + "email": "test@tivertical.com", + "imap_server_host": "tivertical.com", + "imap_server_port": 143, + "imap_username": "test@tivertical.com", + "imap_password": "testpwd", + "smtp_server_host": "tivertical.com", + "smtp_server_port": 587, + "smtp_username": "test@tivertical.com", + "smtp_password": "testpwd", + "ssl_required": False, + }, + }, ] def _create_account(settings, ssl): - email = settings['settings']['email'] - handler = GenericAuthHandler(settings['provider']) + email = settings["settings"]["email"] + handler = GenericAuthHandler(settings["provider"]) credentials = copy.deepcopy(settings) - credentials['settings']['ssl_required'] = ssl - account = handler.create_account(email, credentials['settings']) + credentials["settings"]["ssl_required"] = ssl + account = handler.create_account(email, credentials["settings"]) return account @@ -60,32 +60,34 @@ def test_account_ssl_required(): assert account.ssl_required == ssl -@pytest.mark.skipif(True, reason='Need to investigate') -@pytest.mark.parametrize('settings', settings) +@pytest.mark.skipif(True, reason="Need to investigate") +@pytest.mark.parametrize("settings", settings) @pytest.mark.networkrequired def test_imap_connection(settings): - host = settings['settings']['imap_server_host'] - port = settings['settings']['imap_server_port'] + host = settings["settings"]["imap_server_host"] + port = settings["settings"]["imap_server_port"] conn = IMAPClient(host, port=port, use_uid=True, ssl=False, timeout=120) - if conn.has_capability('STARTTLS'): + if conn.has_capability("STARTTLS"): conn = create_imap_connection(host, port, ssl_required=True) - conn.login(settings['settings']['imap_username'], - settings['settings']['imap_password']) + conn.login( + settings["settings"]["imap_username"], settings["settings"]["imap_password"] + ) else: with pytest.raises(SSLNotSupportedError): create_imap_connection(host, port, ssl_required=True) conn = create_imap_connection(host, port, ssl_required=False) - conn.login(settings['settings']['imap_username'], - settings['settings']['imap_password']) + conn.login( + settings["settings"]["imap_username"], settings["settings"]["imap_password"] + ) -@pytest.mark.skipif(True, reason='Need to investigate') -@pytest.mark.parametrize('settings', settings) +@pytest.mark.skipif(True, reason="Need to investigate") +@pytest.mark.parametrize("settings", settings) @pytest.mark.networkrequired def test_smtp_connection(settings): - has_starttls = ('aol' in settings['settings']['smtp_server_host']) + has_starttls = "aol" in settings["settings"]["smtp_server_host"] if has_starttls: account = _create_account(settings, ssl=True) @@ -104,13 +106,13 @@ def test_smtp_connection(settings): pass -@pytest.mark.skipif(True, reason='Need to investigate') -@pytest.mark.parametrize('settings', settings) +@pytest.mark.skipif(True, reason="Need to investigate") +@pytest.mark.parametrize("settings", settings) @pytest.mark.networkrequired def test_auth(settings): - handler = GenericAuthHandler(settings['provider']) + handler = GenericAuthHandler(settings["provider"]) - has_starttls = ('aol' in settings['settings']['imap_server_host']) + has_starttls = "aol" in settings["settings"]["imap_server_host"] if has_starttls: account = _create_account(settings, ssl=True) handler.verify_account(account) diff --git a/inbox/test/conftest.py b/inbox/test/conftest.py index fc1c7e40f..326f6d350 100644 --- a/inbox/test/conftest.py +++ b/inbox/test/conftest.py @@ -2,15 +2,19 @@ # Monkeypatch first, to prevent "AttributeError: 'module' object has no # attribute 'poll'" errors when tests import socket, then monkeypatch. from gevent import monkey + monkey.patch_all(aggressive=False) import gevent_openssl + gevent_openssl.monkey_patch() -from inbox.test.util.base import * # noqa -from inbox.util.testutils import (mock_imapclient, # noqa - mock_smtp_get_connection, # noqa - mock_dns_resolver, # noqa - dump_dns_queries, # noqa - files, # noqa - uploaded_file_ids) # noqa +from inbox.test.util.base import * # noqa +from inbox.util.testutils import ( + mock_imapclient, # noqa + mock_smtp_get_connection, # noqa + mock_dns_resolver, # noqa + dump_dns_queries, # noqa + files, # noqa + uploaded_file_ids, +) # noqa diff --git a/inbox/test/contacts/test_process_mail.py b/inbox/test/contacts/test_process_mail.py index fecc76783..c7552e1c9 100644 --- a/inbox/test/contacts/test_process_mail.py +++ b/inbox/test/contacts/test_process_mail.py @@ -7,35 +7,56 @@ def test_update_contacts_from_message(db, default_namespace, thread): # Check that only one Contact is created for repeatedly-referenced # addresses. - add_fake_message(db.session, default_namespace.id, thread, - from_addr=[('', 'alpha@example.com')], - cc_addr=[('', 'alpha@example.com')]) + add_fake_message( + db.session, + default_namespace.id, + thread, + from_addr=[("", "alpha@example.com")], + cc_addr=[("", "alpha@example.com")], + ) - assert db.session.query(Contact).filter_by( - email_address='alpha@example.com').count() == 1 + assert ( + db.session.query(Contact).filter_by(email_address="alpha@example.com").count() + == 1 + ) # Check that existing Contacts are used when we process a new message # referencing them. - add_fake_message(db.session, default_namespace.id, thread, - from_addr=[('', 'alpha@example.com')], - cc_addr=[('', 'alpha@example.com')], - to_addr=[('', 'beta@example.com'), - ('', 'gamma@example.com')]) + add_fake_message( + db.session, + default_namespace.id, + thread, + from_addr=[("", "alpha@example.com")], + cc_addr=[("", "alpha@example.com")], + to_addr=[("", "beta@example.com"), ("", "gamma@example.com")], + ) - assert db.session.query(Contact).filter( - Contact.email_address.like('%@example.com'), - Contact.namespace_id == default_namespace.id).count() == 3 - alpha = db.session.query(Contact).filter_by( - email_address='alpha@example.com', - namespace_id=default_namespace.id).one() + assert ( + db.session.query(Contact) + .filter( + Contact.email_address.like("%@example.com"), + Contact.namespace_id == default_namespace.id, + ) + .count() + == 3 + ) + alpha = ( + db.session.query(Contact) + .filter_by(email_address="alpha@example.com", namespace_id=default_namespace.id) + .one() + ) assert len(alpha.message_associations) == 4 def test_addresses_canonicalized(db, default_namespace, thread): - msg = add_fake_message(db.session, default_namespace.id, thread, - from_addr=[('', 'alpha.beta@gmail.com')], - cc_addr=[('', 'alphabeta@gmail.com')], - bcc_addr=[('', 'ALPHABETA@GMAIL.COM')]) + msg = add_fake_message( + db.session, + default_namespace.id, + thread, + from_addr=[("", "alpha.beta@gmail.com")], + cc_addr=[("", "alphabeta@gmail.com")], + bcc_addr=[("", "ALPHABETA@GMAIL.COM")], + ) # Because Gmail addresses with and without periods are the same, check that # there are three MessageContactAssociation instances attached to the @@ -47,25 +68,47 @@ def test_addresses_canonicalized(db, default_namespace, thread): def test_handle_noreply_addresses(db, default_namespace, thread): add_fake_message( - db.session, default_namespace.id, thread, - from_addr=[('Alice', 'drive-shares-noreply@google.com')]) + db.session, + default_namespace.id, + thread, + from_addr=[("Alice", "drive-shares-noreply@google.com")], + ) add_fake_message( - db.session, default_namespace.id, thread, - from_addr=[('Bob', 'drive-shares-noreply@google.com')]) + db.session, + default_namespace.id, + thread, + from_addr=[("Bob", "drive-shares-noreply@google.com")], + ) - noreply_contact = db.session.query(Contact).filter( - Contact.namespace == default_namespace, - Contact.email_address == 'drive-shares-noreply@google.com').one() + noreply_contact = ( + db.session.query(Contact) + .filter( + Contact.namespace == default_namespace, + Contact.email_address == "drive-shares-noreply@google.com", + ) + .one() + ) assert noreply_contact.name is None add_fake_message( - db.session, default_namespace.id, thread, - from_addr=[('Alice', 'alice@example.com')]) + db.session, + default_namespace.id, + thread, + from_addr=[("Alice", "alice@example.com")], + ) add_fake_message( - db.session, default_namespace.id, thread, - from_addr=[('Alice Lastname', 'alice@example.com')]) + db.session, + default_namespace.id, + thread, + from_addr=[("Alice Lastname", "alice@example.com")], + ) - contact = db.session.query(Contact).filter( - Contact.namespace == default_namespace, - Contact.email_address == 'alice@example.com').first() + contact = ( + db.session.query(Contact) + .filter( + Contact.namespace == default_namespace, + Contact.email_address == "alice@example.com", + ) + .first() + ) assert contact.name is not None diff --git a/inbox/test/contacts/test_remote_sync.py b/inbox/test/contacts/test_remote_sync.py index 510b8f133..d8661a62c 100644 --- a/inbox/test/contacts/test_remote_sync.py +++ b/inbox/test/contacts/test_remote_sync.py @@ -1,75 +1,76 @@ import pytest -from inbox.test.util.base import (contact_sync, contacts_provider, - ContactsProviderStub) +from inbox.test.util.base import contact_sync, contacts_provider, ContactsProviderStub from inbox.models import Contact -__all__ = ['contact_sync', 'contacts_provider'] +__all__ = ["contact_sync", "contacts_provider"] -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def alternate_contacts_provider(): - return ContactsProviderStub('alternate_provider') + return ContactsProviderStub("alternate_provider") -def test_add_contacts_case_insensitive(contacts_provider, contact_sync, db, default_namespace): +def test_add_contacts_case_insensitive( + contacts_provider, contact_sync, db, default_namespace +): """Tests that syncing two contacts with uids that differ only in case sensitivity doesn't cause an error.""" - num_original_contacts = db.session.query(Contact). \ - filter_by(namespace_id=default_namespace.id).count() - contacts_provider._next_uid = 'foo' - contacts_provider._get_next_uid = lambda current: 'FOO' - contacts_provider.supply_contact('Contact One', - 'contact.one@email.address') - contacts_provider.supply_contact('Contact Two', - 'contact.two@email.address') + num_original_contacts = ( + db.session.query(Contact).filter_by(namespace_id=default_namespace.id).count() + ) + contacts_provider._next_uid = "foo" + contacts_provider._get_next_uid = lambda current: "FOO" + contacts_provider.supply_contact("Contact One", "contact.one@email.address") + contacts_provider.supply_contact("Contact Two", "contact.two@email.address") contact_sync.provider = contacts_provider contact_sync.sync() - num_current_contacts = db.session.query(Contact). \ - filter_by(namespace_id=default_namespace.id).count() + num_current_contacts = ( + db.session.query(Contact).filter_by(namespace_id=default_namespace.id).count() + ) assert num_current_contacts - num_original_contacts == 2 def test_add_contacts(contacts_provider, contact_sync, db, default_namespace): """Test that added contacts get stored.""" - num_original_contacts = db.session.query(Contact). \ - filter_by(namespace_id=default_namespace.id).count() - contacts_provider.supply_contact('Contact One', - 'contact.one@email.address') - contacts_provider.supply_contact('Contact Two', - 'contact.two@email.address') + num_original_contacts = ( + db.session.query(Contact).filter_by(namespace_id=default_namespace.id).count() + ) + contacts_provider.supply_contact("Contact One", "contact.one@email.address") + contacts_provider.supply_contact("Contact Two", "contact.two@email.address") contact_sync.provider = contacts_provider contact_sync.sync() - num_current_contacts = db.session.query(Contact). \ - filter_by(namespace_id=default_namespace.id).count() + num_current_contacts = ( + db.session.query(Contact).filter_by(namespace_id=default_namespace.id).count() + ) assert num_current_contacts - num_original_contacts == 2 def test_update_contact(contacts_provider, contact_sync, db): """Test that subsequent contact updates get stored.""" - contacts_provider.supply_contact('Old Name', 'old@email.address') + contacts_provider.supply_contact("Old Name", "old@email.address") contact_sync.provider = contacts_provider contact_sync.sync() results = db.session.query(Contact).all() email_addresses = [r.email_address for r in results] - assert 'old@email.address' in email_addresses + assert "old@email.address" in email_addresses contacts_provider.__init__() - contacts_provider.supply_contact('New Name', 'new@email.address') + contacts_provider.supply_contact("New Name", "new@email.address") contact_sync.sync() db.session.commit() results = db.session.query(Contact).all() names = [r.name for r in results] - assert 'New Name' in names + assert "New Name" in names email_addresses = [r.email_address for r in results] - assert 'new@email.address' in email_addresses + assert "new@email.address" in email_addresses def test_deletes(contacts_provider, contact_sync, db): num_original_contacts = db.session.query(Contact).count() - contacts_provider.supply_contact('Name', 'name@email.address') + contacts_provider.supply_contact("Name", "name@email.address") contact_sync.provider = contacts_provider contact_sync.sync() num_current_contacts = db.session.query(Contact).count() @@ -87,9 +88,9 @@ def test_auth_error_handling(contact_sync, default_account, db): """Test that the contact sync greenlet stops if account credentials are invalid.""" # Give the default test account patently invalid OAuth credentials. - default_account.refresh_token = 'foo' + default_account.refresh_token = "foo" for auth_creds in default_account.auth_credentials: - auth_creds.refresh_token = 'foo' + auth_creds.refresh_token = "foo" db.session.commit() contact_sync.start() diff --git a/inbox/test/events/test_datetime.py b/inbox/test/events/test_datetime.py index 1b636b3a3..3f6657b09 100644 --- a/inbox/test/events/test_datetime.py +++ b/inbox/test/events/test_datetime.py @@ -1,20 +1,18 @@ import arrow from datetime import timedelta -from inbox.models.when import (Time, TimeSpan, Date, DateSpan, parse_as_when, - parse_utc) +from inbox.models.when import Time, TimeSpan, Date, DateSpan, parse_as_when, parse_utc from inbox.models.event import time_parse -from inbox.events.util import (google_to_event_time, parse_google_time, - parse_datetime) +from inbox.events.util import google_to_event_time, parse_google_time, parse_datetime def test_when_time(): - start_time = arrow.get('2014-09-30T15:34:00.000-07:00') - time = {'time': start_time.timestamp} + start_time = arrow.get("2014-09-30T15:34:00.000-07:00") + time = {"time": start_time.timestamp} ts = parse_as_when(time) assert isinstance(ts, Time) - assert ts.start == start_time.to('utc') - assert ts.end == start_time.to('utc') + assert ts.start == start_time.to("utc") + assert ts.end == start_time.to("utc") assert not ts.spanning assert not ts.all_day assert ts.is_time @@ -23,14 +21,13 @@ def test_when_time(): def test_when_timespan(): - start_time = arrow.get('2014-09-30T15:34:00.000-07:00') - end_time = arrow.get('2014-09-30T16:34:00.000-07:00') - timespan = {'start_time': start_time.timestamp, - 'end_time': end_time.timestamp} + start_time = arrow.get("2014-09-30T15:34:00.000-07:00") + end_time = arrow.get("2014-09-30T16:34:00.000-07:00") + timespan = {"start_time": start_time.timestamp, "end_time": end_time.timestamp} ts = parse_as_when(timespan) assert isinstance(ts, TimeSpan) - assert ts.start == start_time.to('utc') - assert ts.end == end_time.to('utc') + assert ts.start == start_time.to("utc") + assert ts.end == end_time.to("utc") assert ts.spanning assert not ts.all_day assert ts.is_time @@ -39,8 +36,8 @@ def test_when_timespan(): def test_when_date(): - start_date = arrow.get('2014-09-30') - date = {'date': start_date.format('YYYY-MM-DD')} + start_date = arrow.get("2014-09-30") + date = {"date": start_date.format("YYYY-MM-DD")} ts = parse_as_when(date) assert isinstance(ts, Date) assert ts.start == start_date @@ -53,10 +50,12 @@ def test_when_date(): def test_when_datespan(): - start_date = arrow.get('2014-09-30') - end_date = arrow.get('2014-10-01') - datespan = {'start_date': start_date.format('YYYY-MM-DD'), - 'end_date': end_date.format('YYYY-MM-DD')} + start_date = arrow.get("2014-09-30") + end_date = arrow.get("2014-10-01") + datespan = { + "start_date": start_date.format("YYYY-MM-DD"), + "end_date": end_date.format("YYYY-MM-DD"), + } ts = parse_as_when(datespan) assert isinstance(ts, DateSpan) assert ts.start == start_date @@ -70,27 +69,28 @@ def test_when_datespan(): def test_when_spans_arent_spans(): # If start and end are the same, don't create a Span object - start_date = arrow.get('2014-09-30') - end_date = arrow.get('2014-09-30') - datespan = {'start_date': start_date.format('YYYY-MM-DD'), - 'end_date': end_date.format('YYYY-MM-DD')} + start_date = arrow.get("2014-09-30") + end_date = arrow.get("2014-09-30") + datespan = { + "start_date": start_date.format("YYYY-MM-DD"), + "end_date": end_date.format("YYYY-MM-DD"), + } ts = parse_as_when(datespan) assert isinstance(ts, Date) - start_time = arrow.get('2014-09-30T15:34:00.000-07:00') - end_time = arrow.get('2014-09-30T15:34:00.000-07:00') - timespan = {'start_time': start_time.timestamp, - 'end_time': end_time.timestamp} + start_time = arrow.get("2014-09-30T15:34:00.000-07:00") + end_time = arrow.get("2014-09-30T15:34:00.000-07:00") + timespan = {"start_time": start_time.timestamp, "end_time": end_time.timestamp} ts = parse_as_when(timespan) assert isinstance(ts, Time) def test_parse_datetime(): - t = '20140104T102030Z' + t = "20140104T102030Z" dt = parse_datetime(t) assert dt == arrow.get(2014, 01, 04, 10, 20, 30) - t = '2014-01-15T17:00:00-05:00' + t = "2014-01-15T17:00:00-05:00" dt = parse_datetime(t) assert dt == arrow.get(2014, 01, 15, 22, 00, 00) @@ -118,32 +118,29 @@ def test_time_parse(): def test_parse_google_time(): - t = {'dateTime': '2012-10-15T17:00:00-07:00', - 'timeZone': 'America/Los_Angeles'} + t = {"dateTime": "2012-10-15T17:00:00-07:00", "timeZone": "America/Los_Angeles"} gt = parse_google_time(t) - assert gt.to('utc') == arrow.get(2012, 10, 16, 00, 00, 00) + assert gt.to("utc") == arrow.get(2012, 10, 16, 00, 00, 00) - t = {'dateTime': '2012-10-15T13:00:00+01:00'} + t = {"dateTime": "2012-10-15T13:00:00+01:00"} gt = parse_google_time(t) - assert gt.to('utc') == arrow.get(2012, 10, 15, 12, 00, 00) + assert gt.to("utc") == arrow.get(2012, 10, 15, 12, 00, 00) - t = {'date': '2012-10-15'} + t = {"date": "2012-10-15"} gt = parse_google_time(t) assert gt == arrow.get(2012, 10, 15) def test_google_to_event_time(): - start = {'dateTime': '2012-10-15T17:00:00-07:00', - 'timeZone': 'America/Los_Angeles'} - end = {'dateTime': '2012-10-15T17:25:00-07:00', - 'timeZone': 'America/Los_Angeles'} + start = {"dateTime": "2012-10-15T17:00:00-07:00", "timeZone": "America/Los_Angeles"} + end = {"dateTime": "2012-10-15T17:25:00-07:00", "timeZone": "America/Los_Angeles"} event_time = google_to_event_time(start, end) assert event_time.start == arrow.get(2012, 10, 16, 00, 00, 00) assert event_time.end == arrow.get(2012, 10, 16, 00, 25, 00) assert event_time.all_day is False - start = {'date': '2012-10-15'} - end = {'date': '2012-10-16'} + start = {"date": "2012-10-15"} + end = {"date": "2012-10-16"} event_time = google_to_event_time(start, end) assert event_time.start == arrow.get(2012, 10, 15) assert event_time.end == arrow.get(2012, 10, 15) @@ -151,17 +148,15 @@ def test_google_to_event_time(): def test_google_to_event_time_reverse(): - end = {'dateTime': '2012-10-15T17:00:00-07:00', - 'timeZone': 'America/Los_Angeles'} - start = {'dateTime': '2012-10-15T17:25:00-07:00', - 'timeZone': 'America/Los_Angeles'} + end = {"dateTime": "2012-10-15T17:00:00-07:00", "timeZone": "America/Los_Angeles"} + start = {"dateTime": "2012-10-15T17:25:00-07:00", "timeZone": "America/Los_Angeles"} event_time = google_to_event_time(start, end) assert event_time.start == arrow.get(2012, 10, 16, 00, 00, 00) assert event_time.end == arrow.get(2012, 10, 16, 00, 25, 00) assert event_time.all_day is False - start = {'date': '2012-10-15'} - end = {'date': '2012-10-16'} + start = {"date": "2012-10-15"} + end = {"date": "2012-10-16"} event_time = google_to_event_time(start, end) assert event_time.start == arrow.get(2012, 10, 15) assert event_time.end == arrow.get(2012, 10, 15) diff --git a/inbox/test/events/test_events_util.py b/inbox/test/events/test_events_util.py index b7c87ef5e..a07f2f971 100644 --- a/inbox/test/events/test_events_util.py +++ b/inbox/test/events/test_events_util.py @@ -7,6 +7,7 @@ def test_base36_validation(): from inbox.events.util import valid_base36 + assert valid_base36("1234zerzerzedsfsd") is True assert valid_base36("zerzerzedsfsd") is True assert valid_base36("é(§è!è§('") is False @@ -15,23 +16,25 @@ def test_base36_validation(): def test_event_organizer_parsing(): from inbox.models.event import Event + e = Event() - e.owner = 'Jean Lecanuet ' - assert e.organizer_email == 'jean.lecanuet@orange.fr' + e.owner = "Jean Lecanuet " + assert e.organizer_email == "jean.lecanuet@orange.fr" - e.owner = u'Pierre Mendès-France ' - assert e.organizer_email == 'pierre-mendes.france@orange.fr' + e.owner = u"Pierre Mendès-France " + assert e.organizer_email == "pierre-mendes.france@orange.fr" - e.owner = u'Pierre Messmer < pierre.messmer@orange.fr >' - assert e.organizer_email == 'pierre.messmer@orange.fr' + e.owner = u"Pierre Messmer < pierre.messmer@orange.fr >" + assert e.organizer_email == "pierre.messmer@orange.fr" def test_removed_participants(): from inbox.events.util import removed_participants - helena = {'email': 'helena@nylas.com', 'name': 'Helena Handbasket'} - ben = {'email': 'ben@nylas.com', 'name': 'Ben Handbasket'} - paul = {'email': 'paul@nylas.com', 'name': 'Paul Hochon'} - helena_case_change = {'email': 'HELENA@nylas.com', 'name': 'Helena Handbasket'} + + helena = {"email": "helena@nylas.com", "name": "Helena Handbasket"} + ben = {"email": "ben@nylas.com", "name": "Ben Handbasket"} + paul = {"email": "paul@nylas.com", "name": "Paul Hochon"} + helena_case_change = {"email": "HELENA@nylas.com", "name": "Helena Handbasket"} assert removed_participants([], []) == [] assert removed_participants([helena], [ben]) == [helena] @@ -49,18 +52,20 @@ def test_unicode_event_truncation(db, default_account): emoji_str = u"".join([u"😁" for i in range(300)]) title = "".join(["a" for i in range(2000)]) - e = Event(raw_data='', - busy=True, - all_day=False, - read_only=False, - uid='31418', - start=datetime(2015, 2, 22, 11, 11), - end=datetime(2015, 2, 22, 22, 22), - is_owner=True, - calendar=default_account.emailed_events_calendar, - title=title, - location=emoji_str, - participants=[]) + e = Event( + raw_data="", + busy=True, + all_day=False, + read_only=False, + uid="31418", + start=datetime(2015, 2, 22, 11, 11), + end=datetime(2015, 2, 22, 22, 22), + is_owner=True, + calendar=default_account.emailed_events_calendar, + title=title, + location=emoji_str, + participants=[], + ) e.namespace = default_account.namespace db.session.add(e) db.session.commit() @@ -74,18 +79,19 @@ def test_unicode_event_truncation(db, default_account): def test_event_emails(): from inbox.models.event import Event + e = Event() - e.description = 'Email: test@example.com.' - assert e.emails_from_description == ['test@example.com'] + e.description = "Email: test@example.com." + assert e.emails_from_description == ["test@example.com"] e.description = 'other@example.com' - assert e.emails_from_description == ['test@example.com', 'other@example.com'] + assert e.emails_from_description == ["test@example.com", "other@example.com"] - e.title = 'Email: title@example.com' - assert e.emails_from_title == ['title@example.com'] + e.title = "Email: title@example.com" + assert e.emails_from_title == ["title@example.com"] # We're not currently able to extract HTML-encoded email addresses from an # HTML event. - #e.description = '

Email: test@example.com

' - #assert e.emails_from_description == ['test@example.com'] + # e.description = '

Email: test@example.com

' + # assert e.emails_from_description == ['test@example.com'] diff --git a/inbox/test/events/test_google_events.py b/inbox/test/events/test_google_events.py index 6ae05142a..a3ee477b1 100644 --- a/inbox/test/events/test_google_events.py +++ b/inbox/test/events/test_google_events.py @@ -11,81 +11,108 @@ def cmp_cal_attrs(calendar1, calendar2): - return all(getattr(calendar1, attr) == getattr(calendar2, attr) for attr in - ('name', 'uid', 'description', 'read_only')) + return all( + getattr(calendar1, attr) == getattr(calendar2, attr) + for attr in ("name", "uid", "description", "read_only") + ) def cmp_event_attrs(event1, event2): - for attr in ('title', 'description', 'location', 'start', 'end', 'all_day', - 'owner', 'read_only', 'participants', 'recurrence'): + for attr in ( + "title", + "description", + "location", + "start", + "end", + "all_day", + "owner", + "read_only", + "participants", + "recurrence", + ): if getattr(event1, attr) != getattr(event2, attr): print attr, getattr(event1, attr), getattr(event2, attr) - return all(getattr(event1, attr) == getattr(event2, attr) for attr in - ('title', 'description', 'location', 'start', 'end', 'all_day', - 'owner', 'read_only', 'participants', 'recurrence')) + return all( + getattr(event1, attr) == getattr(event2, attr) + for attr in ( + "title", + "description", + "location", + "start", + "end", + "all_day", + "owner", + "read_only", + "participants", + "recurrence", + ) + ) def test_calendar_parsing(): raw_response = [ { - 'accessRole': 'owner', - 'backgroundColor': '#9a9cff', - 'colorId': '17', - 'defaultReminders': [{'method': 'popup', 'minutes': 30}], - 'etag': '"1425508164135000"', - 'foregroundColor': '#000000', - 'id': 'ben.bitdiddle2222@gmail.com', - 'kind': 'calendar#calendarListEntry', - 'notificationSettings': { - 'notifications': [ - {'method': 'email', 'type': 'eventCreation'}, - {'method': 'email', 'type': 'eventChange'}, - {'method': 'email', 'type': 'eventCancellation'}, - {'method': 'email', 'type': 'eventResponse'} + "accessRole": "owner", + "backgroundColor": "#9a9cff", + "colorId": "17", + "defaultReminders": [{"method": "popup", "minutes": 30}], + "etag": '"1425508164135000"', + "foregroundColor": "#000000", + "id": "ben.bitdiddle2222@gmail.com", + "kind": "calendar#calendarListEntry", + "notificationSettings": { + "notifications": [ + {"method": "email", "type": "eventCreation"}, + {"method": "email", "type": "eventChange"}, + {"method": "email", "type": "eventCancellation"}, + {"method": "email", "type": "eventResponse"}, ] }, - 'primary': True, - 'selected': True, - 'summary': 'ben.bitdiddle2222@gmail.com', - 'timeZone': 'America/Los_Angeles' + "primary": True, + "selected": True, + "summary": "ben.bitdiddle2222@gmail.com", + "timeZone": "America/Los_Angeles", }, { - 'accessRole': 'reader', - 'backgroundColor': '#f83a22', - 'colorId': '3', - 'defaultReminders': [], - 'description': 'Holidays and Observances in United States', - 'etag': '"1399416119263000"', - 'foregroundColor': '#000000', - 'id': 'en.usa#holiday@group.v.calendar.google.com', - 'kind': 'calendar#calendarListEntry', - 'selected': True, - 'summary': 'Holidays in United States', - 'timeZone': 'America/Los_Angeles' + "accessRole": "reader", + "backgroundColor": "#f83a22", + "colorId": "3", + "defaultReminders": [], + "description": "Holidays and Observances in United States", + "etag": '"1399416119263000"', + "foregroundColor": "#000000", + "id": "en.usa#holiday@group.v.calendar.google.com", + "kind": "calendar#calendarListEntry", + "selected": True, + "summary": "Holidays in United States", + "timeZone": "America/Los_Angeles", }, { - 'defaultReminders': [], - 'deleted': True, - 'etag': '"1425952878772000"', - 'id': 'fg0s7qel95q86log75ilhhf12g@group.calendar.google.com', - 'kind': 'calendar#calendarListEntry' - } + "defaultReminders": [], + "deleted": True, + "etag": '"1425952878772000"', + "id": "fg0s7qel95q86log75ilhhf12g@group.calendar.google.com", + "kind": "calendar#calendarListEntry", + }, ] - expected_deletes = ['fg0s7qel95q86log75ilhhf12g@group.calendar.google.com'] + expected_deletes = ["fg0s7qel95q86log75ilhhf12g@group.calendar.google.com"] expected_updates = [ - Calendar(uid='ben.bitdiddle2222@gmail.com', - name='ben.bitdiddle2222@gmail.com', - description=None, - read_only=False), - Calendar(uid='en.usa#holiday@group.v.calendar.google.com', - name='Holidays in United States', - description='Holidays and Observances in United States', - read_only=True) + Calendar( + uid="ben.bitdiddle2222@gmail.com", + name="ben.bitdiddle2222@gmail.com", + description=None, + read_only=False, + ), + Calendar( + uid="en.usa#holiday@group.v.calendar.google.com", + name="Holidays in United States", + description="Holidays and Observances in United States", + read_only=True, + ), ] provider = GoogleEventsProvider(1, 1) - provider._get_raw_calendars = mock.MagicMock( - return_value=raw_response) + provider._get_raw_calendars = mock.MagicMock(return_value=raw_response) deletes, updates = provider.sync_calendars() assert deletes == expected_deletes for obtained, expected in zip(updates, expected_updates): @@ -95,136 +122,148 @@ def test_calendar_parsing(): def test_event_parsing(): raw_response = [ { - 'created': '2012-10-09T22:35:50.000Z', - 'creator': { - 'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'self': True + "created": "2012-10-09T22:35:50.000Z", + "creator": { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "self": True, }, - 'end': {'dateTime': '2012-10-15T18:00:00-07:00'}, - 'etag': '"2806773858144000"', - 'htmlLink': 'https://www.google.com/calendar/event?eid=FOO', - 'iCalUID': 'tn7krk4cekt8ag3pk6gapqqbro@google.com', - 'id': 'tn7krk4cekt8ag3pk6gapqqbro', - 'kind': 'calendar#event', - 'organizer': { - 'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'self': True + "end": {"dateTime": "2012-10-15T18:00:00-07:00"}, + "etag": '"2806773858144000"', + "htmlLink": "https://www.google.com/calendar/event?eid=FOO", + "iCalUID": "tn7krk4cekt8ag3pk6gapqqbro@google.com", + "id": "tn7krk4cekt8ag3pk6gapqqbro", + "kind": "calendar#event", + "organizer": { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "self": True, }, - 'attendees': [ - {'displayName': 'MITOC BOD', - 'email': 'mitoc-bod@mit.edu', - 'responseStatus': 'accepted'}, - {'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'responseStatus': 'accepted'} + "attendees": [ + { + "displayName": "MITOC BOD", + "email": "mitoc-bod@mit.edu", + "responseStatus": "accepted", + }, + { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "responseStatus": "accepted", + }, ], - 'reminders': {'useDefault': True}, - 'recurrence': ['RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO'], - 'sequence': 0, - 'start': {'dateTime': '2012-10-15T17:00:00-07:00'}, - 'status': 'confirmed', - 'summary': 'BOD Meeting', - 'updated': '2014-06-21T21:42:09.072Z' + "reminders": {"useDefault": True}, + "recurrence": ["RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO"], + "sequence": 0, + "start": {"dateTime": "2012-10-15T17:00:00-07:00"}, + "status": "confirmed", + "summary": "BOD Meeting", + "updated": "2014-06-21T21:42:09.072Z", }, { - 'created': '2014-01-09T03:33:02.000Z', - 'creator': { - 'displayName': 'Holidays in United States', - 'email': 'en.usa#holiday@group.v.calendar.google.com', - 'self': True + "created": "2014-01-09T03:33:02.000Z", + "creator": { + "displayName": "Holidays in United States", + "email": "en.usa#holiday@group.v.calendar.google.com", + "self": True, }, - 'end': {u'date': '2014-06-16'}, - 'etag': '"2778476764000000"', - 'htmlLink': 'https://www.google.com/calendar/event?eid=BAR', - 'iCalUID': '20140615_60o30dr564o30c1g60o30dr4ck@google.com', - 'id': '20140615_60o30dr564o30c1g60o30dr4ck', - 'kind': 'calendar#event', - 'organizer': { - 'displayName': 'Holidays in United States', - 'email': 'en.usa#holiday@group.v.calendar.google.com', - 'self': True + "end": {u"date": "2014-06-16"}, + "etag": '"2778476764000000"', + "htmlLink": "https://www.google.com/calendar/event?eid=BAR", + "iCalUID": "20140615_60o30dr564o30c1g60o30dr4ck@google.com", + "id": "20140615_60o30dr564o30c1g60o30dr4ck", + "kind": "calendar#event", + "organizer": { + "displayName": "Holidays in United States", + "email": "en.usa#holiday@group.v.calendar.google.com", + "self": True, }, - 'sequence': 0, - 'start': {'date': '2014-06-15'}, - 'status': 'confirmed', - 'summary': "Fathers' Day", - 'transparency': 'transparent', - 'updated': '2014-01-09T03:33:02.000Z', - 'visibility': 'public' + "sequence": 0, + "start": {"date": "2014-06-15"}, + "status": "confirmed", + "summary": "Fathers' Day", + "transparency": "transparent", + "updated": "2014-01-09T03:33:02.000Z", + "visibility": "public", }, { - 'created': '2015-03-10T01:19:59.000Z', - 'creator': { - 'displayName': 'Ben Bitdiddle', - 'email': 'ben.bitdiddle2222@gmail.com', - 'self': True + "created": "2015-03-10T01:19:59.000Z", + "creator": { + "displayName": "Ben Bitdiddle", + "email": "ben.bitdiddle2222@gmail.com", + "self": True, }, - 'end': {u'date': '2015-03-11'}, - 'etag': '"2851906839480000"', - 'htmlLink': 'https://www.google.com/calendar/event?eid=BAZ', - 'iCalUID': '3uisajkmdjqo43tfc3ig1l5hek@google.com', - 'id': '3uisajkmdjqo43tfc3ig1l5hek', - 'kind': 'calendar#event', - 'organizer': { - 'displayName': 'Ben Bitdiddle', - 'email': 'ben.bitdiddle2222@gmail.com', - 'self': True}, - 'reminders': {u'useDefault': False}, - 'sequence': 1, - 'start': {u'date': '2015-03-10'}, - 'status': 'cancelled', - 'summary': 'TUESDAY', - 'transparency': 'transparent', - 'updated': '2015-03-10T02:10:19.740Z' - } + "end": {u"date": "2015-03-11"}, + "etag": '"2851906839480000"', + "htmlLink": "https://www.google.com/calendar/event?eid=BAZ", + "iCalUID": "3uisajkmdjqo43tfc3ig1l5hek@google.com", + "id": "3uisajkmdjqo43tfc3ig1l5hek", + "kind": "calendar#event", + "organizer": { + "displayName": "Ben Bitdiddle", + "email": "ben.bitdiddle2222@gmail.com", + "self": True, + }, + "reminders": {u"useDefault": False}, + "sequence": 1, + "start": {u"date": "2015-03-10"}, + "status": "cancelled", + "summary": "TUESDAY", + "transparency": "transparent", + "updated": "2015-03-10T02:10:19.740Z", + }, ] - expected_deletes = ['3uisajkmdjqo43tfc3ig1l5hek'] + expected_deletes = ["3uisajkmdjqo43tfc3ig1l5hek"] expected_updates = [ - RecurringEvent(uid='tn7krk4cekt8ag3pk6gapqqbro', - title='BOD Meeting', - description=None, - read_only=False, - start=arrow.get(2012, 10, 16, 0, 0, 0), - end=arrow.get(2012, 10, 16, 1, 0, 0), - all_day=False, - busy=True, - owner='Eben Freeman ', - recurrence=['RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO'], - participants=[ - {'email': 'mitoc-bod@mit.edu', - 'name': 'MITOC BOD', - 'status': 'yes', - 'notes': None}, - {'email': 'freemaneben@gmail.com', - 'name': 'Eben Freeman', - 'status': 'yes', - 'notes': None} - ]), - Event(uid='20140615_60o30dr564o30c1g60o30dr4ck', - title="Fathers' Day", - description=None, - read_only=False, - busy=False, - start=arrow.get(2014, 06, 15), - end=arrow.get(2014, 06, 15), - all_day=True, - owner='Holidays in United States ', - participants=[]) + RecurringEvent( + uid="tn7krk4cekt8ag3pk6gapqqbro", + title="BOD Meeting", + description=None, + read_only=False, + start=arrow.get(2012, 10, 16, 0, 0, 0), + end=arrow.get(2012, 10, 16, 1, 0, 0), + all_day=False, + busy=True, + owner="Eben Freeman ", + recurrence=["RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO"], + participants=[ + { + "email": "mitoc-bod@mit.edu", + "name": "MITOC BOD", + "status": "yes", + "notes": None, + }, + { + "email": "freemaneben@gmail.com", + "name": "Eben Freeman", + "status": "yes", + "notes": None, + }, + ], + ), + Event( + uid="20140615_60o30dr564o30c1g60o30dr4ck", + title="Fathers' Day", + description=None, + read_only=False, + busy=False, + start=arrow.get(2014, 06, 15), + end=arrow.get(2014, 06, 15), + all_day=True, + owner="Holidays in United States ", + participants=[], + ), ] provider = GoogleEventsProvider(1, 1) - provider.calendars_table = {'uid': False} - provider._get_raw_events = mock.MagicMock( - return_value=raw_response) - updates = provider.sync_events('uid', 1) + provider.calendars_table = {"uid": False} + provider._get_raw_events = mock.MagicMock(return_value=raw_response) + updates = provider.sync_events("uid", 1) # deleted events are actually only marked as # cancelled. Look for them in the updates stream. found_cancelled_event = False for event in updates: - if event.uid in expected_deletes and event.status == 'cancelled': + if event.uid in expected_deletes and event.status == "cancelled": found_cancelled_event = True break @@ -237,115 +276,116 @@ def test_event_parsing(): # Test read-only support raw_response = [ { - 'created': '2014-01-09T03:33:02.000Z', - 'creator': { - 'displayName': 'Holidays in United States', - 'email': 'en.usa#holiday@group.v.calendar.google.com', - 'self': True + "created": "2014-01-09T03:33:02.000Z", + "creator": { + "displayName": "Holidays in United States", + "email": "en.usa#holiday@group.v.calendar.google.com", + "self": True, }, - 'end': {u'date': '2014-06-16'}, - 'etag': '"2778476764000000"', - 'htmlLink': 'https://www.google.com/calendar/event?eid=BAR', - 'iCalUID': '20140615_60o30dr564o30c1g60o30dr4ck@google.com', - 'id': '20140615_60o30dr564o30c1g60o30dr4ck', - 'kind': 'calendar#event', - 'organizer': { - 'displayName': 'Holidays in United States', - 'email': 'en.usa#holiday@group.v.calendar.google.com', - 'self': True + "end": {u"date": "2014-06-16"}, + "etag": '"2778476764000000"', + "htmlLink": "https://www.google.com/calendar/event?eid=BAR", + "iCalUID": "20140615_60o30dr564o30c1g60o30dr4ck@google.com", + "id": "20140615_60o30dr564o30c1g60o30dr4ck", + "kind": "calendar#event", + "organizer": { + "displayName": "Holidays in United States", + "email": "en.usa#holiday@group.v.calendar.google.com", + "self": True, }, - 'sequence': 0, - 'start': {'date': '2014-06-15'}, - 'status': 'confirmed', - 'summary': "Fathers' Day", - 'transparency': 'transparent', - 'updated': '2014-01-09T03:33:02.000Z', - 'visibility': 'public', - 'guestCanModify': True}] + "sequence": 0, + "start": {"date": "2014-06-15"}, + "status": "confirmed", + "summary": "Fathers' Day", + "transparency": "transparent", + "updated": "2014-01-09T03:33:02.000Z", + "visibility": "public", + "guestCanModify": True, + } + ] provider = GoogleEventsProvider(1, 1) # This is a read-only calendar - provider.calendars_table = {'uid': True} - provider._get_raw_events = mock.MagicMock( - return_value=raw_response) - updates = provider.sync_events('uid', 1) + provider.calendars_table = {"uid": True} + provider._get_raw_events = mock.MagicMock(return_value=raw_response) + updates = provider.sync_events("uid", 1) assert len(updates) == 1 assert updates[0].read_only is True def test_handle_offset_all_day_events(): raw_event = { - 'created': '2014-01-09T03:33:02.000Z', - 'creator': { - 'displayName': 'Ben Bitdiddle', - 'email': 'ben.bitdiddle2222@gmail.com', - 'self': True + "created": "2014-01-09T03:33:02.000Z", + "creator": { + "displayName": "Ben Bitdiddle", + "email": "ben.bitdiddle2222@gmail.com", + "self": True, }, - 'etag': '"2778476764000000"', - 'htmlLink': 'https://www.google.com/calendar/event?eid=BAR', - 'iCalUID': '20140615_60o30dr564o30c1g60o30dr4ck@google.com', - 'id': '20140615_60o30dr564o30c1g60o30dr4ck', - 'kind': 'calendar#event', - 'organizer': { - 'displayName': 'Ben Bitdiddle', - 'email': 'ben.bitdiddle2222@gmail.com', - 'self': True + "etag": '"2778476764000000"', + "htmlLink": "https://www.google.com/calendar/event?eid=BAR", + "iCalUID": "20140615_60o30dr564o30c1g60o30dr4ck@google.com", + "id": "20140615_60o30dr564o30c1g60o30dr4ck", + "kind": "calendar#event", + "organizer": { + "displayName": "Ben Bitdiddle", + "email": "ben.bitdiddle2222@gmail.com", + "self": True, }, - 'sequence': 0, - 'start': {'date': '2014-03-15'}, - 'end': {u'date': '2014-03-15'}, - 'status': 'confirmed', - 'summary': 'Ides of March', - 'transparency': 'transparent', - 'updated': '2014-01-09T03:33:02.000Z', - 'visibility': 'public' + "sequence": 0, + "start": {"date": "2014-03-15"}, + "end": {u"date": "2014-03-15"}, + "status": "confirmed", + "summary": "Ides of March", + "transparency": "transparent", + "updated": "2014-01-09T03:33:02.000Z", + "visibility": "public", } - expected = Event(uid='20140615_60o30dr564o30c1g60o30dr4ck', - title='Ides of March', - description=None, - read_only=False, - busy=False, - start=arrow.get(2014, 03, 15), - end=arrow.get(2014, 03, 15), - all_day=True, - owner='Ben Bitdiddle ', - participants=[]) + expected = Event( + uid="20140615_60o30dr564o30c1g60o30dr4ck", + title="Ides of March", + description=None, + read_only=False, + busy=False, + start=arrow.get(2014, 03, 15), + end=arrow.get(2014, 03, 15), + all_day=True, + owner="Ben Bitdiddle ", + participants=[], + ) assert cmp_event_attrs(expected, parse_event_response(raw_event, False)) def test_handle_unparseable_dates(): - raw_response = [{ - 'id': '20140615_60o30dr564o30c1g60o30dr4ck', - 'start': {'date': '0000-01-01'}, - 'end': {'date': '0000-01-02'}, - 'summary': 'test' - }] + raw_response = [ + { + "id": "20140615_60o30dr564o30c1g60o30dr4ck", + "start": {"date": "0000-01-01"}, + "end": {"date": "0000-01-02"}, + "summary": "test", + } + ] provider = GoogleEventsProvider(1, 1) - provider._get_raw_events = mock.MagicMock( - return_value=raw_response) - updates = provider.sync_events('uid', 1) + provider._get_raw_events = mock.MagicMock(return_value=raw_response) + updates = provider.sync_events("uid", 1) assert len(updates) == 0 def test_pagination(): first_response = requests.Response() first_response.status_code = 200 - first_response._content = json.dumps({ - 'items': ['A', 'B', 'C'], - 'nextPageToken': 'CjkKKzlhb2tkZjNpZTMwNjhtZThllU' - }) + first_response._content = json.dumps( + {"items": ["A", "B", "C"], "nextPageToken": "CjkKKzlhb2tkZjNpZTMwNjhtZThllU"} + ) second_response = requests.Response() second_response.status_code = 200 - second_response._content = json.dumps({ - 'items': ['D', 'E'] - }) + second_response._content = json.dumps({"items": ["D", "E"]}) requests.get = mock.Mock(side_effect=[first_response, second_response]) provider = GoogleEventsProvider(1, 1) - provider._get_access_token = mock.Mock(return_value='token') - items = provider._get_resource_list('https://googleapis.com/testurl') - assert items == ['A', 'B', 'C', 'D', 'E'] + provider._get_access_token = mock.Mock(return_value="token") + items = provider._get_resource_list("https://googleapis.com/testurl") + assert items == ["A", "B", "C", "D", "E"] def test_handle_http_401(): @@ -354,90 +394,93 @@ def test_handle_http_401(): second_response = requests.Response() second_response.status_code = 200 - second_response._content = json.dumps({ - 'items': ['A', 'B', 'C'] - }) + second_response._content = json.dumps({"items": ["A", "B", "C"]}) requests.get = mock.Mock(side_effect=[first_response, second_response]) provider = GoogleEventsProvider(1, 1) - provider._get_access_token = mock.Mock(return_value='token') - items = provider._get_resource_list('https://googleapis.com/testurl') - assert items == ['A', 'B', 'C'] + provider._get_access_token = mock.Mock(return_value="token") + items = provider._get_resource_list("https://googleapis.com/testurl") + assert items == ["A", "B", "C"] # Check that we actually refreshed the access token assert len(provider._get_access_token.mock_calls) == 2 -@pytest.mark.usefixtures('mock_gevent_sleep') +@pytest.mark.usefixtures("mock_gevent_sleep") def test_handle_quota_exceeded(): first_response = requests.Response() first_response.status_code = 403 - first_response._content = json.dumps({ - 'error': { - 'errors': [ - {'domain': 'usageLimits', - 'reason': 'userRateLimitExceeded', - 'message': 'User Rate Limit Exceeded'} - ], - 'code': 403, - 'message': 'User Rate Limit Exceeded' + first_response._content = json.dumps( + { + "error": { + "errors": [ + { + "domain": "usageLimits", + "reason": "userRateLimitExceeded", + "message": "User Rate Limit Exceeded", + } + ], + "code": 403, + "message": "User Rate Limit Exceeded", + } } - }) + ) second_response = requests.Response() second_response.status_code = 200 - second_response._content = json.dumps({ - 'items': ['A', 'B', 'C'] - }) + second_response._content = json.dumps({"items": ["A", "B", "C"]}) requests.get = mock.Mock(side_effect=[first_response, second_response]) provider = GoogleEventsProvider(1, 1) - provider._get_access_token = mock.Mock(return_value='token') - items = provider._get_resource_list('https://googleapis.com/testurl') + provider._get_access_token = mock.Mock(return_value="token") + items = provider._get_resource_list("https://googleapis.com/testurl") # Check that we slept, then retried. assert gevent.sleep.called - assert items == ['A', 'B', 'C'] + assert items == ["A", "B", "C"] -@pytest.mark.usefixtures('mock_gevent_sleep') +@pytest.mark.usefixtures("mock_gevent_sleep") def test_handle_internal_server_error(): first_response = requests.Response() first_response.status_code = 503 second_response = requests.Response() second_response.status_code = 200 - second_response._content = json.dumps({ - 'items': ['A', 'B', 'C'] - }) + second_response._content = json.dumps({"items": ["A", "B", "C"]}) requests.get = mock.Mock(side_effect=[first_response, second_response]) provider = GoogleEventsProvider(1, 1) - provider._get_access_token = mock.Mock(return_value='token') - items = provider._get_resource_list('https://googleapis.com/testurl') + provider._get_access_token = mock.Mock(return_value="token") + items = provider._get_resource_list("https://googleapis.com/testurl") # Check that we slept, then retried. assert gevent.sleep.called - assert items == ['A', 'B', 'C'] + assert items == ["A", "B", "C"] def test_handle_api_not_enabled(): response = requests.Response() response.status_code = 403 - response._content = json.dumps({ - 'error': { - 'code': 403, - 'message': 'Access Not Configured.', - 'errors': [ - {'domain': 'usageLimits', 'message': 'Access Not Configured', - 'reason': 'accessNotConfigured', - 'extendedHelp': 'https://console.developers.google.com'} - ] + response._content = json.dumps( + { + "error": { + "code": 403, + "message": "Access Not Configured.", + "errors": [ + { + "domain": "usageLimits", + "message": "Access Not Configured", + "reason": "accessNotConfigured", + "extendedHelp": "https://console.developers.google.com", + } + ], + } } - }) + ) requests.get = mock.Mock(return_value=response) provider = GoogleEventsProvider(1, 1) - provider._get_access_token = mock.Mock(return_value='token') + provider._get_access_token = mock.Mock(return_value="token") with pytest.raises(AccessNotEnabledError): - provider._get_resource_list('https://googleapis.com/testurl') + provider._get_resource_list("https://googleapis.com/testurl") def test_handle_other_errors(): @@ -446,107 +489,121 @@ def test_handle_other_errors(): response._content = "This is not the JSON you're looking for" requests.get = mock.Mock(return_value=response) provider = GoogleEventsProvider(1, 1) - provider._get_access_token = mock.Mock(return_value='token') + provider._get_access_token = mock.Mock(return_value="token") with pytest.raises(requests.exceptions.HTTPError): - provider._get_resource_list('https://googleapis.com/testurl') + provider._get_resource_list("https://googleapis.com/testurl") response = requests.Response() response.status_code = 404 requests.get = mock.Mock(return_value=response) provider = GoogleEventsProvider(1, 1) - provider._get_access_token = mock.Mock(return_value='token') + provider._get_access_token = mock.Mock(return_value="token") with pytest.raises(requests.exceptions.HTTPError): - provider._get_resource_list('https://googleapis.com/testurl') + provider._get_resource_list("https://googleapis.com/testurl") def test_recurrence_creation(): event = { - 'created': '2012-10-09T22:35:50.000Z', - 'creator': { - 'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'self': True + "created": "2012-10-09T22:35:50.000Z", + "creator": { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "self": True, }, - 'end': {'dateTime': '2012-10-15T18:00:00-07:00'}, - 'etag': '"2806773858144000"', - 'htmlLink': 'https://www.google.com/calendar/event?eid=FOO', - 'iCalUID': 'tn7krk4cekt8ag3pk6gapqqbro@google.com', - 'id': 'tn7krk4cekt8ag3pk6gapqqbro', - 'kind': 'calendar#event', - 'organizer': { - 'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'self': True + "end": {"dateTime": "2012-10-15T18:00:00-07:00"}, + "etag": '"2806773858144000"', + "htmlLink": "https://www.google.com/calendar/event?eid=FOO", + "iCalUID": "tn7krk4cekt8ag3pk6gapqqbro@google.com", + "id": "tn7krk4cekt8ag3pk6gapqqbro", + "kind": "calendar#event", + "organizer": { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "self": True, }, - 'attendees': [ - {'displayName': 'MITOC BOD', - 'email': 'mitoc-bod@mit.edu', - 'responseStatus': 'accepted'}, - {'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'responseStatus': 'accepted'} + "attendees": [ + { + "displayName": "MITOC BOD", + "email": "mitoc-bod@mit.edu", + "responseStatus": "accepted", + }, + { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "responseStatus": "accepted", + }, + ], + "reminders": {"useDefault": True}, + "recurrence": [ + "RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO", + "EXDATE;TZID=America/Los_Angeles:20150208T010000", ], - 'reminders': {'useDefault': True}, - 'recurrence': ['RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO', - 'EXDATE;TZID=America/Los_Angeles:20150208T010000'], - 'sequence': 0, - 'start': {'dateTime': '2012-10-15T17:00:00-07:00', - 'timeZone': 'America/Los_Angeles'}, - 'status': 'confirmed', - 'summary': 'BOD Meeting', - 'updated': '2014-06-21T21:42:09.072Z' + "sequence": 0, + "start": { + "dateTime": "2012-10-15T17:00:00-07:00", + "timeZone": "America/Los_Angeles", + }, + "status": "confirmed", + "summary": "BOD Meeting", + "updated": "2014-06-21T21:42:09.072Z", } event = parse_event_response(event, False) assert isinstance(event, RecurringEvent) - assert event.rrule == 'RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO' - assert event.exdate == 'EXDATE;TZID=America/Los_Angeles:20150208T010000' + assert event.rrule == "RRULE:FREQ=WEEKLY;UNTIL=20150209T075959Z;BYDAY=MO" + assert event.exdate == "EXDATE;TZID=America/Los_Angeles:20150208T010000" assert event.until == arrow.get(2015, 02, 9, 7, 59, 59) - assert event.start_timezone == 'America/Los_Angeles' + assert event.start_timezone == "America/Los_Angeles" def test_override_creation(): event = { - 'created': '2012-10-09T22:35:50.000Z', - 'creator': { - 'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'self': True + "created": "2012-10-09T22:35:50.000Z", + "creator": { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "self": True, }, - 'end': {'dateTime': '2012-10-22T19:00:00-07:00'}, - 'etag': '"2806773858144000"', - 'htmlLink': 'https://www.google.com/calendar/event?eid=FOO', - 'iCalUID': 'tn7krk4cekt8ag3pk6gapqqbro@google.com', - 'id': 'tn7krk4cekt8ag3pk6gapqqbro_20121022T170000Z', - 'kind': 'calendar#event', - 'organizer': { - 'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'self': True + "end": {"dateTime": "2012-10-22T19:00:00-07:00"}, + "etag": '"2806773858144000"', + "htmlLink": "https://www.google.com/calendar/event?eid=FOO", + "iCalUID": "tn7krk4cekt8ag3pk6gapqqbro@google.com", + "id": "tn7krk4cekt8ag3pk6gapqqbro_20121022T170000Z", + "kind": "calendar#event", + "organizer": { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "self": True, }, - 'attendees': [ - {'displayName': 'MITOC BOD', - 'email': 'mitoc-bod@mit.edu', - 'responseStatus': 'accepted'}, - {'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'responseStatus': 'accepted'} + "attendees": [ + { + "displayName": "MITOC BOD", + "email": "mitoc-bod@mit.edu", + "responseStatus": "accepted", + }, + { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "responseStatus": "accepted", + }, ], - 'originalStartTime': { - 'dateTime': '2012-10-22T17:00:00-07:00', - 'timeZone': 'America/Los_Angeles' + "originalStartTime": { + "dateTime": "2012-10-22T17:00:00-07:00", + "timeZone": "America/Los_Angeles", }, - 'recurringEventId': 'tn7krk4cekt8ag3pk6gapqqbro', - 'reminders': {'useDefault': True}, - 'sequence': 0, - 'start': {'dateTime': '2012-10-22T18:00:00-07:00', - 'timeZone': 'America/Los_Angeles'}, - 'status': 'confirmed', - 'summary': 'BOD Meeting', - 'updated': '2014-06-21T21:42:09.072Z' + "recurringEventId": "tn7krk4cekt8ag3pk6gapqqbro", + "reminders": {"useDefault": True}, + "sequence": 0, + "start": { + "dateTime": "2012-10-22T18:00:00-07:00", + "timeZone": "America/Los_Angeles", + }, + "status": "confirmed", + "summary": "BOD Meeting", + "updated": "2014-06-21T21:42:09.072Z", } event = parse_event_response(event, False) assert isinstance(event, RecurringEventOverride) - assert event.master_event_uid == 'tn7krk4cekt8ag3pk6gapqqbro' + assert event.master_event_uid == "tn7krk4cekt8ag3pk6gapqqbro" assert event.original_start_time == arrow.get(2012, 10, 23, 00, 00, 00) @@ -555,47 +612,54 @@ def test_cancelled_override_creation(): # of recurring events) as full event objects, with status = 'cancelled'. # Test that we save this as a RecurringEventOverride rather than trying # to delete the UID. - raw_response = [{ - 'created': '2012-10-09T22:35:50.000Z', - 'creator': { - 'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'self': True - }, - 'end': {'dateTime': '2012-10-22T19:00:00-07:00'}, - 'etag': '"2806773858144000"', - 'htmlLink': 'https://www.google.com/calendar/event?eid=FOO', - 'iCalUID': 'tn7krk4cekt8ag3pk6gapqqbro@google.com', - 'id': 'tn7krk4cekt8ag3pk6gapqqbro_20121022T170000Z', - 'kind': 'calendar#event', - 'organizer': { - 'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'self': True - }, - 'attendees': [ - {'displayName': 'MITOC BOD', - 'email': 'mitoc-bod@mit.edu', - 'responseStatus': 'accepted'}, - {'displayName': 'Eben Freeman', - 'email': 'freemaneben@gmail.com', - 'responseStatus': 'accepted'} - ], - 'originalStartTime': { - 'dateTime': '2012-10-22T17:00:00-07:00', - 'timeZone': 'America/Los_Angeles' - }, - 'recurringEventId': 'tn7krk4cekt8ag3pk6gapqqbro', - 'reminders': {'useDefault': True}, - 'sequence': 0, - 'start': {'dateTime': '2012-10-22T18:00:00-07:00', - 'timeZone': 'America/Los_Angeles'}, - 'status': 'cancelled', - 'summary': 'BOD Meeting', - }] + raw_response = [ + { + "created": "2012-10-09T22:35:50.000Z", + "creator": { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "self": True, + }, + "end": {"dateTime": "2012-10-22T19:00:00-07:00"}, + "etag": '"2806773858144000"', + "htmlLink": "https://www.google.com/calendar/event?eid=FOO", + "iCalUID": "tn7krk4cekt8ag3pk6gapqqbro@google.com", + "id": "tn7krk4cekt8ag3pk6gapqqbro_20121022T170000Z", + "kind": "calendar#event", + "organizer": { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "self": True, + }, + "attendees": [ + { + "displayName": "MITOC BOD", + "email": "mitoc-bod@mit.edu", + "responseStatus": "accepted", + }, + { + "displayName": "Eben Freeman", + "email": "freemaneben@gmail.com", + "responseStatus": "accepted", + }, + ], + "originalStartTime": { + "dateTime": "2012-10-22T17:00:00-07:00", + "timeZone": "America/Los_Angeles", + }, + "recurringEventId": "tn7krk4cekt8ag3pk6gapqqbro", + "reminders": {"useDefault": True}, + "sequence": 0, + "start": { + "dateTime": "2012-10-22T18:00:00-07:00", + "timeZone": "America/Los_Angeles", + }, + "status": "cancelled", + "summary": "BOD Meeting", + } + ] provider = GoogleEventsProvider(1, 1) - provider._get_raw_events = mock.MagicMock( - return_value=raw_response) - updates = provider.sync_events('uid', 1) + provider._get_raw_events = mock.MagicMock(return_value=raw_response) + updates = provider.sync_events("uid", 1) assert updates[0].cancelled is True diff --git a/inbox/test/events/test_ics_parsing.py b/inbox/test/events/test_ics_parsing.py index adc39de86..3c6f506cd 100644 --- a/inbox/test/events/test_ics_parsing.py +++ b/inbox/test/events/test_ics_parsing.py @@ -5,26 +5,32 @@ from inbox.models.event import Event, RecurringEvent from inbox.events.util import MalformedEventError from inbox.events.ical import events_from_ics, import_attached_events -from inbox.test.util.base import (absolute_path, add_fake_calendar, - generic_account, add_fake_msg_with_calendar_part) +from inbox.test.util.base import ( + absolute_path, + add_fake_calendar, + generic_account, + add_fake_msg_with_calendar_part, +) -FIXTURES = './events/fixtures/' +FIXTURES = "./events/fixtures/" def test_invalid_ical(db, default_account): with pytest.raises(MalformedEventError): - events_from_ics(default_account.namespace, - default_account.emailed_events_calendar, "asdf") + events_from_ics( + default_account.namespace, default_account.emailed_events_calendar, "asdf" + ) def test_windows_tz_ical(db, default_account): data = None - with open(absolute_path(FIXTURES + 'windows_event.ics')) as fd: + with open(absolute_path(FIXTURES + "windows_event.ics")) as fd: data = fd.read() - events = events_from_ics(default_account.namespace, - default_account.emailed_events_calendar, data) - events = events['invites'] + events = events_from_ics( + default_account.namespace, default_account.emailed_events_calendar, data + ) + events = events["invites"] assert len(events) == 1, "There should be only one event in the test file" ev = events[0] @@ -32,17 +38,18 @@ def test_windows_tz_ical(db, default_account): assert ev.end == arrow.get(2015, 2, 20, 9, 0) assert ev.title == "Pommes" assert len(ev.participants) == 1 - assert ev.participants[0]['email'] == 'karim@nilas.com' + assert ev.participants[0]["email"] == "karim@nilas.com" def test_icloud_allday_event(db, default_account): data = None - with open(absolute_path(FIXTURES + 'icloud_oneday_event.ics')) as fd: + with open(absolute_path(FIXTURES + "icloud_oneday_event.ics")) as fd: data = fd.read() - events = events_from_ics(default_account.namespace, - default_account.emailed_events_calendar, data) - events = events['invites'] + events = events_from_ics( + default_account.namespace, default_account.emailed_events_calendar, data + ) + events = events["invites"] assert len(events) == 1, "There should be only one event in the test file" ev = events[0] @@ -51,17 +58,18 @@ def test_icloud_allday_event(db, default_account): assert ev.end == arrow.get(2015, 3, 17, 0, 0) assert len(ev.participants) == 2 - assert ev.participants[0]['email'] == 'karim@nilas.com' + assert ev.participants[0]["email"] == "karim@nilas.com" def test_iphone_through_exchange(db, default_account): data = None - with open(absolute_path(FIXTURES + 'iphone_through_exchange.ics')) as fd: + with open(absolute_path(FIXTURES + "iphone_through_exchange.ics")) as fd: data = fd.read() - events = events_from_ics(default_account.namespace, - default_account.emailed_events_calendar, data) - events = events['invites'] + events = events_from_ics( + default_account.namespace, default_account.emailed_events_calendar, data + ) + events = events["invites"] assert len(events) == 1, "There should be only one event in the test file" ev = events[0] @@ -70,38 +78,43 @@ def test_iphone_through_exchange(db, default_account): def test_event_update(db, default_account, message): - add_fake_calendar(db.session, default_account.namespace.id, - name="Emailed events", read_only=True) + add_fake_calendar( + db.session, default_account.namespace.id, name="Emailed events", read_only=True + ) - with open(absolute_path(FIXTURES + 'gcal_v1.ics')) as fd: + with open(absolute_path(FIXTURES + "gcal_v1.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part(db.session, default_account, - ics_data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "jvbroggos139aumnj4p5og9rd0@google.com").one() + ev = ( + db.session.query(Event) + .filter(Event.uid == "jvbroggos139aumnj4p5og9rd0@google.com") + .one() + ) - assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " - "75009 Paris, France") + assert ev.location == ( + "Olympia Hall, 28 Boulevard des Capucines, " "75009 Paris, France" + ) - with open(absolute_path(FIXTURES + 'gcal_v2.ics')) as fd: + with open(absolute_path(FIXTURES + "gcal_v2.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, ics_data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "jvbroggos139aumnj4p5og9rd0@google.com").one() + ev = ( + db.session.query(Event) + .filter(Event.uid == "jvbroggos139aumnj4p5og9rd0@google.com") + .one() + ) - assert ev.location == (u"Le Zenith, 211 Avenue Jean Jaures, " - "75019 Paris, France") + assert ev.location == (u"Le Zenith, 211 Avenue Jean Jaures, " "75019 Paris, France") # This test checks that: @@ -111,30 +124,30 @@ def test_event_update(db, default_account, message): def test_self_sent_update(db, default_account, message): # Create the calendars - add_fake_calendar(db.session, default_account.namespace.id, - name="Emailed events", read_only=True) + add_fake_calendar( + db.session, default_account.namespace.id, name="Emailed events", read_only=True + ) - default_calendar = add_fake_calendar(db.session, - default_account.namespace.id, - name="Calendar", read_only=False) + default_calendar = add_fake_calendar( + db.session, default_account.namespace.id, name="Calendar", read_only=False + ) # Import the self-sent event. - with open(absolute_path(FIXTURES + 'self_sent_v1.ics')) as fd: + with open(absolute_path(FIXTURES + "self_sent_v1.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part(db.session, default_account, - ics_data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) msg.from_addr = [(default_account.name, default_account.email_address)] import_attached_events(db.session, default_account, msg) db.session.commit() - evs = db.session.query(Event).filter( - Event.uid == "burgos@google.com").all() + evs = db.session.query(Event).filter(Event.uid == "burgos@google.com").all() assert len(evs) == 1 ev = evs[0] - assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " - "75009 Paris, France") + assert ev.location == ( + "Olympia Hall, 28 Boulevard des Capucines, " "75009 Paris, France" + ) # Create a copy of the event, and store it in the default calendar. event_copy = Event() @@ -143,70 +156,76 @@ def test_self_sent_update(db, default_account, message): db.session.add(event_copy) db.session.commit() - with open(absolute_path(FIXTURES + 'self_sent_v2.ics')) as fd: + with open(absolute_path(FIXTURES + "self_sent_v2.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, ics_data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() - evs = db.session.query(Event).filter( - Event.uid == "burgos@google.com").all() + evs = db.session.query(Event).filter(Event.uid == "burgos@google.com").all() # Check that the event in the default calendar didn't get updated. assert len(evs) == 2 for ev in evs: db.session.refresh(ev) if ev.calendar_id == default_calendar.id: - assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " - "75009 Paris, France") + assert ev.location == ( + "Olympia Hall, 28 Boulevard des Capucines, " "75009 Paris, France" + ) else: - assert ev.location == (u"Le Zenith, 211 Avenue Jean Jaures, " - "75019 Paris, France") + assert ev.location == ( + u"Le Zenith, 211 Avenue Jean Jaures, " "75019 Paris, France" + ) def test_recurring_ical(db, default_account): - with open(absolute_path(FIXTURES + 'gcal_recur.ics')) as fd: + with open(absolute_path(FIXTURES + "gcal_recur.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, ics_data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "flg2h6nam1cb1uqetgfkslrfrc@google.com").one() + ev = ( + db.session.query(Event) + .filter(Event.uid == "flg2h6nam1cb1uqetgfkslrfrc@google.com") + .one() + ) assert isinstance(ev, RecurringEvent) assert isinstance(ev.recurring, list) - assert ev.start_timezone == 'America/Los_Angeles' + assert ev.start_timezone == "America/Los_Angeles" def test_event_no_end_time(db, default_account): # With no end time, import should fail - with open(absolute_path(FIXTURES + 'meetup_infinite.ics')) as fd: + with open(absolute_path(FIXTURES + "meetup_infinite.ics")) as fd: ics_data = fd.read() add_fake_msg_with_calendar_part(db.session, default_account, ics_data) # doesn't raise an exception (to not derail message parsing, but also # doesn't create an event) - ev = db.session.query(Event).filter( - Event.uid == "nih2h78am1cb1uqetgfkslrfrc@meetup.com").first() + ev = ( + db.session.query(Event) + .filter(Event.uid == "nih2h78am1cb1uqetgfkslrfrc@meetup.com") + .first() + ) assert not ev def test_event_no_participants(db, default_account): data = None - with open(absolute_path(FIXTURES + 'event_with_no_participants.ics')) as fd: + with open(absolute_path(FIXTURES + "event_with_no_participants.ics")) as fd: data = fd.read() - events = events_from_ics(default_account.namespace, - default_account.emailed_events_calendar, data) - events = events['invites'] + events = events_from_ics( + default_account.namespace, default_account.emailed_events_calendar, data + ) + events = events["invites"] assert len(events) == 1, "There should be only one event in the test file" ev = events[0] assert len(ev.participants) == 0 @@ -214,12 +233,13 @@ def test_event_no_participants(db, default_account): def test_multiple_events(db, default_account): data = None - with open(absolute_path(FIXTURES + 'multiple_events.ics')) as fd: + with open(absolute_path(FIXTURES + "multiple_events.ics")) as fd: data = fd.read() - events = events_from_ics(default_account.namespace, - default_account.emailed_events_calendar, data) - events = events['invites'] + events = events_from_ics( + default_account.namespace, default_account.emailed_events_calendar, data + ) + events = events["invites"] assert len(events) == 2 ev0 = events[0] ev1 = events[1] @@ -230,25 +250,33 @@ def test_multiple_events(db, default_account): def test_icalendar_import(db, generic_account, message): - add_fake_calendar(db.session, generic_account.namespace.id, - name="Emailed events", read_only=True) + add_fake_calendar( + db.session, generic_account.namespace.id, name="Emailed events", read_only=True + ) - with open(absolute_path(FIXTURES + 'invite_w_rsvps1.ics')) as fd: + with open(absolute_path(FIXTURES + "invite_w_rsvps1.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, generic_account, ics_data) + msg = add_fake_msg_with_calendar_part(db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg) - ev = db.session.query(Event).filter( - Event.uid == ("040000008200E00074C5B7101A82E00800000000" - "F9125A30B06BD001000000000000000010000000" - "9D791C7548BFD144BFA54F14213CAD25")).one() + ev = ( + db.session.query(Event) + .filter( + Event.uid + == ( + "040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25" + ) + ) + .one() + ) assert len(ev.participants) == 2 for participant in ev.participants: - assert participant['status'] == 'noreply' + assert participant["status"] == "noreply" def test_rsvp_merging(db, generic_account, message): @@ -257,189 +285,226 @@ def test_rsvp_merging(db, generic_account, message): # autoimported invites end up in the "emailed events" calendar. # However, we're simulating invite sending, which supposes using # an event from another calendar. - add_fake_calendar(db.session, generic_account.namespace.id, - name="Emailed events", read_only=True) - cal2 = add_fake_calendar(db.session, generic_account.namespace.id, - name="Random calendar", read_only=True) - - with open(absolute_path(FIXTURES + 'invite_w_rsvps1.ics')) as fd: + add_fake_calendar( + db.session, generic_account.namespace.id, name="Emailed events", read_only=True + ) + cal2 = add_fake_calendar( + db.session, generic_account.namespace.id, name="Random calendar", read_only=True + ) + + with open(absolute_path(FIXTURES + "invite_w_rsvps1.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, generic_account, ics_data) + msg = add_fake_msg_with_calendar_part(db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg) - ev = db.session.query(Event).filter( - Event.uid == ("040000008200E00074C5B7101A82E00800000000" - "F9125A30B06BD001000000000000000010000000" - "9D791C7548BFD144BFA54F14213CAD25")).one() + ev = ( + db.session.query(Event) + .filter( + Event.uid + == ( + "040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25" + ) + ) + .one() + ) assert len(ev.participants) == 2 for participant in ev.participants: - assert participant['status'] == 'noreply' + assert participant["status"] == "noreply" ev.public_id = "cccc" ev.calendar = cal2 - with open(absolute_path(FIXTURES + 'invite_w_rsvps2.ics')) as fd: + with open(absolute_path(FIXTURES + "invite_w_rsvps2.ics")) as fd: ics_data = fd.read() - msg2 = add_fake_msg_with_calendar_part( - db.session, generic_account, ics_data) + msg2 = add_fake_msg_with_calendar_part(db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg2) - ev = db.session.query(Event).filter( - Event.uid == ("040000008200E00074C5B7101A82E00800000000" - "F9125A30B06BD001000000000000000010000000" - "9D791C7548BFD144BFA54F14213CAD25")).one() + ev = ( + db.session.query(Event) + .filter( + Event.uid + == ( + "040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25" + ) + ) + .one() + ) assert len(ev.participants) == 2 for participant in ev.participants: - if participant['email'] == 'test1@example.com': - assert participant['status'] == 'maybe' - assert participant['name'] == 'Inbox Apptest' - elif participant['email'] == 'karim@example.com': - assert participant['status'] == 'noreply' + if participant["email"] == "test1@example.com": + assert participant["status"] == "maybe" + assert participant["name"] == "Inbox Apptest" + elif participant["email"] == "karim@example.com": + assert participant["status"] == "noreply" - with open(absolute_path(FIXTURES + 'invite_w_rsvps3.ics')) as fd: + with open(absolute_path(FIXTURES + "invite_w_rsvps3.ics")) as fd: ics_data = fd.read() - msg3 = add_fake_msg_with_calendar_part( - db.session, generic_account, ics_data) + msg3 = add_fake_msg_with_calendar_part(db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg3) - ev = db.session.query(Event).filter( - Event.uid == ("040000008200E00074C5B7101A82E00800000000" - "F9125A30B06BD001000000000000000010000000" - "9D791C7548BFD144BFA54F14213CAD25")).one() + ev = ( + db.session.query(Event) + .filter( + Event.uid + == ( + "040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25" + ) + ) + .one() + ) assert len(ev.participants) == 2 for participant in ev.participants: - if participant['email'] == 'test1@example.com': - assert participant['status'] == 'maybe' - assert participant['name'] == 'Inbox Apptest' - elif participant['email'] == 'karim@example.com': - assert participant['name'] == 'Karim Hamidou' - assert participant['status'] == 'yes' + if participant["email"] == "test1@example.com": + assert participant["status"] == "maybe" + assert participant["name"] == "Inbox Apptest" + elif participant["email"] == "karim@example.com": + assert participant["name"] == "Karim Hamidou" + assert participant["status"] == "yes" # Check that we're handling sequence numbers correctly - i.e: an RSVP # with a sequence number < to the event's sequence number should be # discarded. ev.sequence_number += 1 - with open(absolute_path(FIXTURES + 'invite_w_rsvps_4.ics')) as fd: + with open(absolute_path(FIXTURES + "invite_w_rsvps_4.ics")) as fd: ics_data = fd.read() - msg4 = add_fake_msg_with_calendar_part( - db.session, generic_account, ics_data) + msg4 = add_fake_msg_with_calendar_part(db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg3) - ev = db.session.query(Event).filter( - Event.uid == ("040000008200E00074C5B7101A82E00800000000" - "F9125A30B06BD001000000000000000010000000" - "9D791C7548BFD144BFA54F14213CAD25")).one() + ev = ( + db.session.query(Event) + .filter( + Event.uid + == ( + "040000008200E00074C5B7101A82E00800000000" + "F9125A30B06BD001000000000000000010000000" + "9D791C7548BFD144BFA54F14213CAD25" + ) + ) + .one() + ) assert len(ev.participants) == 2 for participant in ev.participants: - if participant['email'] == 'test1@example.com': - assert participant['status'] == 'maybe' - assert participant['name'] == 'Inbox Apptest' - elif participant['email'] == 'karim@example.com': - assert participant['name'] == 'Karim Hamidou' - assert participant['status'] == 'yes' + if participant["email"] == "test1@example.com": + assert participant["status"] == "maybe" + assert participant["name"] == "Inbox Apptest" + elif participant["email"] == "karim@example.com": + assert participant["name"] == "Karim Hamidou" + assert participant["status"] == "yes" def test_cancelled_event(db, default_account): - with open(absolute_path(FIXTURES + 'google_cancelled1.ics')) as fd: + with open(absolute_path(FIXTURES + "google_cancelled1.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, ics_data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "c74p2nmutcd0kt69ku7rs8vu2g@google.com").one() + ev = ( + db.session.query(Event) + .filter(Event.uid == "c74p2nmutcd0kt69ku7rs8vu2g@google.com") + .one() + ) - assert ev.status == 'confirmed' + assert ev.status == "confirmed" - with open(absolute_path(FIXTURES + 'google_cancelled2.ics')) as fd: + with open(absolute_path(FIXTURES + "google_cancelled2.ics")) as fd: ics_data = fd.read() - msg2 = add_fake_msg_with_calendar_part( - db.session, default_account, ics_data) + msg2 = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg2) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "c74p2nmutcd0kt69ku7rs8vu2g@google.com").one() + ev = ( + db.session.query(Event) + .filter(Event.uid == "c74p2nmutcd0kt69ku7rs8vu2g@google.com") + .one() + ) - assert ev.status == 'cancelled' + assert ev.status == "cancelled" def test_icloud_cancelled_event(db, default_account): - with open(absolute_path(FIXTURES + 'icloud_cancelled1.ics')) as fd: + with open(absolute_path(FIXTURES + "icloud_cancelled1.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, ics_data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "5919D444-7C99-4687-A526-FC5D10091318").one() + ev = ( + db.session.query(Event) + .filter(Event.uid == "5919D444-7C99-4687-A526-FC5D10091318") + .one() + ) - assert ev.status == 'confirmed' + assert ev.status == "confirmed" - with open(absolute_path(FIXTURES + 'icloud_cancelled2.ics')) as fd: + with open(absolute_path(FIXTURES + "icloud_cancelled2.ics")) as fd: ics_data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, ics_data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "5919D444-7C99-4687-A526-FC5D10091318").one() + ev = ( + db.session.query(Event) + .filter(Event.uid == "5919D444-7C99-4687-A526-FC5D10091318") + .one() + ) - assert ev.status == 'cancelled' + assert ev.status == "cancelled" def test_multiple_summaries(db, default_account): data = None - with open(absolute_path(FIXTURES + 'multiple_summaries.ics')) as fd: + with open(absolute_path(FIXTURES + "multiple_summaries.ics")) as fd: data = fd.read() - events = events_from_ics(default_account.namespace, - default_account.emailed_events_calendar, data) - events = events['invites'] + events = events_from_ics( + default_account.namespace, default_account.emailed_events_calendar, data + ) + events = events["invites"] assert len(events) == 1 - assert events[0].title == 'The Strokes - Is this it?' + assert events[0].title == "The Strokes - Is this it?" def test_invalid_rsvp(db, default_account): # Test that we don't save an RSVP reply with an invalid id. data = None - with open(absolute_path(FIXTURES + 'invalid_rsvp.ics')) as fd: + with open(absolute_path(FIXTURES + "invalid_rsvp.ics")) as fd: data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "234252$cccc@nylas.com").all() + ev = db.session.query(Event).filter(Event.uid == "234252$cccc@nylas.com").all() assert len(ev) == 0 @@ -448,34 +513,30 @@ def test_rsvp_for_other_provider(db, default_account): # Test that we don't save RSVP replies which aren't replies to a Nylas # invite. data = None - with open(absolute_path(FIXTURES + 'invalid_rsvp2.ics')) as fd: + with open(absolute_path(FIXTURES + "invalid_rsvp2.ics")) as fd: data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "234252cccc@google.com").all() + ev = db.session.query(Event).filter(Event.uid == "234252cccc@google.com").all() assert len(ev) == 0 def test_truncate_bogus_sequence_numbers(db, default_account): data = None - with open(absolute_path(FIXTURES + 'bogus_sequence_number.ics')) as fd: + with open(absolute_path(FIXTURES + "bogus_sequence_number.ics")) as fd: data = fd.read() - msg = add_fake_msg_with_calendar_part( - db.session, default_account, data) + msg = add_fake_msg_with_calendar_part(db.session, default_account, data) import_attached_events(db.session, default_account, msg) db.session.commit() - ev = db.session.query(Event).filter( - Event.uid == "234252cccc@google.com").one() + ev = db.session.query(Event).filter(Event.uid == "234252cccc@google.com").one() # Check that the sequence number got truncated to the biggest possible # number. @@ -483,12 +544,13 @@ def test_truncate_bogus_sequence_numbers(db, default_account): def test_handle_missing_sequence_number(db, default_account): - with open(absolute_path(FIXTURES + 'event_without_sequence.ics')) as fd: + with open(absolute_path(FIXTURES + "event_without_sequence.ics")) as fd: data = fd.read() - events = events_from_ics(default_account.namespace, - default_account.emailed_events_calendar, data) - events = events['invites'] + events = events_from_ics( + default_account.namespace, default_account.emailed_events_calendar, data + ) + events = events["invites"] assert len(events) == 1 ev = events[0] assert ev.sequence_number == 0 diff --git a/inbox/test/events/test_inviting.py b/inbox/test/events/test_inviting.py index aa812c067..430bfd3d9 100644 --- a/inbox/test/events/test_inviting.py +++ b/inbox/test/events/test_inviting.py @@ -7,18 +7,17 @@ def test_invite_generation(event, default_account): from inbox.events.ical import generate_icalendar_invite event.sequence_number = 1 - event.participants = [{'email': 'helena@nylas.com'}, - {'email': 'myles@nylas.com'}] + event.participants = [{"email": "helena@nylas.com"}, {"email": "myles@nylas.com"}] cal = generate_icalendar_invite(event) - assert cal['method'] == 'REQUEST' + assert cal["method"] == "REQUEST" for component in cal.walk(): if component.name == "VEVENT": - assert component.get('summary') == event.title - assert int(component.get('sequence')) == event.sequence_number - assert component.get('location') == event.location + assert component.get("summary") == event.title + assert int(component.get("sequence")) == event.sequence_number + assert component.get("location") == event.location - attendees = component.get('attendee', []) + attendees = component.get("attendee", []) # the iCalendar python module doesn't return a list when # there's only one attendee. Go figure. @@ -28,20 +27,21 @@ def test_invite_generation(event, default_account): for attendee in attendees: email = unicode(attendee) # strip mailto: if it exists - if email.lower().startswith('mailto:'): + if email.lower().startswith("mailto:"): email = email[7:] - assert email in ['helena@nylas.com', 'myles@nylas.com'] + assert email in ["helena@nylas.com", "myles@nylas.com"] def test_message_generation(event, default_account): from inbox.events.ical import generate_invite_message - event.title = 'A long walk on the beach' - event.participants = [{'email': 'helena@nylas.com'}] - msg = generate_invite_message('empty', event, default_account) + + event.title = "A long walk on the beach" + event.participants = [{"email": "helena@nylas.com"}] + msg = generate_invite_message("empty", event, default_account) # Make sure the From header is set correctly - assert msg.headers['From'] == "automated@notifications.nylas.com" + assert msg.headers["From"] == "automated@notifications.nylas.com" # Check that we have an email with an HTML part, a plain text part, a # text/calendar with METHOD=REQUEST and an attachment. @@ -50,16 +50,20 @@ def test_message_generation(event, default_account): format_type = mimepart.content_type.format_type subtype = mimepart.content_type.subtype - if (format_type, subtype) in [('text', 'plain'), ('text', 'html'), - ('text', 'calendar; method=request'), - ('application', 'ics')]: + if (format_type, subtype) in [ + ("text", "plain"), + ("text", "html"), + ("text", "calendar; method=request"), + ("application", "ics"), + ]: count += 1 assert count == 3 def test_unicode_message_generation(event, default_account): from inbox.events.ical import generate_invite_message - event.title = u'Dîner chez François et Hélène' + + event.title = u"Dîner chez François et Hélène" event.description = u"""Cher Paul, Je suis heureux de vous inviter à un diner le samedi 19 novembre 2011 à 19h30 au chalet de l'île Daumesnil dans le bois de Vincennes. @@ -68,8 +72,8 @@ def test_unicode_message_generation(event, default_account): Hélène (Ἑλένη) """ - event.participants = [{'email': 'hélène@nylas.com'}] - generate_invite_message('empty', event, default_account) + event.participants = [{"email": "hélène@nylas.com"}] + generate_invite_message("empty", event, default_account) # That's it --- we just needed to make sure message # generation shouldn't blow up. diff --git a/inbox/test/events/test_merge.py b/inbox/test/events/test_merge.py index caf9b61f8..5ba29a4e7 100644 --- a/inbox/test/events/test_merge.py +++ b/inbox/test/events/test_merge.py @@ -3,23 +3,37 @@ def fake_event(): - return Event(title="The fifth element", - participants=[{"name": "Ronald Zubar", - "email": "ronald@example.com", - "status": "noreply", - "notes": "required"}]) + return Event( + title="The fifth element", + participants=[ + { + "name": "Ronald Zubar", + "email": "ronald@example.com", + "status": "noreply", + "notes": "required", + } + ], + ) def fake_event2(): - return Event(title="The fifth element", - participants=[{"name": "Ronald Zubar", - "email": "ronald@example.com", - "status": "noreply", - "notes": "required"}, - {"name": "Ronald McDonald", - "email": "ronald@mcdonalds.com", - "status": "noreply", - "notes": "required"}]) + return Event( + title="The fifth element", + participants=[ + { + "name": "Ronald Zubar", + "email": "ronald@example.com", + "status": "noreply", + "notes": "required", + }, + { + "name": "Ronald McDonald", + "email": "ronald@mcdonalds.com", + "status": "noreply", + "notes": "required", + }, + ], + ) def test_overwrite(): diff --git a/inbox/test/events/test_recurrence.py b/inbox/test/events/test_recurrence.py index c46bc57e5..7f97fc80b 100644 --- a/inbox/test/events/test_recurrence.py +++ b/inbox/test/events/test_recurrence.py @@ -7,10 +7,15 @@ from inbox.models.event import Event, RecurringEvent, RecurringEventOverride from inbox.models.when import Date, Time, DateSpan, TimeSpan from inbox.events.remote_sync import handle_event_updates -from inbox.events.recurring import (link_events, get_start_times, - parse_exdate, rrule_to_json) +from inbox.events.recurring import ( + link_events, + get_start_times, + parse_exdate, + rrule_to_json, +) from nylas.logging import get_logger + log = get_logger() TEST_RRULE = ["RRULE:FREQ=WEEKLY;UNTIL=20140918T203000Z;BYDAY=TH"] @@ -20,37 +25,45 @@ TEST_EXDATE_RULE.extend(TEST_EXDATE) -def recurring_event(db, account, calendar, rrule, - start=arrow.get(2014, 8, 7, 20, 30, 00), - end=arrow.get(2014, 8, 7, 21, 30, 00), - all_day=False, commit=True): +def recurring_event( + db, + account, + calendar, + rrule, + start=arrow.get(2014, 8, 7, 20, 30, 00), + end=arrow.get(2014, 8, 7, 21, 30, 00), + all_day=False, + commit=True, +): # commit: are we returning a commited instance object? if commit: - ev = db.session.query(Event).filter_by(uid='myuid').first() + ev = db.session.query(Event).filter_by(uid="myuid").first() if ev: db.session.delete(ev) - ev = Event(namespace_id=account.namespace.id, - calendar=calendar, - title='recurring', - description='', - uid='myuid', - location='', - busy=False, - read_only=False, - reminders='', - recurrence=rrule, - start=start, - end=end, - all_day=all_day, - is_owner=False, - participants=[], - provider_name='inbox', - raw_data='', - original_start_tz='America/Los_Angeles', - original_start_time=None, - master_event_uid=None, - source='local') + ev = Event( + namespace_id=account.namespace.id, + calendar=calendar, + title="recurring", + description="", + uid="myuid", + location="", + busy=False, + read_only=False, + reminders="", + recurrence=rrule, + start=start, + end=end, + all_day=all_day, + is_owner=False, + participants=[], + provider_name="inbox", + raw_data="", + original_start_tz="America/Los_Angeles", + original_start_time=None, + master_event_uid=None, + source="local", + ) if commit: db.session.add(ev) @@ -69,16 +82,17 @@ def recurring_override(db, master, original_start, start, end): def recurring_override_instance(db, master, original_start, start, end): # Returns an Override that has the master's UID, but is not linked yet - override_uid = '{}_{}'.format(master.uid, - original_start.strftime("%Y%m%dT%H%M%SZ")) + override_uid = "{}_{}".format(master.uid, original_start.strftime("%Y%m%dT%H%M%SZ")) ev = db.session.query(Event).filter_by(uid=override_uid).first() if ev: db.session.delete(ev) db.session.commit() - ev = Event(original_start_time=original_start, - master_event_uid=master.uid, - namespace_id=master.namespace_id, - calendar_id=master.calendar_id) + ev = Event( + original_start_time=original_start, + master_event_uid=master.uid, + namespace_id=master.namespace_id, + calendar_id=master.calendar_id, + ) ev.update(master) ev.uid = override_uid ev.start = start @@ -102,30 +116,35 @@ def test_link_events_from_override(db, default_account, calendar, other_calendar # from the override. master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] - override = Event(original_start_time=original_start, - master_event_uid=master.uid, - namespace_id=master.namespace_id, - calendar_id=calendar.id, - source='local') + override = Event( + original_start_time=original_start, + master_event_uid=master.uid, + namespace_id=master.namespace_id, + calendar_id=calendar.id, + source="local", + ) assert isinstance(override, RecurringEventOverride) link_events(db.session, override) assert override.master == master -def test_linking_events_from_different_calendars(db, default_account, - calendar, other_calendar): +def test_linking_events_from_different_calendars( + db, default_account, calendar, other_calendar +): # Test that two events with the same UID but in different calendars don't # get linked together. This is important because with the Google API, a # recurring events can be in two calendars and have the same UID. # In this case, we create two different recurring events. master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] - override = Event(original_start_time=original_start, - master_event_uid=master.uid, - namespace_id=master.namespace_id, - calendar_id=other_calendar.id, - uid='blah', - source='local') + override = Event( + original_start_time=original_start, + master_event_uid=master.uid, + namespace_id=master.namespace_id, + calendar_id=other_calendar.id, + uid="blah", + source="local", + ) assert isinstance(override, RecurringEventOverride) link_events(db.session, override) @@ -138,23 +157,26 @@ def test_link_events_from_master(db, default_account, calendar): # from the master event. master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] - override = recurring_override_instance(db, master, original_start, - master.start, master.end) + override = recurring_override_instance( + db, master, original_start, master.start, master.end + ) assert isinstance(master, RecurringEvent) assert len(link_events(db.session, master)) == 1 assert override in master.overrides assert override.uid in [o.uid for o in master.overrides] -def test_link_events_from_master_diff_calendars(db, default_account, calendar, - other_calendar): +def test_link_events_from_master_diff_calendars( + db, default_account, calendar, other_calendar +): # Same as the previous test except that we check that it doesn't work across # calendars (see test_link_events_from_master_diff_calendars for more # details). master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] - override = recurring_override_instance(db, master, original_start, - master.start, master.end) + override = recurring_override_instance( + db, master, original_start, master.start, master.end + ) override.calendar = other_calendar assert isinstance(master, RecurringEvent) o = link_events(db.session, master) @@ -174,25 +196,38 @@ def test_rrule_parsing(db, default_account, calendar): def test_all_day_rrule_parsing(db, default_account, calendar): - event = recurring_event(db, default_account, calendar, ALL_DAY_RRULE, - start=arrow.get(2014, 8, 7), - end=arrow.get(2014, 8, 7), - all_day=True) + event = recurring_event( + db, + default_account, + calendar, + ALL_DAY_RRULE, + start=arrow.get(2014, 8, 7), + end=arrow.get(2014, 8, 7), + all_day=True, + ) g = get_start_times(event) assert len(g) == 6 -@pytest.mark.parametrize("rule", [ - "RRULE:FREQ=DAILY;UNTIL=20160913", - "RRULE:FREQ=DAILY;UNTIL=20160913T070000Z", - "RRULE:FREQ=DAILY;UNTIL=20160913T070000" -]) +@pytest.mark.parametrize( + "rule", + [ + "RRULE:FREQ=DAILY;UNTIL=20160913", + "RRULE:FREQ=DAILY;UNTIL=20160913T070000Z", + "RRULE:FREQ=DAILY;UNTIL=20160913T070000", + ], +) def test_all_day_rrule_parsing_utc(db, default_account, calendar, rule): # Use an RRULE with timezone away until date + all day event - event = recurring_event(db, default_account, calendar, rule, - start=arrow.get(2016, 9, 10), - end=arrow.get(2016, 9, 13), - all_day=True) + event = recurring_event( + db, + default_account, + calendar, + rule, + start=arrow.get(2016, 9, 10), + end=arrow.get(2016, 9, 13), + all_day=True, + ) start_boundary = datetime.datetime(2016, 9, 8, 1, 21, 55) end_boundary = datetime.datetime(2016, 9, 16, 0, 31, 55) @@ -235,9 +270,14 @@ def test_inflate_across_DST(db, default_account, calendar): # adjust the base time accordingly to account for the new UTC offset. # Daylight Savings for US/PST: March 8, 2015 - Nov 1, 2015 dst_rrule = ["RRULE:FREQ=WEEKLY;BYDAY=TU"] - dst_event = recurring_event(db, default_account, calendar, dst_rrule, - start=arrow.get(2015, 03, 03, 03, 03, 03), - end=arrow.get(2015, 03, 03, 04, 03, 03)) + dst_event = recurring_event( + db, + default_account, + calendar, + dst_rrule, + start=arrow.get(2015, 03, 03, 03, 03, 03), + end=arrow.get(2015, 03, 03, 04, 03, 03), + ) g = get_start_times(dst_event, end=arrow.get(2015, 03, 21)) # In order for this event to occur at the same local time, the recurrence @@ -255,9 +295,14 @@ def test_inflate_across_DST(db, default_account, calendar): assert time.astimezone(local_tz).hour == 19 # Test an event that starts during local daylight savings time - dst_event = recurring_event(db, default_account, calendar, dst_rrule, - start=arrow.get(2015, 10, 27, 02, 03, 03), - end=arrow.get(2015, 10, 27, 03, 03, 03)) + dst_event = recurring_event( + db, + default_account, + calendar, + dst_rrule, + start=arrow.get(2015, 10, 27, 02, 03, 03), + end=arrow.get(2015, 10, 27, 03, 03, 03), + ) g = get_start_times(dst_event, end=arrow.get(2015, 11, 11)) for time in g: if time > arrow.get(2015, 11, 1): @@ -268,9 +313,15 @@ def test_inflate_across_DST(db, default_account, calendar): def test_inflate_all_day_event(db, default_account, calendar): - event = recurring_event(db, default_account, calendar, ALL_DAY_RRULE, - start=arrow.get(2014, 9, 4), - end=arrow.get(2014, 9, 4), all_day=True) + event = recurring_event( + db, + default_account, + calendar, + ALL_DAY_RRULE, + start=arrow.get(2014, 9, 4), + end=arrow.get(2014, 9, 4), + all_day=True, + ) infl = event.inflate() for i in infl: assert i.all_day @@ -279,9 +330,15 @@ def test_inflate_all_day_event(db, default_account, calendar): def test_inflate_multi_day_event(db, default_account, calendar): - event = recurring_event(db, default_account, calendar, ALL_DAY_RRULE, - start=arrow.get(2014, 9, 4), - end=arrow.get(2014, 9, 5), all_day=True) + event = recurring_event( + db, + default_account, + calendar, + ALL_DAY_RRULE, + start=arrow.get(2014, 9, 4), + end=arrow.get(2014, 9, 5), + all_day=True, + ) infl = event.inflate() for i in infl: assert i.all_day @@ -293,42 +350,45 @@ def test_inflate_multi_day_event(db, default_account, calendar): def test_invalid_rrule_entry(db, default_account, calendar): # If we don't know how to expand the RRULE, we treat the event as if # it were a single instance. - event = recurring_event(db, default_account, calendar, 'INVALID_RRULE_YAY') + event = recurring_event(db, default_account, calendar, "INVALID_RRULE_YAY") infl = event.inflate() assert len(infl) == 1 assert infl[0].start == event.start def test_invalid_parseable_rrule_entry(db, default_account, calendar): - event = recurring_event(db, default_account, calendar, - ["RRULE:FREQ=CHRISTMAS;UNTIL=1984;BYDAY=QQ"]) + event = recurring_event( + db, default_account, calendar, ["RRULE:FREQ=CHRISTMAS;UNTIL=1984;BYDAY=QQ"] + ) infl = event.inflate() assert len(infl) == 1 assert infl[0].start == event.start def test_non_recurring_events_behave(db, default_account, calendar): - event = Event(namespace_id=default_account.namespace.id, - calendar=calendar, - title='not recurring', - description='', - uid='non_recurring_uid', - location='', - busy=False, - read_only=False, - reminders='', - recurrence=None, - start=arrow.get(2014, 07, 07, 13, 30), - end=arrow.get(2014, 07, 07, 13, 55), - all_day=False, - is_owner=False, - participants=[], - provider_name='inbox', - raw_data='', - original_start_tz='America/Los_Angeles', - original_start_time=None, - master_event_uid=None, - source='local') + event = Event( + namespace_id=default_account.namespace.id, + calendar=calendar, + title="not recurring", + description="", + uid="non_recurring_uid", + location="", + busy=False, + read_only=False, + reminders="", + recurrence=None, + start=arrow.get(2014, 07, 07, 13, 30), + end=arrow.get(2014, 07, 07, 13, 55), + all_day=False, + is_owner=False, + participants=[], + provider_name="inbox", + raw_data="", + original_start_tz="America/Los_Angeles", + original_start_time=None, + master_event_uid=None, + source="local", + ) assert isinstance(event, Event) with pytest.raises(AttributeError): event.inflate() @@ -343,7 +403,7 @@ def test_inflated_events_cant_persist(db, default_account, calendar): # FIXME "No handlers could be found for logger" - ensure this is only # a test issue or fix. db.session.commit() - assert 'should not be committed' in str(excinfo.value) + assert "should not be committed" in str(excinfo.value) def test_override_instantiated(db, default_account, calendar): @@ -351,10 +411,13 @@ def test_override_instantiated(db, default_account, calendar): # RecurringEventOverrides, have links back to the parent, and don't # appear twice in the event list. event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) - override = recurring_override(db, event, - arrow.get(2014, 9, 4, 20, 30, 00), - arrow.get(2014, 9, 4, 21, 30, 00), - arrow.get(2014, 9, 4, 22, 30, 00)) + override = recurring_override( + db, + event, + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 21, 30, 00), + arrow.get(2014, 9, 4, 22, 30, 00), + ) all_events = event.all_events() assert len(all_events) == 7 assert override in all_events @@ -365,10 +428,13 @@ def test_override_same_start(db, default_account, calendar): # start date (ie. the RRULE has no EXDATE for that event), it doesn't # appear twice in the all_events list. event = recurring_event(db, default_account, calendar, TEST_RRULE) - override = recurring_override(db, event, - arrow.get(2014, 9, 4, 20, 30, 00), - arrow.get(2014, 9, 4, 20, 30, 00), - arrow.get(2014, 9, 4, 21, 30, 00)) + override = recurring_override( + db, + event, + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 21, 30, 00), + ) all_events = event.all_events() assert len(all_events) == 7 unique_starts = list(set([e.start for e in all_events])) @@ -384,30 +450,30 @@ def test_override_updated(db, default_account, calendar): # create a new Event, as if we just got it from Google master_uid = event.uid override_uid = master_uid + "_20140814T203000Z" - override = Event(title='new override from google', - description='', - uid=override_uid, - location='', - busy=False, - read_only=False, - reminders='', - recurrence=None, - start=arrow.get(2014, 8, 14, 22, 30, 00), - end=arrow.get(2014, 8, 14, 23, 30, 00), - all_day=False, - is_owner=False, - participants=[], - provider_name='inbox', - raw_data='', - original_start_tz='America/Los_Angeles', - original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), - master_event_uid=master_uid, - source='local') - handle_event_updates(default_account.namespace.id, - calendar.id, - [override], - log, - db.session) + override = Event( + title="new override from google", + description="", + uid=override_uid, + location="", + busy=False, + read_only=False, + reminders="", + recurrence=None, + start=arrow.get(2014, 8, 14, 22, 30, 00), + end=arrow.get(2014, 8, 14, 23, 30, 00), + all_day=False, + is_owner=False, + participants=[], + provider_name="inbox", + raw_data="", + original_start_tz="America/Los_Angeles", + original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), + master_event_uid=master_uid, + source="local", + ) + handle_event_updates( + default_account.namespace.id, calendar.id, [override], log, db.session + ) db.session.commit() # Lets see if the event got saved with the right info find_override = db.session.query(Event).filter_by(uid=override_uid).one() @@ -415,50 +481,54 @@ def test_override_updated(db, default_account, calendar): assert find_override.master_event_id == event.id # Update the same override, making sure we don't create two - override = Event(title='new override from google', - description='', - uid=override_uid, - location='walk and talk', - busy=False, - read_only=False, - reminders='', - recurrence=None, - start=arrow.get(2014, 8, 14, 22, 15, 00), - end=arrow.get(2014, 8, 14, 23, 15, 00), - all_day=False, - is_owner=False, - participants=[], - provider_name='inbox', - raw_data='', - original_start_tz='America/Los_Angeles', - original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), - master_event_uid=master_uid, - source='local') - handle_event_updates(default_account.namespace.id, - calendar.id, - [override], log, db.session) + override = Event( + title="new override from google", + description="", + uid=override_uid, + location="walk and talk", + busy=False, + read_only=False, + reminders="", + recurrence=None, + start=arrow.get(2014, 8, 14, 22, 15, 00), + end=arrow.get(2014, 8, 14, 23, 15, 00), + all_day=False, + is_owner=False, + participants=[], + provider_name="inbox", + raw_data="", + original_start_tz="America/Los_Angeles", + original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), + master_event_uid=master_uid, + source="local", + ) + handle_event_updates( + default_account.namespace.id, calendar.id, [override], log, db.session + ) db.session.commit() # Let's see if the event got saved with the right info find_override = db.session.query(Event).filter_by(uid=override_uid).one() assert find_override is not None assert find_override.master_event_id == event.id - assert find_override.location == 'walk and talk' + assert find_override.location == "walk and talk" def test_override_cancelled(db, default_account, calendar): # Test that overrides with status 'cancelled' are appropriately missing # from the expanded event. event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) - override = recurring_override(db, event, - arrow.get(2014, 9, 4, 20, 30, 00), - arrow.get(2014, 9, 4, 21, 30, 00), - arrow.get(2014, 9, 4, 22, 30, 00)) + override = recurring_override( + db, + event, + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 21, 30, 00), + arrow.get(2014, 9, 4, 22, 30, 00), + ) override.cancelled = True all_events = event.all_events() assert len(all_events) == 6 assert override not in all_events - assert not any([e.start == arrow.get(2014, 9, 4, 20, 30, 00) - for e in all_events]) + assert not any([e.start == arrow.get(2014, 9, 4, 20, 30, 00) for e in all_events]) def test_new_instance_cancelled(db, default_account, calendar): @@ -466,33 +536,38 @@ def test_new_instance_cancelled(db, default_account, calendar): # as an override with cancelled status rather than deleting it. event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) override_uid = event.uid + "_20140814T203000Z" - override = Event(title='CANCELLED', - description='', - uid=override_uid, - location='', - busy=False, - read_only=False, - reminders='', - recurrence=None, - start=arrow.get(2014, 8, 14, 22, 15, 00), - end=arrow.get(2014, 8, 14, 23, 15, 00), - all_day=False, - is_owner=False, - participants=[], - provider_name='inbox', - raw_data='', - original_start_tz='America/Los_Angeles', - original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), - master_event_uid=event.uid, - cancelled=True, - source='local') - handle_event_updates(default_account.namespace.id, - calendar.id, - [override], log, db.session) + override = Event( + title="CANCELLED", + description="", + uid=override_uid, + location="", + busy=False, + read_only=False, + reminders="", + recurrence=None, + start=arrow.get(2014, 8, 14, 22, 15, 00), + end=arrow.get(2014, 8, 14, 23, 15, 00), + all_day=False, + is_owner=False, + participants=[], + provider_name="inbox", + raw_data="", + original_start_tz="America/Los_Angeles", + original_start_time=arrow.get(2014, 8, 14, 21, 30, 00), + master_event_uid=event.uid, + cancelled=True, + source="local", + ) + handle_event_updates( + default_account.namespace.id, calendar.id, [override], log, db.session + ) db.session.commit() # Check the event got saved with the cancelled flag - find_override = db.session.query(Event).filter_by( - uid=override_uid, namespace_id=default_account.namespace.id).one() + find_override = ( + db.session.query(Event) + .filter_by(uid=override_uid, namespace_id=default_account.namespace.id) + .one() + ) assert find_override.cancelled is True @@ -533,42 +608,46 @@ def test_when_delta(): def test_rrule_to_json(): # Generate more test cases! # http://jakubroztocil.github.io/rrule/ - r = 'RRULE:FREQ=WEEKLY;UNTIL=20140918T203000Z;BYDAY=TH' + r = "RRULE:FREQ=WEEKLY;UNTIL=20140918T203000Z;BYDAY=TH" r = rrulestr(r, dtstart=None) j = rrule_to_json(r) - assert j.get('freq') == 'WEEKLY' - assert j.get('byweekday') == 'TH' + assert j.get("freq") == "WEEKLY" + assert j.get("byweekday") == "TH" - r = 'FREQ=HOURLY;COUNT=30;WKST=MO;BYMONTH=1;BYMINUTE=42;BYSECOND=24' + r = "FREQ=HOURLY;COUNT=30;WKST=MO;BYMONTH=1;BYMINUTE=42;BYSECOND=24" r = rrulestr(r, dtstart=None) j = rrule_to_json(r) - assert j.get('until') is None - assert j.get('byminute') is 42 + assert j.get("until") is None + assert j.get("byminute") is 42 def test_master_cancelled(db, default_account, calendar): # Test that when the master recurring event is cancelled, we cancel every # override too. event = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) - override = recurring_override(db, event, - arrow.get(2014, 9, 4, 20, 30, 00), - arrow.get(2014, 9, 4, 21, 30, 00), - arrow.get(2014, 9, 4, 22, 30, 00)) - - update = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE, - commit=False) - update.status = 'cancelled' + override = recurring_override( + db, + event, + arrow.get(2014, 9, 4, 20, 30, 00), + arrow.get(2014, 9, 4, 21, 30, 00), + arrow.get(2014, 9, 4, 22, 30, 00), + ) + + update = recurring_event( + db, default_account, calendar, TEST_EXDATE_RULE, commit=False + ) + update.status = "cancelled" updates = [update] - handle_event_updates(default_account.namespace.id, - calendar.id, - updates, log, db.session) + handle_event_updates( + default_account.namespace.id, calendar.id, updates, log, db.session + ) db.session.commit() find_master = db.session.query(Event).filter_by(uid=event.uid).first() - assert find_master.status == 'cancelled' + assert find_master.status == "cancelled" find_override = db.session.query(Event).filter_by(uid=override.uid).first() - assert find_override.status == 'cancelled' + assert find_override.status == "cancelled" def test_made_recurring_then_cancelled(db, default_account, calendar): @@ -579,15 +658,16 @@ def test_made_recurring_then_cancelled(db, default_account, calendar): assert type(normal) == Event # Update with a recurrence rule *and* cancellation - update = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE, - commit=False) - update.status = 'cancelled' + update = recurring_event( + db, default_account, calendar, TEST_EXDATE_RULE, commit=False + ) + update.status = "cancelled" updates = [update] - handle_event_updates(default_account.namespace.id, - calendar.id, - updates, log, db.session) + handle_event_updates( + default_account.namespace.id, calendar.id, updates, log, db.session + ) db.session.commit() find_master = db.session.query(Event).filter_by(uid=normal.uid).first() - assert find_master.status == 'cancelled' + assert find_master.status == "cancelled" diff --git a/inbox/test/events/test_rsvp.py b/inbox/test/events/test_rsvp.py index c0a40ad2f..42adc89ae 100644 --- a/inbox/test/events/test_rsvp.py +++ b/inbox/test/events/test_rsvp.py @@ -6,21 +6,21 @@ def test_rsvp_recipient(default_account, message): assert rsvp_recipient(None) is None event = Event() - event.owner = 'Georges Perec ' - assert rsvp_recipient(event) == 'georges@gmail.com' + event.owner = "Georges Perec " + assert rsvp_recipient(event) == "georges@gmail.com" event = Event() - event.owner = '' - assert rsvp_recipient(event) == 'perec@gmail.com' + event.owner = "" + assert rsvp_recipient(event) == "perec@gmail.com" event = Event() - event.owner = 'perec@gmail.com' - assert rsvp_recipient(event) == 'perec@gmail.com' + event.owner = "perec@gmail.com" + assert rsvp_recipient(event) == "perec@gmail.com" - event.owner = 'None ' + event.owner = "None " assert rsvp_recipient(event) is None - message.from_addr = [('Georges Perec', 'georges@gmail.com')] + message.from_addr = [("Georges Perec", "georges@gmail.com")] event = Event() event.owner = None event.message = message @@ -32,8 +32,8 @@ def test_rsvp_recipient(default_account, message): message.from_addr = [] assert rsvp_recipient(event) is None - message.from_addr = [('', '')] + message.from_addr = [("", "")] assert rsvp_recipient(event) is None - message.from_addr = [('Georges Sans Addresse', '')] + message.from_addr = [("Georges Sans Addresse", "")] assert rsvp_recipient(event) is None diff --git a/inbox/test/events/test_sync.py b/inbox/test/events/test_sync.py index 9a20e47b9..ccd28b3f4 100644 --- a/inbox/test/events/test_sync.py +++ b/inbox/test/events/test_sync.py @@ -7,124 +7,157 @@ # Placeholder values for non-nullable attributes -default_params = dict(raw_data='', - busy=True, - all_day=False, - read_only=False, - start=datetime(2015, 2, 22, 11, 11), - end=datetime(2015, 2, 22, 22, 22), - is_owner=True, - participants=[{'email': 'japandroids@example.com', 'name': 'Japandroids'}]) +default_params = dict( + raw_data="", + busy=True, + all_day=False, + read_only=False, + start=datetime(2015, 2, 22, 11, 11), + end=datetime(2015, 2, 22, 22, 22), + is_owner=True, + participants=[{"email": "japandroids@example.com", "name": "Japandroids"}], +) # Mock responses from the provider with adds/updates/deletes def calendar_response(): - return CalendarSyncResponse([], [ - Calendar(name='Important Meetings', - uid='first_calendar_uid', - read_only=False), - Calendar(name='Nefarious Schemes', - uid='second_calendar_uid', - read_only=False), - ]) + return CalendarSyncResponse( + [], + [ + Calendar( + name="Important Meetings", uid="first_calendar_uid", read_only=False + ), + Calendar( + name="Nefarious Schemes", uid="second_calendar_uid", read_only=False + ), + ], + ) # Returns a calendar with name that is longer that our allowed column length of # 191 (MAX_INDEXABLE_LENGTH). This name is 192 characters def calendar_long_name(): return CalendarSyncResponse( - [], [Calendar(name='Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris_!', - uid='long_calendar_uid', read_only=True)]) + [], + [ + Calendar( + name="Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris_!", + uid="long_calendar_uid", + read_only=True, + ) + ], + ) def calendar_response_with_update(): return CalendarSyncResponse( - [], [Calendar(name='Super Important Meetings', - uid='first_calendar_uid', - read_only=False)]) + [], + [ + Calendar( + name="Super Important Meetings", + uid="first_calendar_uid", + read_only=False, + ) + ], + ) def calendar_response_with_delete(): - return (['first_calendar_uid'], []) + return (["first_calendar_uid"], []) def event_response(calendar_uid, sync_from_time): - if calendar_uid == 'first_calendar_uid': + if calendar_uid == "first_calendar_uid": return [ - Event(uid='first_event_uid', - title='Plotting Meeting', - **default_params), - Event(uid='second_event_uid', - title='Scheming meeting', - **default_params), - Event(uid='third_event_uid', - title='Innocent Meeting', - **default_params) + Event(uid="first_event_uid", title="Plotting Meeting", **default_params), + Event(uid="second_event_uid", title="Scheming meeting", **default_params), + Event(uid="third_event_uid", title="Innocent Meeting", **default_params), ] else: return [ - Event(uid='second_event_uid', - title='Plotting Meeting', - **default_params), - Event(uid='third_event_uid', - title='Scheming meeting', - **default_params) + Event(uid="second_event_uid", title="Plotting Meeting", **default_params), + Event(uid="third_event_uid", title="Scheming meeting", **default_params), ] def event_response_with_update(calendar_uid, sync_from_time): - if calendar_uid == 'first_calendar_uid': - return [Event(uid='first_event_uid', - title='Top Secret Plotting Meeting', - **default_params)] + if calendar_uid == "first_calendar_uid": + return [ + Event( + uid="first_event_uid", + title="Top Secret Plotting Meeting", + **default_params + ) + ] def event_response_with_participants_update(calendar_uid, sync_from_time): - if calendar_uid == 'first_calendar_uid': - new_events = [Event(uid='first_event_uid', - **default_params)] - new_events[0].participants = [{'name': 'Johnny Thunders', - 'email': 'johnny@thunde.rs'}] + if calendar_uid == "first_calendar_uid": + new_events = [Event(uid="first_event_uid", **default_params)] + new_events[0].participants = [ + {"name": "Johnny Thunders", "email": "johnny@thunde.rs"} + ] return new_events def event_response_with_delete(calendar_uid, sync_from_time): - if calendar_uid == 'first_calendar_uid': - return [Event(uid='first_event_uid', status='cancelled', - **default_params)] + if calendar_uid == "first_calendar_uid": + return [Event(uid="first_event_uid", status="cancelled", **default_params)] def test_handle_changes(db, generic_account): namespace_id = generic_account.namespace.id - event_sync = EventSync(generic_account.email_address, 'google', - generic_account.id, namespace_id) + event_sync = EventSync( + generic_account.email_address, "google", generic_account.id, namespace_id + ) # Sync calendars/events event_sync.provider.sync_calendars = calendar_response event_sync.provider.sync_events = event_response event_sync.sync() - assert db.session.query(Calendar).filter( - Calendar.namespace_id == namespace_id, - Calendar.name != 'Emailed events').count() == 2 - - assert db.session.query(Event).join(Calendar).filter( - Event.namespace_id == namespace_id, - Calendar.uid == 'first_calendar_uid').count() == 3 - - assert db.session.query(Event).join(Calendar).filter( - Event.namespace_id == namespace_id, - Calendar.uid == 'second_calendar_uid').count() == 2 + assert ( + db.session.query(Calendar) + .filter( + Calendar.namespace_id == namespace_id, Calendar.name != "Emailed events" + ) + .count() + == 2 + ) + + assert ( + db.session.query(Event) + .join(Calendar) + .filter( + Event.namespace_id == namespace_id, Calendar.uid == "first_calendar_uid" + ) + .count() + == 3 + ) + + assert ( + db.session.query(Event) + .join(Calendar) + .filter( + Event.namespace_id == namespace_id, Calendar.uid == "second_calendar_uid" + ) + .count() + == 2 + ) # Sync a calendar update with long name event_sync.provider.sync_calendars = calendar_long_name event_sync.sync() - long_calendar = db.session.query(Calendar).filter( - Calendar.namespace_id == namespace_id, - Calendar.uid == 'long_calendar_uid').one() + long_calendar = ( + db.session.query(Calendar) + .filter( + Calendar.namespace_id == namespace_id, Calendar.uid == "long_calendar_uid" + ) + .one() + ) assert len(long_calendar.name) == MAX_INDEXABLE_LENGTH @@ -134,84 +167,139 @@ def test_handle_changes(db, generic_account): event_sync.sync() # Check that we have the same number of calendars and events as before - assert db.session.query(Calendar).filter( - Calendar.namespace_id == namespace_id, - Calendar.name != 'Emailed events').count() == 3 - - assert db.session.query(Event).join(Calendar).filter( - Event.namespace_id == namespace_id, - Calendar.uid == 'first_calendar_uid').count() == 3 - - assert db.session.query(Event).join(Calendar).filter( - Event.namespace_id == namespace_id, - Calendar.uid == 'second_calendar_uid').count() == 2 - - assert db.session.query(Event).join(Calendar).filter( - Event.namespace_id == namespace_id, - Calendar.uid == 'long_calendar_uid').count() == 2 + assert ( + db.session.query(Calendar) + .filter( + Calendar.namespace_id == namespace_id, Calendar.name != "Emailed events" + ) + .count() + == 3 + ) + + assert ( + db.session.query(Event) + .join(Calendar) + .filter( + Event.namespace_id == namespace_id, Calendar.uid == "first_calendar_uid" + ) + .count() + == 3 + ) + + assert ( + db.session.query(Event) + .join(Calendar) + .filter( + Event.namespace_id == namespace_id, Calendar.uid == "second_calendar_uid" + ) + .count() + == 2 + ) + + assert ( + db.session.query(Event) + .join(Calendar) + .filter(Event.namespace_id == namespace_id, Calendar.uid == "long_calendar_uid") + .count() + == 2 + ) # Check that calendar attribute was updated. - first_calendar = db.session.query(Calendar).filter( - Calendar.namespace_id == namespace_id, - Calendar.uid == 'first_calendar_uid').one() - assert first_calendar.name == 'Super Important Meetings' + first_calendar = ( + db.session.query(Calendar) + .filter( + Calendar.namespace_id == namespace_id, Calendar.uid == "first_calendar_uid" + ) + .one() + ) + assert first_calendar.name == "Super Important Meetings" # Sync an event update event_sync.provider.sync_events = event_response_with_update event_sync.sync() # Make sure the update was persisted - first_event = db.session.query(Event).filter( - Event.namespace_id == namespace_id, - Event.calendar_id == first_calendar.id, - Event.uid == 'first_event_uid').one() - assert first_event.title == 'Top Secret Plotting Meeting' + first_event = ( + db.session.query(Event) + .filter( + Event.namespace_id == namespace_id, + Event.calendar_id == first_calendar.id, + Event.uid == "first_event_uid", + ) + .one() + ) + assert first_event.title == "Top Secret Plotting Meeting" # Sync a participant update event_sync.provider.sync_events = event_response_with_participants_update event_sync.sync() # Make sure the update was persisted - first_event = db.session.query(Event).filter( - Event.namespace_id == namespace_id, - Event.calendar_id == first_calendar.id, - Event.uid == 'first_event_uid').one() + first_event = ( + db.session.query(Event) + .filter( + Event.namespace_id == namespace_id, + Event.calendar_id == first_calendar.id, + Event.uid == "first_event_uid", + ) + .one() + ) db.session.refresh(first_event) - assert first_event.participants == [{'name': 'Johnny Thunders', - 'email': 'johnny@thunde.rs'}] + assert first_event.participants == [ + {"name": "Johnny Thunders", "email": "johnny@thunde.rs"} + ] # Sync an event delete event_sync.provider.sync_events = event_response_with_delete event_sync.sync() # Make sure the delete was persisted. - first_event = db.session.query(Event).filter( - Event.namespace_id == namespace_id, - Event.calendar_id == first_calendar.id, - Event.uid == 'first_event_uid').first() + first_event = ( + db.session.query(Event) + .filter( + Event.namespace_id == namespace_id, + Event.calendar_id == first_calendar.id, + Event.uid == "first_event_uid", + ) + .first() + ) db.session.refresh(first_event) - assert first_event.status == 'cancelled' + assert first_event.status == "cancelled" # Sync a calendar delete - event_public_ids = [id_ for id_, in db.session.query(Event.public_id). - filter(Event.namespace_id == namespace_id, - Event.calendar_id == first_calendar.id)] + event_public_ids = [ + id_ + for id_, in db.session.query(Event.public_id).filter( + Event.namespace_id == namespace_id, Event.calendar_id == first_calendar.id + ) + ] event_sync.provider.sync_calendars = calendar_response_with_delete event_sync.sync() - assert db.session.query(Calendar).filter( - Calendar.namespace_id == namespace_id, - Calendar.uid == 'first_calendar_uid').first() is None + assert ( + db.session.query(Calendar) + .filter( + Calendar.namespace_id == namespace_id, Calendar.uid == "first_calendar_uid" + ) + .first() + is None + ) # Check that delete transactions are created for events on the deleted # calendar. - deleted_event_transactions = db.session.query(Transaction).filter( - Transaction.object_type == 'event', - Transaction.command == 'delete', - Transaction.namespace_id == namespace_id, - Transaction.object_public_id.in_(event_public_ids)).all() + deleted_event_transactions = ( + db.session.query(Transaction) + .filter( + Transaction.object_type == "event", + Transaction.command == "delete", + Transaction.namespace_id == namespace_id, + Transaction.object_public_id.in_(event_public_ids), + ) + .all() + ) assert len(deleted_event_transactions) == 3 # Check that events with the same uid but associated to a different # calendar still survive. - assert db.session.query(Event).filter( - Event.namespace_id == namespace_id).count() == 4 + assert ( + db.session.query(Event).filter(Event.namespace_id == namespace_id).count() == 4 + ) diff --git a/inbox/test/general/test_account.py b/inbox/test/general/test_account.py index 8c641c7d2..0c0cb1c89 100644 --- a/inbox/test/general/test_account.py +++ b/inbox/test/general/test_account.py @@ -16,15 +16,16 @@ def add_fake_imap_account(db_session, provider, email_address, password): @pytest.fixture def fake_imap_accounts(db): imap_account_data = { - 'yahoo': 'cypresstest@yahoo.com', - 'aol': 'benbitdit@aol.com', - 'icloud': 'inbox.watchdog@icloud.com', - 'imap': 'heyhey@mycustomimap.com', + "yahoo": "cypresstest@yahoo.com", + "aol": "benbitdit@aol.com", + "icloud": "inbox.watchdog@icloud.com", + "imap": "heyhey@mycustomimap.com", } - accounts = {'gmail': add_fake_gmail_account(db.session)} + accounts = {"gmail": add_fake_gmail_account(db.session)} for provider, email in imap_account_data.items(): - accounts[provider] = add_fake_imap_account(db.session, provider, email, - 'sEcr3T') + accounts[provider] = add_fake_imap_account( + db.session, provider, email, "sEcr3T" + ) return accounts diff --git a/inbox/test/general/test_address_canonicalization.py b/inbox/test/general/test_address_canonicalization.py index b4b244507..a66650bd7 100644 --- a/inbox/test/general/test_address_canonicalization.py +++ b/inbox/test/general/test_address_canonicalization.py @@ -1,23 +1,32 @@ def test_canonicalization(db): from inbox.models import Namespace, Account + ns = Namespace() - account = Account(namespace=ns, - email_address='lambda.the.ultimate@gmail.com') + account = Account(namespace=ns, email_address="lambda.the.ultimate@gmail.com") db.session.add(account) db.session.add(ns) db.session.commit() - assert account.email_address == 'lambda.the.ultimate@gmail.com' + assert account.email_address == "lambda.the.ultimate@gmail.com" - assert db.session.query(Account). \ - filter_by(email_address='lambdatheultimate@gmail.com').count() == 1 + assert ( + db.session.query(Account) + .filter_by(email_address="lambdatheultimate@gmail.com") + .count() + == 1 + ) - assert db.session.query(Account). \ - filter_by(email_address='lambda.theultimate@gmail.com').count() == 1 + assert ( + db.session.query(Account) + .filter_by(email_address="lambda.theultimate@gmail.com") + .count() + == 1 + ) # Check that nothing bad happens if you pass something that can't actually # be parsed as an email address. - assert db.session.query(Account). \ - filter_by(email_address='foo').count() == 0 + assert db.session.query(Account).filter_by(email_address="foo").count() == 0 # Flanker will parse hostnames too, don't break on that. - assert db.session.query(Account). \ - filter_by(email_address='http://example.com').count() == 0 + assert ( + db.session.query(Account).filter_by(email_address="http://example.com").count() + == 0 + ) diff --git a/inbox/test/general/test_category.py b/inbox/test/general/test_category.py index 5147ff7cd..e30b1f31d 100644 --- a/inbox/test/general/test_category.py +++ b/inbox/test/general/test_category.py @@ -2,26 +2,30 @@ from inbox.models import Folder, Label from inbox.models.category import sanitize_name from inbox.models.constants import MAX_INDEXABLE_LENGTH -from inbox.test.util.base import (add_fake_folder, add_fake_label, generic_account, - gmail_account, db) +from inbox.test.util.base import ( + add_fake_folder, + add_fake_label, + generic_account, + gmail_account, + db, +) -__all__ = ['db', 'generic_account', 'gmail_account'] +__all__ = ["db", "generic_account", "gmail_account"] def test_category_sanitize_name(): - assert sanitize_name(42) == u'42' - assert sanitize_name('42') == u'42' - assert sanitize_name(u' Boîte de réception ') ==\ - u' Boîte de réception' - long_name = 'N' * (MAX_INDEXABLE_LENGTH + 10) - assert sanitize_name(long_name) == 'N' * MAX_INDEXABLE_LENGTH + assert sanitize_name(42) == u"42" + assert sanitize_name("42") == u"42" + assert sanitize_name(u" Boîte de réception ") == u" Boîte de réception" + long_name = "N" * (MAX_INDEXABLE_LENGTH + 10) + assert sanitize_name(long_name) == "N" * MAX_INDEXABLE_LENGTH - long_name = 'N' * (MAX_INDEXABLE_LENGTH - 2) + ' ' - assert sanitize_name(long_name) == 'N' * (MAX_INDEXABLE_LENGTH - 2) + long_name = "N" * (MAX_INDEXABLE_LENGTH - 2) + " " + assert sanitize_name(long_name) == "N" * (MAX_INDEXABLE_LENGTH - 2) def test_folder_sanitized(db, generic_account): - long_name = 'F' * (MAX_INDEXABLE_LENGTH + 10) + long_name = "F" * (MAX_INDEXABLE_LENGTH + 10) folder = add_fake_folder(db.session, generic_account, long_name) assert len(folder.name) == MAX_INDEXABLE_LENGTH @@ -34,7 +38,7 @@ def test_folder_sanitized(db, generic_account): def test_label_sanitized(db, gmail_account): - long_name = 'L' * (MAX_INDEXABLE_LENGTH + 10) + long_name = "L" * (MAX_INDEXABLE_LENGTH + 10) label = add_fake_label(db.session, gmail_account, long_name) assert len(label.name) == MAX_INDEXABLE_LENGTH diff --git a/inbox/test/general/test_concurrency.py b/inbox/test/general/test_concurrency.py index 44615080f..842e7d0a8 100644 --- a/inbox/test/general/test_concurrency.py +++ b/inbox/test/general/test_concurrency.py @@ -10,7 +10,6 @@ class MockLogger(object): - def __init__(self): self.call_count = 0 @@ -19,7 +18,7 @@ def error(self, *args, **kwargs): class FailingFunction(object): - __name__ = 'FailingFunction' + __name__ = "FailingFunction" def __init__(self, exc_type, max_executions=3, delay=0): self.exc_type = exc_type @@ -35,7 +34,7 @@ def __call__(self): return -@pytest.mark.usefixtures('mock_gevent_sleep') +@pytest.mark.usefixtures("mock_gevent_sleep") def test_retry_with_logging(): logger = MockLogger() failing_function = FailingFunction(ValueError) @@ -57,35 +56,43 @@ def test_selective_retry(): logger = MockLogger() failing_function = FailingFunction(ValueError) with pytest.raises(ValueError): - retry_with_logging(failing_function, logger=logger, - fail_classes=[ValueError]) + retry_with_logging(failing_function, logger=logger, fail_classes=[ValueError]) assert logger.call_count == 0 assert failing_function.call_count == 1 -@pytest.mark.usefixtures('mock_gevent_sleep') +@pytest.mark.usefixtures("mock_gevent_sleep") def test_no_logging_until_many_transient_error(): transient = [ socket.timeout, socket.error, _mysql_exceptions.OperationalError( "(_mysql_exceptions.OperationalError) (1213, 'Deadlock " - "found when trying to get lock; try restarting transaction')"), + "found when trying to get lock; try restarting transaction')" + ), _mysql_exceptions.OperationalError( "(_mysql_exceptions.OperationalError) Lost connection to MySQL " - "server during query"), + "server during query" + ), _mysql_exceptions.OperationalError( - "(_mysql_exceptions.OperationalError) MySQL server has gone away."), + "(_mysql_exceptions.OperationalError) MySQL server has gone away." + ), _mysql_exceptions.OperationalError( "(_mysql_exceptions.OperationalError) Can't connect to MySQL " - "server on 127.0.0.1"), + "server on 127.0.0.1" + ), _mysql_exceptions.OperationalError( "(_mysql_exceptions.OperationalError) Max connect timeout reached " - "while reaching hostgroup 71"), + "while reaching hostgroup 71" + ), StatementError( - message="?", statement="SELECT *", params={}, + message="?", + statement="SELECT *", + params={}, orig=_mysql_exceptions.OperationalError( - "(_mysql_exceptions.OperationalError) MySQL server has gone away.")), + "(_mysql_exceptions.OperationalError) MySQL server has gone away." + ), + ), ] for transient_exc in transient: @@ -93,7 +100,7 @@ def test_no_logging_until_many_transient_error(): failing_function = FailingFunction(transient_exc, max_executions=2) retry_with_logging(failing_function, logger=logger) - assert logger.call_count == 0, '{} should not be logged'.format(transient_exc) + assert logger.call_count == 0, "{} should not be logged".format(transient_exc) assert failing_function.call_count == 2 failing_function = FailingFunction(socket.error, max_executions=21) @@ -105,22 +112,27 @@ def test_no_logging_until_many_transient_error(): failing_function = FailingFunction(socket.error, max_executions=2) -@pytest.mark.usefixtures('mock_gevent_sleep') +@pytest.mark.usefixtures("mock_gevent_sleep") def test_logging_on_critical_error(): critical = [ TypeError("Example TypeError"), + StatementError(message="?", statement="SELECT *", params={}, orig=None), StatementError( - message="?", statement="SELECT *", params={}, orig=None), - StatementError( - message="?", statement="SELECT *", params={}, + message="?", + statement="SELECT *", + params={}, orig=_mysql_exceptions.OperationalError( "(_mysql_exceptions.OperationalError) Incorrect string value " - "'\\xE7\\x(a\\x84\\xE5'")), + "'\\xE7\\x(a\\x84\\xE5'" + ), + ), _mysql_exceptions.OperationalError( "(_mysql_exceptions.OperationalError) Incorrect string value " - "'\\xE7\\x(a\\x84\\xE5'"), + "'\\xE7\\x(a\\x84\\xE5'" + ), _mysql_exceptions.IntegrityError( - "(_mysql_exceptions.IntegrityError) Column not found"), + "(_mysql_exceptions.IntegrityError) Column not found" + ), ] for critical_exc in critical: @@ -128,5 +140,5 @@ def test_logging_on_critical_error(): failing_function = FailingFunction(critical_exc, max_executions=2) retry_with_logging(failing_function, logger=logger) - assert logger.call_count == 1, '{} should be logged'.format(critical_exc) + assert logger.call_count == 1, "{} should be logged".format(critical_exc) assert failing_function.call_count == 2 diff --git a/inbox/test/general/test_draft_creation.py b/inbox/test/general/test_draft_creation.py index 327dd62ac..1cd33091e 100644 --- a/inbox/test/general/test_draft_creation.py +++ b/inbox/test/general/test_draft_creation.py @@ -2,17 +2,17 @@ def test_headers_presence(default_namespace, db): - data = {'subject': 'test draft', 'to': [{'email': 'karim@nylas.com'}]} - draft = create_message_from_json(data, default_namespace, db.session, - False) + data = {"subject": "test draft", "to": [{"email": "karim@nylas.com"}]} + draft = create_message_from_json(data, default_namespace, db.session, False) assert draft.nylas_uid is not None assert draft.message_id_header is not None old_uid = draft.nylas_uid - update_draft(db.session, default_namespace.account, draft, - body="updated body", blocks=[]) + update_draft( + db.session, default_namespace.account, draft, body="updated body", blocks=[] + ) assert draft.nylas_uid is not None assert draft.message_id_header is not None diff --git a/inbox/test/general/test_filename_truncation.py b/inbox/test/general/test_filename_truncation.py index efdbfbbea..da0531e56 100644 --- a/inbox/test/general/test_filename_truncation.py +++ b/inbox/test/general/test_filename_truncation.py @@ -4,21 +4,21 @@ def test_filename_truncation(): # Note: test both 3-byte and 4-byte UTF8 chars to make sure truncation # follows UTF8 boundaries. - uname = u'\U0001f1fa\U0001f1f8\u2678\U0001f602.txt' - assert _trim_filename(uname, 'a', max_len=8) == uname - assert _trim_filename(uname, 'a', max_len=7) == u'\U0001f1fa\U0001f1f8\u2678.txt' - assert _trim_filename(uname, 'a', max_len=6) == u'\U0001f1fa\U0001f1f8.txt' - assert _trim_filename(uname, 'a', max_len=5) == u'\U0001f1fa.txt' + uname = u"\U0001f1fa\U0001f1f8\u2678\U0001f602.txt" + assert _trim_filename(uname, "a", max_len=8) == uname + assert _trim_filename(uname, "a", max_len=7) == u"\U0001f1fa\U0001f1f8\u2678.txt" + assert _trim_filename(uname, "a", max_len=6) == u"\U0001f1fa\U0001f1f8.txt" + assert _trim_filename(uname, "a", max_len=5) == u"\U0001f1fa.txt" # Note: Test input that is not unicode, ensure it uses unicode length not byte length - cname = '\xf0\x9f\x87\xba\xf0\x9f\x87\xb8\xe2\x99\xb8\xf0\x9f\x98\x82.txt' - assert _trim_filename(cname, 'a', max_len=8) == uname - assert _trim_filename(cname, 'a', max_len=7) == u'\U0001f1fa\U0001f1f8\u2678.txt' - assert _trim_filename(cname, 'a', max_len=6) == u'\U0001f1fa\U0001f1f8.txt' - assert _trim_filename(cname, 'a', max_len=5) == u'\U0001f1fa.txt' + cname = "\xf0\x9f\x87\xba\xf0\x9f\x87\xb8\xe2\x99\xb8\xf0\x9f\x98\x82.txt" + assert _trim_filename(cname, "a", max_len=8) == uname + assert _trim_filename(cname, "a", max_len=7) == u"\U0001f1fa\U0001f1f8\u2678.txt" + assert _trim_filename(cname, "a", max_len=6) == u"\U0001f1fa\U0001f1f8.txt" + assert _trim_filename(cname, "a", max_len=5) == u"\U0001f1fa.txt" - uname = 'ABCDEF.txttxttxtxtxttxttxtx' - assert _trim_filename(uname, 'a', max_len=8) == 'A.txttxt' + uname = "ABCDEF.txttxttxtxtxttxttxtx" + assert _trim_filename(uname, "a", max_len=8) == "A.txttxt" - uname = '.txttxttxtxtxttxttxtx' - assert _trim_filename(uname, 'a', max_len=8) == '.txttxtt' + uname = ".txttxttxtxtxttxttxtx" + assert _trim_filename(uname, "a", max_len=8) == ".txttxtt" diff --git a/inbox/test/general/test_html_parsing.py b/inbox/test/general/test_html_parsing.py index 21a6d288b..caa52182a 100644 --- a/inbox/test/general/test_html_parsing.py +++ b/inbox/test/general/test_html_parsing.py @@ -4,19 +4,21 @@ def test_strip_tags(): - text = ('
' - 'check out this link yo!
') - assert strip_tags(text).strip() == 'check out this link yo!' + text = ( + "
" + 'check out this link yo!
' + ) + assert strip_tags(text).strip() == "check out this link yo!" def test_preserve_refs(): """Test that HTML character/entity references are preserved when we strip tags.""" - text = u'la philologie mène au pire' - assert strip_tags(text) == u'la philologie mène au pire' + text = u"la philologie mène au pire" + assert strip_tags(text) == u"la philologie mène au pire" - text = u'la philologie mène au pire' - assert strip_tags(text) == u'la philologie mène au pire' + text = u"la philologie mène au pire" + assert strip_tags(text) == u"la philologie mène au pire" - text = u'veer & wander' - assert strip_tags(text) == 'veer & wander' + text = u"veer & wander" + assert strip_tags(text) == "veer & wander" diff --git a/inbox/test/general/test_ignition.py b/inbox/test/general/test_ignition.py index d03cce0a3..5a3c2cf88 100644 --- a/inbox/test/general/test_ignition.py +++ b/inbox/test/general/test_ignition.py @@ -7,9 +7,10 @@ from inbox.util.testutils import create_test_db, setup_test_db -@pytest.yield_fixture(scope='function') +@pytest.yield_fixture(scope="function") def base_db(config): from inbox.ignition import engine_manager + create_test_db() yield engine_manager setup_test_db() @@ -38,25 +39,25 @@ def test_reset_autoincrements(base_db): # A correctly set auto_increment. key = 0 init_db(engines[key], key) - reset_tables = reset_invalid_autoincrements(engines[key], - shard_schemas[key], key, - False) + reset_tables = reset_invalid_autoincrements( + engines[key], shard_schemas[key], key, False + ) assert len(reset_tables) == 0 # Ensure dry_run mode does not reset tables key = 1 init_db(engines[key], key + 1) - reset_tables = reset_invalid_autoincrements(engines[key], - shard_schemas[key], key, - True) + reset_tables = reset_invalid_autoincrements( + engines[key], shard_schemas[key], key, True + ) assert len(reset_tables) > 0 with pytest.raises(AssertionError): verify_db(engines[key], shard_schemas[key], key) - reset_tables = reset_invalid_autoincrements(engines[key], - shard_schemas[key], key, - False) + reset_tables = reset_invalid_autoincrements( + engines[key], shard_schemas[key], key, False + ) assert len(reset_tables) > 0 verify_db(engines[key], shard_schemas[key], key) diff --git a/inbox/test/general/test_message_parsing.py b/inbox/test/general/test_message_parsing.py index b26820d04..e50c296da 100644 --- a/inbox/test/general/test_message_parsing.py +++ b/inbox/test/general/test_message_parsing.py @@ -11,18 +11,24 @@ from inbox.util.blockstore import get_from_blockstore from inbox.util.addr import parse_mimepart_address_header -from inbox.test.util.base import (default_account, default_namespace, thread, - new_message_from_synced, mime_message, - add_fake_thread) +from inbox.test.util.base import ( + default_account, + default_namespace, + thread, + new_message_from_synced, + mime_message, + add_fake_thread, +) -__all__ = ['default_namespace', 'thread', 'default_account'] +__all__ = ["default_namespace", "thread", "default_account"] def create_from_synced(db, account, raw_message): thread = add_fake_thread(db.session, account.namespace.id) received_date = datetime.datetime.utcnow() - m = Message.create_from_synced(account, 22, '[Gmail]/All Mail', - received_date, raw_message) + m = Message.create_from_synced( + account, 22, "[Gmail]/All Mail", received_date, raw_message + ) m.thread = thread db.session.add(m) db.session.commit() @@ -33,114 +39,137 @@ def create_from_synced(db, account, raw_message): def raw_message_with_many_recipients(): # Message carefully constructed s.t. the length of the serialized 'to' # field is 65536. - return pkgutil.get_data('inbox', 'test/data/raw_message_with_many_recipients.txt') + return pkgutil.get_data("inbox", "test/data/raw_message_with_many_recipients.txt") @pytest.fixture def mime_message_with_bad_date(mime_message): - mime_message.headers['Date'] = 'unparseable' + mime_message.headers["Date"] = "unparseable" return mime_message @pytest.fixture def raw_message_with_long_content_id(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_long_content_id.txt') + return pkgutil.get_data("inbox", "test/data/raw_message_with_long_content_id.txt") @pytest.fixture def raw_message_with_ical_invite(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_ical_invite.txt') + return pkgutil.get_data("inbox", "test/data/raw_message_with_ical_invite.txt") @pytest.fixture def raw_message_with_bad_attachment(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_bad_attachment.txt') + return pkgutil.get_data("inbox", "test/data/raw_message_with_bad_attachment.txt") @pytest.fixture def raw_message_with_filename_attachment(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_filename_attachment.txt') + return pkgutil.get_data( + "inbox", "test/data/raw_message_with_filename_attachment.txt" + ) @pytest.fixture def raw_message_with_name_attachment(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_name_attachment.txt') + return pkgutil.get_data("inbox", "test/data/raw_message_with_name_attachment.txt") @pytest.fixture def raw_message_with_inline_name_attachment(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_inline_attachment.txt') + return pkgutil.get_data("inbox", "test/data/raw_message_with_inline_attachment.txt") @pytest.fixture def raw_message_with_outlook_emoji(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_outlook_emoji.txt') + return pkgutil.get_data("inbox", "test/data/raw_message_with_outlook_emoji.txt") @pytest.fixture def raw_message_with_outlook_emoji_inline(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_outlook_emoji_inline.txt') + return pkgutil.get_data( + "inbox", "test/data/raw_message_with_outlook_emoji_inline.txt" + ) @pytest.fixture def raw_message_with_long_message_id(): - return pkgutil.get_data('inbox', 'test/data/raw_message_with_long_message_id.txt') + return pkgutil.get_data("inbox", "test/data/raw_message_with_long_message_id.txt") def test_message_from_synced(db, new_message_from_synced, default_namespace): thread = add_fake_thread(db.session, default_namespace.id) m = new_message_from_synced assert m.namespace_id == default_namespace.id - assert m.to_addr == [['Alice', 'alice@example.com']] - assert m.cc_addr == [['Bob', 'bob@example.com']] - assert m.subject == 'Hello' - assert m.body == 'Hello World!' + assert m.to_addr == [["Alice", "alice@example.com"]] + assert m.cc_addr == [["Bob", "bob@example.com"]] + assert m.subject == "Hello" + assert m.body == "Hello World!" assert m.data_sha256 m.thread = thread db.session.add(m) db.session.commit() - assert (db.session.query(Block).filter( - Block.namespace_id == default_namespace.id).count() == 0) + assert ( + db.session.query(Block) + .filter(Block.namespace_id == default_namespace.id) + .count() + == 0 + ) assert len(m.parts) == 0 def test_save_attachments(db, default_account): - mime_msg = mime.create.multipart('mixed') + mime_msg = mime.create.multipart("mixed") mime_msg.append( - mime.create.text('plain', 'This is a message with attachments'), - mime.create.attachment('image/png', 'filler', 'attached_image.png', - 'attachment'), - mime.create.attachment('application/pdf', 'filler', - 'attached_file.pdf', 'attachment') + mime.create.text("plain", "This is a message with attachments"), + mime.create.attachment( + "image/png", "filler", "attached_image.png", "attachment" + ), + mime.create.attachment( + "application/pdf", "filler", "attached_file.pdf", "attachment" + ), ) msg = create_from_synced(db, default_account, mime_msg.to_string()) assert len(msg.parts) == 2 - assert all(part.content_disposition == 'attachment' for part in msg.parts) - assert {part.block.filename for part in msg.parts} == \ - {'attached_image.png', 'attached_file.pdf'} - assert {part.block.content_type for part in msg.parts} == \ - {'image/png', 'application/pdf'} - assert (db.session.query(Block).filter( - Block.namespace_id == default_account.namespace.id).count() == 2) + assert all(part.content_disposition == "attachment" for part in msg.parts) + assert {part.block.filename for part in msg.parts} == { + "attached_image.png", + "attached_file.pdf", + } + assert {part.block.content_type for part in msg.parts} == { + "image/png", + "application/pdf", + } + assert ( + db.session.query(Block) + .filter(Block.namespace_id == default_account.namespace.id) + .count() + == 2 + ) def test_save_inline_attachments(db, default_account): - mime_msg = mime.create.multipart('mixed') - inline_attachment = mime.create.attachment('image/png', 'filler', - 'inline_image.png', 'inline') - inline_attachment.headers['Content-Id'] = '' + mime_msg = mime.create.multipart("mixed") + inline_attachment = mime.create.attachment( + "image/png", "filler", "inline_image.png", "inline" + ) + inline_attachment.headers["Content-Id"] = "" mime_msg.append(inline_attachment) return mime_msg msg = create_from_synced(db, default_account, mime_message.to_string()) assert len(msg.parts) == 1 part = msg.parts[0] - assert part.content_disposition == 'inline' - assert part.content_id == '' - assert part.block.content_type == 'image/png' - assert part.block.data == 'filler' - assert (db.session.query(Block).filter( - Block.namespace_id == default_account.namespace.id).count() == 1) + assert part.content_disposition == "inline" + assert part.content_id == "" + assert part.block.content_type == "image/png" + assert part.block.data == "filler" + assert ( + db.session.query(Block) + .filter(Block.namespace_id == default_account.namespace.id) + .count() + == 1 + ) def test_concatenate_parts_for_body(db, default_account): @@ -158,21 +187,25 @@ def test_concatenate_parts_for_body(db, default_account): # +-image/jpeg # | # +-text/html - mime_msg = mime.create.multipart('mixed') + mime_msg = mime.create.multipart("mixed") mime_msg.append( - mime.create.text('html', 'First part'), - mime.create.attachment('image/png', 'filler', disposition='inline'), - mime.create.text('html', 'Second part'), - mime.create.attachment('image/png', 'more filler', - disposition='inline'), - mime.create.text('html', '3rd part'), + mime.create.text("html", "First part"), + mime.create.attachment("image/png", "filler", disposition="inline"), + mime.create.text("html", "Second part"), + mime.create.attachment("image/png", "more filler", disposition="inline"), + mime.create.text("html", "3rd part"), ) m = create_from_synced(db, default_account, mime_msg.to_string()) - assert m.body == \ - 'First partSecond part3rd part' + assert ( + m.body == "First partSecond part3rd part" + ) assert len(m.parts) == 2 - assert (db.session.query(Block).filter( - Block.namespace_id == default_account.namespace.id).count() == 2) + assert ( + db.session.query(Block) + .filter(Block.namespace_id == default_account.namespace.id) + .count() + == 2 + ) def test_inline_parts_may_form_body_text(db, default_account): @@ -180,81 +213,94 @@ def test_inline_parts_may_form_body_text(db, default_account): # or text/html parts that are really just the body text. Check that we # don't save them as inline atrachments, but just use them to form the body # text. - mime_msg = mime.create.multipart('mixed') + mime_msg = mime.create.multipart("mixed") mime_msg.append( - mime.create.attachment('text/html', 'Hello World!', - disposition='inline'), - mime.create.attachment('text/plain', 'Hello World!', - disposition='inline') + mime.create.attachment( + "text/html", "Hello World!", disposition="inline" + ), + mime.create.attachment("text/plain", "Hello World!", disposition="inline"), ) m = create_from_synced(db, default_account, mime_msg.to_string()) - assert m.body == 'Hello World!' + assert m.body == "Hello World!" assert len(m.parts) == 0 - assert (db.session.query(Block).filter( - Block.namespace_id == default_account.namespace.id).count() == 0) + assert ( + db.session.query(Block) + .filter(Block.namespace_id == default_account.namespace.id) + .count() + == 0 + ) def test_convert_plaintext_body_to_html(db, default_account): - mime_msg = mime.create.text('plain', 'Hello World!') + mime_msg = mime.create.text("plain", "Hello World!") m = create_from_synced(db, default_account, mime_msg.to_string()) - assert m.body == '

Hello World!

' + assert m.body == "

Hello World!

" def test_save_parts_without_disposition_as_attachments(db, default_account): - mime_msg = mime.create.multipart('mixed') - mime_msg.append( - mime.create.attachment('image/png', 'filler', - disposition=None) - ) + mime_msg = mime.create.multipart("mixed") + mime_msg.append(mime.create.attachment("image/png", "filler", disposition=None)) m = create_from_synced(db, default_account, mime_msg.to_string()) assert len(m.parts) == 1 - assert m.parts[0].content_disposition == 'attachment' - assert m.parts[0].block.content_type == 'image/png' - assert m.parts[0].block.data == 'filler' - assert (db.session.query(Block).filter( - Block.namespace_id == default_account.namespace.id).count() == 1) + assert m.parts[0].content_disposition == "attachment" + assert m.parts[0].block.content_type == "image/png" + assert m.parts[0].block.data == "filler" + assert ( + db.session.query(Block) + .filter(Block.namespace_id == default_account.namespace.id) + .count() + == 1 + ) def test_handle_long_filenames(db, default_account): - mime_msg = mime.create.multipart('mixed') + mime_msg = mime.create.multipart("mixed") mime_msg.append( - mime.create.attachment('image/png', 'filler', - filename=990 * 'A' + '.png', - disposition='attachment') + mime.create.attachment( + "image/png", "filler", filename=990 * "A" + ".png", disposition="attachment" + ) ) m = create_from_synced(db, default_account, mime_msg.to_string()) assert len(m.parts) == 1 saved_filename = m.parts[0].block.filename assert len(saved_filename) < 256 # Check that we kept the extension - assert saved_filename.endswith('.png') + assert saved_filename.endswith(".png") def test_handle_long_subjects(db, default_account, mime_message): - mime_message.headers['Subject'] = 4096 * 'A' + mime_message.headers["Subject"] = 4096 * "A" m = create_from_synced(db, default_account, mime_message.to_string()) assert len(m.subject) < 256 def test_dont_use_attached_html_to_form_body(db, default_account): - mime_msg = mime.create.multipart('mixed') + mime_msg = mime.create.multipart("mixed") mime_msg.append( - mime.create.text('plain', 'Please see attachment'), - mime.create.attachment('text/html', 'This is attached', - disposition='attachment', - filename='attachment.html') + mime.create.text("plain", "Please see attachment"), + mime.create.attachment( + "text/html", + "This is attached", + disposition="attachment", + filename="attachment.html", + ), ) m = create_from_synced(db, default_account, mime_msg.to_string()) assert len(m.parts) == 1 - assert m.parts[0].content_disposition == 'attachment' - assert m.parts[0].block.content_type == 'text/html' - assert m.body == '

Please see attachment

' - assert (db.session.query(Block).filter( - Block.namespace_id == default_account.namespace.id).count() == 1) + assert m.parts[0].content_disposition == "attachment" + assert m.parts[0].block.content_type == "text/html" + assert m.body == "

Please see attachment

" + assert ( + db.session.query(Block) + .filter(Block.namespace_id == default_account.namespace.id) + .count() + == 1 + ) -def test_truncate_recipients(db, default_account, thread, - raw_message_with_many_recipients): +def test_truncate_recipients( + db, default_account, thread, raw_message_with_many_recipients +): m = create_from_synced(db, default_account, raw_message_with_many_recipients) m.thread = thread db.session.add(m) @@ -266,99 +312,108 @@ def test_address_parsing(): """Check that header parsing can handle a variety of tricky input.""" # Extra quotes around display name mimepart = mime.from_string('From: ""Bob"" ') - parsed = parse_mimepart_address_header(mimepart, 'From') - assert parsed == [[' Bob ', 'bob@foocorp.com']] + parsed = parse_mimepart_address_header(mimepart, "From") + assert parsed == [[" Bob ", "bob@foocorp.com"]] # Comments after addr-spec mimepart = mime.from_string( - 'From: "Bob" (through Yahoo! Store Order System)') - parsed = parse_mimepart_address_header(mimepart, 'From') - assert parsed == [['Bob', 'bob@foocorp.com']] + 'From: "Bob" (through Yahoo! Store Order System)' + ) + parsed = parse_mimepart_address_header(mimepart, "From") + assert parsed == [["Bob", "bob@foocorp.com"]] - mimepart = mime.from_string( - 'From: Indiegogo (no reply)') - parsed = parse_mimepart_address_header(mimepart, 'From') - assert parsed == [['Indiegogo', 'noreply@indiegogo.com']] + mimepart = mime.from_string("From: Indiegogo (no reply)") + parsed = parse_mimepart_address_header(mimepart, "From") + assert parsed == [["Indiegogo", "noreply@indiegogo.com"]] - mimepart = mime.from_string( - 'From: Anon (GitHub Staff)') - parsed = parse_mimepart_address_header(mimepart, 'From') - assert parsed == [['Anon', 'support@github.com']] + mimepart = mime.from_string("From: Anon (GitHub Staff)") + parsed = parse_mimepart_address_header(mimepart, "From") + assert parsed == [["Anon", "support@github.com"]] # Display name in comment - mimepart = mime.from_string('From: root@gunks (Cron Daemon)') - parsed = parse_mimepart_address_header(mimepart, 'From') - assert parsed == [['Cron Daemon', 'root@gunks']] + mimepart = mime.from_string("From: root@gunks (Cron Daemon)") + parsed = parse_mimepart_address_header(mimepart, "From") + assert parsed == [["Cron Daemon", "root@gunks"]] # Missing closing angle bracket - mimepart = mime.from_string('From: Bob ') - parsed = parse_mimepart_address_header(mimepart, 'From') - assert parsed == [['Foo, Corp.', 'info@foocorp.com']] + mimepart = mime.from_string("From: =?utf-8?Q?Foo=2C=20Corp.?= ") + parsed = parse_mimepart_address_header(mimepart, "From") + assert parsed == [["Foo, Corp.", "info@foocorp.com"]] mimepart = mime.from_string( - 'To: =?utf-8?Q?Foo=2C=20Corp.?= , ' - '=?utf-8?Q?Support?= ') - parsed = parse_mimepart_address_header(mimepart, 'To') - assert parsed == [['Foo, Corp.', 'info@foocorp.com'], - ['Support', 'support@foocorp.com']] + "To: =?utf-8?Q?Foo=2C=20Corp.?= , " + "=?utf-8?Q?Support?= " + ) + parsed = parse_mimepart_address_header(mimepart, "To") + assert parsed == [ + ["Foo, Corp.", "info@foocorp.com"], + ["Support", "support@foocorp.com"], + ] # Multiple header lines mimepart = mime.from_string( - 'To: alice@foocorp.com\nSubject: Hello\nTo: bob@foocorp.com') - parsed = parse_mimepart_address_header(mimepart, 'To') - assert parsed == [['', 'alice@foocorp.com'], ['', 'bob@foocorp.com']] + "To: alice@foocorp.com\nSubject: Hello\nTo: bob@foocorp.com" + ) + parsed = parse_mimepart_address_header(mimepart, "To") + assert parsed == [["", "alice@foocorp.com"], ["", "bob@foocorp.com"]] -def test_handle_bad_content_disposition(db, default_account, default_namespace, - mime_message): +def test_handle_bad_content_disposition( + db, default_account, default_namespace, mime_message +): # Message with a MIME part that has an invalid content-disposition. mime_message.append( - mime.create.attachment('image/png', 'filler', 'attached_image.png', - disposition='alternative') + mime.create.attachment( + "image/png", "filler", "attached_image.png", disposition="alternative" + ) ) m = create_from_synced(db, default_account, mime_message.to_string()) assert m.namespace_id == default_namespace.id - assert m.to_addr == [['Alice', 'alice@example.com']] - assert m.cc_addr == [['Bob', 'bob@example.com']] - assert m.body == 'Hello World!' + assert m.to_addr == [["Alice", "alice@example.com"]] + assert m.cc_addr == [["Bob", "bob@example.com"]] + assert m.body == "Hello World!" assert len(m.parts) == 0 - assert (db.session.query(Block).filter( - Block.namespace_id == default_namespace.id).count() == 0) + assert ( + db.session.query(Block) + .filter(Block.namespace_id == default_namespace.id) + .count() + == 0 + ) -def test_store_full_body_on_parse_error( - default_account, mime_message_with_bad_date): +def test_store_full_body_on_parse_error(default_account, mime_message_with_bad_date): received_date = None - m = Message.create_from_synced(default_account, 139219, '[Gmail]/All Mail', - received_date, - mime_message_with_bad_date.to_string()) + m = Message.create_from_synced( + default_account, + 139219, + "[Gmail]/All Mail", + received_date, + mime_message_with_bad_date.to_string(), + ) assert get_from_blockstore(m.data_sha256) -def test_long_content_id(db, default_account, thread, - raw_message_with_long_content_id): +def test_long_content_id(db, default_account, thread, raw_message_with_long_content_id): m = create_from_synced(db, default_account, raw_message_with_long_content_id) m.thread = thread db.session.add(m) @@ -366,49 +421,56 @@ def test_long_content_id(db, default_account, thread, db.session.commit() -def test_parse_body_on_bad_attachment( - default_account, raw_message_with_bad_attachment): +def test_parse_body_on_bad_attachment(default_account, raw_message_with_bad_attachment): received_date = None - m = Message.create_from_synced(default_account, 139219, '[Gmail]/All Mail', - received_date, - raw_message_with_bad_attachment) + m = Message.create_from_synced( + default_account, + 139219, + "[Gmail]/All Mail", + received_date, + raw_message_with_bad_attachment, + ) assert m.decode_error - assert 'dingy blue carpet' in m.body + assert "dingy blue carpet" in m.body assert len(m.parts) == 0 def test_calculate_snippet(): m = Message() # Check that we strip contents of title, script, style tags - body = 'EMAIL' \ - 'Hello, world' - assert m.calculate_html_snippet(body) == 'Hello, world' + body = ( + "EMAIL" + "Hello, world" + ) + assert m.calculate_html_snippet(body) == "Hello, world" # Check that we replace various incarnations of
by spaces - body = 'Hello,
world' - assert m.calculate_html_snippet(body) == 'Hello, world' + body = "Hello,
world" + assert m.calculate_html_snippet(body) == "Hello, world" - body = 'Hello,
world' - assert m.calculate_html_snippet(body) == 'Hello, world' + body = 'Hello,
world' + assert m.calculate_html_snippet(body) == "Hello, world" - body = 'Hello,
world' - assert m.calculate_html_snippet(body) == 'Hello, world' + body = "Hello,
world" + assert m.calculate_html_snippet(body) == "Hello, world" - body = 'Hello,

world' - assert m.calculate_html_snippet(body) == 'Hello, world' + body = "Hello,

world" + assert m.calculate_html_snippet(body) == "Hello, world" body = '
line1
line2
line3

' - assert m.calculate_html_snippet(body) == 'line1 line2 line3' + assert m.calculate_html_snippet(body) == "line1 line2 line3" # Check that snippets are properly truncated to 191 characters. - body = '''Etenim quid est, Catilina, quod iam amplius + body = """Etenim quid est, Catilina, quod iam amplius exspectes, si neque nox tenebris obscurare coetus nefarios nec privata domus parietibus continere voces coniurationis tuae - potest, si illustrantur, si erumpunt omnia?''' - expected_snippet = 'Etenim quid est, Catilina, quod iam amplius ' \ - 'exspectes, si neque nox tenebris obscurare coetus ' \ - 'nefarios nec privata domus parietibus continere ' \ - 'voces coniurationis tuae potest, si illustrantur,' + potest, si illustrantur, si erumpunt omnia?""" + expected_snippet = ( + "Etenim quid est, Catilina, quod iam amplius " + "exspectes, si neque nox tenebris obscurare coetus " + "nefarios nec privata domus parietibus continere " + "voces coniurationis tuae potest, si illustrantur," + ) assert len(expected_snippet) == 191 assert m.calculate_html_snippet(body) == expected_snippet @@ -416,63 +478,74 @@ def test_calculate_snippet(): def test_sanitize_subject(default_account, mime_message): # Parse a raw message with encoded null bytes in subject header; # check that we strip the null bytes. - mime_message.headers['Subject'] = \ - '=?UTF-8?B?WW91ciBVUFMgUGFja2FnZSB3YXMgZGVsaXZlcmVkAAAA?=' + mime_message.headers[ + "Subject" + ] = "=?UTF-8?B?WW91ciBVUFMgUGFja2FnZSB3YXMgZGVsaXZlcmVkAAAA?=" m = Message.create_from_synced( - default_account, 22, '[Gmail]/All Mail', datetime.datetime.utcnow(), - mime_message.to_string()) - assert m.subject == u'Your UPS Package was delivered' + default_account, + 22, + "[Gmail]/All Mail", + datetime.datetime.utcnow(), + mime_message.to_string(), + ) + assert m.subject == u"Your UPS Package was delivered" -def test_attachments_filename_parsing(db, default_account, - raw_message_with_filename_attachment, - raw_message_with_name_attachment): - m = create_from_synced(db, default_account, - raw_message_with_filename_attachment) +def test_attachments_filename_parsing( + db, + default_account, + raw_message_with_filename_attachment, + raw_message_with_name_attachment, +): + m = create_from_synced(db, default_account, raw_message_with_filename_attachment) assert len(m.attachments) == 1 - assert m.attachments[0].block.filename == 'bewerbung_anschreiben_positivbeispiel.txt' + assert ( + m.attachments[0].block.filename == "bewerbung_anschreiben_positivbeispiel.txt" + ) - m = create_from_synced(db, default_account, - raw_message_with_name_attachment) + m = create_from_synced(db, default_account, raw_message_with_name_attachment) assert len(m.attachments) == 1 - assert m.attachments[0].block.filename == 'bewerbung_anschreiben_positivbeispiel.txt' + assert ( + m.attachments[0].block.filename == "bewerbung_anschreiben_positivbeispiel.txt" + ) -def test_inline_attachments_filename_parsing(db, default_account, - raw_message_with_inline_name_attachment): - m = create_from_synced(db, default_account, - raw_message_with_inline_name_attachment) +def test_inline_attachments_filename_parsing( + db, default_account, raw_message_with_inline_name_attachment +): + m = create_from_synced(db, default_account, raw_message_with_inline_name_attachment) assert len(m.attachments) == 1 - assert m.attachments[0].block.filename == u"Capture d'e\u0301cran 2015-08-13 20.58.24.png" + assert ( + m.attachments[0].block.filename + == u"Capture d'e\u0301cran 2015-08-13 20.58.24.png" + ) -def test_attachments_emoji_filename_parsing(db, default_account, - raw_message_with_outlook_emoji): - m = create_from_synced(db, default_account, - raw_message_with_outlook_emoji) +def test_attachments_emoji_filename_parsing( + db, default_account, raw_message_with_outlook_emoji +): + m = create_from_synced(db, default_account, raw_message_with_outlook_emoji) assert len(m.attachments) == 1 - assert m.attachments[0].block.filename == u'OutlookEmoji-\U0001f60a.png' - assert m.attachments[0].block.content_type == 'image/png' - assert m.attachments[0].content_id == '<3f0ea351-779e-48b3-bfa9-7c2a9e373aeb>' - assert m.attachments[0].content_disposition == 'attachment' + assert m.attachments[0].block.filename == u"OutlookEmoji-\U0001f60a.png" + assert m.attachments[0].block.content_type == "image/png" + assert m.attachments[0].content_id == "<3f0ea351-779e-48b3-bfa9-7c2a9e373aeb>" + assert m.attachments[0].content_disposition == "attachment" -def test_attachments_emoji_filename_parsing(db, default_account, - raw_message_with_outlook_emoji_inline): - m = create_from_synced(db, default_account, - raw_message_with_outlook_emoji_inline) +def test_attachments_emoji_filename_parsing( + db, default_account, raw_message_with_outlook_emoji_inline +): + m = create_from_synced(db, default_account, raw_message_with_outlook_emoji_inline) assert len(m.attachments) == 1 - assert m.attachments[0].block.filename == u'OutlookEmoji-\U0001f60a.png' - assert m.attachments[0].block.content_type == 'image/png' - assert m.attachments[0].content_id == '<3f0ea351-779e-48b3-bfa9-7c2a9e373aeb>' - assert m.attachments[0].content_disposition == 'inline' + assert m.attachments[0].block.filename == u"OutlookEmoji-\U0001f60a.png" + assert m.attachments[0].block.content_type == "image/png" + assert m.attachments[0].content_id == "<3f0ea351-779e-48b3-bfa9-7c2a9e373aeb>" + assert m.attachments[0].content_disposition == "inline" @pytest.mark.only -def test_long_message_id(db, default_account, thread, - raw_message_with_long_message_id): - m = create_from_synced(db, default_account, - raw_message_with_long_message_id) +def test_long_message_id(db, default_account, thread, raw_message_with_long_message_id): + m = create_from_synced(db, default_account, raw_message_with_long_message_id) m.thread = thread db.session.add(m) # Check that no database error is raised. diff --git a/inbox/test/general/test_mutable_json_type.py b/inbox/test/general/test_mutable_json_type.py index b2f90dbcf..bf6a4a4f2 100644 --- a/inbox/test/general/test_mutable_json_type.py +++ b/inbox/test/general/test_mutable_json_type.py @@ -11,31 +11,29 @@ def test_mutable_json_type(db, config, default_account, folder): """ from inbox.models.backends.imap import ImapFolderSyncStatus - sync_status = ImapFolderSyncStatus( - account_id=default_account.id, - folder=folder) + sync_status = ImapFolderSyncStatus(account_id=default_account.id, folder=folder) db.session.add(sync_status) db.session.commit() original_metrics = sync_status.metrics - metrics = dict(download_uid_count=10, - queue_checked_at=datetime.utcnow()) + metrics = dict(download_uid_count=10, queue_checked_at=datetime.utcnow()) sync_status.update_metrics(metrics) updated_metrics = sync_status.metrics metrics.update(original_metrics) - assert updated_metrics != original_metrics and updated_metrics == metrics,\ - 'metrics not updated correctly' + assert ( + updated_metrics != original_metrics and updated_metrics == metrics + ), "metrics not updated correctly" # Reupdate status - new_metrics = dict(delete_uid_count=50, - download_uid_count=100, - queue_checked_at=datetime.utcnow()) + new_metrics = dict( + delete_uid_count=50, download_uid_count=100, queue_checked_at=datetime.utcnow() + ) sync_status.update_metrics(new_metrics) latest_metrics = sync_status.metrics metrics.update(new_metrics) - assert latest_metrics == metrics, 'metrics not re-updated correctly' + assert latest_metrics == metrics, "metrics not re-updated correctly" diff --git a/inbox/test/general/test_namespace.py b/inbox/test/general/test_namespace.py index 1e1719d34..cf41584b9 100644 --- a/inbox/test/general/test_namespace.py +++ b/inbox/test/general/test_namespace.py @@ -5,10 +5,18 @@ from freezegun import freeze_time from inbox.models.namespace import Namespace -from inbox.test.util.base import (add_generic_imap_account, add_fake_thread, add_fake_message, - add_fake_calendar, add_fake_event, add_fake_folder, - add_fake_imapuid, add_fake_gmail_account, - add_fake_contact, add_fake_msg_with_calendar_part) +from inbox.test.util.base import ( + add_generic_imap_account, + add_fake_thread, + add_fake_message, + add_fake_calendar, + add_fake_event, + add_fake_folder, + add_fake_imapuid, + add_fake_gmail_account, + add_fake_contact, + add_fake_msg_with_calendar_part, +) @fixture @@ -17,9 +25,7 @@ def get(*args, **kwargs): resp = Response() resp.status_code = 500 - monkeypatch.setattr( - 'requests.get', - lambda *args, **kwargs: get()) + monkeypatch.setattr("requests.get", lambda *args, **kwargs: get()) @fixture @@ -28,22 +34,22 @@ def get(*args, **kwargs): resp = Response() resp.status_code = 500 - monkeypatch.setattr( - 'requests.get', - lambda *args, **kwargs: get()) + monkeypatch.setattr("requests.get", lambda *args, **kwargs: get()) def random_range(start, end): return range(random.randrange(start, end)) -def add_completely_fake_account(db, email='test@nylas.com'): +def add_completely_fake_account(db, email="test@nylas.com"): from inbox.models.backends.gmail import GmailAuthCredentials + fake_account = add_fake_gmail_account(db.session, email_address=email) calendar = add_fake_calendar(db.session, fake_account.namespace.id) for i in random_range(1, 10): - add_fake_event(db.session, fake_account.namespace.id, - calendar=calendar, title='%s' % i) + add_fake_event( + db.session, fake_account.namespace.id, calendar=calendar, title="%s" % i + ) # Add fake Threads, Messages and ImapUids. folder = add_fake_folder(db.session, fake_account) @@ -51,15 +57,16 @@ def add_completely_fake_account(db, email='test@nylas.com'): th = add_fake_thread(db.session, fake_account.namespace.id) for j in random_range(1, 3): - msg = add_fake_msg_with_calendar_part(db.session, - fake_account, - 'fake part', thread=th) + msg = add_fake_msg_with_calendar_part( + db.session, fake_account, "fake part", thread=th + ) db.session.add(msg) db.session.flush() for k in random_range(1, 2): - add_fake_imapuid(db.session, fake_account.id, msg, folder, - int('%s%s' % (msg.id, k))) + add_fake_imapuid( + db.session, fake_account.id, msg, folder, int("%s%s" % (msg.id, k)) + ) # Add fake contacts for i in random_range(1, 5): add_fake_contact(db.session, fake_account.namespace.id, uid=str(i)) @@ -85,7 +92,7 @@ def test_get_accounts_to_delete(db): existing_account_count = db.session.query(Account.id).count() accounts = [] - email = 'test{}@nylas.com' + email = "test{}@nylas.com" for i in range(1, 6): account = add_completely_fake_account(db, email.format(i)) accounts.append(account) @@ -267,16 +274,14 @@ def test_namespace_deletion(db, default_account): message = add_fake_message(db.session, namespace_id, thread) for m in models: - c = db.session.query(m).filter( - m.namespace_id == namespace_id).count() + c = db.session.query(m).filter(m.namespace_id == namespace_id).count() print "count for", m, ":", c assert c != 0 fake_account = add_generic_imap_account(db.session) fake_account_id = fake_account.id - assert fake_account_id != account.id and \ - fake_account.namespace.id != namespace_id + assert fake_account_id != account.id and fake_account.namespace.id != namespace_id thread = add_fake_thread(db.session, fake_account.namespace.id) thread_id = thread.id @@ -284,25 +289,33 @@ def test_namespace_deletion(db, default_account): message = add_fake_message(db.session, fake_account.namespace.id, thread) message_id = message.id - assert len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) > 0 + assert ( + len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) > 0 + ) # Delete namespace, verify data corresponding to this namespace /only/ # is deleted - account = db.session.query(Account).join(Namespace).filter(Namespace.id == namespace_id).one() + account = ( + db.session.query(Account) + .join(Namespace) + .filter(Namespace.id == namespace_id) + .one() + ) account.mark_for_deletion() delete_namespace(namespace_id) db.session.commit() - assert len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) == 0 + assert ( + len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) == 0 + ) account = db.session.query(Account).get(account_id) assert not account for m in models: - assert db.session.query(m).filter( - m.namespace_id == namespace_id).count() == 0 + assert db.session.query(m).filter(m.namespace_id == namespace_id).count() == 0 fake_account = db.session.query(Account).get(fake_account_id) assert fake_account @@ -329,22 +342,22 @@ def test_namespace_delete_cascade(db, default_account): add_fake_message(db.session, namespace_id, thread) for m in models: - c = db.session.query(m).filter( - m.namespace_id == namespace_id).count() + c = db.session.query(m).filter(m.namespace_id == namespace_id).count() print "count for", m, ":", c assert c != 0 fake_account = add_generic_imap_account(db.session) fake_account_id = fake_account.id - assert fake_account_id != account.id and \ - fake_account.namespace.id != namespace_id + assert fake_account_id != account.id and fake_account.namespace.id != namespace_id thread = add_fake_thread(db.session, fake_account.namespace.id) add_fake_message(db.session, fake_account.namespace.id, thread) - assert len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) > 0 + assert ( + len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) > 0 + ) # This test is separate from test_namespace_deletion because we want to # do a raw SQLAlchemy delete rather than using delete_namespace, which does @@ -353,12 +366,22 @@ def test_namespace_delete_cascade(db, default_account): db.session.query(Namespace).filter(Namespace.id == namespace_id).delete() db.session.commit() - assert len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) == 0 + assert ( + len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) == 0 + ) def test_fake_accounts(empty_db): - from inbox.models import (Account, Thread, Message, Block, - Secret, Contact, Event, Transaction) + from inbox.models import ( + Account, + Thread, + Message, + Block, + Secret, + Contact, + Event, + Transaction, + ) from inbox.models.backends.imap import ImapUid from inbox.models.backends.gmail import GmailAuthCredentials from inbox.models.util import delete_namespace @@ -369,27 +392,23 @@ def test_fake_accounts(empty_db): account = add_completely_fake_account(db) for m in models: - c = db.session.query(m).filter( - m.namespace_id == account.namespace.id).count() + c = db.session.query(m).filter(m.namespace_id == account.namespace.id).count() assert c != 0 assert db.session.query(ImapUid).count() != 0 assert db.session.query(Secret).count() != 0 assert db.session.query(GmailAuthCredentials).count() != 0 - assert db.session.query(Account).filter( - Account.id == account.id).count() == 1 + assert db.session.query(Account).filter(Account.id == account.id).count() == 1 # Try the dry-run mode: account.mark_for_deletion() delete_namespace(account.namespace.id, dry_run=True) for m in models: - c = db.session.query(m).filter( - m.namespace_id == account.namespace.id).count() + c = db.session.query(m).filter(m.namespace_id == account.namespace.id).count() assert c != 0 - assert db.session.query(Account).filter( - Account.id == account.id).count() != 0 + assert db.session.query(Account).filter(Account.id == account.id).count() != 0 assert db.session.query(Secret).count() != 0 assert db.session.query(GmailAuthCredentials).count() != 0 @@ -399,12 +418,10 @@ def test_fake_accounts(empty_db): delete_namespace(account.namespace.id) for m in models: - c = db.session.query(m).filter( - m.namespace_id == account.namespace.id).count() + c = db.session.query(m).filter(m.namespace_id == account.namespace.id).count() assert c == 0 - assert db.session.query(Account).filter( - Account.id == account.id).count() == 0 + assert db.session.query(Account).filter(Account.id == account.id).count() == 0 assert db.session.query(Secret).count() == 0 assert db.session.query(GmailAuthCredentials).count() == 0 @@ -414,15 +431,14 @@ def test_fake_accounts(empty_db): def test_multiple_fake_accounts(empty_db): # Add three fake accounts, check that removing one doesn't affect # the two others. - from inbox.models import (Thread, Message, Block, Secret, Contact, Event, - Transaction) + from inbox.models import Thread, Message, Block, Secret, Contact, Event, Transaction from inbox.models.backends.gmail import GmailAuthCredentials from inbox.models.util import delete_namespace db = empty_db accounts = [] - accounts.append(add_completely_fake_account(db, 'test1@nylas.com')) - accounts.append(add_completely_fake_account(db, 'test2@nylas.com')) + accounts.append(add_completely_fake_account(db, "test1@nylas.com")) + accounts.append(add_completely_fake_account(db, "test2@nylas.com")) # Count secrets and authcredentials now. We can't do it after adding # the third account because our object model is a bit cumbersome. @@ -431,7 +447,7 @@ def test_multiple_fake_accounts(empty_db): assert secret_count != 0 assert authcredentials_count != 0 - accounts.append(add_completely_fake_account(db, 'test3@nylas.com')) + accounts.append(add_completely_fake_account(db, "test3@nylas.com")) stats = {} models = [Thread, Message, Event, Transaction, Contact, Block] @@ -440,8 +456,11 @@ def test_multiple_fake_accounts(empty_db): stats[account.email_address] = {} for model in models: clsname = model.__name__ - stats[account.email_address][clsname] = db.session.query(model).filter( - model.namespace_id == account.namespace.id).count() + stats[account.email_address][clsname] = ( + db.session.query(model) + .filter(model.namespace_id == account.namespace.id) + .count() + ) # now delete the third account. last_namespace_id = accounts[2].namespace.id @@ -452,14 +471,22 @@ def test_multiple_fake_accounts(empty_db): for account in accounts[:2]: for model in models: clsname = model.__name__ - assert stats[account.email_address][clsname] == db.session.query(model).filter( - model.namespace_id == account.namespace.id).count() + assert ( + stats[account.email_address][clsname] + == db.session.query(model) + .filter(model.namespace_id == account.namespace.id) + .count() + ) # check that no model from the last account is present. for model in models: clsname = model.__name__ - assert db.session.query(model).filter( - model.namespace_id == last_namespace_id).count() == 0 + assert ( + db.session.query(model) + .filter(model.namespace_id == last_namespace_id) + .count() + == 0 + ) # check that we didn't delete a secret that wasn't ours. assert db.session.query(Secret).count() == secret_count diff --git a/inbox/test/general/test_paths.py b/inbox/test/general/test_paths.py index f45903e5e..2cb373e19 100644 --- a/inbox/test/general/test_paths.py +++ b/inbox/test/general/test_paths.py @@ -3,28 +3,31 @@ def test_imap_folder_path(): - assert imap_folder_path('a/b') == 'a.b' - assert imap_folder_path('a/b', separator='?') == 'a?b' + assert imap_folder_path("a/b") == "a.b" + assert imap_folder_path("a/b", separator="?") == "a?b" - assert imap_folder_path('/A/b') == 'A.b' - assert imap_folder_path('/INBOX/b') == 'INBOX.b' - assert imap_folder_path('INBOX/b') == 'INBOX.b' + assert imap_folder_path("/A/b") == "A.b" + assert imap_folder_path("/INBOX/b") == "INBOX.b" + assert imap_folder_path("INBOX/b") == "INBOX.b" - assert imap_folder_path('a/very/deep/nested/folder') == 'a.very.deep.nested.folder' - assert imap_folder_path('/a/very/deep/nested/folder') == 'a.very.deep.nested.folder' + assert imap_folder_path("a/very/deep/nested/folder") == "a.very.deep.nested.folder" + assert imap_folder_path("/a/very/deep/nested/folder") == "a.very.deep.nested.folder" - assert imap_folder_path('') is None - assert imap_folder_path('/') is None + assert imap_folder_path("") is None + assert imap_folder_path("/") is None - assert imap_folder_path('A/B', prefix='INBOX.', separator='.') == 'INBOX.A.B' - assert imap_folder_path('/A/B', prefix='INBOX.', separator='.') == 'INBOX.A.B' - assert imap_folder_path('/A/B', prefix='INBOX', separator='.') == 'INBOX.A.B' - assert imap_folder_path('INBOX/A/B', prefix='INBOX', separator='.') == 'INBOX.A.B' + assert imap_folder_path("A/B", prefix="INBOX.", separator=".") == "INBOX.A.B" + assert imap_folder_path("/A/B", prefix="INBOX.", separator=".") == "INBOX.A.B" + assert imap_folder_path("/A/B", prefix="INBOX", separator=".") == "INBOX.A.B" + assert imap_folder_path("INBOX/A/B", prefix="INBOX", separator=".") == "INBOX.A.B" def test_fs_folder_path(): - assert fs_folder_path('INBOX.A.B') == 'INBOX/A/B' - assert fs_folder_path('INBOX.A.B', prefix='INBOX.') == 'A/B' - assert fs_folder_path('INBOX?A?B', prefix='INBOX?', separator='?') == 'A/B' - assert fs_folder_path('INBOX.a.very.deep.nested.folder') == 'INBOX/a/very/deep/nested/folder' - assert fs_folder_path(imap_folder_path('a/b')) == 'a/b' + assert fs_folder_path("INBOX.A.B") == "INBOX/A/B" + assert fs_folder_path("INBOX.A.B", prefix="INBOX.") == "A/B" + assert fs_folder_path("INBOX?A?B", prefix="INBOX?", separator="?") == "A/B" + assert ( + fs_folder_path("INBOX.a.very.deep.nested.folder") + == "INBOX/a/very/deep/nested/folder" + ) + assert fs_folder_path(imap_folder_path("a/b")) == "a/b" diff --git a/inbox/test/general/test_provider_resolution.py b/inbox/test/general/test_provider_resolution.py index b6c663267..36205db14 100644 --- a/inbox/test/general/test_provider_resolution.py +++ b/inbox/test/general/test_provider_resolution.py @@ -8,56 +8,60 @@ def test_provider_resolution(mock_dns_resolver): - mock_dns_resolver._load_records('inbox', 'test/data/general_test_provider_resolution.json') + mock_dns_resolver._load_records( + "inbox", "test/data/general_test_provider_resolution.json" + ) test_cases = [ - ('foo@example.com', 'unknown'), - ('foo@noresolve.com', 'unknown'), - ('foo@gmail.com', 'gmail'), - ('foo@postini.com', 'gmail'), - ('foo@yahoo.com', 'yahoo'), - ('foo@yahoo.se', 'yahoo'), - ('foo@hotmail.com', 'outlook'), - ('foo@outlook.com', 'outlook'), - ('foo@aol.com', 'aol'), - ('foo@love.com', 'aol'), - ('foo@games.com', 'aol'), - ('foo@exchange.mit.edu', 'eas'), - ('foo@fastmail.fm', 'fastmail'), - ('foo@fastmail.net', 'fastmail'), - ('foo@fastmail.com', 'fastmail'), - ('foo@hover.com', 'hover'), - ('foo@yahoo.com', 'yahoo'), - ('foo@yandex.com', 'yandex'), - ('foo@mrmail.com', 'zimbra'), - ('foo@icloud.com', 'icloud'), - ('foo@mac.com', 'icloud'), - ('foo@gmx.com', 'gmx'), - ('foo@gandi.net', 'gandi'), - ('foo@debuggers.co', 'gandi'), - ('foo@forumone.com', 'gmail'), - ('foo@getbannerman.com', 'gmail'), - ('foo@inboxapp.onmicrosoft.com', 'eas'), - ('foo@espertech.onmicrosoft.com', 'eas'), - ('foo@doesnotexist.nilas.com', 'unknown'), - ('foo@autobizbrokers.com', 'bluehost'), + ("foo@example.com", "unknown"), + ("foo@noresolve.com", "unknown"), + ("foo@gmail.com", "gmail"), + ("foo@postini.com", "gmail"), + ("foo@yahoo.com", "yahoo"), + ("foo@yahoo.se", "yahoo"), + ("foo@hotmail.com", "outlook"), + ("foo@outlook.com", "outlook"), + ("foo@aol.com", "aol"), + ("foo@love.com", "aol"), + ("foo@games.com", "aol"), + ("foo@exchange.mit.edu", "eas"), + ("foo@fastmail.fm", "fastmail"), + ("foo@fastmail.net", "fastmail"), + ("foo@fastmail.com", "fastmail"), + ("foo@hover.com", "hover"), + ("foo@yahoo.com", "yahoo"), + ("foo@yandex.com", "yandex"), + ("foo@mrmail.com", "zimbra"), + ("foo@icloud.com", "icloud"), + ("foo@mac.com", "icloud"), + ("foo@gmx.com", "gmx"), + ("foo@gandi.net", "gandi"), + ("foo@debuggers.co", "gandi"), + ("foo@forumone.com", "gmail"), + ("foo@getbannerman.com", "gmail"), + ("foo@inboxapp.onmicrosoft.com", "eas"), + ("foo@espertech.onmicrosoft.com", "eas"), + ("foo@doesnotexist.nilas.com", "unknown"), + ("foo@autobizbrokers.com", "bluehost"), ] for email, expected_provider in test_cases: - assert provider_from_address(email, lambda: mock_dns_resolver) == expected_provider + assert ( + provider_from_address(email, lambda: mock_dns_resolver) == expected_provider + ) with pytest.raises(InvalidEmailAddressError): - provider_from_address('notanemail', lambda: mock_dns_resolver) + provider_from_address("notanemail", lambda: mock_dns_resolver) with pytest.raises(InvalidEmailAddressError): - provider_from_address('not@anemail', lambda: mock_dns_resolver) + provider_from_address("not@anemail", lambda: mock_dns_resolver) with pytest.raises(InvalidEmailAddressError): - provider_from_address('notanemail.com', lambda: mock_dns_resolver) + provider_from_address("notanemail.com", lambda: mock_dns_resolver) def test_auth_handler_dispatch(): - assert isinstance(handler_from_provider('custom'), GenericAuthHandler) - assert isinstance(handler_from_provider('fastmail'), GenericAuthHandler) - assert isinstance(handler_from_provider('aol'), GenericAuthHandler) - assert isinstance(handler_from_provider('yahoo'), GenericAuthHandler) - assert isinstance(handler_from_provider('gmail'), GmailAuthHandler) + assert isinstance(handler_from_provider("custom"), GenericAuthHandler) + assert isinstance(handler_from_provider("fastmail"), GenericAuthHandler) + assert isinstance(handler_from_provider("aol"), GenericAuthHandler) + assert isinstance(handler_from_provider("yahoo"), GenericAuthHandler) + assert isinstance(handler_from_provider("gmail"), GmailAuthHandler) with pytest.raises(NotSupportedError): - handler_from_provider('NOTAREALMAILPROVIDER') + handler_from_provider("NOTAREALMAILPROVIDER") diff --git a/inbox/test/general/test_relationships.py b/inbox/test/general/test_relationships.py index 879e69682..e8e6f0f13 100644 --- a/inbox/test/general/test_relationships.py +++ b/inbox/test/general/test_relationships.py @@ -11,35 +11,42 @@ def test_category_delete(db, gmail_account): when a Category is deleted """ api_client = new_api_client(db, gmail_account.namespace) - po_data = api_client.post_data('/labels/', - {"display_name": "Test_Label"}) + po_data = api_client.post_data("/labels/", {"display_name": "Test_Label"}) assert po_data.status_code == 200 - category_public_id = json.loads(po_data.data)['id'] - category = db.session.query(Category).filter( - Category.public_id == category_public_id).one() + category_public_id = json.loads(po_data.data)["id"] + category = ( + db.session.query(Category) + .filter(Category.public_id == category_public_id) + .one() + ) category_id = category.id for i in xrange(10): - generic_thread = add_fake_thread(db.session, - gmail_account.namespace.id) - gen_message = add_fake_message(db.session, - gmail_account.namespace.id, - generic_thread) + generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) + gen_message = add_fake_message( + db.session, gmail_account.namespace.id, generic_thread + ) data = {"label_ids": [category_public_id]} - resp = api_client.put_data('/messages/{}'. - format(gen_message.public_id), data) + resp = api_client.put_data("/messages/{}".format(gen_message.public_id), data) assert resp.status_code == 200 - associated_mcs = db.session.query(MessageCategory). \ - filter(MessageCategory.category_id == category_id).all() + associated_mcs = ( + db.session.query(MessageCategory) + .filter(MessageCategory.category_id == category_id) + .all() + ) assert len(associated_mcs) == 10 db.session.delete(category) db.session.commit() - assert db.session.query(MessageCategory). \ - filter(MessageCategory.category_id == category_id).all() == [] + assert ( + db.session.query(MessageCategory) + .filter(MessageCategory.category_id == category_id) + .all() + == [] + ) def test_message_delete(db, gmail_account): @@ -49,32 +56,37 @@ def test_message_delete(db, gmail_account): api_client = new_api_client(db, gmail_account.namespace) generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) - gen_message = add_fake_message(db.session, - gmail_account.namespace.id, - generic_thread) + gen_message = add_fake_message( + db.session, gmail_account.namespace.id, generic_thread + ) category_ids = [] for i in xrange(10): - po_data = api_client.post_data('/labels/', - {"display_name": str(i)}) + po_data = api_client.post_data("/labels/", {"display_name": str(i)}) assert po_data.status_code == 200 - category_ids.append(json.loads(po_data.data)['id']) + category_ids.append(json.loads(po_data.data)["id"]) data = {"label_ids": category_ids} - resp = api_client.put_data('/messages/{}'. - format(gen_message.public_id), data) + resp = api_client.put_data("/messages/{}".format(gen_message.public_id), data) assert resp.status_code == 200 - associated_mcs = db.session.query(MessageCategory). \ - filter(MessageCategory.message_id == gen_message.id).all() + associated_mcs = ( + db.session.query(MessageCategory) + .filter(MessageCategory.message_id == gen_message.id) + .all() + ) assert len(associated_mcs) == 10 db.session.delete(gen_message) db.session.commit() - assert db.session.query(MessageCategory). \ - filter(MessageCategory.message_id == gen_message.id).all() == [] + assert ( + db.session.query(MessageCategory) + .filter(MessageCategory.message_id == gen_message.id) + .all() + == [] + ) def test_thread_delete(db, gmail_account): @@ -82,18 +94,20 @@ def test_thread_delete(db, gmail_account): when a Thread is deleted.""" generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) - generic_message = add_fake_message(db.session, - gmail_account.namespace.id, - generic_thread) - assert db.session.query(Thread). \ - filter(Thread.id == generic_thread.id).all() == [generic_thread] - assert db.session.query(Message). \ - filter(Message.id == generic_message.id).all() == [generic_message] + generic_message = add_fake_message( + db.session, gmail_account.namespace.id, generic_thread + ) + assert db.session.query(Thread).filter(Thread.id == generic_thread.id).all() == [ + generic_thread + ] + assert db.session.query(Message).filter(Message.id == generic_message.id).all() == [ + generic_message + ] db.session.delete(generic_thread) db.session.commit() - assert db.session.query(Thread). \ - filter(Thread.id == generic_thread.id).all() == [] - assert db.session.query(Message). \ - filter(Message.id == generic_message.id).all() == [] + assert db.session.query(Thread).filter(Thread.id == generic_thread.id).all() == [] + assert ( + db.session.query(Message).filter(Message.id == generic_message.id).all() == [] + ) diff --git a/inbox/test/general/test_required_folders.py b/inbox/test/general/test_required_folders.py index f1f4d1a71..ec7b448c6 100644 --- a/inbox/test/general/test_required_folders.py +++ b/inbox/test/general/test_required_folders.py @@ -8,33 +8,31 @@ class AccountStub(object): id = 0 - email_address = 'bob@bob.com' + email_address = "bob@bob.com" access_token = None imap_endpoint = None - sync_state = 'running' + sync_state = "running" def new_token(self): - return ('foo', 22) + return ("foo", 22) def validate_token(self, new_token): return True class ConnectionStub(object): - def logout(self): pass def get_auth_handler(monkeypatch, folders): - g = GmailAuthHandler('gmail') + g = GmailAuthHandler("gmail") def mock_connect(a): return ConnectionStub() g.connect_account = mock_connect - monkeypatch.setattr(GmailCrispinClient, 'folder_names', - lambda x: folders) + monkeypatch.setattr(GmailCrispinClient, "folder_names", lambda x: folders) return g @@ -44,7 +42,7 @@ def test_all_mail_missing(monkeypatch): is not in the list of folders. """ - g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'}) + g = get_auth_handler(monkeypatch, {"inbox": "INBOX"}) with pytest.raises(GmailSettingError): g.verify_account(AccountStub()) @@ -54,6 +52,7 @@ def test_all_mail_present(monkeypatch): Test that the validate_folders passes if All Mail is present. """ - g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX', - 'trash': 'TRASH'}) + g = get_auth_handler( + monkeypatch, {"all": "ALL", "inbox": "INBOX", "trash": "TRASH"} + ) assert g.verify_account(AccountStub()) diff --git a/inbox/test/general/test_sync_engine_exit.py b/inbox/test/general/test_sync_engine_exit.py index 2f464e38d..4d6e4df61 100644 --- a/inbox/test/general/test_sync_engine_exit.py +++ b/inbox/test/general/test_sync_engine_exit.py @@ -17,9 +17,9 @@ @pytest.fixture def yahoo_account(db): - account = GenericAuthHandler('yahoo').create_account( - TEST_YAHOO_EMAIL, - {"email": TEST_YAHOO_EMAIL, "password": "BLAH"}) + account = GenericAuthHandler("yahoo").create_account( + TEST_YAHOO_EMAIL, {"email": TEST_YAHOO_EMAIL, "password": "BLAH"} + ) db.session.add(account) db.session.commit() return account @@ -31,21 +31,27 @@ def raise_folder_error(*args, **kwargs): @pytest.fixture def sync_engine_stub(db, yahoo_account): - db.session.add(Folder(account=yahoo_account, name='Inbox')) + db.session.add(Folder(account=yahoo_account, name="Inbox")) db.session.commit() - engine = FolderSyncEngine(yahoo_account.id, yahoo_account.namespace.id, - "Inbox", TEST_YAHOO_EMAIL, "yahoo", None) + engine = FolderSyncEngine( + yahoo_account.id, + yahoo_account.namespace.id, + "Inbox", + TEST_YAHOO_EMAIL, + "yahoo", + None, + ) return engine -def test_folder_engine_exits_if_folder_missing(db, yahoo_account, - sync_engine_stub): +def test_folder_engine_exits_if_folder_missing(db, yahoo_account, sync_engine_stub): # if the folder does not exist in our database, _load_state will # encounter an IntegrityError as it tries to insert a child # ImapFolderSyncStatus against an invalid foreign key - folder = db.session.query(Folder).filter_by(account=yahoo_account, - name='Inbox').one() + folder = ( + db.session.query(Folder).filter_by(account=yahoo_account, name="Inbox").one() + ) db.session.delete(folder) db.session.commit() with pytest.raises(IntegrityError): @@ -58,7 +64,7 @@ def test_folder_engine_exits_if_folder_missing(db, yahoo_account, # also check that we handle the crispin select_folder error appropriately # within the core True loop of _run() sync_engine_stub._load_state = lambda: True - sync_engine_stub.state = 'poll' + sync_engine_stub.state = "poll" sync_engine_stub.poll_impl = raise_folder_error with pytest.raises(MailsyncDone): sync_engine_stub._run() diff --git a/inbox/test/general/test_thread_creation.py b/inbox/test/general/test_thread_creation.py index c6e80fbc7..cfb9eb10e 100644 --- a/inbox/test/general/test_thread_creation.py +++ b/inbox/test/general/test_thread_creation.py @@ -8,55 +8,64 @@ from inbox.models.backends.generic import GenericAccount from inbox.models.backends.imap import ImapUid from inbox.util.threading import fetch_corresponding_thread -from inbox.test.util.base import (add_fake_thread, add_fake_message, - add_generic_imap_account) +from inbox.test.util.base import ( + add_fake_thread, + add_fake_message, + add_generic_imap_account, +) -MockRawMessage = namedtuple('RawMessage', ['flags']) +MockRawMessage = namedtuple("RawMessage", ["flags"]) @pytest.fixture def folder_sync_engine(db, generic_account): - db.session.add(Folder(account=generic_account, name='Inbox')) + db.session.add(Folder(account=generic_account, name="Inbox")) db.session.commit() - engine = FolderSyncEngine(generic_account.id, - generic_account.namespace.id, - "Inbox", - generic_account.email_address, - generic_account.provider, - None) + engine = FolderSyncEngine( + generic_account.id, + generic_account.namespace.id, + "Inbox", + generic_account.email_address, + generic_account.provider, + None, + ) return engine def test_generic_grouping(db, default_account): thread = add_fake_thread(db.session, default_account.namespace.id) - message = add_fake_message(db.session, default_account.namespace.id, - thread, subject="Golden Gate Park next Sat") - folder = Folder(account=default_account, name='Inbox', - canonical_name='inbox') - ImapUid(message=message, account_id=default_account.id, - msg_uid=2222, folder=folder) + message = add_fake_message( + db.session, + default_account.namespace.id, + thread, + subject="Golden Gate Park next Sat", + ) + folder = Folder(account=default_account, name="Inbox", canonical_name="inbox") + ImapUid(message=message, account_id=default_account.id, msg_uid=2222, folder=folder) thread = add_fake_thread(db.session, default_account.namespace.id) account = add_generic_imap_account(db.session) - message = add_fake_message(db.session, account.namespace.id, - thread, subject="Golden Gate Park next Sat") + message = add_fake_message( + db.session, account.namespace.id, thread, subject="Golden Gate Park next Sat" + ) - thread = fetch_corresponding_thread(db.session, - default_account.namespace.id, message) - assert thread is None, ("fetch_similar_threads should " - "heed namespace boundaries") + thread = fetch_corresponding_thread( + db.session, default_account.namespace.id, message + ) + assert thread is None, "fetch_similar_threads should " "heed namespace boundaries" def test_threading_limit(db, folder_sync_engine, monkeypatch): """Test that custom threading doesn't produce arbitrarily long threads, which eventually break things.""" from inbox.models import Message, Thread + # Shorten bound to make test faster MAX_THREAD_LENGTH = 10 monkeypatch.setattr( - 'inbox.mailsync.backends.imap.generic.MAX_THREAD_LENGTH', - MAX_THREAD_LENGTH) + "inbox.mailsync.backends.imap.generic.MAX_THREAD_LENGTH", MAX_THREAD_LENGTH + ) namespace_id = folder_sync_engine.namespace_id msg = MockRawMessage([]) @@ -66,20 +75,20 @@ def test_threading_limit(db, folder_sync_engine, monkeypatch): m.received_date = datetime.datetime.utcnow() m.references = [] m.size = 0 - m.body = '' + m.body = "" m.from_addr = [("Karim Hamidou", "karim@nilas.com")] m.to_addr = [("Eben Freeman", "eben@nilas.com")] - m.snippet = '' - m.subject = 'unique subject' + m.snippet = "" + m.subject = "unique subject" db.session.add(m) folder_sync_engine.add_message_to_thread(db.session, m, msg) db.session.commit() - new_threads = db.session.query(Thread). \ - filter(Thread.subject == 'unique subject').all() + new_threads = ( + db.session.query(Thread).filter(Thread.subject == "unique subject").all() + ) assert len(new_threads) == 3 - assert all(len(thread.messages) == MAX_THREAD_LENGTH for thread in - new_threads) + assert all(len(thread.messages) == MAX_THREAD_LENGTH for thread in new_threads) -if __name__ == '__main__': +if __name__ == "__main__": pytest.main([__file__]) diff --git a/inbox/test/general/test_threading.py b/inbox/test/general/test_threading.py index 79d9c26f9..ebef65dc4 100644 --- a/inbox/test/general/test_threading.py +++ b/inbox/test/general/test_threading.py @@ -3,8 +3,7 @@ import pytest from inbox.util.threading import fetch_corresponding_thread from inbox.util.misc import cleanup_subject -from inbox.test.util.base import (add_fake_message, add_fake_thread, - add_fake_imapuid) +from inbox.test.util.base import add_fake_message, add_fake_thread, add_fake_imapuid def test_message_cleanup(): @@ -12,45 +11,56 @@ def test_message_cleanup(): assert cleanup_subject("Re:Birthday") == "Birthday" assert cleanup_subject("Re:FWD: Birthday") == "Birthday" assert cleanup_subject("RE:FWD: My\tBirthday\n Party") == "My Birthday Party" - assert (cleanup_subject("Re: RE: Alors, comment ça s'est passé ?") == - "Alors, comment ça s'est passé ?") + assert ( + cleanup_subject("Re: RE: Alors, comment ça s'est passé ?") + == "Alors, comment ça s'est passé ?" + ) assert cleanup_subject("Re: FWD:FWD: Re:La chaise") == "La chaise" assert cleanup_subject("Aw: über cool") == "über cool" assert cleanup_subject("Aw:Re:wienerschnitzel") == "wienerschnitzel" assert cleanup_subject("Aw: wienerschnitzel") == "wienerschnitzel" assert cleanup_subject("aw: wg:wienerschnitzel") == "wienerschnitzel" - assert cleanup_subject( - "Undeliverable: Message returned to sender") == "Message returned to sender" - assert cleanup_subject( - "Undelivered: Message returned to sender") == "Message returned to sender" + assert ( + cleanup_subject("Undeliverable: Message returned to sender") + == "Message returned to sender" + ) + assert ( + cleanup_subject("Undelivered: Message returned to sender") + == "Message returned to sender" + ) def test_basic_message_grouping(db, default_namespace): first_thread = add_fake_thread(db.session, default_namespace.id) - first_thread.subject = 'Some kind of test' - - add_fake_message(db.session, default_namespace.id, - thread=first_thread, - subject='Some kind of test', - from_addr=[('Karim Hamidou', 'karim@nilas.com')], - to_addr=[('Eben Freeman', 'emfree@nilas.com')], - bcc_addr=[('Some person', 'person@nilas.com')]) - - msg2 = add_fake_message(db.session, default_namespace.id, thread=None, - subject='Re: Some kind of test', - from_addr=[('Some random dude', - 'random@pobox.com')], - to_addr=[('Karim Hamidou', 'karim@nilas.com')]) - - matched_thread = fetch_corresponding_thread(db.session, - default_namespace.id, msg2) + first_thread.subject = "Some kind of test" + + add_fake_message( + db.session, + default_namespace.id, + thread=first_thread, + subject="Some kind of test", + from_addr=[("Karim Hamidou", "karim@nilas.com")], + to_addr=[("Eben Freeman", "emfree@nilas.com")], + bcc_addr=[("Some person", "person@nilas.com")], + ) + + msg2 = add_fake_message( + db.session, + default_namespace.id, + thread=None, + subject="Re: Some kind of test", + from_addr=[("Some random dude", "random@pobox.com")], + to_addr=[("Karim Hamidou", "karim@nilas.com")], + ) + + matched_thread = fetch_corresponding_thread(db.session, default_namespace.id, msg2) assert matched_thread is None, "the algo shouldn't thread different convos" msg3 = add_fake_message(db.session, default_namespace.id, thread=None) - msg3.subject = 'Re: Some kind of test' - msg3.from_addr = [('Eben Freeman', 'emfree@nilas.com')] - msg3.to_addr = [('Karim Hamidou', 'karim@nilas.com')] + msg3.subject = "Re: Some kind of test" + msg3.from_addr = [("Eben Freeman", "emfree@nilas.com")] + msg3.to_addr = [("Karim Hamidou", "karim@nilas.com")] matched_thread = fetch_corresponding_thread(db.session, default_namespace.id, msg3) assert matched_thread is first_thread, "Should match on participants" @@ -58,24 +68,29 @@ def test_basic_message_grouping(db, default_namespace): def test_self_send(db, default_namespace): first_thread = add_fake_thread(db.session, default_namespace.id) - first_thread.subject = 'Some kind of test' - - add_fake_message(db.session, default_namespace.id, - thread=first_thread, - subject='Some kind of test', - from_addr=[('Karim Hamidou', 'karim@nilas.com')], - to_addr=[('Karim Hamidou', 'karim@nilas.com')]) - - msg2 = add_fake_message(db.session, default_namespace.id, - thread=None, - subject='Re: Some kind of test', - from_addr=[('Karim Hamidou', 'karim@nilas.com')], - to_addr=[('Karim Hamidou', 'karim@nilas.com')]) - - matched_thread = fetch_corresponding_thread(db.session, - default_namespace.id, msg2) + first_thread.subject = "Some kind of test" + + add_fake_message( + db.session, + default_namespace.id, + thread=first_thread, + subject="Some kind of test", + from_addr=[("Karim Hamidou", "karim@nilas.com")], + to_addr=[("Karim Hamidou", "karim@nilas.com")], + ) + + msg2 = add_fake_message( + db.session, + default_namespace.id, + thread=None, + subject="Re: Some kind of test", + from_addr=[("Karim Hamidou", "karim@nilas.com")], + to_addr=[("Karim Hamidou", "karim@nilas.com")], + ) + + matched_thread = fetch_corresponding_thread(db.session, default_namespace.id, msg2) assert matched_thread is first_thread, "Should match on self-send" -if __name__ == '__main__': +if __name__ == "__main__": pytest.main([__file__]) diff --git a/inbox/test/general/test_util.py b/inbox/test/general/test_util.py index 6ad8759cb..b04436d34 100644 --- a/inbox/test/general/test_util.py +++ b/inbox/test/general/test_util.py @@ -5,64 +5,73 @@ def test_naked_domain(): - assert naked_domain( - 'python.linux.com') == 'python.linux.com' - assert naked_domain( - 'iplayer.forums.bbc.co.uk') == 'iplayer.forums.bbc.co.uk' - assert naked_domain( - 'parliament.org.au') == 'parliament.org.au' - assert naked_domain( - 'prime-minister.parliament.org.au') == 'prime-minister.parliament.org.au' - assert naked_domain( - 'https://python.linux.com/resume-guido.pdf') == 'python.linux.com' - assert naked_domain( - 'ftp://linux.com/vmlinuz') == 'linux.com' - assert naked_domain( - 'ftp://parliament.co.uk/vmlinuz') == 'parliament.co.uk' - assert naked_domain( - 'ftp://pm.parliament.co.uk/vmlinuz') == 'pm.parliament.co.uk' - assert naked_domain( - 'https://username:password@python.linux.com/vmlinuz') == 'python.linux.com' + assert naked_domain("python.linux.com") == "python.linux.com" + assert naked_domain("iplayer.forums.bbc.co.uk") == "iplayer.forums.bbc.co.uk" + assert naked_domain("parliament.org.au") == "parliament.org.au" + assert ( + naked_domain("prime-minister.parliament.org.au") + == "prime-minister.parliament.org.au" + ) + assert ( + naked_domain("https://python.linux.com/resume-guido.pdf") == "python.linux.com" + ) + assert naked_domain("ftp://linux.com/vmlinuz") == "linux.com" + assert naked_domain("ftp://parliament.co.uk/vmlinuz") == "parliament.co.uk" + assert naked_domain("ftp://pm.parliament.co.uk/vmlinuz") == "pm.parliament.co.uk" + assert ( + naked_domain("https://username:password@python.linux.com/vmlinuz") + == "python.linux.com" + ) def test_matching_subdomains(monkeypatch): def gethostbyname_patch(x): return "127.0.0.1" - monkeypatch.setattr(socket, 'gethostbyname', gethostbyname_patch) + monkeypatch.setattr(socket, "gethostbyname", gethostbyname_patch) - assert matching_subdomains(None, 'mail.nylas.com') is False + assert matching_subdomains(None, "mail.nylas.com") is False # Two domains with the same IP but different domains aren't matched. - assert matching_subdomains('mail.microsoft.com', 'mail.nylas.com') is False - assert matching_subdomains('test.nylas.co.uk', 'mail.nylas.co.uk') is True - assert matching_subdomains('test.servers.nylas.com.au', 'mail.nylas.com.au') is True - assert matching_subdomains('test.servers.nylas.com', 'mail.nylas.com.au') is False - assert matching_subdomains('test.servers.co.uk', 'evil.co.uk') is False + assert matching_subdomains("mail.microsoft.com", "mail.nylas.com") is False + assert matching_subdomains("test.nylas.co.uk", "mail.nylas.co.uk") is True + assert matching_subdomains("test.servers.nylas.com.au", "mail.nylas.com.au") is True + assert matching_subdomains("test.servers.nylas.com", "mail.nylas.com.au") is False + assert matching_subdomains("test.servers.co.uk", "evil.co.uk") is False - addresses = ['127.0.0.1', '192.168.1.11'] + addresses = ["127.0.0.1", "192.168.1.11"] def gethostbyname_patch(x): return addresses.pop() - monkeypatch.setattr(socket, 'gethostbyname', gethostbyname_patch) + monkeypatch.setattr(socket, "gethostbyname", gethostbyname_patch) - addresses = ['127.0.0.1', '192.168.1.11'] + addresses = ["127.0.0.1", "192.168.1.11"] def gethostbyname_patch(x): return addresses.pop() # Check that if the domains are the same, we're not doing an # IP address resolution. - assert matching_subdomains('nylas.com', 'nylas.com') is True + assert matching_subdomains("nylas.com", "nylas.com") is True def test_extract_emails_from_text(): - assert extract_emails_from_text('test@example.com') == ['test@example.com'] - assert extract_emails_from_text('foo#test@ex-ample.com#foo') == ['foo#test@ex-ample.com'] - assert extract_emails_from_text('email="test@example.com"') == ['test@example.com'] - assert extract_emails_from_text('Email') == ['test@example.com'] - assert extract_emails_from_text('The email is test@example.com.') == ['test@example.com'] - assert extract_emails_from_text(u'Email b\xe4r@foo.ex\xe4mple.com') == [u'b\xe4r@foo.ex\xe4mple.com'] - assert extract_emails_from_text('Multiple\nfoo@example.com\nbar@example.com\nemails') == ['foo@example.com', 'bar@example.com'] - assert extract_emails_from_text('Email ') == ['test@example.com'] + assert extract_emails_from_text("test@example.com") == ["test@example.com"] + assert extract_emails_from_text("foo#test@ex-ample.com#foo") == [ + "foo#test@ex-ample.com" + ] + assert extract_emails_from_text('email="test@example.com"') == ["test@example.com"] + assert extract_emails_from_text('Email') == [ + "test@example.com" + ] + assert extract_emails_from_text("The email is test@example.com.") == [ + "test@example.com" + ] + assert extract_emails_from_text(u"Email b\xe4r@foo.ex\xe4mple.com") == [ + u"b\xe4r@foo.ex\xe4mple.com" + ] + assert extract_emails_from_text( + "Multiple\nfoo@example.com\nbar@example.com\nemails" + ) == ["foo@example.com", "bar@example.com"] + assert extract_emails_from_text("Email ") == ["test@example.com"] diff --git a/inbox/test/heartbeat/test_heartbeat.py b/inbox/test/heartbeat/test_heartbeat.py index aec203667..b706cdc1f 100644 --- a/inbox/test/heartbeat/test_heartbeat.py +++ b/inbox/test/heartbeat/test_heartbeat.py @@ -4,28 +4,36 @@ import time from datetime import datetime, timedelta -from inbox.heartbeat.store import (HeartbeatStore, HeartbeatStatusProxy, - HeartbeatStatusKey) -from inbox.heartbeat.status import (clear_heartbeat_status, - get_ping_status) +from inbox.heartbeat.store import ( + HeartbeatStore, + HeartbeatStatusProxy, + HeartbeatStatusKey, +) +from inbox.heartbeat.status import clear_heartbeat_status, get_ping_status import inbox.heartbeat.config as heartbeat_config from inbox.heartbeat.config import ALIVE_EXPIRY from inbox.config import config from nylas.logging import configure_logging -configure_logging(config.get('LOGLEVEL')) + +configure_logging(config.get("LOGLEVEL")) from mockredis import MockRedis + # Note that all Redis commands are mocked via mockredis in conftest.py. -def proxy_for(account_id, folder_id, email='test@test.com', provider='gmail', - device_id=0): - return HeartbeatStatusProxy(account_id=account_id, folder_id=folder_id, - folder_name="Inbox", - email_address=email, - provider_name=provider, - device_id=device_id) +def proxy_for( + account_id, folder_id, email="test@test.com", provider="gmail", device_id=0 +): + return HeartbeatStatusProxy( + account_id=account_id, + folder_id=folder_id, + folder_name="Inbox", + email_address=email, + provider_name=provider, + device_id=device_id, + ) def fuzzy_equals(a, b): @@ -62,6 +70,7 @@ def test_heartbeat_status_key(): def test_proxy_publish_doesnt_break_everything(monkeypatch): def break_things(s, k, d, v): raise Exception("Redis connection failure") + monkeypatch.setattr("mockredis.MockRedis.hset", break_things) # Check heartbeat publish exception doesn't pass up through to caller. # It will print out an error in the log, though. @@ -73,15 +82,15 @@ def test_folder_publish_in_index(redis_client): proxy = proxy_for(1, 2) proxy.publish() client = heartbeat_config.get_redis_client() - assert '1' in client.keys() + assert "1" in client.keys() # Check the per-account folder-list index was populated correctly: it # should be a sorted set of all folder IDs for that account, with the # folder's last heartbeat timestamp. - acct_folder_index = client.zrange('1', 0, -1, withscores=True) + acct_folder_index = client.zrange("1", 0, -1, withscores=True) assert len(acct_folder_index) == 1 key, timestamp = acct_folder_index[0] - assert key == '2' + assert key == "2" assert fuzzy_equals(proxy.heartbeat_at, timestamp) @@ -97,7 +106,7 @@ def test_kill_device_multiple(): assert len(folders) == 1 f, ts = folders[0] - assert f == '2' + assert f == "2" # Test querying heartbeats @@ -117,8 +126,9 @@ def random_heartbeats(): def make_dead_heartbeat(store, proxies, account_id, folder_id, time_dead): dead_time = time.time() - ALIVE_EXPIRY - time_dead dead_proxy = proxies[account_id][folder_id] - store.publish(dead_proxy.key, dead_proxy.device_id, - json.dumps(dead_proxy.value), dead_time) + store.publish( + dead_proxy.key, dead_proxy.device_id, json.dumps(dead_proxy.value), dead_time + ) def test_ping(random_heartbeats): @@ -128,7 +138,7 @@ def test_ping(random_heartbeats): assert isinstance(ping, dict) assert sorted(ping.keys()) == sorted(random_heartbeats.keys()) single = ping[0] - attrs = ('id', 'folders') + attrs = ("id", "folders") for attr in attrs: assert hasattr(single, attr) for f in single.folders: diff --git a/inbox/test/imap/data.py b/inbox/test/imap/data.py index 82ec8d34e..f2bb11aa4 100644 --- a/inbox/test/imap/data.py +++ b/inbox/test/imap/data.py @@ -7,9 +7,10 @@ import os import tempfile + # don't try writing to .hypothesis -os.environ['HYPOTHESIS_STORAGE_DIRECTORY'] = hyp_dir = tempfile.mkdtemp() -os.environ['HYPOTHESIS_DATABASE_FILE'] = os.path.join(hyp_dir, 'db') +os.environ["HYPOTHESIS_STORAGE_DIRECTORY"] = hyp_dir = tempfile.mkdtemp() +os.environ["HYPOTHESIS_DATABASE_FILE"] = os.path.join(hyp_dir, "db") from hypothesis import strategies as s from hypothesis.extra.datetime import datetimes @@ -18,35 +19,33 @@ def _build_address_header(addresslist): - return ', '.join( + return ", ".join( flanker.addresslib.address.EmailAddress(phrase, spec).full_spec() for phrase, spec in addresslist ) def build_mime_message(from_, to, cc, bcc, subject, body): - msg = mime.create.multipart('alternative') - msg.append( - mime.create.text('plain', body) - ) - msg.headers['Subject'] = subject - msg.headers['From'] = _build_address_header(from_) - msg.headers['To'] = _build_address_header(to) - msg.headers['Cc'] = _build_address_header(cc) - msg.headers['Bcc'] = _build_address_header(bcc) + msg = mime.create.multipart("alternative") + msg.append(mime.create.text("plain", body)) + msg.headers["Subject"] = subject + msg.headers["From"] = _build_address_header(from_) + msg.headers["To"] = _build_address_header(to) + msg.headers["Cc"] = _build_address_header(cc) + msg.headers["Bcc"] = _build_address_header(bcc) return msg.to_string() def build_uid_data(internaldate, flags, body, g_labels, g_msgid, modseq): return { - 'INTERNALDATE': internaldate, - 'FLAGS': flags, - 'BODY[]': body, - 'RFC822.SIZE': len(body), - 'X-GM-LABELS': g_labels, - 'X-GM-MSGID': g_msgid, - 'X-GM-THRID': g_msgid, # For simplicity - 'MODSEQ': modseq + "INTERNALDATE": internaldate, + "FLAGS": flags, + "BODY[]": body, + "RFC822.SIZE": len(body), + "X-GM-LABELS": g_labels, + "X-GM-MSGID": g_msgid, + "X-GM-THRID": g_msgid, # For simplicity + "MODSEQ": modseq, } @@ -57,16 +56,12 @@ def build_uid_data(internaldate, flags, body, g_labels, g_msgid, modseq): # An email address of the form 'foo@bar'. address = s.builds( - lambda localpart, domain: '{}@{}'.format(localpart, domain), - basic_text, basic_text) + lambda localpart, domain: "{}@{}".format(localpart, domain), basic_text, basic_text +) # A list of tuples ('displayname', 'addr@domain') -addresslist = s.lists( - s.tuples(basic_text, address), - min_size=1, - max_size=5 -) +addresslist = s.lists(s.tuples(basic_text, address), min_size=1, max_size=5) # A basic MIME message with plaintext body plus From/To/Cc/Bcc/Subject headers @@ -77,7 +72,7 @@ def build_uid_data(internaldate, flags, body, g_labels, g_msgid, modseq): addresslist, addresslist, basic_text, - basic_text + basic_text, ) randint = s.basic(generate=lambda random, _: random.getrandbits(63)) @@ -85,15 +80,12 @@ def build_uid_data(internaldate, flags, body, g_labels, g_msgid, modseq): uid_data = s.builds( build_uid_data, datetimes(timezones=[]), - s.sampled_from([(), ('\\Seen',)]), + s.sampled_from([(), ("\\Seen",)]), mime_message, - s.sampled_from([(), ('\\Inbox',)]), + s.sampled_from([(), ("\\Inbox",)]), + randint, randint, - randint) +) -uids = s.dictionaries( - s.integers(min_value=22), - uid_data, - min_size=5, - max_size=10) +uids = s.dictionaries(s.integers(min_value=22), uid_data, min_size=5, max_size=10) diff --git a/inbox/test/imap/network/test_actions_syncback.py b/inbox/test/imap/network/test_actions_syncback.py index ec965af5e..375f56e7a 100644 --- a/inbox/test/imap/network/test_actions_syncback.py +++ b/inbox/test/imap/network/test_actions_syncback.py @@ -12,18 +12,26 @@ def test_archive_move_syncback(db, config): - from inbox.actions.backends.gmail import (set_remote_archived, - remote_move, uidvalidity_cb) + from inbox.actions.backends.gmail import ( + set_remote_archived, + remote_move, + uidvalidity_cb, + ) from inbox.models.backends.imap import ImapAccount, ImapThread - g_thrid = db.session.query(ImapThread.g_thrid).filter_by( - id=THREAD_ID, namespace_id=NAMESPACE_ID).one()[0] + + g_thrid = ( + db.session.query(ImapThread.g_thrid) + .filter_by(id=THREAD_ID, namespace_id=NAMESPACE_ID) + .one()[0] + ) account = db.session.query(ImapAccount).get(ACCOUNT_ID) set_remote_archived(account, THREAD_ID, False, db.session) set_remote_archived(account, THREAD_ID, True, db.session) - assert account.inbox_folder_id and account.all_folder_id, \ - "`inbox_folder_id` and `all_folder_id` cannot be NULL" + assert ( + account.inbox_folder_id and account.all_folder_id + ), "`inbox_folder_id` and `all_folder_id` cannot be NULL" with crispin_client(account.id, account.provider) as client: client.select_folder(account.inbox_folder.name, uidvalidity_cb) inbox_uids = client.find_messages(g_thrid) @@ -33,8 +41,13 @@ def test_archive_move_syncback(db, config): assert archive_uids, "thread missing from archive" # and put things back the way they were :) - remote_move(account, THREAD_ID, account.all_folder.name, - account.inbox_folder.name, db.session) + remote_move( + account, + THREAD_ID, + account.all_folder.name, + account.inbox_folder.name, + db.session, + ) client.select_folder(account.inbox_folder.name, uidvalidity_cb) inbox_uids = client.find_messages(g_thrid) assert inbox_uids, "thread missing from inbox" @@ -44,16 +57,21 @@ def test_archive_move_syncback(db, config): def test_copy_delete_syncback(db, config): - from inbox.actions.backends.gmail import (_remote_copy, _remote_delete, - uidvalidity_cb) + from inbox.actions.backends.gmail import ( + _remote_copy, + _remote_delete, + uidvalidity_cb, + ) from inbox.models.backends.imap import ImapAccount, ImapThread - g_thrid = db.session.query(ImapThread.g_thrid). \ - filter_by(id=THREAD_ID, namespace_id=NAMESPACE_ID).one()[0] + g_thrid = ( + db.session.query(ImapThread.g_thrid) + .filter_by(id=THREAD_ID, namespace_id=NAMESPACE_ID) + .one()[0] + ) account = db.session.query(ImapAccount).get(ACCOUNT_ID) - _remote_copy(account, THREAD_ID, account.inbox_folder.name, 'testlabel', - db.session) + _remote_copy(account, THREAD_ID, account.inbox_folder.name, "testlabel", db.session) with crispin_client(account.id, account.provider) as client: client.select_folder(account.inbox_folder.name, uidvalidity_cb) @@ -62,19 +80,19 @@ def test_copy_delete_syncback(db, config): client.select_folder(account.all_folder.name, uidvalidity_cb) archive_uids = client.find_messages(g_thrid) assert archive_uids, "thread missing from archive" - client.select_folder('testlabel', uidvalidity_cb) + client.select_folder("testlabel", uidvalidity_cb) testlabel_uids = client.find_messages(g_thrid) assert testlabel_uids, "thread missing from testlabel" # and put things back the way they were :) - _remote_delete(account, THREAD_ID, 'testlabel', db.session) + _remote_delete(account, THREAD_ID, "testlabel", db.session) client.select_folder(account.inbox_folder.name, uidvalidity_cb) inbox_uids = client.find_messages(g_thrid) assert inbox_uids, "thread missing from inbox" client.select_folder(account.all_folder.name, uidvalidity_cb) archive_uids = client.find_messages(g_thrid) assert archive_uids, "thread missing from archive" - client.select_folder('testlabel', uidvalidity_cb) + client.select_folder("testlabel", uidvalidity_cb) testlabel_uids = client.find_messages(g_thrid) assert not testlabel_uids, "thread still present in testlabel" @@ -84,24 +102,20 @@ def test_remote_unread_syncback(db, config): from inbox.models.backends.imap import ImapAccount, ImapThread account = db.session.query(ImapAccount).get(ACCOUNT_ID) - g_thrid, = db.session.query(ImapThread.g_thrid). \ - filter_by(id=THREAD_ID).one() + (g_thrid,) = db.session.query(ImapThread.g_thrid).filter_by(id=THREAD_ID).one() set_remote_unread(account, THREAD_ID, True, db.session) with crispin_client(account.id, account.provider) as client: client.select_folder(account.all_folder.name, uidvalidity_cb) uids = client.find_messages(g_thrid) - assert not any('\\Seen' in flags for flags, _ in - client.flags(uids).values()) + assert not any("\\Seen" in flags for flags, _ in client.flags(uids).values()) set_remote_unread(account, THREAD_ID, False, db.session) - assert all('\\Seen' in flags for flags, _ in - client.flags(uids).values()) + assert all("\\Seen" in flags for flags, _ in client.flags(uids).values()) set_remote_unread(account, THREAD_ID, True, db.session) - assert not any('\\Seen' in flags for flags, _ in - client.flags(uids).values()) + assert not any("\\Seen" in flags for flags, _ in client.flags(uids).values()) # TODO: Test more of the different cases here. diff --git a/inbox/test/imap/network/test_drafts_syncback.py b/inbox/test/imap/network/test_drafts_syncback.py index c5380fc99..ac0477a99 100644 --- a/inbox/test/imap/network/test_drafts_syncback.py +++ b/inbox/test/imap/network/test_drafts_syncback.py @@ -13,15 +13,19 @@ # back to the state it started in when the test is done. -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def message(db, config): from inbox.models.backends.imap import ImapAccount account = db.session.query(ImapAccount).get(ACCOUNT_ID) - to = [{'name': u'"\u2605The red-haired mermaid\u2605"', - 'email': account.email_address}] - subject = 'Draft test: ' + str(uuid.uuid4().hex) - body = '

Sea, birds, yoga and sand.

' + to = [ + { + "name": u'"\u2605The red-haired mermaid\u2605"', + "email": account.email_address, + } + ] + subject = "Draft test: " + str(uuid.uuid4().hex) + body = "

Sea, birds, yoga and sand.

" return (to, subject, body) @@ -37,25 +41,33 @@ def test_remote_save_draft(db, config, message): to, subject, body = message to_addr = _parse_recipients(to) recipients = Recipients(to_addr, [], []) - email = create_email(account.sender_name, account.email_address, None, - recipients, subject, body, None) + email = create_email( + account.sender_name, + account.email_address, + None, + recipients, + subject, + body, + None, + ) date = datetime.utcnow() - remote_save_draft(account, account.drafts_folder.name, email.to_string(), - db.session, date) + remote_save_draft( + account, account.drafts_folder.name, email.to_string(), db.session, date + ) with crispin_client(account.id, account.provider) as c: - criteria = ['NOT DELETED', 'SUBJECT "{0}"'.format(subject)] + criteria = ["NOT DELETED", 'SUBJECT "{0}"'.format(subject)] c.conn.select_folder(account.drafts_folder.name, readonly=False) draft_uids = c.conn.search(criteria) - assert draft_uids, 'Message missing from Drafts folder' + assert draft_uids, "Message missing from Drafts folder" flags = c.conn.get_flags(draft_uids) for uid in draft_uids: f = flags.get(uid) - assert f and '\\Draft' in f, "Message missing '\\Draft' flag" + assert f and "\\Draft" in f, "Message missing '\\Draft' flag" c.conn.delete_messages(draft_uids) c.conn.expunge() @@ -67,8 +79,7 @@ def test_remote_delete_draft(db, config, message): remote. """ - from inbox.actions.backends.gmail import (remote_save_draft, - remote_delete_draft) + from inbox.actions.backends.gmail import remote_save_draft, remote_delete_draft from inbox.sendmail.base import _parse_recipients from inbox.sendmail.message import create_email, Recipients from inbox.models import Account @@ -77,28 +88,34 @@ def test_remote_delete_draft(db, config, message): to, subject, body = message to_addr = _parse_recipients(to) recipients = Recipients(to_addr, [], []) - email = create_email(account.sender_name, account.email_address, None, - recipients, subject, body, None) + email = create_email( + account.sender_name, + account.email_address, + None, + recipients, + subject, + body, + None, + ) date = datetime.utcnow() # Save on remote - remote_save_draft(account, account.drafts_folder.name, email.to_string(), - db.session, date) + remote_save_draft( + account, account.drafts_folder.name, email.to_string(), db.session, date + ) - inbox_uid = email.headers['X-INBOX-ID'] + inbox_uid = email.headers["X-INBOX-ID"] with crispin_client(account.id, account.provider) as c: - criteria = ['DRAFT', 'NOT DELETED', - 'HEADER X-INBOX-ID {0}'.format(inbox_uid)] + criteria = ["DRAFT", "NOT DELETED", "HEADER X-INBOX-ID {0}".format(inbox_uid)] c.conn.select_folder(account.drafts_folder.name, readonly=False) uids = c.conn.search(criteria) - assert uids, 'Message missing from Drafts folder' + assert uids, "Message missing from Drafts folder" # Delete on remote - remote_delete_draft(account, account.drafts_folder.name, inbox_uid, - db.session) + remote_delete_draft(account, account.drafts_folder.name, inbox_uid, db.session) c.conn.select_folder(account.drafts_folder.name, readonly=False) uids = c.conn.search(criteria) - assert not uids, 'Message still in Drafts folder' + assert not uids, "Message still in Drafts folder" diff --git a/inbox/test/imap/network/test_send.py b/inbox/test/imap/network/test_send.py index ecafef160..d928fc7fc 100644 --- a/inbox/test/imap/network/test_send.py +++ b/inbox/test/imap/network/test_send.py @@ -7,49 +7,47 @@ from inbox.test.util.crispin import crispin_client from inbox.test.api.base import api_client -__all__ = ['default_account', 'api_client'] +__all__ = ["default_account", "api_client"] @pytest.fixture def example_draft(db, default_account): return { - 'subject': 'Draft test at {}'.format(datetime.utcnow()), - 'body': '

Sea, birds and sand.

', - 'to': [{'name': 'The red-haired mermaid', - 'email': default_account.email_address}] + "subject": "Draft test at {}".format(datetime.utcnow()), + "body": "

Sea, birds and sand.

", + "to": [ + {"name": "The red-haired mermaid", "email": default_account.email_address} + ], } def test_send_draft(db, api_client, example_draft, default_account): - r = api_client.post_data('/drafts', example_draft) + r = api_client.post_data("/drafts", example_draft) assert r.status_code == 200 - public_id = json.loads(r.data)['id'] - version = json.loads(r.data)['version'] + public_id = json.loads(r.data)["id"] + version = json.loads(r.data)["version"] - r = api_client.post_data('/send', {'draft_id': public_id, - 'version': version}) + r = api_client.post_data("/send", {"draft_id": public_id, "version": version}) assert r.status_code == 200 - draft = api_client.get_data('/drafts/{}'.format(public_id)) + draft = api_client.get_data("/drafts/{}".format(public_id)) assert draft is not None - assert draft['object'] != 'draft' + assert draft["object"] != "draft" with crispin_client(default_account.id, default_account.provider) as c: - criteria = ['NOT DELETED', 'SUBJECT "{0}"'.format( - example_draft['subject'])] + criteria = ["NOT DELETED", 'SUBJECT "{0}"'.format(example_draft["subject"])] - c.conn.select_folder(default_account.drafts_folder.name, - readonly=False) + c.conn.select_folder(default_account.drafts_folder.name, readonly=False) draft_uids = c.conn.search(criteria) - assert not draft_uids, 'Message still in Drafts folder' + assert not draft_uids, "Message still in Drafts folder" c.conn.select_folder(default_account.sent_folder.name, readonly=False) sent_uids = c.conn.search(criteria) - assert sent_uids, 'Message missing from Sent folder' + assert sent_uids, "Message missing from Sent folder" c.conn.delete_messages(sent_uids) c.conn.expunge() diff --git a/inbox/test/imap/test_actions.py b/inbox/test/imap/test_actions.py index b53f98e2f..d838cdcdf 100644 --- a/inbox/test/imap/test_actions.py +++ b/inbox/test/imap/test_actions.py @@ -4,10 +4,20 @@ import pytest import gevent from flanker import mime -from inbox.actions.base import (change_labels, save_draft, update_draft, - delete_draft, create_folder, update_folder, - delete_folder, create_label, update_label, - delete_label, mark_unread, mark_starred) +from inbox.actions.base import ( + change_labels, + save_draft, + update_draft, + delete_draft, + create_folder, + update_folder, + delete_folder, + create_label, + update_label, + delete_label, + mark_unread, + mark_starred, +) from inbox.util.testutils import mock_imapclient # noqa from inbox.test.util.base import add_fake_imapuid, add_fake_category from inbox.crispin import writable_connection_pool @@ -21,67 +31,81 @@ from inbox.actions.backends.generic import _create_email import pytest + + @pytest.mark.only def test_draft_updates(db, default_account, mock_imapclient): # Set up folder list - mock_imapclient._data['Drafts'] = {} - mock_imapclient._data['Trash'] = {} - mock_imapclient._data['Sent Mail'] = {} + mock_imapclient._data["Drafts"] = {} + mock_imapclient._data["Trash"] = {} + mock_imapclient._data["Sent Mail"] = {} mock_imapclient.list_folders = lambda: [ - (('\\HasNoChildren', '\\Drafts'), '/', 'Drafts'), - (('\\HasNoChildren', '\\Trash'), '/', 'Trash'), - (('\\HasNoChildren', '\\Sent'), '/', 'Sent Mail'), + (("\\HasNoChildren", "\\Drafts"), "/", "Drafts"), + (("\\HasNoChildren", "\\Trash"), "/", "Trash"), + (("\\HasNoChildren", "\\Sent"), "/", "Sent Mail"), ] pool = writable_connection_pool(default_account.id) - draft = create_message_from_json({'subject': 'Test draft'}, - default_account.namespace, db.session, - True) + draft = create_message_from_json( + {"subject": "Test draft"}, default_account.namespace, db.session, True + ) draft.is_draft = True draft.version = 0 db.session.commit() with pool.get() as conn: - save_draft(conn, default_account.id, draft.id, {'version': 0}) - conn.select_folder('Drafts', lambda *args: True) + save_draft(conn, default_account.id, draft.id, {"version": 0}) + conn.select_folder("Drafts", lambda *args: True) assert len(conn.all_uids()) == 1 # Check that draft is not resaved if already synced. - update_draft(conn, default_account.id, draft.id, {'version': 0}) - conn.select_folder('Drafts', lambda *args: True) + update_draft(conn, default_account.id, draft.id, {"version": 0}) + conn.select_folder("Drafts", lambda *args: True) assert len(conn.all_uids()) == 1 # Check that an older version is deleted draft.version = 4 - sendmail_update_draft(db.session, default_account, draft, - from_addr=draft.from_addr, subject='New subject', - blocks=[]) + sendmail_update_draft( + db.session, + default_account, + draft, + from_addr=draft.from_addr, + subject="New subject", + blocks=[], + ) db.session.commit() - update_draft(conn, default_account.id, draft.id, {'version': 5}) + update_draft(conn, default_account.id, draft.id, {"version": 5}) - conn.select_folder('Drafts', lambda *args: True) + conn.select_folder("Drafts", lambda *args: True) all_uids = conn.all_uids() assert len(all_uids) == 1 data = conn.uids(all_uids)[0] parsed = mime.from_string(data.body) - expected_message_id = '<{}-{}@mailer.nylas.com>'.format( - draft.public_id, draft.version) - assert parsed.headers.get('Message-Id') == expected_message_id + expected_message_id = "<{}-{}@mailer.nylas.com>".format( + draft.public_id, draft.version + ) + assert parsed.headers.get("Message-Id") == expected_message_id # We're testing the draft deletion with Gmail here. However, # because of a race condition in Gmail's reconciliation algorithm, # we need to check if the sent mail has been created in the sent # folder. Since we're mocking everything, we have to create it # ourselves. - mock_imapclient.append('Sent Mail', data.body, None, None, - x_gm_msgid=4323) - - delete_draft(conn, default_account.id, draft.id, - {'message_id_header': draft.message_id_header, - 'nylas_uid': draft.nylas_uid, 'version': 5}) - - conn.select_folder('Drafts', lambda *args: True) + mock_imapclient.append("Sent Mail", data.body, None, None, x_gm_msgid=4323) + + delete_draft( + conn, + default_account.id, + draft.id, + { + "message_id_header": draft.message_id_header, + "nylas_uid": draft.nylas_uid, + "version": 5, + }, + ) + + conn.select_folder("Drafts", lambda *args: True) all_uids = conn.all_uids() assert len(all_uids) == 0 @@ -92,21 +116,19 @@ def test_change_flags(db, default_account, message, folder, mock_imapclient): mock_imapclient.remove_flags = mock.Mock() add_fake_imapuid(db.session, default_account.id, message, folder, 22) with writable_connection_pool(default_account.id).get() as crispin_client: - mark_unread(crispin_client, default_account.id, message.id, - {'unread': False}) - mock_imapclient.add_flags.assert_called_with([22], ['\\Seen'], silent=True) + mark_unread(crispin_client, default_account.id, message.id, {"unread": False}) + mock_imapclient.add_flags.assert_called_with([22], ["\\Seen"], silent=True) - mark_unread(crispin_client, default_account.id, message.id, - {'unread': True}) - mock_imapclient.remove_flags.assert_called_with([22], ['\\Seen'], silent=True) + mark_unread(crispin_client, default_account.id, message.id, {"unread": True}) + mock_imapclient.remove_flags.assert_called_with([22], ["\\Seen"], silent=True) - mark_starred(crispin_client, default_account.id, message.id, - {'starred': True}) - mock_imapclient.add_flags.assert_called_with([22], ['\\Flagged'], silent=True) + mark_starred(crispin_client, default_account.id, message.id, {"starred": True}) + mock_imapclient.add_flags.assert_called_with([22], ["\\Flagged"], silent=True) - mark_starred(crispin_client, default_account.id, message.id, - {'starred': False}) - mock_imapclient.remove_flags.assert_called_with([22], ['\\Flagged'], silent=True) + mark_starred(crispin_client, default_account.id, message.id, {"starred": False}) + mock_imapclient.remove_flags.assert_called_with( + [22], ["\\Flagged"], silent=True + ) def test_change_labels(db, default_account, message, folder, mock_imapclient): @@ -116,77 +138,98 @@ def test_change_labels(db, default_account, message, folder, mock_imapclient): add_fake_imapuid(db.session, default_account.id, message, folder, 22) with writable_connection_pool(default_account.id).get() as crispin_client: - change_labels(crispin_client, default_account.id, [message.id], - {'removed_labels': ['\\Inbox'], - 'added_labels': [u'motörhead', u'μετάνοια']}) + change_labels( + crispin_client, + default_account.id, + [message.id], + { + "removed_labels": ["\\Inbox"], + "added_labels": [u"motörhead", u"μετάνοια"], + }, + ) mock_imapclient.add_gmail_labels.assert_called_with( - [22], ['mot&APY-rhead', '&A7wDtQPEA6wDvQO,A7kDsQ-'], silent=True) - mock_imapclient.remove_gmail_labels.assert_called_with([22], ['\\Inbox'], - silent=True) + [22], ["mot&APY-rhead", "&A7wDtQPEA6wDvQO,A7kDsQ-"], silent=True + ) + mock_imapclient.remove_gmail_labels.assert_called_with( + [22], ["\\Inbox"], silent=True + ) -@pytest.mark.parametrize('obj_type', ['folder', 'label']) +@pytest.mark.parametrize("obj_type", ["folder", "label"]) def test_folder_crud(db, default_account, mock_imapclient, obj_type): mock_imapclient.create_folder = mock.Mock() mock_imapclient.rename_folder = mock.Mock() mock_imapclient.delete_folder = mock.Mock() - cat = add_fake_category(db.session, default_account.namespace.id, - 'MyFolder') + cat = add_fake_category(db.session, default_account.namespace.id, "MyFolder") with writable_connection_pool(default_account.id).get() as crispin_client: - if obj_type == 'folder': + if obj_type == "folder": create_folder(crispin_client, default_account.id, cat.id) else: create_label(crispin_client, default_account.id, cat.id) - mock_imapclient.create_folder.assert_called_with('MyFolder') + mock_imapclient.create_folder.assert_called_with("MyFolder") - cat.display_name = 'MyRenamedFolder' + cat.display_name = "MyRenamedFolder" db.session.commit() - if obj_type == 'folder': - update_folder(crispin_client, default_account.id, cat.id, - {'old_name': 'MyFolder', - 'new_name': 'MyRenamedFolder'}) + if obj_type == "folder": + update_folder( + crispin_client, + default_account.id, + cat.id, + {"old_name": "MyFolder", "new_name": "MyRenamedFolder"}, + ) else: - update_label(crispin_client, default_account.id, cat.id, - {'old_name': 'MyFolder', - 'new_name': 'MyRenamedFolder'}) - mock_imapclient.rename_folder.assert_called_with('MyFolder', - 'MyRenamedFolder') + update_label( + crispin_client, + default_account.id, + cat.id, + {"old_name": "MyFolder", "new_name": "MyRenamedFolder"}, + ) + mock_imapclient.rename_folder.assert_called_with("MyFolder", "MyRenamedFolder") category_id = cat.id - if obj_type == 'folder': + if obj_type == "folder": delete_folder(crispin_client, default_account.id, cat.id) else: delete_label(crispin_client, default_account.id, cat.id) - mock_imapclient.delete_folder.assert_called_with('MyRenamedFolder') + mock_imapclient.delete_folder.assert_called_with("MyRenamedFolder") db.session.commit() assert db.session.query(Category).get(category_id) is None + @pytest.yield_fixture def patched_syncback_task(monkeypatch): # Ensures 'create_event' actions fail and all others succeed def function_for_action(name): def func(*args): - if name == 'create_event': + if name == "create_event": raise Exception("Failed to create remote event") + return func - monkeypatch.setattr("inbox.transactions.actions.function_for_action", function_for_action) + monkeypatch.setattr( + "inbox.transactions.actions.function_for_action", function_for_action + ) monkeypatch.setattr("inbox.transactions.actions.ACTION_MAX_NR_OF_RETRIES", 1) yield monkeypatch.undo() + # Test that failing to create a remote copy of an event marks all pending actions # for that event as failed. def test_failed_event_creation(db, patched_syncback_task, default_account, event): - schedule_action('create_event', event, default_account.namespace.id, db.session) - schedule_action('update_event', event, default_account.namespace.id, db.session) - schedule_action('update_event', event, default_account.namespace.id, db.session) - schedule_action('delete_event', event, default_account.namespace.id, db.session) + schedule_action("create_event", event, default_account.namespace.id, db.session) + schedule_action("update_event", event, default_account.namespace.id, db.session) + schedule_action("update_event", event, default_account.namespace.id, db.session) + schedule_action("delete_event", event, default_account.namespace.id, db.session) db.session.commit() NUM_WORKERS = 2 - service = SyncbackService(syncback_id=0, process_number=0, - total_processes=NUM_WORKERS, num_workers=NUM_WORKERS) + service = SyncbackService( + syncback_id=0, + process_number=0, + total_processes=NUM_WORKERS, + num_workers=NUM_WORKERS, + ) service._restart_workers() service._process_log() @@ -200,4 +243,4 @@ def test_failed_event_creation(db, patched_syncback_task, default_account, event gevent.sleep(0.1) q = db.session.query(ActionLog).filter_by(record_id=event.id).all() - assert all(a.status == 'failed' for a in q) + assert all(a.status == "failed" for a in q) diff --git a/inbox/test/imap/test_crispin_client.py b/inbox/test/imap/test_crispin_client.py index 28459bc27..881326acc 100644 --- a/inbox/test/imap/test_crispin_client.py +++ b/inbox/test/imap/test_crispin_client.py @@ -10,30 +10,43 @@ import imapclient import pytest -from inbox.crispin import (CrispinClient, GmailCrispinClient, GMetadata, - GmailFlags, RawMessage, Flags, - FolderMissingError, localized_folder_names) +from inbox.crispin import ( + CrispinClient, + GmailCrispinClient, + GMetadata, + GmailFlags, + RawMessage, + Flags, + FolderMissingError, + localized_folder_names, +) class MockedIMAPClient(imapclient.IMAPClient): - def _create_IMAP4(self): return mock.Mock() @pytest.fixture def gmail_client(): - conn = MockedIMAPClient(host='somehost') - return GmailCrispinClient(account_id=1, provider_info=None, - email_address='inboxapptest@gmail.com', - conn=conn) + conn = MockedIMAPClient(host="somehost") + return GmailCrispinClient( + account_id=1, + provider_info=None, + email_address="inboxapptest@gmail.com", + conn=conn, + ) @pytest.fixture def generic_client(): - conn = MockedIMAPClient(host='somehost') - return CrispinClient(account_id=1, provider_info=None, - email_address='inboxapptest@fastmail.fm', conn=conn) + conn = MockedIMAPClient(host="somehost") + return CrispinClient( + account_id=1, + provider_info=None, + email_address="inboxapptest@fastmail.fm", + conn=conn, + ) @pytest.fixture @@ -46,213 +59,247 @@ def constants(): modseq = 95020 size = 16384 flags = () - raw_g_labels = '(mot&APY-rhead &A7wDtQPEA6wDvQO,A7kDsQ- \\Inbox)' - unicode_g_labels = [u'motörhead', u'μετάνοια', '\\Inbox'] + raw_g_labels = "(mot&APY-rhead &A7wDtQPEA6wDvQO,A7kDsQ- \\Inbox)" + unicode_g_labels = [u"motörhead", u"μετάνοια", "\\Inbox"] - internaldate = '02-Mar-2015 23:36:20 +0000' - body = 'Delivered-To: ...' + internaldate = "02-Mar-2015 23:36:20 +0000" + body = "Delivered-To: ..." body_size = len(body) # folder test constant gmail_role_map = { - '[Gmail]/All Mail': 'all', - 'Inbox': 'inbox', - '[Gmail]/Trash': 'trash', - '[Gmail]/Spam': 'spam', - '[Gmail]/Drafts': 'drafts', - '[Gmail]/Sent Mail': 'sent', - '[Gmail]/Important': 'important', - '[Gmail]/Starred': 'starred', - 'reference': None + "[Gmail]/All Mail": "all", + "Inbox": "inbox", + "[Gmail]/Trash": "trash", + "[Gmail]/Spam": "spam", + "[Gmail]/Drafts": "drafts", + "[Gmail]/Sent Mail": "sent", + "[Gmail]/Important": "important", + "[Gmail]/Starred": "starred", + "reference": None, } - gmail_folders = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), - (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), - (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), - (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), - (('\\HasNoChildren', '\\Sent'), '/', u'[Gmail]/Sent Mail'), - (('\\HasNoChildren', '\\Junk'), '/', u'[Gmail]/Spam'), - (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), - (('\\HasNoChildren', '\\Trash'), '/', u'[Gmail]/Trash'), - (('\\HasNoChildren',), '/', u'reference')] - imap_folders = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), - (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), - (('\\HasNoChildren', '\\Sent'), '/', u'Sent'), - (('\\HasNoChildren', '\\Sent'), '/', u'Sent Items'), - (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), - (('\\HasNoChildren', '\\Trash'), '/', u'Trash'), - (('\\HasNoChildren',), '/', u'reference')] + gmail_folders = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"[Gmail]"), + (("\\HasNoChildren", "\\All"), "/", u"[Gmail]/All Mail"), + (("\\HasNoChildren", "\\Drafts"), "/", u"[Gmail]/Drafts"), + (("\\HasNoChildren", "\\Important"), "/", u"[Gmail]/Important"), + (("\\HasNoChildren", "\\Sent"), "/", u"[Gmail]/Sent Mail"), + (("\\HasNoChildren", "\\Junk"), "/", u"[Gmail]/Spam"), + (("\\Flagged", "\\HasNoChildren"), "/", u"[Gmail]/Starred"), + (("\\HasNoChildren", "\\Trash"), "/", u"[Gmail]/Trash"), + (("\\HasNoChildren",), "/", u"reference"), + ] + imap_folders = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"SKIP"), + (("\\HasNoChildren", "\\Drafts"), "/", u"Drafts"), + (("\\HasNoChildren", "\\Sent"), "/", u"Sent"), + (("\\HasNoChildren", "\\Sent"), "/", u"Sent Items"), + (("\\HasNoChildren", "\\Junk"), "/", u"Spam"), + (("\\HasNoChildren", "\\Trash"), "/", u"Trash"), + (("\\HasNoChildren",), "/", u"reference"), + ] imap_role_map = { - 'INBOX': 'inbox', - 'Trash': 'trash', - 'Drafts': 'drafts', - 'Sent': 'sent', - 'Sent Items': 'sent', - 'Spam': 'spam', - u'reference': None + "INBOX": "inbox", + "Trash": "trash", + "Drafts": "drafts", + "Sent": "sent", + "Sent Items": "sent", + "Spam": "spam", + u"reference": None, } - return dict(g_msgid=g_msgid, g_thrid=g_thrid, seq=seq, uid=uid, - modseq=modseq, size=size, flags=flags, - raw_g_labels=raw_g_labels, unicode_g_labels=unicode_g_labels, - body=body, body_size=body_size, internaldate=internaldate, - gmail_role_map=gmail_role_map, gmail_folders=gmail_folders, - imap_role_map=imap_role_map, imap_folders=imap_folders) + return dict( + g_msgid=g_msgid, + g_thrid=g_thrid, + seq=seq, + uid=uid, + modseq=modseq, + size=size, + flags=flags, + raw_g_labels=raw_g_labels, + unicode_g_labels=unicode_g_labels, + body=body, + body_size=body_size, + internaldate=internaldate, + gmail_role_map=gmail_role_map, + gmail_folders=gmail_folders, + imap_role_map=imap_role_map, + imap_folders=imap_folders, + ) def patch_gmail_client(monkeypatch, folders): - monkeypatch.setattr(GmailCrispinClient, '_fetch_folder_list', - lambda x: folders) + monkeypatch.setattr(GmailCrispinClient, "_fetch_folder_list", lambda x: folders) - conn = MockedIMAPClient(host='somehost') - return GmailCrispinClient(account_id=1, provider_info=None, - email_address='inboxapptest@gmail.com', - conn=conn) + conn = MockedIMAPClient(host="somehost") + return GmailCrispinClient( + account_id=1, + provider_info=None, + email_address="inboxapptest@gmail.com", + conn=conn, + ) def patch_generic_client(monkeypatch, folders): - monkeypatch.setattr(CrispinClient, '_fetch_folder_list', - lambda x: folders) + monkeypatch.setattr(CrispinClient, "_fetch_folder_list", lambda x: folders) - conn = MockedIMAPClient(host='somehost') - return CrispinClient(account_id=1, provider_info={}, - email_address='inboxapptest@fastmail.fm', conn=conn) + conn = MockedIMAPClient(host="somehost") + return CrispinClient( + account_id=1, + provider_info={}, + email_address="inboxapptest@fastmail.fm", + conn=conn, + ) def patch_imap4(crispin_client, resp): - crispin_client.conn._imap._command_complete.return_value = ( - 'OK', ['Success']) - crispin_client.conn._imap._untagged_response.return_value = ('OK', resp) + crispin_client.conn._imap._command_complete.return_value = ("OK", ["Success"]) + crispin_client.conn._imap._untagged_response.return_value = ("OK", resp) def test_g_metadata(gmail_client, constants): - expected_resp = '{seq} (X-GM-THRID {g_thrid} X-GM-MSGID {g_msgid} ' \ - 'RFC822.SIZE {size} UID {uid} MODSEQ ({modseq}))'. \ - format(**constants) - unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + expected_resp = ( + "{seq} (X-GM-THRID {g_thrid} X-GM-MSGID {g_msgid} " + "RFC822.SIZE {size} UID {uid} MODSEQ ({modseq}))".format(**constants) + ) + unsolicited_resp = "1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))" patch_imap4(gmail_client, [expected_resp, unsolicited_resp]) - uid = constants['uid'] - g_msgid = constants['g_msgid'] - g_thrid = constants['g_thrid'] - size = constants['size'] - assert gmail_client.g_metadata([uid]) == {uid: GMetadata(g_msgid, g_thrid, - size)} + uid = constants["uid"] + g_msgid = constants["g_msgid"] + g_thrid = constants["g_thrid"] + size = constants["size"] + assert gmail_client.g_metadata([uid]) == {uid: GMetadata(g_msgid, g_thrid, size)} def test_gmail_flags(gmail_client, constants): - expected_resp = '{seq} (FLAGS {flags} X-GM-LABELS {raw_g_labels} ' \ - 'UID {uid} MODSEQ ({modseq}))'.format(**constants) - unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + expected_resp = ( + "{seq} (FLAGS {flags} X-GM-LABELS {raw_g_labels} " + "UID {uid} MODSEQ ({modseq}))".format(**constants) + ) + unsolicited_resp = "1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))" patch_imap4(gmail_client, [expected_resp, unsolicited_resp]) - uid = constants['uid'] - flags = constants['flags'] - modseq = constants['modseq'] - g_labels = constants['unicode_g_labels'] - assert gmail_client.flags([uid]) == {uid: GmailFlags(flags, g_labels, - modseq)} + uid = constants["uid"] + flags = constants["flags"] + modseq = constants["modseq"] + g_labels = constants["unicode_g_labels"] + assert gmail_client.flags([uid]) == {uid: GmailFlags(flags, g_labels, modseq)} def test_g_msgids(gmail_client, constants): - expected_resp = '{seq} (X-GM-MSGID {g_msgid} ' \ - 'UID {uid} MODSEQ ({modseq}))'.format(**constants) - unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + expected_resp = ( + "{seq} (X-GM-MSGID {g_msgid} " + "UID {uid} MODSEQ ({modseq}))".format(**constants) + ) + unsolicited_resp = "1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))" patch_imap4(gmail_client, [expected_resp, unsolicited_resp]) - uid = constants['uid'] - g_msgid = constants['g_msgid'] + uid = constants["uid"] + g_msgid = constants["g_msgid"] assert gmail_client.g_msgids([uid]) == {uid: g_msgid} def test_gmail_body(gmail_client, constants): - expected_resp = ('{seq} (X-GM-MSGID {g_msgid} X-GM-THRID {g_thrid} ' - 'X-GM-LABELS {raw_g_labels} UID {uid} MODSEQ ({modseq}) ' - 'INTERNALDATE "{internaldate}" FLAGS {flags} ' - 'BODY[] {{{body_size}}}'.format(**constants), - constants['body']) - unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' - patch_imap4(gmail_client, [expected_resp, ')', unsolicited_resp]) - - uid = constants['uid'] - flags = constants['flags'] - g_labels = constants['unicode_g_labels'] - g_thrid = constants['g_thrid'] - g_msgid = constants['g_msgid'] - body = constants['body'] + expected_resp = ( + "{seq} (X-GM-MSGID {g_msgid} X-GM-THRID {g_thrid} " + "X-GM-LABELS {raw_g_labels} UID {uid} MODSEQ ({modseq}) " + 'INTERNALDATE "{internaldate}" FLAGS {flags} ' + "BODY[] {{{body_size}}}".format(**constants), + constants["body"], + ) + unsolicited_resp = "1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))" + patch_imap4(gmail_client, [expected_resp, ")", unsolicited_resp]) + + uid = constants["uid"] + flags = constants["flags"] + g_labels = constants["unicode_g_labels"] + g_thrid = constants["g_thrid"] + g_msgid = constants["g_msgid"] + body = constants["body"] assert gmail_client.uids([uid]) == [ - RawMessage(uid=long(uid), - internaldate=datetime(2015, 3, 2, 23, 36, 20), - flags=flags, - body=body, - g_labels=g_labels, - g_thrid=g_thrid, - g_msgid=g_msgid) + RawMessage( + uid=long(uid), + internaldate=datetime(2015, 3, 2, 23, 36, 20), + flags=flags, + body=body, + g_labels=g_labels, + g_thrid=g_thrid, + g_msgid=g_msgid, + ) ] def test_flags(generic_client, constants): - expected_resp = '{seq} (FLAGS {flags} ' \ - 'UID {uid} MODSEQ ({modseq}))'.format(**constants) - unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' + expected_resp = "{seq} (FLAGS {flags} " "UID {uid} MODSEQ ({modseq}))".format( + **constants + ) + unsolicited_resp = "1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))" patch_imap4(generic_client, [expected_resp, unsolicited_resp]) - uid = constants['uid'] - flags = constants['flags'] + uid = constants["uid"] + flags = constants["flags"] assert generic_client.flags([uid]) == {uid: Flags(flags, None)} def test_body(generic_client, constants): - expected_resp = ('{seq} (UID {uid} MODSEQ ({modseq}) ' - 'INTERNALDATE "{internaldate}" FLAGS {flags} ' - 'BODY[] {{{body_size}}}'.format(**constants), - constants['body']) - unsolicited_resp = '1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))' - patch_imap4(generic_client, [expected_resp, ')', unsolicited_resp]) - - uid = constants['uid'] - flags = constants['flags'] - body = constants['body'] + expected_resp = ( + "{seq} (UID {uid} MODSEQ ({modseq}) " + 'INTERNALDATE "{internaldate}" FLAGS {flags} ' + "BODY[] {{{body_size}}}".format(**constants), + constants["body"], + ) + unsolicited_resp = "1198 (UID 1731 MODSEQ (95244) FLAGS (\\Seen))" + patch_imap4(generic_client, [expected_resp, ")", unsolicited_resp]) + + uid = constants["uid"] + flags = constants["flags"] + body = constants["body"] assert generic_client.uids([uid]) == [ - RawMessage(uid=long(uid), - internaldate=datetime(2015, 3, 2, 23, 36, 20), - flags=flags, - body=body, - g_labels=None, - g_thrid=None, - g_msgid=None) + RawMessage( + uid=long(uid), + internaldate=datetime(2015, 3, 2, 23, 36, 20), + flags=flags, + body=body, + g_labels=None, + g_thrid=None, + g_msgid=None, + ) ] def test_internaldate(generic_client, constants): """ Test that our monkeypatched imaplib works through imapclient """ dates_to_test = [ - ('6-Mar-2015 10:02:32 +0900', datetime(2015, 3, 6, 1, 2, 32)), - (' 6-Mar-2015 10:02:32 +0900', datetime(2015, 3, 6, 1, 2, 32)), - ('06-Mar-2015 10:02:32 +0900', datetime(2015, 3, 6, 1, 2, 32)), - ('6-Mar-2015 07:02:32 +0900', datetime(2015, 3, 5, 22, 2, 32)), - (' 3-Sep-1922 09:16:51 +0000', datetime(1922, 9, 3, 9, 16, 51)), - ('2-Jan-2015 03:05:37 +0800', datetime(2015, 1, 1, 19, 5, 37)) + ("6-Mar-2015 10:02:32 +0900", datetime(2015, 3, 6, 1, 2, 32)), + (" 6-Mar-2015 10:02:32 +0900", datetime(2015, 3, 6, 1, 2, 32)), + ("06-Mar-2015 10:02:32 +0900", datetime(2015, 3, 6, 1, 2, 32)), + ("6-Mar-2015 07:02:32 +0900", datetime(2015, 3, 5, 22, 2, 32)), + (" 3-Sep-1922 09:16:51 +0000", datetime(1922, 9, 3, 9, 16, 51)), + ("2-Jan-2015 03:05:37 +0800", datetime(2015, 1, 1, 19, 5, 37)), ] for internaldate_string, native_date in dates_to_test: - constants['internaldate'] = internaldate_string - expected_resp = ('{seq} (UID {uid} MODSEQ ({modseq}) ' - 'INTERNALDATE "{internaldate}" FLAGS {flags} ' - 'BODY[] {{{body_size}}}'.format(**constants), - constants['body']) - patch_imap4(generic_client, [expected_resp, ')']) - - uid = constants['uid'] + constants["internaldate"] = internaldate_string + expected_resp = ( + "{seq} (UID {uid} MODSEQ ({modseq}) " + 'INTERNALDATE "{internaldate}" FLAGS {flags} ' + "BODY[] {{{body_size}}}".format(**constants), + constants["body"], + ) + patch_imap4(generic_client, [expected_resp, ")"]) + + uid = constants["uid"] assert generic_client.uids([uid]) == [ - RawMessage(uid=long(uid), - internaldate=native_date, - flags=constants['flags'], - body=constants['body'], - g_labels=None, - g_thrid=None, - g_msgid=None) + RawMessage( + uid=long(uid), + internaldate=native_date, + flags=constants["flags"], + body=constants["body"], + g_labels=None, + g_thrid=None, + g_msgid=None, + ) ] @@ -260,203 +307,247 @@ def test_deleted_folder_on_select(monkeypatch, generic_client, constants): """ Test that a 'select failed EXAMINE' error specifying that a folder doesn't exist is converted into a FolderMissingError. (Yahoo style) """ + def raise_invalid_folder_exc(*args, **kwargs): - raise imapclient.IMAPClient.Error("select failed: '[TRYCREATE] EXAMINE" - " error - Folder does not exist or" - " server encountered an error") + raise imapclient.IMAPClient.Error( + "select failed: '[TRYCREATE] EXAMINE" + " error - Folder does not exist or" + " server encountered an error" + ) - monkeypatch.setattr('imapclient.IMAPClient.select_folder', - raise_invalid_folder_exc) + monkeypatch.setattr("imapclient.IMAPClient.select_folder", raise_invalid_folder_exc) with pytest.raises(FolderMissingError): - generic_client.select_folder('missing_folder', lambda: True) + generic_client.select_folder("missing_folder", lambda: True) def test_deleted_folder_on_fetch(monkeypatch, generic_client, constants): """ Test that a 'select failed EXAMINE' error specifying that a folder doesn't exist is converted into a FolderMissingError. (Yahoo style) """ + def raise_invalid_uid_exc(*args, **kwargs): raise imapclient.IMAPClient.Error( - '[UNAVAILABLE] UID FETCH Server error while fetching messages') + "[UNAVAILABLE] UID FETCH Server error while fetching messages" + ) - monkeypatch.setattr('imapclient.IMAPClient.fetch', - raise_invalid_uid_exc) + monkeypatch.setattr("imapclient.IMAPClient.fetch", raise_invalid_uid_exc) # Simply check that the Error exception is handled. generic_client.uids(["125"]) def test_gmail_folders(monkeypatch, constants): - folders = constants['gmail_folders'] - role_map = constants['gmail_role_map'] + folders = constants["gmail_folders"] + role_map = constants["gmail_role_map"] client = patch_gmail_client(monkeypatch, folders) raw_folders = client.folders() - generic_folder_checks(raw_folders, role_map, client, 'gmail') + generic_folder_checks(raw_folders, role_map, client, "gmail") def generic_folder_checks(raw_folders, role_map, client, provider): # Should not contain the `\\Noselect' folder - assert filter(lambda y: '\\Noselect' in y, - map(lambda x: x[0], raw_folders)) == [] - if provider == 'gmail': + assert filter(lambda y: "\\Noselect" in y, map(lambda x: x[0], raw_folders)) == [] + if provider == "gmail": assert {f.display_name: f.role for f in raw_folders} == role_map - elif provider == 'imap': + elif provider == "imap": for f in raw_folders: if f.display_name in role_map: assert f.role == role_map[f.display_name] else: - assert f.display_name in ['reference'] + assert f.display_name in ["reference"] assert f.role is None folder_names = client.folder_names() - if provider == 'gmail': - for role in ['inbox', 'all', 'trash', 'drafts', 'important', 'sent', - 'spam', 'starred']: + if provider == "gmail": + for role in [ + "inbox", + "all", + "trash", + "drafts", + "important", + "sent", + "spam", + "starred", + ]: assert role in folder_names names = folder_names[role] assert isinstance(names, list) and len(names) == 1 - elif provider == 'imap': - for role in ['inbox', 'trash', 'drafts', 'sent', 'spam']: + elif provider == "imap": + for role in ["inbox", "trash", "drafts", "sent", "spam"]: assert role in folder_names names = folder_names[role] assert isinstance(names, list) - if role == 'sent': + if role == "sent": assert len(names) == 2 else: assert len(names) == 1 # Inbox folder should be synced first. - assert client.sync_folders()[0] == 'INBOX' + assert client.sync_folders()[0] == "INBOX" def test_gmail_missing_trash(constants, monkeypatch): - ''' + """ Test that we can label their folder when they don't have a folder labeled trash. This test will go through a list of examples of trash aliases we have seen in the wild, and check that we are able to properly label those folders. - ''' + """ # create list of folders that doesn't have a trash folder - folder_base = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), - (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), - (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), - (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), - (('\\HasNoChildren', '\\Sent'), '/', u'[Gmail]/Sent Mail'), - (('\\HasNoChildren', '\\Junk'), '/', u'[Gmail]/Spam'), - (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), - (('\\HasNoChildren',), '/', u'reference')] - check_missing_generic('trash', - folder_base, localized_folder_names['trash'], - 'gmail', constants, monkeypatch) + folder_base = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"[Gmail]"), + (("\\HasNoChildren", "\\All"), "/", u"[Gmail]/All Mail"), + (("\\HasNoChildren", "\\Drafts"), "/", u"[Gmail]/Drafts"), + (("\\HasNoChildren", "\\Important"), "/", u"[Gmail]/Important"), + (("\\HasNoChildren", "\\Sent"), "/", u"[Gmail]/Sent Mail"), + (("\\HasNoChildren", "\\Junk"), "/", u"[Gmail]/Spam"), + (("\\Flagged", "\\HasNoChildren"), "/", u"[Gmail]/Starred"), + (("\\HasNoChildren",), "/", u"reference"), + ] + check_missing_generic( + "trash", + folder_base, + localized_folder_names["trash"], + "gmail", + constants, + monkeypatch, + ) def test_imap_missing_trash(constants, monkeypatch): - ''' + """ Same strategy as test_gmail_missing_trash, except with imap as a provider - ''' - folder_base = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), - (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), - (('\\HasNoChildren', '\\Sent'), '/', u'Sent'), - (('\\HasNoChildren', '\\Sent'), '/', u'Sent Items'), - (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), - (('\\HasNoChildren',), '/', u'reference')] - check_missing_generic('trash', - folder_base, localized_folder_names['trash'], - 'imap', constants, monkeypatch) + """ + folder_base = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"SKIP"), + (("\\HasNoChildren", "\\Drafts"), "/", u"Drafts"), + (("\\HasNoChildren", "\\Sent"), "/", u"Sent"), + (("\\HasNoChildren", "\\Sent"), "/", u"Sent Items"), + (("\\HasNoChildren", "\\Junk"), "/", u"Spam"), + (("\\HasNoChildren",), "/", u"reference"), + ] + check_missing_generic( + "trash", + folder_base, + localized_folder_names["trash"], + "imap", + constants, + monkeypatch, + ) def test_gmail_missing_spam(constants, monkeypatch): - ''' + """ Same strategy as test_gmail_missing_trash, except with spam folder aliases - ''' + """ # Create a list of folders thath doesn't have a spam folder - folder_base = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), - (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), - (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), - (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), - (('\\HasNoChildren', '\\Sent'), '/', u'[Gmail]/Sent Mail'), - (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), - (('\\HasNoChildren', '\\Trash'), '/', u'[Gmail]/Trash'), - (('\\HasNoChildren',), '/', u'reference')] - check_missing_generic('spam', folder_base, localized_folder_names['spam'], - 'gmail', constants, monkeypatch) + folder_base = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"[Gmail]"), + (("\\HasNoChildren", "\\All"), "/", u"[Gmail]/All Mail"), + (("\\HasNoChildren", "\\Drafts"), "/", u"[Gmail]/Drafts"), + (("\\HasNoChildren", "\\Important"), "/", u"[Gmail]/Important"), + (("\\HasNoChildren", "\\Sent"), "/", u"[Gmail]/Sent Mail"), + (("\\Flagged", "\\HasNoChildren"), "/", u"[Gmail]/Starred"), + (("\\HasNoChildren", "\\Trash"), "/", u"[Gmail]/Trash"), + (("\\HasNoChildren",), "/", u"reference"), + ] + check_missing_generic( + "spam", + folder_base, + localized_folder_names["spam"], + "gmail", + constants, + monkeypatch, + ) def test_imap_missing_spam(constants, monkeypatch): - ''' + """ Same strategy as test_gmail_missing_spam, except with imap as a provider - ''' - folder_base = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), - (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), - (('\\HasNoChildren', '\\Sent'), '/', u'Sent'), - (('\\HasNoChildren', '\\Sent'), '/', u'Sent Items'), - (('\\HasNoChildren', '\\Trash'), '/', u'Trash'), - (('\\HasNoChildren',), '/', u'reference')] - check_missing_generic('spam', folder_base, localized_folder_names['spam'], - 'imap', constants, monkeypatch) + """ + folder_base = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"SKIP"), + (("\\HasNoChildren", "\\Drafts"), "/", u"Drafts"), + (("\\HasNoChildren", "\\Sent"), "/", u"Sent"), + (("\\HasNoChildren", "\\Sent"), "/", u"Sent Items"), + (("\\HasNoChildren", "\\Trash"), "/", u"Trash"), + (("\\HasNoChildren",), "/", u"reference"), + ] + check_missing_generic( + "spam", + folder_base, + localized_folder_names["spam"], + "imap", + constants, + monkeypatch, + ) def test_gmail_missing_sent(constants, monkeypatch): - ''' + """ Same strategy as test_gmail_missing_trash, except with sent folder aliases - ''' + """ # Create a list of folders thath doesn't have a sent folder - folder_base = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), - (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), - (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), - (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), - (('\\HasNoChildren', '\\Junk'), '/', u'[Gmail]/Spam'), - (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), - (('\\HasNoChildren', '\\Trash'), '/', u'[Gmail]/Trash'), - (('\\HasNoChildren',), '/', u'reference')] - check_missing_generic('sent', folder_base, localized_folder_names['sent'], - 'gmail', constants, monkeypatch) + folder_base = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"[Gmail]"), + (("\\HasNoChildren", "\\All"), "/", u"[Gmail]/All Mail"), + (("\\HasNoChildren", "\\Drafts"), "/", u"[Gmail]/Drafts"), + (("\\HasNoChildren", "\\Important"), "/", u"[Gmail]/Important"), + (("\\HasNoChildren", "\\Junk"), "/", u"[Gmail]/Spam"), + (("\\Flagged", "\\HasNoChildren"), "/", u"[Gmail]/Starred"), + (("\\HasNoChildren", "\\Trash"), "/", u"[Gmail]/Trash"), + (("\\HasNoChildren",), "/", u"reference"), + ] + check_missing_generic( + "sent", + folder_base, + localized_folder_names["sent"], + "gmail", + constants, + monkeypatch, + ) def test_imap_missing_sent(constants, monkeypatch): - ''' + """ Almost same strategy as test_gmail_missing_sent, except with imap as a provider we can't really make call to checking_missing_geneirc, because imap and sent are special because there are allowed to be more than 1 sent folder. - ''' - folder_base = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), - (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), - (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), - (('\\HasNoChildren', '\\Trash'), '/', u'Trash'), - (('\\HasNoChildren',), '/', u'reference')] + """ + folder_base = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"SKIP"), + (("\\HasNoChildren", "\\Drafts"), "/", u"Drafts"), + (("\\HasNoChildren", "\\Junk"), "/", u"Spam"), + (("\\HasNoChildren", "\\Trash"), "/", u"Trash"), + (("\\HasNoChildren",), "/", u"reference"), + ] role_map = { - 'INBOX': 'inbox', - 'Trash': 'trash', - 'Drafts': 'drafts', - 'Spam': 'spam', - u'reference': None + "INBOX": "inbox", + "Trash": "trash", + "Drafts": "drafts", + "Spam": "spam", + u"reference": None, } - for role_alias in localized_folder_names['sent']: - folders = folder_base + [(('\\HasNoChildren'), '/', role_alias)] + for role_alias in localized_folder_names["sent"]: + folders = folder_base + [(("\\HasNoChildren"), "/", role_alias)] client = patch_generic_client(monkeypatch, folders) raw_folders = client.folders() folder_names = client.folder_names() - role_map[role_alias] = 'sent' + role_map[role_alias] = "sent" # Explcit checks. Different than check_missing_generic # and generic_folder_checks, because imap allows @@ -466,9 +557,9 @@ def test_imap_missing_sent(constants, monkeypatch): if f.display_name in role_map: assert f.role == role_map[f.display_name] else: - assert f.display_name in ['reference'] + assert f.display_name in ["reference"] assert f.role is None - for role in ['inbox', 'trash', 'drafts', 'sent', 'spam']: + for role in ["inbox", "trash", "drafts", "sent", "spam"]: assert role in folder_names names = folder_names[role] @@ -478,18 +569,22 @@ def test_imap_missing_sent(constants, monkeypatch): del role_map[role_alias] -def check_missing_generic(role, folder_base, generic_role_names, - provider, constants, monkeypatch): - ''' +def check_missing_generic( + role, folder_base, generic_role_names, provider, constants, monkeypatch +): + """ check clients label every folder in generic_role_names as input role role: the role that the generic_role_names should be assigned folder_base: generic list of folders, excluding one that is assigned role generic_role_names: list of strings that represent common role liases for - ''' + """ assert folder_base is not None - role_map = constants['gmail_role_map'] \ - if provider == 'gmail' else constants['imap_role_map'] + role_map = ( + constants["gmail_role_map"] + if provider == "gmail" + else constants["imap_role_map"] + ) # role_map is close, but not quite right, because it has a role key keys_to_remove = [] # done in two loops to avoid modifying map while iterating through it @@ -500,10 +595,12 @@ def check_missing_generic(role, folder_base, generic_role_names, del role_map[key] for role_alias in generic_role_names: # add in a folder with name of role alias, without it's role flag - folders = folder_base + [(('\\HasNoChildren'), '/', role_alias)] - client = patch_gmail_client(monkeypatch, folders) \ - if provider == 'gmail' \ + folders = folder_base + [(("\\HasNoChildren"), "/", role_alias)] + client = ( + patch_gmail_client(monkeypatch, folders) + if provider == "gmail" else patch_generic_client(monkeypatch, folders) + ) raw_folders = client.folders() role_map[role_alias] = role @@ -516,33 +613,34 @@ def test_gmail_folders_no_flags(monkeypatch): Tests that system folders (trash, inbox, sent) without flags can be labeled """ - folders = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'[Gmail]'), - (('\\HasNoChildren', '\\All'), '/', u'[Gmail]/All Mail'), - (('\\HasNoChildren', '\\Drafts'), '/', u'[Gmail]/Drafts'), - (('\\HasNoChildren', '\\Important'), '/', u'[Gmail]/Important'), - (('\\HasNoChildren'), '/', u'[Gmail]/Sent Mail'), - (('\\HasNoChildren'), '/', u'[Gmail]/Spam'), - (('\\Flagged', '\\HasNoChildren'), '/', u'[Gmail]/Starred'), - (('\\HasNoChildren'), '/', u'[Gmail]/Trash'), - (('\\HasNoChildren',), '/', u'reference')] + folders = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"[Gmail]"), + (("\\HasNoChildren", "\\All"), "/", u"[Gmail]/All Mail"), + (("\\HasNoChildren", "\\Drafts"), "/", u"[Gmail]/Drafts"), + (("\\HasNoChildren", "\\Important"), "/", u"[Gmail]/Important"), + (("\\HasNoChildren"), "/", u"[Gmail]/Sent Mail"), + (("\\HasNoChildren"), "/", u"[Gmail]/Spam"), + (("\\Flagged", "\\HasNoChildren"), "/", u"[Gmail]/Starred"), + (("\\HasNoChildren"), "/", u"[Gmail]/Trash"), + (("\\HasNoChildren",), "/", u"reference"), + ] gmail_role_map = { - '[Gmail]/All Mail': 'all', - 'Inbox': 'inbox', - '[Gmail]/Trash': 'trash', - '[Gmail]/Spam': 'spam', - '[Gmail]/Drafts': 'drafts', - '[Gmail]/Sent Mail': 'sent', - '[Gmail]/Important': 'important', - '[Gmail]/Starred': 'starred', - 'reference': None + "[Gmail]/All Mail": "all", + "Inbox": "inbox", + "[Gmail]/Trash": "trash", + "[Gmail]/Spam": "spam", + "[Gmail]/Drafts": "drafts", + "[Gmail]/Sent Mail": "sent", + "[Gmail]/Important": "important", + "[Gmail]/Starred": "starred", + "reference": None, } client = patch_gmail_client(monkeypatch, folders) raw_folders = client.folders() - generic_folder_checks(raw_folders, gmail_role_map, client, 'gmail') + generic_folder_checks(raw_folders, gmail_role_map, client, "gmail") def test_gmail_many_folders_one_role(monkeypatch, constants): @@ -557,9 +655,11 @@ def test_gmail_many_folders_one_role(monkeypatch, constants): # some duplitace folders where one has been flagged, # and neither have been flagged # in both cases, only one should come out flagged. - folders = constants['gmail_folders'] - duplicates = [(('\HasNoChildren'), '/', u'[Imap]/Trash'), - (('\\HasNoChildren'), '/', u'[Imap]/Sent')] + folders = constants["gmail_folders"] + duplicates = [ + (("\HasNoChildren"), "/", u"[Imap]/Trash"), + (("\\HasNoChildren"), "/", u"[Imap]/Sent"), + ] folders += duplicates # This test adds [Imap]/Trash and [Imap]/sent # because we've seen them in the wild with gmail @@ -568,11 +668,18 @@ def test_gmail_many_folders_one_role(monkeypatch, constants): raw_folders = client.folders() folder_names = client.folder_names() - for role in ['inbox', 'all', 'trash', 'drafts', 'important', 'sent', - 'spam', 'starred']: + for role in [ + "inbox", + "all", + "trash", + "drafts", + "important", + "sent", + "spam", + "starred", + ]: assert role in folder_names - test_set = filter(lambda x: x == role, - map(lambda y: y.role, raw_folders)) + test_set = filter(lambda x: x == role, map(lambda y: y.role, raw_folders)) assert len(test_set) == 1, "assigned wrong number of {}".format(role) names = folder_names[role] @@ -581,44 +688,45 @@ def test_gmail_many_folders_one_role(monkeypatch, constants): def test_imap_folders(monkeypatch, constants): - folders = constants['imap_folders'] - role_map = constants['imap_role_map'] + folders = constants["imap_folders"] + role_map = constants["imap_role_map"] client = patch_generic_client(monkeypatch, folders) raw_folders = client.folders() - generic_folder_checks(raw_folders, role_map, client, 'imap') + generic_folder_checks(raw_folders, role_map, client, "imap") def test_imap_folders_no_flags(monkeypatch, constants): """ Tests that system folders (trash, inbox, sent) without flags can be labeled """ - folders = \ - [(('\\HasNoChildren',), '/', u'INBOX'), - (('\\Noselect', '\\HasChildren'), '/', u'SKIP'), - (('\\HasNoChildren', '\\Drafts'), '/', u'Drafts'), - (('\\HasNoChildren'), '/', u'Sent'), - (('\\HasNoChildren'), '/', u'Sent Items'), - (('\\HasNoChildren', '\\Junk'), '/', u'Spam'), - (('\\HasNoChildren'), '/', u'Trash'), - (('\\HasNoChildren',), '/', u'reference')] + folders = [ + (("\\HasNoChildren",), "/", u"INBOX"), + (("\\Noselect", "\\HasChildren"), "/", u"SKIP"), + (("\\HasNoChildren", "\\Drafts"), "/", u"Drafts"), + (("\\HasNoChildren"), "/", u"Sent"), + (("\\HasNoChildren"), "/", u"Sent Items"), + (("\\HasNoChildren", "\\Junk"), "/", u"Spam"), + (("\\HasNoChildren"), "/", u"Trash"), + (("\\HasNoChildren",), "/", u"reference"), + ] role_map = { - 'INBOX': 'inbox', - 'Trash': 'trash', - 'Drafts': 'drafts', - 'Sent': 'sent', - 'Sent Items': 'sent', - 'Spam': 'spam', - u'[Gmail]/Sent Mail': None, - u'[Gmail]/Trash': 'trash', - u'reference': None + "INBOX": "inbox", + "Trash": "trash", + "Drafts": "drafts", + "Sent": "sent", + "Sent Items": "sent", + "Spam": "spam", + u"[Gmail]/Sent Mail": None, + u"[Gmail]/Trash": "trash", + u"reference": None, } client = patch_generic_client(monkeypatch, folders) raw_folders = client.folders() - generic_folder_checks(raw_folders, role_map, client, 'imap') + generic_folder_checks(raw_folders, role_map, client, "imap") def test_imap_many_folders_one_role(monkeypatch, constants): @@ -633,19 +741,19 @@ def test_imap_many_folders_one_role(monkeypatch, constants): extra folder with the name sent, but since it doesn't have flags and there is already sent folder, than we don't coerce it to a sent folder """ - folders = constants['imap_folders'] - duplicates = [(('\HasNoChildren', '\\Trash'), '/', u'[Gmail]/Trash'), - (('\\HasNoChildren'), '/', u'[Gmail]/Sent')] + folders = constants["imap_folders"] + duplicates = [ + (("\HasNoChildren", "\\Trash"), "/", u"[Gmail]/Trash"), + (("\\HasNoChildren"), "/", u"[Gmail]/Sent"), + ] folders += duplicates client = patch_generic_client(monkeypatch, folders) raw_folders = client.folders() folder_names = client.folder_names() - for role in ['inbox', 'trash', 'drafts', 'sent', 'spam']: + for role in ["inbox", "trash", "drafts", "sent", "spam"]: assert role in folder_names - number_roles = 2 if (role in ['sent', 'trash']) else 1 - test_set = filter(lambda x: x == role, - map(lambda y: y.role, raw_folders)) - assert len(test_set) == number_roles,\ - "assigned wrong number of {}".format(role) + number_roles = 2 if (role in ["sent", "trash"]) else 1 + test_set = filter(lambda x: x == role, map(lambda y: y.role, raw_folders)) + assert len(test_set) == number_roles, "assigned wrong number of {}".format(role) diff --git a/inbox/test/imap/test_delete_handling.py b/inbox/test/imap/test_delete_handling.py index e423b52c4..a1bf502a9 100644 --- a/inbox/test/imap/test_delete_handling.py +++ b/inbox/test/imap/test_delete_handling.py @@ -6,8 +6,7 @@ from gevent.lock import Semaphore from sqlalchemy.orm.exc import ObjectDeletedError from inbox.crispin import GmailFlags -from inbox.mailsync.backends.imap.common import (remove_deleted_uids, - update_metadata) +from inbox.mailsync.backends.imap.common import remove_deleted_uids, update_metadata from inbox.mailsync.gc import DeleteHandler, LabelRenameHandler from inbox.models import Folder, Message, Transaction from inbox.models.label import Label @@ -23,21 +22,28 @@ def marked_deleted_message(db, message): return message -def test_messages_deleted_asynchronously(db, default_account, thread, message, - imapuid, folder): +def test_messages_deleted_asynchronously( + db, default_account, thread, message, imapuid, folder +): msg_uid = imapuid.msg_uid - update_metadata(default_account.id, folder.id, folder.canonical_name, - {msg_uid: GmailFlags((), ('label',), None)}, db.session) - assert 'label' in [cat.display_name for cat in message.categories] + update_metadata( + default_account.id, + folder.id, + folder.canonical_name, + {msg_uid: GmailFlags((), ("label",), None)}, + db.session, + ) + assert "label" in [cat.display_name for cat in message.categories] remove_deleted_uids(default_account.id, folder.id, [msg_uid]) db.session.expire_all() assert abs((message.deleted_at - datetime.utcnow()).total_seconds()) < 2 # Check that message categories do get updated synchronously. - assert 'label' not in [cat.display_name for cat in message.categories] + assert "label" not in [cat.display_name for cat in message.categories] -def test_drafts_deleted_synchronously(db, default_account, thread, message, - imapuid, folder): +def test_drafts_deleted_synchronously( + db, default_account, thread, message, imapuid, folder +): message.is_draft = True db.session.commit() msg_uid = imapuid.msg_uid @@ -49,39 +55,40 @@ def test_drafts_deleted_synchronously(db, default_account, thread, message, thread.id -def test_deleting_from_a_message_with_multiple_uids(db, default_account, - message, thread): +def test_deleting_from_a_message_with_multiple_uids( + db, default_account, message, thread +): """Check that deleting a imapuid from a message with multiple uids doesn't mark the message for deletion.""" - inbox_folder = Folder.find_or_create(db.session, default_account, 'inbox', - 'inbox') - sent_folder = Folder.find_or_create(db.session, default_account, 'sent', - 'sent') + inbox_folder = Folder.find_or_create(db.session, default_account, "inbox", "inbox") + sent_folder = Folder.find_or_create(db.session, default_account, "sent", "sent") - add_fake_imapuid(db.session, default_account.id, message, sent_folder, - 1337) - add_fake_imapuid(db.session, default_account.id, message, inbox_folder, - 2222) + add_fake_imapuid(db.session, default_account.id, message, sent_folder, 1337) + add_fake_imapuid(db.session, default_account.id, message, inbox_folder, 2222) assert len(message.imapuids) == 2 remove_deleted_uids(default_account.id, inbox_folder.id, [2222]) db.session.expire_all() - assert message.deleted_at is None, \ - "The associated message should not have been marked for deletion." + assert ( + message.deleted_at is None + ), "The associated message should not have been marked for deletion." - assert len(message.imapuids) == 1, \ - "The message should have only one imapuid." + assert len(message.imapuids) == 1, "The message should have only one imapuid." -def test_deletion_with_short_ttl(db, default_account, default_namespace, - marked_deleted_message, thread, folder): - handler = DeleteHandler(account_id=default_account.id, - namespace_id=default_namespace.id, - provider_name=default_account.provider, - uid_accessor=lambda m: m.imapuids, - message_ttl=0, thread_ttl=0) +def test_deletion_with_short_ttl( + db, default_account, default_namespace, marked_deleted_message, thread, folder +): + handler = DeleteHandler( + account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0, + thread_ttl=0, + ) handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) handler.gc_deleted_threads(thread.deleted_at + timedelta(seconds=1)) db.session.expire_all() @@ -92,13 +99,17 @@ def test_deletion_with_short_ttl(db, default_account, default_namespace, thread.id -def test_thread_deletion_with_short_ttl(db, default_account, default_namespace, - marked_deleted_message, thread, folder): - handler = DeleteHandler(account_id=default_account.id, - namespace_id=default_namespace.id, - provider_name=default_account.provider, - uid_accessor=lambda m: m.imapuids, - message_ttl=0, thread_ttl=120) +def test_thread_deletion_with_short_ttl( + db, default_account, default_namespace, marked_deleted_message, thread, folder +): + handler = DeleteHandler( + account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0, + thread_ttl=120, + ) delete_time = marked_deleted_message.deleted_at handler.check(delete_time + timedelta(seconds=1)) @@ -120,15 +131,22 @@ def test_thread_deletion_with_short_ttl(db, default_account, default_namespace, thread.id -def test_non_orphaned_messages_get_unmarked(db, default_account, - default_namespace, - marked_deleted_message, thread, - folder, imapuid): - handler = DeleteHandler(account_id=default_account.id, - namespace_id=default_namespace.id, - provider_name=default_account.provider, - uid_accessor=lambda m: m.imapuids, - message_ttl=0) +def test_non_orphaned_messages_get_unmarked( + db, + default_account, + default_namespace, + marked_deleted_message, + thread, + folder, + imapuid, +): + handler = DeleteHandler( + account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0, + ) handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) db.session.expire_all() # message actually has an imapuid associated, so check that the @@ -136,15 +154,16 @@ def test_non_orphaned_messages_get_unmarked(db, default_account, assert marked_deleted_message.deleted_at is None -def test_threads_only_deleted_when_no_messages_left(db, default_account, - default_namespace, - marked_deleted_message, - thread, folder): - handler = DeleteHandler(account_id=default_account.id, - namespace_id=default_namespace.id, - provider_name=default_account.provider, - uid_accessor=lambda m: m.imapuids, - message_ttl=0) +def test_threads_only_deleted_when_no_messages_left( + db, default_account, default_namespace, marked_deleted_message, thread, folder +): + handler = DeleteHandler( + account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0, + ) # Add another message onto the thread add_fake_message(db.session, default_namespace.id, thread) @@ -157,15 +176,16 @@ def test_threads_only_deleted_when_no_messages_left(db, default_account, thread.id -def test_deletion_deferred_with_longer_ttl(db, default_account, - default_namespace, - marked_deleted_message, thread, - folder): - handler = DeleteHandler(account_id=default_account.id, - namespace_id=default_namespace.id, - provider_name=default_account.provider, - uid_accessor=lambda m: m.imapuids, - message_ttl=5) +def test_deletion_deferred_with_longer_ttl( + db, default_account, default_namespace, marked_deleted_message, thread, folder +): + handler = DeleteHandler( + account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=5, + ) db.session.commit() handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) @@ -174,41 +194,54 @@ def test_deletion_deferred_with_longer_ttl(db, default_account, thread.id -def test_deletion_creates_revision(db, default_account, default_namespace, - marked_deleted_message, thread, folder): +def test_deletion_creates_revision( + db, default_account, default_namespace, marked_deleted_message, thread, folder +): message_id = marked_deleted_message.id thread_id = thread.id - handler = DeleteHandler(account_id=default_account.id, - namespace_id=default_namespace.id, - provider_name=default_account.provider, - uid_accessor=lambda m: m.imapuids, - message_ttl=0) + handler = DeleteHandler( + account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0, + ) handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1)) db.session.commit() - latest_message_transaction = db.session.query(Transaction). \ - filter(Transaction.record_id == message_id, - Transaction.object_type == 'message', - Transaction.namespace_id == default_namespace.id). \ - order_by(desc(Transaction.id)).first() - assert latest_message_transaction.command == 'delete' - - latest_thread_transaction = db.session.query(Transaction). \ - filter(Transaction.record_id == thread_id, - Transaction.object_type == 'thread', - Transaction.namespace_id == default_namespace.id). \ - order_by(desc(Transaction.id)).first() - assert latest_thread_transaction.command == 'update' - - -def test_deleted_labels_get_gced(empty_db, default_account, thread, message, - imapuid, folder): + latest_message_transaction = ( + db.session.query(Transaction) + .filter( + Transaction.record_id == message_id, + Transaction.object_type == "message", + Transaction.namespace_id == default_namespace.id, + ) + .order_by(desc(Transaction.id)) + .first() + ) + assert latest_message_transaction.command == "delete" + + latest_thread_transaction = ( + db.session.query(Transaction) + .filter( + Transaction.record_id == thread_id, + Transaction.object_type == "thread", + Transaction.namespace_id == default_namespace.id, + ) + .order_by(desc(Transaction.id)) + .first() + ) + assert latest_thread_transaction.command == "update" + + +def test_deleted_labels_get_gced( + empty_db, default_account, thread, message, imapuid, folder +): # Check that only the labels without messages attached to them # get deleted. default_namespace = default_account.namespace # Create a label w/ no messages attached. - label = Label.find_or_create(empty_db.session, default_account, - 'dangling label') + label = Label.find_or_create(empty_db.session, default_account, "dangling label") label.deleted_at = datetime.utcnow() label.category.deleted_at = datetime.utcnow() label_id = label.id @@ -216,19 +249,26 @@ def test_deleted_labels_get_gced(empty_db, default_account, thread, message, # Create a label with attached messages. msg_uid = imapuid.msg_uid - update_metadata(default_account.id, folder.id, folder.canonical_name, - {msg_uid: GmailFlags((), ('label',), None)}, empty_db.session) + update_metadata( + default_account.id, + folder.id, + folder.canonical_name, + {msg_uid: GmailFlags((), ("label",), None)}, + empty_db.session, + ) label_ids = [] for cat in message.categories: for l in cat.labels: label_ids.append(l.id) - handler = DeleteHandler(account_id=default_account.id, - namespace_id=default_namespace.id, - provider_name=default_account.provider, - uid_accessor=lambda m: m.imapuids, - message_ttl=0) + handler = DeleteHandler( + account_id=default_account.id, + namespace_id=default_namespace.id, + provider_name=default_account.provider, + uid_accessor=lambda m: m.imapuids, + message_ttl=0, + ) handler.gc_deleted_categories() empty_db.session.commit() @@ -241,30 +281,36 @@ def test_deleted_labels_get_gced(empty_db, default_account, thread, message, assert empty_db.session.query(Label).get(label_id) is not None -def test_renamed_label_refresh(db, default_account, thread, message, - imapuid, folder, mock_imapclient, monkeypatch): +def test_renamed_label_refresh( + db, default_account, thread, message, imapuid, folder, mock_imapclient, monkeypatch +): # Check that imapuids see their labels refreshed after running # the LabelRenameHandler. msg_uid = imapuid.msg_uid - uid_dict = {msg_uid: GmailFlags((), ('stale label',), ('23',))} + uid_dict = {msg_uid: GmailFlags((), ("stale label",), ("23",))} - update_metadata(default_account.id, folder.id, folder.canonical_name, - uid_dict, db.session) + update_metadata( + default_account.id, folder.id, folder.canonical_name, uid_dict, db.session + ) - new_flags = {msg_uid: {'FLAGS': ('\\Seen',), 'X-GM-LABELS': ('new label',), - 'MODSEQ': ('23',)}} - mock_imapclient._data['[Gmail]/All mail'] = new_flags + new_flags = { + msg_uid: { + "FLAGS": ("\\Seen",), + "X-GM-LABELS": ("new label",), + "MODSEQ": ("23",), + } + } + mock_imapclient._data["[Gmail]/All mail"] = new_flags mock_imapclient.add_folder_data(folder.name, new_flags) - monkeypatch.setattr(MockIMAPClient, 'search', - lambda x, y: [msg_uid]) + monkeypatch.setattr(MockIMAPClient, "search", lambda x, y: [msg_uid]) semaphore = Semaphore(value=1) - rename_handler = LabelRenameHandler(default_account.id, - default_account.namespace.id, - 'new label', semaphore) + rename_handler = LabelRenameHandler( + default_account.id, default_account.namespace.id, "new label", semaphore + ) # Acquire the semaphore to check that LabelRenameHandlers block if # the semaphore is in-use. @@ -275,7 +321,7 @@ def test_renamed_label_refresh(db, default_account, thread, message, labels = list(imapuid.labels) assert len(labels) == 1 - assert labels[0].name == 'stale label' + assert labels[0].name == "stale label" semaphore.release() rename_handler.join() @@ -283,7 +329,7 @@ def test_renamed_label_refresh(db, default_account, thread, message, # Now check that the label got updated. labels = list(imapuid.labels) assert len(labels) == 1 - assert labels[0].name == 'new label' + assert labels[0].name == "new label" def test_reply_to_message_cascade(db, default_namespace, thread, message): diff --git a/inbox/test/imap/test_folder_state.py b/inbox/test/imap/test_folder_state.py index 5fb3328f9..610ad2643 100644 --- a/inbox/test/imap/test_folder_state.py +++ b/inbox/test/imap/test_folder_state.py @@ -8,20 +8,30 @@ def add_imap_status_info_rows(folder_id, account_id, db_session): """Add placeholder ImapFolderSyncStatus and ImapFolderInfo rows for this folder_id if none exist. """ - if not db_session.query(ImapFolderSyncStatus).filter_by( - account_id=account_id, folder_id=folder_id).all(): - db_session.add(ImapFolderSyncStatus( - account_id=account_id, - folder_id=folder_id, - state='initial')) + if ( + not db_session.query(ImapFolderSyncStatus) + .filter_by(account_id=account_id, folder_id=folder_id) + .all() + ): + db_session.add( + ImapFolderSyncStatus( + account_id=account_id, folder_id=folder_id, state="initial" + ) + ) - if not db_session.query(ImapFolderInfo).filter_by( - account_id=account_id, folder_id=folder_id).all(): - db_session.add(ImapFolderInfo( - account_id=account_id, - folder_id=folder_id, - uidvalidity=1, - highestmodseq=22)) + if ( + not db_session.query(ImapFolderInfo) + .filter_by(account_id=account_id, folder_id=folder_id) + .all() + ): + db_session.add( + ImapFolderInfo( + account_id=account_id, + folder_id=folder_id, + uidvalidity=1, + highestmodseq=22, + ) + ) def create_foldersyncstatuses(db, default_account): @@ -29,13 +39,13 @@ def create_foldersyncstatuses(db, default_account): monitor = ImapSyncMonitor(default_account) folder_names_and_roles = { - RawFolder('INBOX', 'inbox'), - RawFolder('Sent Mail', 'sent'), - RawFolder('Sent Messages', 'sent'), - RawFolder('Drafts', 'drafts'), - RawFolder('Miscellania', None), - RawFolder('miscellania', None), - RawFolder('Recipes', None), + RawFolder("INBOX", "inbox"), + RawFolder("Sent Mail", "sent"), + RawFolder("Sent Messages", "sent"), + RawFolder("Drafts", "drafts"), + RawFolder("Miscellania", None), + RawFolder("miscellania", None), + RawFolder("Recipes", None), } monitor.save_folder_names(db.session, folder_names_and_roles) folders = db.session.query(Folder).filter_by(account_id=default_account.id) @@ -70,8 +80,7 @@ def test_imap_folder_sync_enabled(db, default_account): assert all([fs.sync_enabled for fs in default_account.foldersyncstatuses]) # Disable sync. Folders should now not have sync_enabled. - default_account.disable_sync('testing') + default_account.disable_sync("testing") db.session.commit() - assert all([not fs.sync_enabled - for fs in default_account.foldersyncstatuses]) + assert all([not fs.sync_enabled for fs in default_account.foldersyncstatuses]) diff --git a/inbox/test/imap/test_folder_sync.py b/inbox/test/imap/test_folder_sync.py index d5141de5a..c1378e4e8 100644 --- a/inbox/test/imap/test_folder_sync.py +++ b/inbox/test/imap/test_folder_sync.py @@ -4,18 +4,19 @@ from gevent.lock import BoundedSemaphore from sqlalchemy.orm.exc import ObjectDeletedError from inbox.models import Folder, Message -from inbox.models.backends.imap import (ImapFolderSyncStatus, ImapUid, - ImapFolderInfo) -from inbox.mailsync.backends.imap.generic import (FolderSyncEngine, UidInvalid, - MAX_UIDINVALID_RESYNCS) +from inbox.models.backends.imap import ImapFolderSyncStatus, ImapUid, ImapFolderInfo +from inbox.mailsync.backends.imap.generic import ( + FolderSyncEngine, + UidInvalid, + MAX_UIDINVALID_RESYNCS, +) from inbox.mailsync.backends.gmail import GmailFolderSyncEngine from inbox.mailsync.backends.base import MailsyncDone -from inbox.test.imap.data import uids, uid_data # noqa +from inbox.test.imap.data import uids, uid_data # noqa from inbox.util.testutils import mock_imapclient # noqa -def create_folder_with_syncstatus(account, name, canonical_name, - db_session): +def create_folder_with_syncstatus(account, name, canonical_name, db_session): folder = Folder.find_or_create(db_session, account, name, canonical_name) folder.imapsyncstatus = ImapFolderSyncStatus(account=account) db_session.commit() @@ -24,26 +25,26 @@ def create_folder_with_syncstatus(account, name, canonical_name, @pytest.fixture def inbox_folder(db, generic_account): - return create_folder_with_syncstatus(generic_account, 'Inbox', 'inbox', - db.session) + return create_folder_with_syncstatus(generic_account, "Inbox", "inbox", db.session) @pytest.fixture def generic_trash_folder(db, generic_account): - return create_folder_with_syncstatus(generic_account, '/Trash', - 'trash', db.session) + return create_folder_with_syncstatus(generic_account, "/Trash", "trash", db.session) @pytest.fixture def all_mail_folder(db, default_account): - return create_folder_with_syncstatus(default_account, '[Gmail]/All Mail', - 'all', db.session) + return create_folder_with_syncstatus( + default_account, "[Gmail]/All Mail", "all", db.session + ) @pytest.fixture def trash_folder(db, default_account): - return create_folder_with_syncstatus(default_account, '[Gmail]/Trash', - 'trash', db.session) + return create_folder_with_syncstatus( + default_account, "[Gmail]/Trash", "trash", db.session + ) def test_initial_sync(db, generic_account, inbox_folder, mock_imapclient): @@ -54,105 +55,125 @@ def test_initial_sync(db, generic_account, inbox_folder, mock_imapclient): uid_dict = uids.example() mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) - folder_sync_engine = FolderSyncEngine(generic_account.id, - generic_account.namespace.id, - inbox_folder.name, - generic_account.email_address, - 'custom', - BoundedSemaphore(1)) + folder_sync_engine = FolderSyncEngine( + generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + "custom", + BoundedSemaphore(1), + ) folder_sync_engine.initial_sync() - saved_uids = db.session.query(ImapUid).filter( - ImapUid.folder_id == inbox_folder.id) + saved_uids = db.session.query(ImapUid).filter(ImapUid.folder_id == inbox_folder.id) assert {u.msg_uid for u in saved_uids} == set(uid_dict) saved_message_hashes = {u.message.data_sha256 for u in saved_uids} - assert saved_message_hashes == {sha256(v['BODY[]']).hexdigest() for v in - uid_dict.values()} + assert saved_message_hashes == { + sha256(v["BODY[]"]).hexdigest() for v in uid_dict.values() + } -def test_new_uids_synced_when_polling(db, generic_account, inbox_folder, - mock_imapclient): +def test_new_uids_synced_when_polling( + db, generic_account, inbox_folder, mock_imapclient +): uid_dict = uids.example() mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) - inbox_folder.imapfolderinfo = ImapFolderInfo(account=generic_account, - uidvalidity=1, - uidnext=1) + inbox_folder.imapfolderinfo = ImapFolderInfo( + account=generic_account, uidvalidity=1, uidnext=1 + ) db.session.commit() - folder_sync_engine = FolderSyncEngine(generic_account.id, - generic_account.namespace.id, - inbox_folder.name, - generic_account.email_address, - 'custom', - BoundedSemaphore(1)) + folder_sync_engine = FolderSyncEngine( + generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + "custom", + BoundedSemaphore(1), + ) # Don't sleep at the end of poll_impl before returning. folder_sync_engine.poll_frequency = 0 folder_sync_engine.poll_impl() - saved_uids = db.session.query(ImapUid).filter( - ImapUid.folder_id == inbox_folder.id) + saved_uids = db.session.query(ImapUid).filter(ImapUid.folder_id == inbox_folder.id) assert {u.msg_uid for u in saved_uids} == set(uid_dict) -def test_condstore_flags_refresh(db, default_account, all_mail_folder, - mock_imapclient, monkeypatch): +def test_condstore_flags_refresh( + db, default_account, all_mail_folder, mock_imapclient, monkeypatch +): monkeypatch.setattr( - 'inbox.mailsync.backends.imap.generic.CONDSTORE_FLAGS_REFRESH_BATCH_SIZE', - 10) + "inbox.mailsync.backends.imap.generic.CONDSTORE_FLAGS_REFRESH_BATCH_SIZE", 10 + ) uid_dict = uids.example() mock_imapclient.add_folder_data(all_mail_folder.name, uid_dict) - mock_imapclient.capabilities = lambda: ['CONDSTORE'] - - folder_sync_engine = FolderSyncEngine(default_account.id, - default_account.namespace.id, - all_mail_folder.name, - default_account.email_address, - 'gmail', - BoundedSemaphore(1)) + mock_imapclient.capabilities = lambda: ["CONDSTORE"] + + folder_sync_engine = FolderSyncEngine( + default_account.id, + default_account.namespace.id, + all_mail_folder.name, + default_account.email_address, + "gmail", + BoundedSemaphore(1), + ) folder_sync_engine.initial_sync() # Change the labels provided by the mock IMAP server for k, v in mock_imapclient._data[all_mail_folder.name].items(): - v['X-GM-LABELS'] = ('newlabel',) - v['MODSEQ'] = (k,) + v["X-GM-LABELS"] = ("newlabel",) + v["MODSEQ"] = (k,) folder_sync_engine.highestmodseq = 0 # Don't sleep at the end of poll_impl before returning. folder_sync_engine.poll_frequency = 0 folder_sync_engine.poll_impl() - imapuids = db.session.query(ImapUid). \ - filter_by(folder_id=all_mail_folder.id).all() + imapuids = db.session.query(ImapUid).filter_by(folder_id=all_mail_folder.id).all() for imapuid in imapuids: - assert 'newlabel' in [l.name for l in imapuid.labels] + assert "newlabel" in [l.name for l in imapuid.labels] - assert folder_sync_engine.highestmodseq == mock_imapclient.folder_status( - all_mail_folder.name, ['HIGHESTMODSEQ'])['HIGHESTMODSEQ'] + assert ( + folder_sync_engine.highestmodseq + == mock_imapclient.folder_status(all_mail_folder.name, ["HIGHESTMODSEQ"])[ + "HIGHESTMODSEQ" + ] + ) def test_generic_flags_refresh_expunges_transient_uids( - db, generic_account, inbox_folder, mock_imapclient, monkeypatch): + db, generic_account, inbox_folder, mock_imapclient, monkeypatch +): # Check that we delete UIDs which are synced but quickly deleted, so never # show up in flags refresh. uid_dict = uids.example() mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) - inbox_folder.imapfolderinfo = ImapFolderInfo(account=generic_account, - uidvalidity=1, - uidnext=1) + inbox_folder.imapfolderinfo = ImapFolderInfo( + account=generic_account, uidvalidity=1, uidnext=1 + ) db.session.commit() - folder_sync_engine = FolderSyncEngine(generic_account.id, - generic_account.namespace.id, - inbox_folder.name, - generic_account.email_address, - 'custom', - BoundedSemaphore(1)) + folder_sync_engine = FolderSyncEngine( + generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + "custom", + BoundedSemaphore(1), + ) folder_sync_engine.initial_sync() # Don't sleep at the end of poll_impl before returning. folder_sync_engine.poll_frequency = 0 folder_sync_engine.poll_impl() - msg = db.session.query(Message).filter_by( - namespace_id=generic_account.namespace.id).first() - transient_uid = ImapUid(folder=inbox_folder, account=generic_account, - message=msg, msg_uid=max(uid_dict) + 1) + msg = ( + db.session.query(Message) + .filter_by(namespace_id=generic_account.namespace.id) + .first() + ) + transient_uid = ImapUid( + folder=inbox_folder, + account=generic_account, + message=msg, + msg_uid=max(uid_dict) + 1, + ) db.session.add(transient_uid) db.session.commit() folder_sync_engine.last_slow_refresh = None @@ -164,16 +185,18 @@ def test_generic_flags_refresh_expunges_transient_uids( def test_handle_uidinvalid(db, generic_account, inbox_folder, mock_imapclient): uid_dict = uids.example() mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) - inbox_folder.imapfolderinfo = ImapFolderInfo(account=generic_account, - uidvalidity=1, - uidnext=1) + inbox_folder.imapfolderinfo = ImapFolderInfo( + account=generic_account, uidvalidity=1, uidnext=1 + ) db.session.commit() - folder_sync_engine = FolderSyncEngine(generic_account.id, - generic_account.namespace.id, - inbox_folder.name, - generic_account.email_address, - 'custom', - BoundedSemaphore(1)) + folder_sync_engine = FolderSyncEngine( + generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + "custom", + BoundedSemaphore(1), + ) folder_sync_engine.initial_sync() mock_imapclient.uidvalidity = 2 with pytest.raises(UidInvalid): @@ -181,13 +204,16 @@ def test_handle_uidinvalid(db, generic_account, inbox_folder, mock_imapclient): new_state = folder_sync_engine.resync_uids() - assert new_state == 'initial' - assert db.session.query(ImapUid).filter( - ImapUid.folder_id == inbox_folder.id).all() == [] + assert new_state == "initial" + assert ( + db.session.query(ImapUid).filter(ImapUid.folder_id == inbox_folder.id).all() + == [] + ) -def test_handle_uidinvalid_loops(db, generic_account, inbox_folder, - mock_imapclient, monkeypatch): +def test_handle_uidinvalid_loops( + db, generic_account, inbox_folder, mock_imapclient, monkeypatch +): import inbox.mailsync.backends.imap.generic as generic_import @@ -200,23 +226,26 @@ def fake_poll_function(self): uidinvalid_count.append(1) raise UidInvalid - monkeypatch.setattr("inbox.mailsync.backends.imap.generic.FolderSyncEngine.poll", - fake_poll_function) + monkeypatch.setattr( + "inbox.mailsync.backends.imap.generic.FolderSyncEngine.poll", fake_poll_function + ) uid_dict = uids.example() mock_imapclient.add_folder_data(inbox_folder.name, uid_dict) - inbox_folder.imapfolderinfo = ImapFolderInfo(account=generic_account, - uidvalidity=1, - uidnext=1) + inbox_folder.imapfolderinfo = ImapFolderInfo( + account=generic_account, uidvalidity=1, uidnext=1 + ) db.session.commit() - folder_sync_engine = generic_import.FolderSyncEngine(generic_account.id, - generic_account.namespace.id, - inbox_folder.name, - generic_account.email_address, - 'custom', - BoundedSemaphore(1)) + folder_sync_engine = generic_import.FolderSyncEngine( + generic_account.id, + generic_account.namespace.id, + inbox_folder.name, + generic_account.email_address, + "custom", + BoundedSemaphore(1), + ) - folder_sync_engine.state = 'poll' + folder_sync_engine.state = "poll" db.session.expunge(inbox_folder.imapsyncstatus) @@ -228,91 +257,113 @@ def fake_poll_function(self): def raise_imap_error(self): from imaplib import IMAP4 - raise IMAP4.error('Unexpected IDLE response') + + raise IMAP4.error("Unexpected IDLE response") -def test_gmail_initial_sync(db, default_account, all_mail_folder, - mock_imapclient): +def test_gmail_initial_sync(db, default_account, all_mail_folder, mock_imapclient): uid_dict = uids.example() mock_imapclient.add_folder_data(all_mail_folder.name, uid_dict) - mock_imapclient.list_folders = lambda: [(('\\All', '\\HasNoChildren',), - '/', u'[Gmail]/All Mail')] + mock_imapclient.list_folders = lambda: [ + (("\\All", "\\HasNoChildren",), "/", u"[Gmail]/All Mail") + ] mock_imapclient.idle = lambda: None mock_imapclient.idle_check = raise_imap_error - folder_sync_engine = GmailFolderSyncEngine(default_account.id, - default_account.namespace.id, - all_mail_folder.name, - default_account.email_address, - 'gmail', - BoundedSemaphore(1)) + folder_sync_engine = GmailFolderSyncEngine( + default_account.id, + default_account.namespace.id, + all_mail_folder.name, + default_account.email_address, + "gmail", + BoundedSemaphore(1), + ) folder_sync_engine.initial_sync() saved_uids = db.session.query(ImapUid).filter( - ImapUid.folder_id == all_mail_folder.id) + ImapUid.folder_id == all_mail_folder.id + ) assert {u.msg_uid for u in saved_uids} == set(uid_dict) -@pytest.mark.skipif(True, reason='Need to investigate') -def test_gmail_message_deduplication(db, default_account, all_mail_folder, - trash_folder, mock_imapclient): +@pytest.mark.skipif(True, reason="Need to investigate") +def test_gmail_message_deduplication( + db, default_account, all_mail_folder, trash_folder, mock_imapclient +): uid = 22 uid_values = uid_data.example() - mock_imapclient.list_folders = lambda: [(('\\All', '\\HasNoChildren',), - '/', u'[Gmail]/All Mail'), - (('\\Trash', '\\HasNoChildren',), - '/', u'[Gmail]/Trash')] + mock_imapclient.list_folders = lambda: [ + (("\\All", "\\HasNoChildren",), "/", u"[Gmail]/All Mail"), + (("\\Trash", "\\HasNoChildren",), "/", u"[Gmail]/Trash"), + ] mock_imapclient.idle = lambda: None mock_imapclient.add_folder_data(all_mail_folder.name, {uid: uid_values}) mock_imapclient.add_folder_data(trash_folder.name, {uid: uid_values}) mock_imapclient.idle_check = raise_imap_error all_folder_sync_engine = GmailFolderSyncEngine( - default_account.id, default_account.namespace.id, all_mail_folder.name, - default_account.email_address, 'gmail', - BoundedSemaphore(1)) + default_account.id, + default_account.namespace.id, + all_mail_folder.name, + default_account.email_address, + "gmail", + BoundedSemaphore(1), + ) all_folder_sync_engine.initial_sync() trash_folder_sync_engine = GmailFolderSyncEngine( - default_account.id, default_account.namespace.id, trash_folder.name, - default_account.email_address, 'gmail', - BoundedSemaphore(1)) + default_account.id, + default_account.namespace.id, + trash_folder.name, + default_account.email_address, + "gmail", + BoundedSemaphore(1), + ) trash_folder_sync_engine.initial_sync() # Check that we have two uids, but just one message. assert [(uid,)] == db.session.query(ImapUid.msg_uid).filter( - ImapUid.folder_id == all_mail_folder.id).all() + ImapUid.folder_id == all_mail_folder.id + ).all() assert [(uid,)] == db.session.query(ImapUid.msg_uid).filter( - ImapUid.folder_id == trash_folder.id).all() - - assert db.session.query(Message).filter( - Message.namespace_id == default_account.namespace.id, - Message.g_msgid == uid_values['X-GM-MSGID']).count() == 1 - - -def test_imap_message_deduplication(db, generic_account, inbox_folder, - generic_trash_folder, mock_imapclient): + ImapUid.folder_id == trash_folder.id + ).all() + + assert ( + db.session.query(Message) + .filter( + Message.namespace_id == default_account.namespace.id, + Message.g_msgid == uid_values["X-GM-MSGID"], + ) + .count() + == 1 + ) + + +def test_imap_message_deduplication( + db, generic_account, inbox_folder, generic_trash_folder, mock_imapclient +): uid = 22 uid_values = uid_data.example() - mock_imapclient.list_folders = lambda: [(('\\All', '\\HasNoChildren',), - '/', u'/Inbox'), - (('\\Trash', '\\HasNoChildren',), - '/', u'/Trash')] + mock_imapclient.list_folders = lambda: [ + (("\\All", "\\HasNoChildren",), "/", u"/Inbox"), + (("\\Trash", "\\HasNoChildren",), "/", u"/Trash"), + ] mock_imapclient.idle = lambda: None mock_imapclient.add_folder_data(inbox_folder.name, {uid: uid_values}) - mock_imapclient.add_folder_data(generic_trash_folder.name, - {uid: uid_values}) + mock_imapclient.add_folder_data(generic_trash_folder.name, {uid: uid_values}) folder_sync_engine = FolderSyncEngine( generic_account.id, generic_account.namespace.id, inbox_folder.name, generic_account.email_address, - 'custom', - BoundedSemaphore(1)) + "custom", + BoundedSemaphore(1), + ) folder_sync_engine.initial_sync() trash_folder_sync_engine = FolderSyncEngine( @@ -320,20 +371,29 @@ def test_imap_message_deduplication(db, generic_account, inbox_folder, generic_account.namespace.id, generic_trash_folder.name, generic_account.email_address, - 'custom', - BoundedSemaphore(1)) + "custom", + BoundedSemaphore(1), + ) trash_folder_sync_engine.initial_sync() # Check that we have two uids, but just one message. assert [(uid,)] == db.session.query(ImapUid.msg_uid).filter( - ImapUid.folder_id == inbox_folder.id).all() + ImapUid.folder_id == inbox_folder.id + ).all() assert [(uid,)] == db.session.query(ImapUid.msg_uid).filter( - ImapUid.folder_id == generic_trash_folder.id).all() + ImapUid.folder_id == generic_trash_folder.id + ).all() # used to uniquely ID messages - body_sha = sha256(uid_values['BODY[]']).hexdigest() - - assert db.session.query(Message).filter( - Message.namespace_id == generic_account.namespace.id, - Message.data_sha256 == body_sha).count() == 1 + body_sha = sha256(uid_values["BODY[]"]).hexdigest() + + assert ( + db.session.query(Message) + .filter( + Message.namespace_id == generic_account.namespace.id, + Message.data_sha256 == body_sha, + ) + .count() + == 1 + ) diff --git a/inbox/test/imap/test_full_imap_enabled.py b/inbox/test/imap/test_full_imap_enabled.py index 94fe707e7..e4460b697 100644 --- a/inbox/test/imap/test_full_imap_enabled.py +++ b/inbox/test/imap/test_full_imap_enabled.py @@ -7,9 +7,8 @@ class MockIMAPClient(IMAPClient): - def __init__(self): - super(MockIMAPClient, self).__init__('randomhost') + super(MockIMAPClient, self).__init__("randomhost") def _create_IMAP4(self): return Mock() @@ -19,30 +18,28 @@ def logout(self): def test_imap_not_fully_enabled(monkeypatch): - def folder_list_fail(conn): - raise Exception("LIST failed: '[ALERT] full IMAP support " - "is NOT enabled for this account'") + raise Exception( + "LIST failed: '[ALERT] full IMAP support " + "is NOT enabled for this account'" + ) - monkeypatch.setattr('imapclient.IMAPClient.list_folders', - folder_list_fail) + monkeypatch.setattr("imapclient.IMAPClient.list_folders", folder_list_fail) def fake_connect(account): return MockIMAPClient() response = { - 'email': 'test@test.com', - 'password': 'test123', - 'imap_server_host': '0.0.0.0', - 'imap_server_port': 22, - 'smtp_server_host': '0.0.0.0', - 'smtp_server_port': 23 + "email": "test@test.com", + "password": "test123", + "imap_server_host": "0.0.0.0", + "imap_server_port": 22, + "smtp_server_host": "0.0.0.0", + "smtp_server_port": 23, } - handler = GenericAuthHandler('custom') - acct = handler.create_account( - 'test@test.com', - response) + handler = GenericAuthHandler("custom") + acct = handler.create_account("test@test.com", response) handler.connect_account = fake_connect handler._supports_condstore = lambda x: True with pytest.raises(UserRecoverableConfigError): diff --git a/inbox/test/imap/test_labels.py b/inbox/test/imap/test_labels.py index 33e27d815..1ca3a7208 100644 --- a/inbox/test/imap/test_labels.py +++ b/inbox/test/imap/test_labels.py @@ -4,29 +4,34 @@ from inbox.mailsync.backends.imap.common import update_message_metadata -from inbox.test.util.base import (default_account, add_fake_folder, add_fake_message, - add_fake_thread, add_fake_imapuid) +from inbox.test.util.base import ( + default_account, + add_fake_folder, + add_fake_message, + add_fake_thread, + add_fake_imapuid, +) from inbox.test.api.base import api_client -__all__ = ['default_account', 'api_client'] +__all__ = ["default_account", "api_client"] def add_fake_label(db_session, default_account, display_name, name): from inbox.models.label import Label + return Label.find_or_create(db_session, default_account, display_name, name) @pytest.fixture def folder_and_message_maps(db, default_account): folder_map, message_map = {}, {} - for name in ('all', 'trash', 'spam'): + for name in ("all", "trash", "spam"): # Create a folder - display_name = name.capitalize() if name != 'all' else 'All Mail' + display_name = name.capitalize() if name != "all" else "All Mail" folder = add_fake_folder(db.session, default_account, display_name, name) thread = add_fake_thread(db.session, default_account.namespace.id) # Create a message in the folder - message = add_fake_message(db.session, default_account.namespace.id, - thread) + message = add_fake_message(db.session, default_account.namespace.id, thread) add_fake_imapuid(db.session, default_account.id, message, folder, 13) update_message_metadata(db.session, default_account, message, False) db.session.commit() @@ -38,10 +43,10 @@ def folder_and_message_maps(db, default_account): def add_inbox_label(db, default_account, message): assert len(message.imapuids) == 1 imapuid = message.imapuids[0] - assert set([c.name for c in imapuid.categories]) == set(['all']) - imapuid.update_labels(['\\Inbox']) + assert set([c.name for c in imapuid.categories]) == set(["all"]) + imapuid.update_labels(["\\Inbox"]) db.session.commit() - assert set([c.name for c in imapuid.categories]) == set(['all', 'inbox']) + assert set([c.name for c in imapuid.categories]) == set(["all", "inbox"]) update_message_metadata(db.session, default_account, message, False) db.session.commit() return message @@ -51,25 +56,24 @@ def add_custom_label(db, default_account, message): assert len(message.imapuids) == 1 imapuid = message.imapuids[0] existing = [c.name for c in imapuid.categories][0] - imapuid.update_labels(['<3']) + imapuid.update_labels(["<3"]) db.session.commit() - assert set([c.name for c in imapuid.categories]) == set([existing, '']) + assert set([c.name for c in imapuid.categories]) == set([existing, ""]) update_message_metadata(db.session, default_account, message, False) db.session.commit() return message -@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) -def test_validation(db, api_client, default_account, folder_and_message_maps, - label): +@pytest.mark.parametrize("label", ["all", "trash", "spam"]) +def test_validation(db, api_client, default_account, folder_and_message_maps, label): folder_map, message_map = folder_and_message_maps message = message_map[label] - resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) - labels = resp_data['labels'] + resp_data = api_client.get_data("/messages/{}".format(message.public_id)) + labels = resp_data["labels"] assert len(labels) == 1 - assert labels[0]['name'] == label - existing_label = labels[0]['id'] + assert labels[0]["name"] == label + existing_label = labels[0]["id"] # Adding more than one mutually exclusive label is not allowed. # For example, adding 'trash' and 'spam'. @@ -81,32 +85,34 @@ def test_validation(db, api_client, default_account, folder_and_message_maps, labels_to_add += [folder_map[key].category.public_id] response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': labels_to_add}) + "/messages/{}".format(message.public_id), {"label_ids": labels_to_add} + ) resp_data = json.loads(response.data) assert response.status_code == 400 - assert resp_data.get('type') == 'invalid_request_error' + assert resp_data.get("type") == "invalid_request_error" response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': labels_to_add + [existing_label]}) + "/messages/{}".format(message.public_id), + {"label_ids": labels_to_add + [existing_label]}, + ) resp_data = json.loads(response.data) assert response.status_code == 400 - assert resp_data.get('type') == 'invalid_request_error' + assert resp_data.get("type") == "invalid_request_error" # Removing all labels is not allowed, because this will remove # the required label (one of 'all'/ 'trash'/ 'spam') too. response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': []}) + "/messages/{}".format(message.public_id), {"label_ids": []} + ) resp_data = json.loads(response.data) assert response.status_code == 400 - assert resp_data.get('type') == 'invalid_request_error' + assert resp_data.get("type") == "invalid_request_error" -@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +@pytest.mark.parametrize("label", ["all", "trash", "spam"]) def test_adding_a_mutually_exclusive_label_replaces_the_other( - db, api_client, default_account, folder_and_message_maps, label): + db, api_client, default_account, folder_and_message_maps, label +): # Verify a Gmail message can only have ONE of the 'all', 'trash', 'spam' # labels at a time. We specifically test that adding 'all'/ 'trash'/ 'spam' # to a message in one of the other two folders *replaces* @@ -119,53 +125,55 @@ def test_adding_a_mutually_exclusive_label_replaces_the_other( continue message = message_map[key] - resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) - labels = resp_data['labels'] + resp_data = api_client.get_data("/messages/{}".format(message.public_id)) + labels = resp_data["labels"] assert len(labels) == 1 - assert labels[0]['name'] == key - existing_label = labels[0]['id'] + assert labels[0]["name"] == key + existing_label = labels[0]["id"] # Adding 'all'/ 'trash'/ 'spam' removes the existing one, # irrespective of whether it's provided in the request or not. response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': [label_to_add.category.public_id, - existing_label]}) - labels = json.loads(response.data)['labels'] + "/messages/{}".format(message.public_id), + {"label_ids": [label_to_add.category.public_id, existing_label]}, + ) + labels = json.loads(response.data)["labels"] assert len(labels) == 1 - assert labels[0]['name'] == label + assert labels[0]["name"] == label -@pytest.mark.parametrize('label', ['trash', 'spam']) +@pytest.mark.parametrize("label", ["trash", "spam"]) def test_adding_trash_or_spam_removes_inbox( - db, api_client, default_account, folder_and_message_maps, label): + db, api_client, default_account, folder_and_message_maps, label +): # Verify a Gmail message in 'trash', 'spam' cannot have 'inbox'. # We specifically test that adding 'trash'/ 'spam' to a message with 'inbox' # removes it. folder_map, message_map = folder_and_message_maps - message = message_map['all'] + message = message_map["all"] add_inbox_label(db, default_account, message) - resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) - labels = resp_data['labels'] + resp_data = api_client.get_data("/messages/{}".format(message.public_id)) + labels = resp_data["labels"] assert len(labels) == 2 - assert set([l['name'] for l in labels]) == set(['all', 'inbox']) + assert set([l["name"] for l in labels]) == set(["all", "inbox"]) # Adding 'trash'/ 'spam' removes 'inbox' (and 'all'), # irrespective of whether it's provided in the request or not. label_to_add = folder_map[label] response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': [label_to_add.category.public_id] + - [l['id'] for l in labels]}) - labels = json.loads(response.data)['labels'] + "/messages/{}".format(message.public_id), + {"label_ids": [label_to_add.category.public_id] + [l["id"] for l in labels]}, + ) + labels = json.loads(response.data)["labels"] assert len(labels) == 1 - assert labels[0]['name'] == label + assert labels[0]["name"] == label -@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +@pytest.mark.parametrize("label", ["all", "trash", "spam"]) def test_adding_a_mutually_exclusive_label_does_not_affect_custom_labels( - db, api_client, default_account, folder_and_message_maps, label): + db, api_client, default_account, folder_and_message_maps, label +): folder_map, message_map = folder_and_message_maps label_to_add = folder_map[label] @@ -175,97 +183,106 @@ def test_adding_a_mutually_exclusive_label_does_not_affect_custom_labels( message = message_map[key] add_custom_label(db, default_account, message) - resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) - labels = resp_data['labels'] + resp_data = api_client.get_data("/messages/{}".format(message.public_id)) + labels = resp_data["labels"] assert len(labels) == 2 - assert key in [l['name'] for l in labels] - assert '<3' in [l['display_name'] for l in labels] + assert key in [l["name"] for l in labels] + assert "<3" in [l["display_name"] for l in labels] # Adding only 'all'/ 'trash'/ 'spam' does not change custom labels. response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': [label_to_add.category.public_id] + - [l['id'] for l in labels]}) - labels = json.loads(response.data)['labels'] + "/messages/{}".format(message.public_id), + { + "label_ids": [label_to_add.category.public_id] + + [l["id"] for l in labels] + }, + ) + labels = json.loads(response.data)["labels"] assert len(labels) == 2 - assert label in [l['name'] for l in labels] - assert '<3' in [l['display_name'] for l in labels] + assert label in [l["name"] for l in labels] + assert "<3" in [l["display_name"] for l in labels] -@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +@pytest.mark.parametrize("label", ["all", "trash", "spam"]) def test_adding_inbox_adds_all_and_removes_trash_spam( - db, api_client, default_account, folder_and_message_maps, label): + db, api_client, default_account, folder_and_message_maps, label +): # Verify a Gmail message in 'trash', 'spam' cannot have 'inbox'. # This time we test that adding 'inbox' to a message in the 'trash'/ 'spam' # moves it to 'all' in addition to adding 'inbox'. folder_map, message_map = folder_and_message_maps message = message_map[label] - resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) - labels = resp_data['labels'] + resp_data = api_client.get_data("/messages/{}".format(message.public_id)) + labels = resp_data["labels"] assert len(labels) == 1 - assert labels[0]['name'] == label - existing_label = labels[0]['id'] + assert labels[0]["name"] == label + existing_label = labels[0]["id"] - inbox_label = add_fake_label(db.session, default_account, 'Inbox', 'inbox') + inbox_label = add_fake_label(db.session, default_account, "Inbox", "inbox") db.session.commit() # Adding 'inbox' adds 'all', replacing 'trash'/ 'spam' if needed. response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': [inbox_label.category.public_id, existing_label]}) + "/messages/{}".format(message.public_id), + {"label_ids": [inbox_label.category.public_id, existing_label]}, + ) db.session.commit() - labels = json.loads(response.data)['labels'] + labels = json.loads(response.data)["labels"] assert len(labels) == 2 - assert set([l['name'] for l in labels]) == set(['all', 'inbox']) + assert set([l["name"] for l in labels]) == set(["all", "inbox"]) -@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +@pytest.mark.parametrize("label", ["all", "trash", "spam"]) def test_adding_a_custom_label_preserves_other_labels( - db, api_client, default_account, folder_and_message_maps, label): + db, api_client, default_account, folder_and_message_maps, label +): folder_map, message_map = folder_and_message_maps message = message_map[label] - resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) - labels = resp_data['labels'] + resp_data = api_client.get_data("/messages/{}".format(message.public_id)) + labels = resp_data["labels"] assert len(labels) == 1 - assert labels[0]['name'] == label - existing_label = labels[0]['id'] + assert labels[0]["name"] == label + existing_label = labels[0]["id"] - custom_label = add_fake_label(db.session, default_account, '<3', None) + custom_label = add_fake_label(db.session, default_account, "<3", None) db.session.commit() # Adding only a custom label does not move a message to a different folder # i.e. does not change its 'all'/ 'trash'/ 'spam' labels. response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': [custom_label.category.public_id, existing_label]}) - labels = json.loads(response.data)['labels'] + "/messages/{}".format(message.public_id), + {"label_ids": [custom_label.category.public_id, existing_label]}, + ) + labels = json.loads(response.data)["labels"] assert len(labels) == 2 - assert set([l['name'] for l in labels]) == set([label, None]) - assert '<3' in [l['display_name'] for l in labels] + assert set([l["name"] for l in labels]) == set([label, None]) + assert "<3" in [l["display_name"] for l in labels] -@pytest.mark.parametrize('label', ['all', 'trash', 'spam']) +@pytest.mark.parametrize("label", ["all", "trash", "spam"]) def test_removing_a_mutually_exclusive_label_does_not_orphan_a_message( - db, api_client, default_account, folder_and_message_maps, label): + db, api_client, default_account, folder_and_message_maps, label +): folder_map, message_map = folder_and_message_maps message = message_map[label] - resp_data = api_client.get_data('/messages/{}'.format(message.public_id)) - labels = resp_data['labels'] + resp_data = api_client.get_data("/messages/{}".format(message.public_id)) + labels = resp_data["labels"] assert len(labels) == 1 - assert labels[0]['name'] == label + assert labels[0]["name"] == label - custom_label = add_fake_label(db.session, default_account, '<3', None) + custom_label = add_fake_label(db.session, default_account, "<3", None) db.session.commit() # Removing a message's ONLY folder "label" does not remove it. # Gmail messages MUST belong to one of 'all'/ 'trash'/ 'spam'. response = api_client.put_data( - '/messages/{}'.format(message.public_id), - {'label_ids': [custom_label.category.public_id]}) - labels = json.loads(response.data)['labels'] + "/messages/{}".format(message.public_id), + {"label_ids": [custom_label.category.public_id]}, + ) + labels = json.loads(response.data)["labels"] assert len(labels) == 2 - assert set([l['name'] for l in labels]) == set([label, None]) - assert '<3' in [l['display_name'] for l in labels] + assert set([l["name"] for l in labels]) == set([label, None]) + assert "<3" in [l["display_name"] for l in labels] diff --git a/inbox/test/imap/test_pooling.py b/inbox/test/imap/test_pooling.py index 421e1462a..6756fa79a 100644 --- a/inbox/test/imap/test_pooling.py +++ b/inbox/test/imap/test_pooling.py @@ -10,7 +10,6 @@ class TestableConnectionPool(CrispinConnectionPool): - def _set_account_info(self): pass @@ -36,14 +35,17 @@ def test_block_on_depleted_pool(): pass -@pytest.mark.parametrize("error_class,expect_logout_called", [ - (imaplib.IMAP4.error, True), - (imaplib.IMAP4.abort, False), - (socket.error, False), - (socket.timeout, False), - (ssl.SSLError, False), - (ssl.CertificateError, False), -]) +@pytest.mark.parametrize( + "error_class,expect_logout_called", + [ + (imaplib.IMAP4.error, True), + (imaplib.IMAP4.abort, False), + (socket.error, False), + (socket.timeout, False), + (ssl.SSLError, False), + (ssl.CertificateError, False), + ], +) def test_imap_and_network_errors(error_class, expect_logout_called): pool = TestableConnectionPool(1, num_connections=3, readonly=True) with pytest.raises(error_class): diff --git a/inbox/test/imap/test_save_folder_names.py b/inbox/test/imap/test_save_folder_names.py index da64eb428..e37e85290 100644 --- a/inbox/test/imap/test_save_folder_names.py +++ b/inbox/test/imap/test_save_folder_names.py @@ -8,91 +8,103 @@ def test_imap_save_generic_folder_names(db, default_account): monitor = ImapSyncMonitor(default_account) folder_names_and_roles = { - ('INBOX', 'inbox'), - ('Sent Mail', 'sent'), - ('Sent Messages', 'sent'), - ('Drafts', 'drafts'), - ('Miscellania', ''), - ('miscellania', ''), - ('Recipes', ''), + ("INBOX", "inbox"), + ("Sent Mail", "sent"), + ("Sent Messages", "sent"), + ("Drafts", "drafts"), + ("Miscellania", ""), + ("miscellania", ""), + ("Recipes", ""), } raw_folders = [RawFolder(*args) for args in folder_names_and_roles] monitor.save_folder_names(db.session, raw_folders) saved_folder_data = set( - db.session.query(Folder.name, Folder.canonical_name).filter( - Folder.account_id == default_account.id).all()) + db.session.query(Folder.name, Folder.canonical_name) + .filter(Folder.account_id == default_account.id) + .all() + ) assert saved_folder_data == folder_names_and_roles def test_handle_folder_deletions(db, default_account): monitor = ImapSyncMonitor(default_account) folder_names_and_roles = { - ('INBOX', 'inbox'), - ('Miscellania', None), + ("INBOX", "inbox"), + ("Miscellania", None), } raw_folders = [RawFolder(*args) for args in folder_names_and_roles] monitor.save_folder_names(db.session, raw_folders) - assert len(db.session.query(Folder).filter( - Folder.account_id == default_account.id).all()) == 2 + assert ( + len( + db.session.query(Folder) + .filter(Folder.account_id == default_account.id) + .all() + ) + == 2 + ) - monitor.save_folder_names(db.session, [RawFolder('INBOX', 'inbox')]) + monitor.save_folder_names(db.session, [RawFolder("INBOX", "inbox")]) saved_folder_data = set( - db.session.query(Folder.name, Folder.canonical_name).filter( - Folder.account_id == default_account.id).all()) - assert saved_folder_data == {('INBOX', 'inbox')} + db.session.query(Folder.name, Folder.canonical_name) + .filter(Folder.account_id == default_account.id) + .all() + ) + assert saved_folder_data == {("INBOX", "inbox")} def test_imap_handle_folder_renames(db, default_account): monitor = ImapSyncMonitor(default_account) folder_names_and_roles = { - ('INBOX', 'inbox'), - ('[Gmail]/Todos', 'all'), - ('[Gmail]/Basura', 'trash') + ("INBOX", "inbox"), + ("[Gmail]/Todos", "all"), + ("[Gmail]/Basura", "trash"), } folders_renamed = { - ('INBOX', 'inbox'), - ('[Gmail]/All', 'all'), - ('[Gmail]/Trash', 'trash') + ("INBOX", "inbox"), + ("[Gmail]/All", "all"), + ("[Gmail]/Trash", "trash"), } - original_raw_folders = [RawFolder(*args) for args in - folder_names_and_roles] + original_raw_folders = [RawFolder(*args) for args in folder_names_and_roles] renamed_raw_folders = [RawFolder(*args) for args in folders_renamed] monitor.save_folder_names(db.session, original_raw_folders) - assert len(db.session.query(Folder).filter( - Folder.account_id == default_account.id).all()) == 3 + assert ( + len( + db.session.query(Folder) + .filter(Folder.account_id == default_account.id) + .all() + ) + == 3 + ) monitor.save_folder_names(db.session, renamed_raw_folders) saved_folder_data = set( - db.session.query(Folder.name, Folder.canonical_name).filter( - Folder.account_id == default_account.id).all()) + db.session.query(Folder.name, Folder.canonical_name) + .filter(Folder.account_id == default_account.id) + .all() + ) assert saved_folder_data == folders_renamed def test_gmail_handle_folder_renames(db, default_account): monitor = GmailSyncMonitor(default_account) - folder_names_and_roles = { - ('[Gmail]/Todos', 'all'), - ('[Gmail]/Basura', 'trash') - } + folder_names_and_roles = {("[Gmail]/Todos", "all"), ("[Gmail]/Basura", "trash")} - folders_renamed = { - ('[Gmail]/All', 'all'), - ('[Gmail]/Trash', 'trash') - } - original_raw_folders = [RawFolder(*args) for args in - folder_names_and_roles] + folders_renamed = {("[Gmail]/All", "all"), ("[Gmail]/Trash", "trash")} + original_raw_folders = [RawFolder(*args) for args in folder_names_and_roles] renamed_raw_folders = [RawFolder(*args) for args in folders_renamed] monitor.save_folder_names(db.session, original_raw_folders) - original_folders = db.session.query(Folder).filter( - Folder.account_id == default_account.id).all() + original_folders = ( + db.session.query(Folder).filter(Folder.account_id == default_account.id).all() + ) assert len(original_folders) == 2 for folder in original_folders: assert folder.category is not None - original_categories = {f.canonical_name: f.category.display_name for f in - original_folders} + original_categories = { + f.canonical_name: f.category.display_name for f in original_folders + } for folder in folder_names_and_roles: display_name, role = folder @@ -100,18 +112,22 @@ def test_gmail_handle_folder_renames(db, default_account): monitor.save_folder_names(db.session, renamed_raw_folders) saved_folder_data = set( - db.session.query(Folder.name, Folder.canonical_name).filter( - Folder.account_id == default_account.id).all()) + db.session.query(Folder.name, Folder.canonical_name) + .filter(Folder.account_id == default_account.id) + .all() + ) assert saved_folder_data == folders_renamed - renamed_folders = db.session.query(Folder).filter( - Folder.account_id == default_account.id).all() + renamed_folders = ( + db.session.query(Folder).filter(Folder.account_id == default_account.id).all() + ) for folder in renamed_folders: assert folder.category is not None - renamed_categories = {f.canonical_name: f.category.display_name for f in - renamed_folders} + renamed_categories = { + f.canonical_name: f.category.display_name for f in renamed_folders + } for folder in folders_renamed: display_name, role = folder @@ -121,87 +137,89 @@ def test_gmail_handle_folder_renames(db, default_account): def test_save_gmail_folder_names(db, default_account): monitor = GmailSyncMonitor(default_account) folder_names_and_roles = { - ('[Gmail]/All Mail', 'all'), - ('[Gmail]/Trash', 'trash'), - ('[Gmail]/Spam', 'spam'), - ('Miscellania', ''), - ('Recipes', ''), + ("[Gmail]/All Mail", "all"), + ("[Gmail]/Trash", "trash"), + ("[Gmail]/Spam", "spam"), + ("Miscellania", ""), + ("Recipes", ""), } raw_folders = [RawFolder(*args) for args in folder_names_and_roles] monitor.save_folder_names(db.session, raw_folders) saved_folder_data = set( db.session.query(Folder.name, Folder.canonical_name).filter( - Folder.account_id == default_account.id) + Folder.account_id == default_account.id + ) ) assert saved_folder_data == { - ('[Gmail]/All Mail', 'all'), - ('[Gmail]/Trash', 'trash'), - ('[Gmail]/Spam', 'spam') + ("[Gmail]/All Mail", "all"), + ("[Gmail]/Trash", "trash"), + ("[Gmail]/Spam", "spam"), } # Casing on "Inbox" is different to make what we get from folder listing # consistent with what we get in X-GM-LABELS during sync. expected_saved_names_and_roles = { - ('[Gmail]/All Mail', 'all'), - ('[Gmail]/Trash', 'trash'), - ('[Gmail]/Spam', 'spam'), - ('Miscellania', ''), - ('Recipes', ''), + ("[Gmail]/All Mail", "all"), + ("[Gmail]/Trash", "trash"), + ("[Gmail]/Spam", "spam"), + ("Miscellania", ""), + ("Recipes", ""), } saved_label_data = set( db.session.query(Label.name, Label.canonical_name).filter( - Label.account_id == default_account.id) + Label.account_id == default_account.id + ) ) assert saved_label_data == expected_saved_names_and_roles saved_category_data = set( db.session.query(Category.display_name, Category.name).filter( - Category.namespace_id == default_account.namespace.id) + Category.namespace_id == default_account.namespace.id + ) ) assert saved_category_data == expected_saved_names_and_roles def test_handle_trailing_whitespace(db, default_account): raw_folders = [ - RawFolder('Miscellania', ''), - RawFolder('Miscellania ', ''), - RawFolder('Inbox', 'inbox') + RawFolder("Miscellania", ""), + RawFolder("Miscellania ", ""), + RawFolder("Inbox", "inbox"), ] monitor = ImapSyncMonitor(default_account) monitor.save_folder_names(db.session, raw_folders) saved_folder_data = set( db.session.query(Folder.name, Folder.canonical_name).filter( - Folder.account_id == default_account.id) + Folder.account_id == default_account.id + ) ) - assert saved_folder_data == {('Miscellania', ''), ('Inbox', 'inbox')} + assert saved_folder_data == {("Miscellania", ""), ("Inbox", "inbox")} def test_imap_remote_delete(db, default_account): monitor = ImapSyncMonitor(default_account) folders = { - ('All', 'inbox'), - ('Trash', 'trash'), - ('Applications', ''), + ("All", "inbox"), + ("Trash", "trash"), + ("Applications", ""), } - new_folders = { - ('All', 'inbox'), - ('Trash', 'trash') - } - original_raw_folders = [RawFolder(*args) for args in - folders] + new_folders = {("All", "inbox"), ("Trash", "trash")} + original_raw_folders = [RawFolder(*args) for args in folders] new_raw_folders = [RawFolder(*args) for args in new_folders] monitor.save_folder_names(db.session, original_raw_folders) - original_folders = db.session.query(Folder).filter( - Folder.account_id == default_account.id).all() + original_folders = ( + db.session.query(Folder).filter(Folder.account_id == default_account.id).all() + ) assert len(original_folders) == 3 for label in original_folders: assert label.category is not None - original_categories = {f.canonical_name: f.category.display_name for f in - original_folders} + original_categories = { + f.canonical_name: f.category.display_name for f in original_folders + } for folder in folders: display_name, role = folder @@ -209,18 +227,22 @@ def test_imap_remote_delete(db, default_account): monitor.save_folder_names(db.session, new_raw_folders) saved_folder_data = set( - db.session.query(Folder.name, Folder.canonical_name).filter( - Folder.account_id == default_account.id).all()) + db.session.query(Folder.name, Folder.canonical_name) + .filter(Folder.account_id == default_account.id) + .all() + ) assert saved_folder_data == new_folders - renamed_folders = db.session.query(Folder).filter( - Folder.account_id == default_account.id).all() + renamed_folders = ( + db.session.query(Folder).filter(Folder.account_id == default_account.id).all() + ) for folder in renamed_folders: assert folder.category is not None - renamed_categories = {f.canonical_name: f.category.display_name for f in - renamed_folders} + renamed_categories = { + f.canonical_name: f.category.display_name for f in renamed_folders + } for folder in new_folders: display_name, role = folder @@ -229,17 +251,18 @@ def test_imap_remote_delete(db, default_account): def test_not_deleting_canonical_folders(empty_db, default_account): # Create a label w/ no messages attached. - label = Label.find_or_create(empty_db.session, default_account, - '[Gmail]/Tous les messages') - label.canonical_name = 'all' + label = Label.find_or_create( + empty_db.session, default_account, "[Gmail]/Tous les messages" + ) + label.canonical_name = "all" empty_db.session.commit() monitor = GmailSyncMonitor(default_account) folder_names_and_roles = { - ('[Gmail]/Corbeille', 'trash'), - ('[Gmail]/Spam', 'spam'), - ('Recettes', None), + ("[Gmail]/Corbeille", "trash"), + ("[Gmail]/Spam", "spam"), + ("Recettes", None), } raw_folders = [RawFolder(*args) for args in folder_names_and_roles] diff --git a/inbox/test/imap/test_smtp.py b/inbox/test/imap/test_smtp.py index 4f4f23a20..e51c9ca2c 100644 --- a/inbox/test/imap/test_smtp.py +++ b/inbox/test/imap/test_smtp.py @@ -12,70 +12,81 @@ def test_use_smtp_over_ssl(): # Auth won't actually work but we just want to test connection # initialization here and below. SMTPConnection.smtp_password = mock.Mock() - conn = SMTPConnection(account_id=1, - email_address='inboxapptest@gmail.com', - smtp_username='inboxapptest@gmail.com', - auth_type='password', - auth_token='secret_password', - smtp_endpoint=('smtp.gmail.com', 465), - ssl_required=True, - log=get_logger()) + conn = SMTPConnection( + account_id=1, + email_address="inboxapptest@gmail.com", + smtp_username="inboxapptest@gmail.com", + auth_type="password", + auth_token="secret_password", + smtp_endpoint=("smtp.gmail.com", 465), + ssl_required=True, + log=get_logger(), + ) assert isinstance(conn.connection, smtplib.SMTP_SSL) @pytest.mark.networkrequired def test_use_starttls(): - conn = SMTPConnection(account_id=1, - email_address='inboxapptest@gmail.com', - smtp_username='inboxapptest@gmail.com', - auth_type='password', - auth_token='secret_password', - smtp_endpoint=('smtp.gmail.com', 587), - ssl_required=True, - log=get_logger()) + conn = SMTPConnection( + account_id=1, + email_address="inboxapptest@gmail.com", + smtp_username="inboxapptest@gmail.com", + auth_type="password", + auth_token="secret_password", + smtp_endpoint=("smtp.gmail.com", 587), + ssl_required=True, + log=get_logger(), + ) assert isinstance(conn.connection, smtplib.SMTP) -@pytest.mark.skipif(True, reason='Need to investigate') +@pytest.mark.skipif(True, reason="Need to investigate") @pytest.mark.networkrequired def test_use_plain(): ssl = True with pytest.raises(SendMailException): - conn = SMTPConnection(account_id=1, - email_address='test@tivertical.com', - smtp_username='test@tivertical.com', - auth_type='password', - auth_token='testpwd', - smtp_endpoint=('tivertical.com', 587), - ssl_required=ssl, - log=get_logger()) + conn = SMTPConnection( + account_id=1, + email_address="test@tivertical.com", + smtp_username="test@tivertical.com", + auth_type="password", + auth_token="testpwd", + smtp_endpoint=("tivertical.com", 587), + ssl_required=ssl, + log=get_logger(), + ) ssl = False - conn = SMTPConnection(account_id=1, - email_address='test@tivertical.com', - smtp_username='test@tivertical.com', - auth_type='password', - auth_token='testpwd', - smtp_endpoint=('tivertical.com', 587), - ssl_required=ssl, - log=get_logger()) + conn = SMTPConnection( + account_id=1, + email_address="test@tivertical.com", + smtp_username="test@tivertical.com", + auth_type="password", + auth_token="testpwd", + smtp_endpoint=("tivertical.com", 587), + ssl_required=ssl, + log=get_logger(), + ) assert isinstance(conn.connection, smtplib.SMTP) -@pytest.mark.parametrize('smtp_port', [465, 587]) +@pytest.mark.parametrize("smtp_port", [465, 587]) @pytest.mark.networkrequired def test_handle_disconnect(monkeypatch, smtp_port): def simulate_disconnect(self): raise smtplib.SMTPServerDisconnected() - monkeypatch.setattr('smtplib.SMTP.rset', simulate_disconnect) - monkeypatch.setattr('smtplib.SMTP.mail', lambda *args: (550, 'NOPE')) - conn = SMTPConnection(account_id=1, - email_address='inboxapptest@gmail.com', - smtp_username='inboxapptest@gmail.com', - auth_type='password', - auth_token='secret_password', - smtp_endpoint=('smtp.gmail.com', smtp_port), - ssl_required=True, - log=get_logger()) + + monkeypatch.setattr("smtplib.SMTP.rset", simulate_disconnect) + monkeypatch.setattr("smtplib.SMTP.mail", lambda *args: (550, "NOPE")) + conn = SMTPConnection( + account_id=1, + email_address="inboxapptest@gmail.com", + smtp_username="inboxapptest@gmail.com", + auth_type="password", + auth_token="secret_password", + smtp_endpoint=("smtp.gmail.com", smtp_port), + ssl_required=True, + log=get_logger(), + ) with pytest.raises(smtplib.SMTPSenderRefused): - conn.sendmail(['test@example.com'], 'hello there') + conn.sendmail(["test@example.com"], "hello there") diff --git a/inbox/test/imap/test_update_metadata.py b/inbox/test/imap/test_update_metadata.py index 42b0b7bb2..a3d92cb9b 100644 --- a/inbox/test/imap/test_update_metadata.py +++ b/inbox/test/imap/test_update_metadata.py @@ -2,58 +2,64 @@ import json from inbox.crispin import GmailFlags, Flags from inbox.models.backends.imap import ImapUid -from inbox.mailsync.backends.imap.common import (update_metadata, - update_message_metadata) -from inbox.test.util.base import (add_fake_message, add_fake_imapuid, - add_fake_folder, add_fake_thread) +from inbox.mailsync.backends.imap.common import update_metadata, update_message_metadata +from inbox.test.util.base import ( + add_fake_message, + add_fake_imapuid, + add_fake_folder, + add_fake_thread, +) -def test_gmail_label_sync(db, default_account, message, folder, - imapuid, default_namespace): +def test_gmail_label_sync( + db, default_account, message, folder, imapuid, default_namespace +): # Note that IMAPClient parses numeric labels into integer types. We have to # correctly handle those too. new_flags = { - imapuid.msg_uid: GmailFlags((), - (u'\\Important', u'\\Starred', u'foo', 42), - None) + imapuid.msg_uid: GmailFlags( + (), (u"\\Important", u"\\Starred", u"foo", 42), None + ) } - update_metadata(default_namespace.account.id, - folder.id, folder.canonical_name, new_flags, db.session) + update_metadata( + default_namespace.account.id, + folder.id, + folder.canonical_name, + new_flags, + db.session, + ) category_canonical_names = {c.name for c in message.categories} category_display_names = {c.display_name for c in message.categories} - assert 'important' in category_canonical_names - assert {'foo', '42'}.issubset(category_display_names) + assert "important" in category_canonical_names + assert {"foo", "42"}.issubset(category_display_names) -def test_gmail_drafts_flag_constrained_by_folder(db, default_account, message, - imapuid, folder): - new_flags = {imapuid.msg_uid: GmailFlags((), (u'\\Draft',), None)} - update_metadata(default_account.id, folder.id, 'all', new_flags, - db.session) +def test_gmail_drafts_flag_constrained_by_folder( + db, default_account, message, imapuid, folder +): + new_flags = {imapuid.msg_uid: GmailFlags((), (u"\\Draft",), None)} + update_metadata(default_account.id, folder.id, "all", new_flags, db.session) assert message.is_draft - update_metadata(default_account.id, folder.id, 'trash', new_flags, - db.session) + update_metadata(default_account.id, folder.id, "trash", new_flags, db.session) assert not message.is_draft -@pytest.mark.parametrize('folder_role', ['drafts', 'trash', 'archive']) -def test_generic_drafts_flag_constrained_by_folder(db, generic_account, - folder_role): +@pytest.mark.parametrize("folder_role", ["drafts", "trash", "archive"]) +def test_generic_drafts_flag_constrained_by_folder(db, generic_account, folder_role): msg_uid = 22 thread = add_fake_thread(db.session, generic_account.namespace.id) - message = add_fake_message(db.session, generic_account.namespace.id, - thread) + message = add_fake_message(db.session, generic_account.namespace.id, thread) folder = add_fake_folder(db.session, generic_account) add_fake_imapuid(db.session, generic_account.id, message, folder, msg_uid) - new_flags = {msg_uid: Flags(('\\Draft',), None)} - update_metadata(generic_account.id, folder.id, folder_role, new_flags, - db.session) - assert message.is_draft == (folder_role == 'drafts') + new_flags = {msg_uid: Flags(("\\Draft",), None)} + update_metadata(generic_account.id, folder.id, folder_role, new_flags, db.session) + assert message.is_draft == (folder_role == "drafts") def test_update_categories_when_actionlog_entry_missing( - db, default_account, message, imapuid): + db, default_account, message, imapuid +): message.categories_changes = True db.session.commit() update_message_metadata(db.session, imapuid.account, message, False) @@ -62,17 +68,64 @@ def test_update_categories_when_actionlog_entry_missing( def test_truncate_imapuid_extra_flags(db, default_account, message, folder): - imapuid = ImapUid(message=message, account_id=default_account.id, - msg_uid=2222, folder=folder) - imapuid.update_flags(['We', 'the', 'People', 'of', 'the', 'United', - 'States', 'in', 'Order', 'to', 'form', 'a', 'more', - 'perfect', 'Union', 'establish', 'Justice', - 'insure', 'domestic', 'Tranquility', 'provide', - 'for', 'the', 'common', 'defence', 'promote', 'the', - 'general', 'Welfare', 'and', 'secure', 'the', - 'Blessings', 'of', 'Liberty', 'to', 'ourselves', - 'and', 'our', 'Posterity', 'do', 'ordain', 'and', - 'establish', 'this', 'Constitution', 'for', 'the', - 'United', 'States', 'of', 'America']) + imapuid = ImapUid( + message=message, account_id=default_account.id, msg_uid=2222, folder=folder + ) + imapuid.update_flags( + [ + "We", + "the", + "People", + "of", + "the", + "United", + "States", + "in", + "Order", + "to", + "form", + "a", + "more", + "perfect", + "Union", + "establish", + "Justice", + "insure", + "domestic", + "Tranquility", + "provide", + "for", + "the", + "common", + "defence", + "promote", + "the", + "general", + "Welfare", + "and", + "secure", + "the", + "Blessings", + "of", + "Liberty", + "to", + "ourselves", + "and", + "our", + "Posterity", + "do", + "ordain", + "and", + "establish", + "this", + "Constitution", + "for", + "the", + "United", + "States", + "of", + "America", + ] + ) assert len(json.dumps(imapuid.extra_flags)) < 255 diff --git a/inbox/test/providers/__init__.py b/inbox/test/providers/__init__.py index 737940769..da3f6d994 100644 --- a/inbox/test/providers/__init__.py +++ b/inbox/test/providers/__init__.py @@ -1,5 +1,7 @@ # Allow out-of-tree backend submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) from inbox.util.misc import register_backends + module_registry = register_backends(__name__, __path__) diff --git a/inbox/test/scheduling/test_sync_start_logic.py b/inbox/test/scheduling/test_sync_start_logic.py index b9377287e..a96690bd2 100644 --- a/inbox/test/scheduling/test_sync_start_logic.py +++ b/inbox/test/scheduling/test_sync_start_logic.py @@ -14,8 +14,10 @@ def patched_sync_service(db, host=host, process_number=0): - s = SyncService(process_identifier='{}:{}'.format(host, process_number), - process_number=process_number) + s = SyncService( + process_identifier="{}:{}".format(host, process_number), + process_number=process_number, + ) def start_sync(aid): acc = db.session.query(Account).get(aid) @@ -35,36 +37,37 @@ def purge_other_accounts(default_account=None): q = db_session.query(Account) if default_account is not None: q = q.filter(Account.id != default_account.id) - q.delete(synchronize_session='fetch') + q.delete(synchronize_session="fetch") db_session.commit() -def test_accounts_started_when_process_previously_assigned( - db, default_account, config): - config['SYNC_STEAL_ACCOUNTS'] = False - default_account.desired_sync_host = '{}:{}'.format(host, 0) +def test_accounts_started_when_process_previously_assigned(db, default_account, config): + config["SYNC_STEAL_ACCOUNTS"] = False + default_account.desired_sync_host = "{}:{}".format(host, 0) db.session.commit() s = patched_sync_service(db, host=host, process_number=0) assert s.account_ids_to_sync() == {default_account.id} -def test_start_new_accounts_when_stealing_enabled(monkeypatch, db, - default_account, config): - config['SYNC_STEAL_ACCOUNTS'] = True +def test_start_new_accounts_when_stealing_enabled( + monkeypatch, db, default_account, config +): + config["SYNC_STEAL_ACCOUNTS"] = True purge_other_accounts(default_account) s = patched_sync_service(db) default_account.sync_host = None db.session.commit() - s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + s.poll_shared_queue({"queue_name": "foo", "id": default_account.id}) assert s.start_sync.call_count == 1 assert s.start_sync.call_args == mock.call(default_account.id) -def test_dont_start_accounts_if_over_ppa_limit(monkeypatch, db, - default_account, config): - config['SYNC_STEAL_ACCOUNTS'] = True +def test_dont_start_accounts_if_over_ppa_limit( + monkeypatch, db, default_account, config +): + config["SYNC_STEAL_ACCOUNTS"] = True purge_other_accounts(default_account) default_account.sync_host = None @@ -73,30 +76,29 @@ def test_dont_start_accounts_if_over_ppa_limit(monkeypatch, db, s._pending_avgs_provider = mock.Mock() s._pending_avgs_provider.get_pending_avgs = lambda *args: {15: 11} - s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + s.poll_shared_queue({"queue_name": "foo", "id": default_account.id}) assert s.start_sync.call_count == 0 -def test_dont_start_new_accounts_when_stealing_disabled(db, config, - default_account): - config['SYNC_STEAL_ACCOUNTS'] = False +def test_dont_start_new_accounts_when_stealing_disabled(db, config, default_account): + config["SYNC_STEAL_ACCOUNTS"] = False s = patched_sync_service(db) default_account.sync_host = None db.session.commit() - s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + s.poll_shared_queue({"queue_name": "foo", "id": default_account.id}) assert s.start_sync.call_count == 0 def test_concurrent_syncs(monkeypatch, db, default_account, config): - config['SYNC_STEAL_ACCOUNTS'] = True + config["SYNC_STEAL_ACCOUNTS"] = True purge_other_accounts(default_account) s1 = patched_sync_service(db, process_number=0) s2 = patched_sync_service(db, process_number=2) default_account.desired_sync_host = s1.process_identifier db.session.commit() - s1.poll({'queue_name': 'foo'}) - s2.poll({'queue_name': 'foo'}) + s1.poll({"queue_name": "foo"}) + s2.poll({"queue_name": "foo"}) # Check that only one SyncService instance claims the account. assert s1.start_sync.call_count == 1 assert s1.start_sync.call_args == mock.call(default_account.id) @@ -108,18 +110,18 @@ def test_twice_queued_accounts_started_once(monkeypatch, db, default_account): s = patched_sync_service(db) default_account.desired_sync_host = s.process_identifier db.session.commit() - s.poll({'queue_name': 'foo'}) - s.poll({'queue_name': 'foo'}) + s.poll({"queue_name": "foo"}) + s.poll({"queue_name": "foo"}) assert default_account.sync_host == s.process_identifier assert s.start_sync.call_count == 1 def test_external_sync_disabling(monkeypatch, db): purge_other_accounts() - account = add_generic_imap_account(db.session, - email_address='test@example.com') + account = add_generic_imap_account(db.session, email_address="test@example.com") other_account = add_generic_imap_account( - db.session, email_address='test2@example.com') + db.session, email_address="test2@example.com" + ) account.sync_host = None account.desired_sync_host = None other_account.sync_host = None @@ -127,42 +129,41 @@ def test_external_sync_disabling(monkeypatch, db): db.session.commit() s = patched_sync_service(db) - s.poll_shared_queue({'queue_name': 'foo', 'id': account.id}) - s.poll_shared_queue({'queue_name': 'foo', 'id': other_account.id}) + s.poll_shared_queue({"queue_name": "foo", "id": account.id}) + s.poll_shared_queue({"queue_name": "foo", "id": other_account.id}) assert len(s.syncing_accounts) == 2 account.mark_for_deletion() db.session.commit() assert account.sync_should_run is False - assert account._sync_status['sync_disabled_reason'] == 'account deleted' + assert account._sync_status["sync_disabled_reason"] == "account deleted" account.mark_invalid() db.session.commit() assert account.sync_should_run is False - assert account.sync_state == 'invalid' - assert account._sync_status['sync_disabled_reason'] == \ - 'invalid credentials' + assert account.sync_state == "invalid" + assert account._sync_status["sync_disabled_reason"] == "invalid credentials" - s.poll({'queue_name': 'foo'}) + s.poll({"queue_name": "foo"}) assert s.syncing_accounts == {other_account.id} def test_http_frontend(db, default_account, monkeypatch): s = patched_sync_service(db) - s.poll({'queue_name': 'foo'}) + s.poll({"queue_name": "foo"}) - monkeypatch.setattr('pympler.muppy.get_objects', lambda *args: []) - monkeypatch.setattr('pympler.summary.summarize', lambda *args: []) + monkeypatch.setattr("pympler.muppy.get_objects", lambda *args: []) + monkeypatch.setattr("pympler.summary.summarize", lambda *args: []) frontend = SyncHTTPFrontend(s, 16384, trace_greenlets=True, profile=True) app = frontend._create_app() - app.config['TESTING'] = True + app.config["TESTING"] = True with app.test_client() as c: - resp = c.get('/profile') + resp = c.get("/profile") assert resp.status_code == 200 - resp = c.get('/load') + resp = c.get("/load") assert resp.status_code == 200 - resp = c.get('/mem') + resp = c.get("/mem") assert resp.status_code == 200 monkeypatch.undo() @@ -173,15 +174,17 @@ def test_http_unassignment(db, default_account): default_account.desired_sync_host = None default_account.sync_host = None db.session.commit() - s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + s.poll_shared_queue({"queue_name": "foo", "id": default_account.id}) frontend = SyncHTTPFrontend(s, 16384, False, False) app = frontend._create_app() - app.config['TESTING'] = True + app.config["TESTING"] = True with app.test_client() as c: resp = c.post( - '/unassign', data=json.dumps({'account_id': default_account.id}), - content_type='application/json') + "/unassign", + data=json.dumps({"account_id": default_account.id}), + content_type="application/json", + ) assert resp.status_code == 200 db.session.expire_all() assert default_account.sync_host is None @@ -190,17 +193,19 @@ def test_http_unassignment(db, default_account): # process. with app.test_client() as c: resp = c.post( - '/unassign', data=json.dumps({'account_id': default_account.id}), - content_type='application/json') + "/unassign", + data=json.dumps({"account_id": default_account.id}), + content_type="application/json", + ) assert resp.status_code == 409 @pytest.mark.parametrize("sync_state", ["running", "stopped", "invalid", None]) -def test_start_accounts_w_sync_should_run_set(monkeypatch, db, default_account, - config, - sync_state): +def test_start_accounts_w_sync_should_run_set( + monkeypatch, db, default_account, config, sync_state +): purge_other_accounts(default_account) - config['SYNC_STEAL_ACCOUNTS'] = True + config["SYNC_STEAL_ACCOUNTS"] = True default_account.sync_should_run = True default_account.sync_state = sync_state default_account.sync_host = None @@ -208,5 +213,5 @@ def test_start_accounts_w_sync_should_run_set(monkeypatch, db, default_account, db.session.commit() s = patched_sync_service(db) - s.poll_shared_queue({'queue_name': 'foo', 'id': default_account.id}) + s.poll_shared_queue({"queue_name": "foo", "id": default_account.id}) assert s.start_sync.call_count == 1 diff --git a/inbox/test/scheduling/test_syncback_logic.py b/inbox/test/scheduling/test_syncback_logic.py index f269b882d..d513452e2 100644 --- a/inbox/test/scheduling/test_syncback_logic.py +++ b/inbox/test/scheduling/test_syncback_logic.py @@ -22,7 +22,7 @@ def purge_accounts_and_actions(): @pytest.yield_fixture def patched_enginemanager(monkeypatch): engines = {k: None for k in range(0, 6)} - monkeypatch.setattr('inbox.ignition.engine_manager.engines', engines) + monkeypatch.setattr("inbox.ignition.engine_manager.engines", engines) yield monkeypatch.undo() @@ -35,12 +35,19 @@ def uses_crispin_client(self): def execute_with_lock(self): with session_scope(self.account_id) as db_session: action_log_entries = db_session.query(ActionLog).filter( - ActionLog.id.in_(self.action_log_ids)) + ActionLog.id.in_(self.action_log_ids) + ) for action_log_entry in action_log_entries: - action_log_entry.status = 'successful' + action_log_entry.status = "successful" db_session.commit() - monkeypatch.setattr('inbox.transactions.actions.SyncbackTask.uses_crispin_client', uses_crispin_client) - monkeypatch.setattr('inbox.transactions.actions.SyncbackTask.execute_with_lock', execute_with_lock) + + monkeypatch.setattr( + "inbox.transactions.actions.SyncbackTask.uses_crispin_client", + uses_crispin_client, + ) + monkeypatch.setattr( + "inbox.transactions.actions.SyncbackTask.execute_with_lock", execute_with_lock + ) yield monkeypatch.undo() @@ -48,19 +55,20 @@ def execute_with_lock(self): def schedule_test_action(db_session, account): from inbox.models.category import Category - category_type = 'label' if account.provider == 'gmail' else 'folder' + category_type = "label" if account.provider == "gmail" else "folder" category = Category.find_or_create( - db_session, account.namespace.id, name=None, - display_name='{}-{}'.format(account.id, random.randint(1, 356)), - type_=category_type) + db_session, + account.namespace.id, + name=None, + display_name="{}-{}".format(account.id, random.randint(1, 356)), + type_=category_type, + ) db_session.flush() - if category_type == 'folder': - schedule_action('create_folder', category, account.namespace.id, - db_session) + if category_type == "folder": + schedule_action("create_folder", category, account.namespace.id, db_session) else: - schedule_action('create_label', category, account.namespace.id, - db_session) + schedule_action("create_label", category, account.namespace.id, db_session) db_session.commit() @@ -68,12 +76,14 @@ def test_all_keys_are_assigned_exactly_once(patched_enginemanager): assigned_keys = [] service = SyncbackService( - syncback_id=0, process_number=0, total_processes=2, num_workers=2) + syncback_id=0, process_number=0, total_processes=2, num_workers=2 + ) assert service.keys == [0, 2, 4] assigned_keys.extend(service.keys) service = SyncbackService( - syncback_id=0, process_number=1, total_processes=2, num_workers=2) + syncback_id=0, process_number=1, total_processes=2, num_workers=2 + ) assert service.keys == [1, 3, 5] assigned_keys.extend(service.keys) @@ -83,20 +93,23 @@ def test_all_keys_are_assigned_exactly_once(patched_enginemanager): assert len(assigned_keys) == len(set(assigned_keys)) -@pytest.mark.skipif(True, reason='Need to investigate') +@pytest.mark.skipif(True, reason="Need to investigate") def test_actions_are_claimed(purge_accounts_and_actions, patched_task): with session_scope_by_shard_id(0) as db_session: account = add_generic_imap_account( - db_session, email_address='{}@test.com'.format(0)) + db_session, email_address="{}@test.com".format(0) + ) schedule_test_action(db_session, account) with session_scope_by_shard_id(1) as db_session: account = add_generic_imap_account( - db_session, email_address='{}@test.com'.format(1)) + db_session, email_address="{}@test.com".format(1) + ) schedule_test_action(db_session, account) service = SyncbackService( - syncback_id=0, process_number=1, total_processes=2, num_workers=2) + syncback_id=0, process_number=1, total_processes=2, num_workers=2 + ) service._restart_workers() service._process_log() @@ -106,31 +119,33 @@ def test_actions_are_claimed(purge_accounts_and_actions, patched_task): with session_scope_by_shard_id(0) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 - assert all(a.status == 'pending' for a in q) + assert all(a.status == "pending" for a in q) with session_scope_by_shard_id(1) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 - assert all(a.status != 'pending' for a in q) + assert all(a.status != "pending" for a in q) -@pytest.mark.skipif(True, reason='Need to investigate') -def test_actions_claimed_by_a_single_service(purge_accounts_and_actions, - patched_task): +@pytest.mark.skipif(True, reason="Need to investigate") +def test_actions_claimed_by_a_single_service(purge_accounts_and_actions, patched_task): actionlogs = [] for key in (0, 1): with session_scope_by_shard_id(key) as db_session: account = add_generic_imap_account( - db_session, - email_address='{}@test.com'.format(key)) + db_session, email_address="{}@test.com".format(key) + ) schedule_test_action(db_session, account) actionlogs += [db_session.query(ActionLog).one().id] services = [] for process_number in (0, 1): service = SyncbackService( - syncback_id=0, process_number=process_number, total_processes=2, - num_workers=2) + syncback_id=0, + process_number=process_number, + total_processes=2, + num_workers=2, + ) service._process_log() services.append(service) @@ -139,31 +154,39 @@ def test_actions_claimed_by_a_single_service(purge_accounts_and_actions, assert service.task_queue.peek().action_log_ids == [actionlogs[i]] -@pytest.mark.skipif(True, reason='Test if causing Jenkins build to fail') -def test_actions_for_invalid_accounts_are_skipped(purge_accounts_and_actions, - patched_task): +@pytest.mark.skipif(True, reason="Test if causing Jenkins build to fail") +def test_actions_for_invalid_accounts_are_skipped( + purge_accounts_and_actions, patched_task +): with session_scope_by_shard_id(0) as db_session: - account = add_generic_imap_account( - db_session, email_address='person@test.com') + account = add_generic_imap_account(db_session, email_address="person@test.com") schedule_test_action(db_session, account) namespace_id = account.namespace.id - count = db_session.query(ActionLog).filter( - ActionLog.namespace_id == namespace_id).count() - assert account.sync_state != 'invalid' + count = ( + db_session.query(ActionLog) + .filter(ActionLog.namespace_id == namespace_id) + .count() + ) + assert account.sync_state != "invalid" another_account = add_generic_imap_account( - db_session, email_address='another@test.com') + db_session, email_address="another@test.com" + ) schedule_test_action(db_session, another_account) another_namespace_id = another_account.namespace.id - another_count = db_session.query(ActionLog).filter( - ActionLog.namespace_id == another_namespace_id).count() - assert another_account.sync_state != 'invalid' + another_count = ( + db_session.query(ActionLog) + .filter(ActionLog.namespace_id == another_namespace_id) + .count() + ) + assert another_account.sync_state != "invalid" account.mark_invalid() db_session.commit() service = SyncbackService( - syncback_id=0, process_number=0, total_processes=2, num_workers=2) + syncback_id=0, process_number=0, total_processes=2, num_workers=2 + ) service._process_log() while not service.task_queue.empty(): @@ -171,11 +194,12 @@ def test_actions_for_invalid_accounts_are_skipped(purge_accounts_and_actions, with session_scope_by_shard_id(0) as db_session: q = db_session.query(ActionLog).filter( - ActionLog.namespace_id == namespace_id, - ActionLog.status == 'pending') + ActionLog.namespace_id == namespace_id, ActionLog.status == "pending" + ) assert q.count() == count q = db_session.query(ActionLog).filter( - ActionLog.namespace_id == another_namespace_id) - assert q.filter(ActionLog.status == 'pending').count() == 0 - assert q.filter(ActionLog.status == 'successful').count() == another_count + ActionLog.namespace_id == another_namespace_id + ) + assert q.filter(ActionLog.status == "pending").count() == 0 + assert q.filter(ActionLog.status == "successful").count() == another_count diff --git a/inbox/test/search/conftest.py b/inbox/test/search/conftest.py index ccaeba2b0..7f3668920 100644 --- a/inbox/test/search/conftest.py +++ b/inbox/test/search/conftest.py @@ -1,6 +1,4 @@ -from inbox.test.util.base import (config, db, absolute_path, - default_namespace) +from inbox.test.util.base import config, db, absolute_path, default_namespace from inbox.test.api.base import api_client -__all__ = ['config', 'db', 'absolute_path', 'default_namespace', - 'api_client'] +__all__ = ["config", "db", "absolute_path", "default_namespace", "api_client"] diff --git a/inbox/test/security/test_blobstorage.py b/inbox/test/security/test_blobstorage.py index 95a47abed..972386f7d 100644 --- a/inbox/test/security/test_blobstorage.py +++ b/inbox/test/security/test_blobstorage.py @@ -6,15 +6,15 @@ # This will run the test for a bunch of randomly-chosen values of sample_input. @hypothesis.given(str, bool) def test_blobstorage(config, sample_input, encrypt): - config['ENCRYPT_SECRETS'] = encrypt + config["ENCRYPT_SECRETS"] = encrypt assert decode_blob(encode_blob(sample_input)) == sample_input @hypothesis.given(str, bool) def test_encoded_format(config, sample_input, encrypt): - config['ENCRYPT_SECRETS'] = encrypt + config["ENCRYPT_SECRETS"] = encrypt encoded = encode_blob(sample_input) - assert encoded.startswith(chr(encrypt) + '\x00\x00\x00\x00') + assert encoded.startswith(chr(encrypt) + "\x00\x00\x00\x00") data = encoded[5:] if encrypt: assert data != sample_input @@ -25,10 +25,9 @@ def test_encoded_format(config, sample_input, encrypt): @hypothesis.given(unicode, bool) def test_message_body_storage(config, message, sample_input, encrypt): - config['ENCRYPT_SECRETS'] = encrypt + config["ENCRYPT_SECRETS"] = encrypt message.body = None assert message._compacted_body is None message.body = sample_input - assert message._compacted_body.startswith( - chr(encrypt) + '\x00\x00\x00\x00') + assert message._compacted_body.startswith(chr(encrypt) + "\x00\x00\x00\x00") assert message.body == sample_input diff --git a/inbox/test/security/test_secret.py b/inbox/test/security/test_secret.py index a47cf647d..9217874f0 100644 --- a/inbox/test/security/test_secret.py +++ b/inbox/test/security/test_secret.py @@ -8,7 +8,7 @@ ACCOUNT_ID = 1 -@pytest.mark.parametrize('encrypt', [True, False]) +@pytest.mark.parametrize("encrypt", [True, False]) def test_secret(db, config, encrypt): """ If encryption is enabled, ensure that: @@ -16,12 +16,12 @@ def test_secret(db, config, encrypt): * secrets are decrypted correctly on retrieval. * secrets are bytes. """ - config['ENCRYPT_SECRETS'] = encrypt - bytes_secret = b'\xff\x00\xf1' - unicode_secret = u'foo\u00a0' + config["ENCRYPT_SECRETS"] = encrypt + bytes_secret = b"\xff\x00\xf1" + unicode_secret = u"foo\u00a0" secret = Secret() - secret.type = 'password' + secret.type = "password" secret.secret = bytes_secret db.session.add(secret) @@ -30,18 +30,18 @@ def test_secret(db, config, encrypt): secret = db.session.query(Secret).get(secret.id) if encrypt: - assert secret._secret != bytes_secret, 'secret is not encrypted' + assert secret._secret != bytes_secret, "secret is not encrypted" else: assert secret._secret == bytes_secret - assert secret.secret == bytes_secret, 'secret not decrypted correctly' + assert secret.secret == bytes_secret, "secret not decrypted correctly" with pytest.raises(TypeError) as e: secret.secret = unicode_secret - assert e.typename == 'TypeError', 'secret cannot be unicode' + assert e.typename == "TypeError", "secret cannot be unicode" -@pytest.mark.parametrize('encrypt', [True, False]) +@pytest.mark.parametrize("encrypt", [True, False]) def test_token(db, config, encrypt): """ If encryption is enabled, ensure that: @@ -51,27 +51,29 @@ def test_token(db, config, encrypt): Note: This tests refresh_tokens but passwords work in the same way """ - config['ENCRYPT_SECRETS'] = encrypt - token = 'tH*$&123abcº™™∞' - - email = 'vault.test@localhost.com' - resp = {'access_token': '', - 'expires_in': 3600, - 'refresh_token': token, - 'scope': '', - 'email': email, - 'family_name': '', - 'given_name': '', - 'name': '', - 'gender': '', - 'id': 0, - 'user_id': '', - 'id_token': '', - 'link': 'http://example.com', - 'locale': '', - 'picture': '', - 'hd': ''} - g = GmailAuthHandler('gmail') + config["ENCRYPT_SECRETS"] = encrypt + token = "tH*$&123abcº™™∞" + + email = "vault.test@localhost.com" + resp = { + "access_token": "", + "expires_in": 3600, + "refresh_token": token, + "scope": "", + "email": email, + "family_name": "", + "given_name": "", + "name": "", + "gender": "", + "id": 0, + "user_id": "", + "id_token": "", + "link": "http://example.com", + "locale": "", + "picture": "", + "hd": "", + } + g = GmailAuthHandler("gmail") g.verify_config = lambda x: True account = g.get_account(SHARD_ID, email, resp) @@ -84,15 +86,14 @@ def test_token(db, config, encrypt): assert secret == account.secret if encrypt: - assert secret._secret != token, 'token not encrypted' + assert secret._secret != token, "token not encrypted" else: - assert secret._secret == token, \ - 'token encrypted when encryption disabled' + assert secret._secret == token, "token encrypted when encryption disabled" decrypted_secret = secret.secret - assert decrypted_secret == token and \ - account.refresh_token == decrypted_secret, \ - 'token not decrypted correctly' + assert ( + decrypted_secret == token and account.refresh_token == decrypted_secret + ), "token not decrypted correctly" # Remove auth credentials row, else weird things # happen when we try to read both encrypted and @@ -103,22 +104,22 @@ def test_token(db, config, encrypt): db.session.commit() -@pytest.mark.parametrize('encrypt', [True, False]) +@pytest.mark.parametrize("encrypt", [True, False]) def test_token_inputs(db, config, encrypt, default_account): """ Ensure unicode tokens are converted to bytes. Ensure invalid UTF-8 tokens are handled correctly. """ - config['ENCRYPT_SECRETS'] = encrypt + config["ENCRYPT_SECRETS"] = encrypt # Unicode - unicode_token = u'myunicodesecret' + unicode_token = u"myunicodesecret" # Invalid UTF-8 byte sequence - invalid_token = b'\xff\x10' + invalid_token = b"\xff\x10" # NULL byte - null_token = b'\x1f\x00\xf1' + null_token = b"\x1f\x00\xf1" default_account.refresh_token = unicode_token db.session.commit() @@ -126,17 +127,17 @@ def test_token_inputs(db, config, encrypt, default_account): secret_id = default_account.refresh_token_id secret = db.session.query(Secret).get(secret_id) - assert not isinstance(secret.secret, unicode), 'secret cannot be unicode' - assert secret.secret == unicode_token, 'token not decrypted correctly' + assert not isinstance(secret.secret, unicode), "secret cannot be unicode" + assert secret.secret == unicode_token, "token not decrypted correctly" with pytest.raises(ValueError) as e: default_account.refresh_token = invalid_token - assert e.typename == 'ValueError', 'token cannot be invalid UTF-8' + assert e.typename == "ValueError", "token cannot be invalid UTF-8" with pytest.raises(ValueError) as f: default_account.refresh_token = null_token - assert f.typename == 'ValueError', 'token cannot contain NULL byte' + assert f.typename == "ValueError", "token cannot contain NULL byte" assert default_account.refresh_token == unicode_token diff --git a/inbox/test/security/test_smtp_ssl.py b/inbox/test/security/test_smtp_ssl.py index 713c1b4cd..7412d14ed 100644 --- a/inbox/test/security/test_smtp_ssl.py +++ b/inbox/test/security/test_smtp_ssl.py @@ -15,26 +15,33 @@ smtpd.DEBUGSTREAM = sys.stderr -__all__ = ['api_client', 'default_account'] +__all__ = ["api_client", "default_account"] current_dir = os.path.dirname(__file__) -SELF_SIGNED_CERTFILE = os.path.realpath(os.path.join(current_dir, '..', 'data/self_signed_cert.pem')) -SELF_SIGNED_KEYFILE = os.path.realpath(os.path.join(current_dir, '..', 'data/self_signed_cert.key')) +SELF_SIGNED_CERTFILE = os.path.realpath( + os.path.join(current_dir, "..", "data/self_signed_cert.pem") +) +SELF_SIGNED_KEYFILE = os.path.realpath( + os.path.join(current_dir, "..", "data/self_signed_cert.key") +) from inbox.sendmail.smtp import postel SHARD_ID = 0 -SMTP_SERVER_HOST = 'localhost' +SMTP_SERVER_HOST = "localhost" class BadCertSMTPServer(smtpd.DebuggingServer): - def __init__(self, localaddr, remoteaddr): smtpd.DebuggingServer.__init__(self, localaddr, remoteaddr) - self.set_socket(ssl.wrap_socket(self.socket, - certfile=SELF_SIGNED_CERTFILE, - keyfile=SELF_SIGNED_KEYFILE, - server_side=True)) + self.set_socket( + ssl.wrap_socket( + self.socket, + certfile=SELF_SIGNED_CERTFILE, + keyfile=SELF_SIGNED_KEYFILE, + server_side=True, + ) + ) def run_bad_cert_smtp_server(): @@ -46,7 +53,7 @@ def run_bad_cert_smtp_server(): asyncore.loop() -@pytest.yield_fixture(scope='module') +@pytest.yield_fixture(scope="module") def bad_cert_smtp_server(): s = gevent.spawn(run_bad_cert_smtp_server) yield s @@ -55,23 +62,28 @@ def bad_cert_smtp_server(): @pytest.fixture def patched_smtp(monkeypatch): - monkeypatch.setattr('inbox.sendmail.smtp.postel.SMTPConnection.smtp_password', - lambda x: None) + monkeypatch.setattr( + "inbox.sendmail.smtp.postel.SMTPConnection.smtp_password", lambda x: None + ) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def local_smtp_account(db): from inbox.auth.generic import GenericAuthHandler - handler = GenericAuthHandler(provider_name='custom') - acc = handler.get_account(SHARD_ID, - 'user@gmail.com', - {'email': 'user@gmail.com', - 'password': 'hunter2', - 'imap_server_host': 'imap-test.nylas.com', - 'imap_server_port': 143, - 'smtp_server_host': SMTP_SERVER_HOST, - 'smtp_server_port': postel.SMTP_OVER_SSL_TEST_PORT}) + handler = GenericAuthHandler(provider_name="custom") + acc = handler.get_account( + SHARD_ID, + "user@gmail.com", + { + "email": "user@gmail.com", + "password": "hunter2", + "imap_server_host": "imap-test.nylas.com", + "imap_server_port": 143, + "smtp_server_host": SMTP_SERVER_HOST, + "smtp_server_port": postel.SMTP_OVER_SSL_TEST_PORT, + }, + ) db.session.add(acc) db.session.commit() return acc @@ -80,25 +92,30 @@ def local_smtp_account(db): @pytest.fixture def example_draft(db, default_account): return { - 'subject': 'Draft test at {}'.format(datetime.datetime.utcnow()), - 'body': '

Sea, birds and sand.

', - 'to': [{'name': 'The red-haired mermaid', - 'email': default_account.email_address}] + "subject": "Draft test at {}".format(datetime.datetime.utcnow()), + "body": "

Sea, birds and sand.

", + "to": [ + {"name": "The red-haired mermaid", "email": default_account.email_address} + ], } -def test_smtp_ssl_verification_bad_cert(db, bad_cert_smtp_server, - example_draft, local_smtp_account, - api_client, patched_smtp): +def test_smtp_ssl_verification_bad_cert( + db, + bad_cert_smtp_server, + example_draft, + local_smtp_account, + api_client, + patched_smtp, +): api_client = new_api_client(db, local_smtp_account.namespace) while len(asyncore.socket_map) < 1: gevent.sleep(0) # let SMTP daemon start up - r = api_client.post_data('/send', example_draft) + r = api_client.post_data("/send", example_draft) assert r.status_code == 200 -if __name__ == '__main__': - server = BadCertSMTPServer((SMTP_SERVER_HOST, SMTP_SERVER_PORT), - (None, None)) +if __name__ == "__main__": + server = BadCertSMTPServer((SMTP_SERVER_HOST, SMTP_SERVER_PORT), (None, None)) asyncore.loop() diff --git a/inbox/test/system/client.py b/inbox/test/system/client.py index dc8632767..9e4106791 100644 --- a/inbox/test/system/client.py +++ b/inbox/test/system/client.py @@ -3,8 +3,11 @@ class NylasTestClient(APIClient): - - def __init__(self, email_address=None, api_base=os.getenv("INBOX_API_PORT_5555_TCP_ADDR", "http://localhost:5555")): + def __init__( + self, + email_address=None, + api_base=os.getenv("INBOX_API_PORT_5555_TCP_ADDR", "http://localhost:5555"), + ): self.email_address = email_address APIClient.__init__(self, None, None, None, api_base) diff --git a/inbox/test/system/conftest.py b/inbox/test/system/conftest.py index 0426549d3..327ffe285 100644 --- a/inbox/test/system/conftest.py +++ b/inbox/test/system/conftest.py @@ -2,7 +2,10 @@ import os import platform -API_BASE = "http://%s:%s" % (os.getenv("API_PORT_5555_TCP_ADDR", "localhost"), os.getenv("API_PORT_5555_TCP_PORT", "5555")) +API_BASE = "http://%s:%s" % ( + os.getenv("API_PORT_5555_TCP_ADDR", "localhost"), + os.getenv("API_PORT_5555_TCP_PORT", "5555"), +) TEST_MAX_DURATION_SECS = 360 TEST_GRANULARITY_CHECK_SECS = 0.1 @@ -18,23 +21,29 @@ # load them from an external json file. try: from accounts import credentials as raw_credentials - credentials = [(c['user'], c['password']) for c in raw_credentials] + + credentials = [(c["user"], c["password"]) for c in raw_credentials] all_accounts = [NylasTestClient(email, API_BASE) for email, _ in credentials] - gmail_accounts = [NylasTestClient(email, API_BASE) - for email, password in credentials - if "gmail.com" in email or - "inboxapp.com" in email] + gmail_accounts = [ + NylasTestClient(email, API_BASE) + for email, password in credentials + if "gmail.com" in email or "inboxapp.com" in email + ] calendar_providers = ["gmail.com", "onmicrosoft.com"] - calendar_accounts = [NylasTestClient(email, API_BASE) - for email, password in credentials - if any(domain in email for domain in calendar_providers)] + calendar_accounts = [ + NylasTestClient(email, API_BASE) + for email, password in credentials + if any(domain in email for domain in calendar_providers) + ] except ImportError: - print ("Error: test accounts file not found. " - "You need to create accounts.py\n" - "File format: credentials = [{'user': 'bill@example.com', " - "'password': 'VerySecret'}]") + print ( + "Error: test accounts file not found. " + "You need to create accounts.py\n" + "File format: credentials = [{'user': 'bill@example.com', " + "'password': 'VerySecret'}]" + ) raise @@ -51,20 +60,20 @@ def wrapped_f(*args, **kwargs): break sleep(TEST_GRANULARITY_CHECK_SECS) - assert success, ("Failed to {} in less than {}s on {}" - .format(name, TEST_MAX_DURATION_SECS, - client.email_address)) + assert success, "Failed to {} in less than {}s on {}".format( + name, TEST_MAX_DURATION_SECS, client.email_address + ) - format_test_result(name, client.provider, - client.email_address, start_time) + format_test_result(name, client.provider, client.email_address, start_time) return True + return wrapped_f + return wrap def format_test_result(function_name, provider, email, start_time): - print "%s\t%s\t%s\t%f" % (function_name, provider, - email, time() - start_time) + print "%s\t%s\t%s\t%f" % (function_name, provider, email, time() - start_time) def create_account(db_session, email, password): @@ -72,11 +81,11 @@ def create_account(db_session, email, password): auth_handler = handler_from_provider(provider) # Special-case Gmail and Outlook, because we need to provide an oauth token # and not merely a password. - response = {'email': email} - if provider == 'gmail': + response = {"email": email} + if provider == "gmail": code = google_auth(email, password) response = auth_handler._get_authenticated_user(code) - elif provider == 'outlook': + elif provider == "outlook": code = outlook_auth(email, password) response = auth_handler._get_authenticated_user(code) else: diff --git a/inbox/test/system/google_auth_helper.py b/inbox/test/system/google_auth_helper.py index e62f152ff..e717980ee 100644 --- a/inbox/test/system/google_auth_helper.py +++ b/inbox/test/system/google_auth_helper.py @@ -9,24 +9,24 @@ class GoogleAuthParser(HTMLParser): _in_form = False def handle_starttag(self, tag, attrs): - if tag == 'form': + if tag == "form": self._in_form = True self.params = {} for k, v in attrs: - if k == 'action': + if k == "action": self.action = v if self._in_form: attr_dict = {} for k, v in attrs: attr_dict[k] = v - if tag == 'input': - if 'value' in attr_dict: - self.params[attr_dict['name']] = attr_dict['value'] + if tag == "input": + if "value" in attr_dict: + self.params[attr_dict["name"]] = attr_dict["value"] def handle_endtag(self, tag): - if tag == 'form': + if tag == "form": self._in_form = False @@ -35,11 +35,11 @@ class GoogleConnectParser(HTMLParser): params = {} def handle_starttag(self, tag, attrs): - if tag == 'form': + if tag == "form": self._in_form = True for k, v in attrs: - if k == 'action': + if k == "action": self.action = v if self._in_form: @@ -47,34 +47,35 @@ def handle_starttag(self, tag, attrs): for k, v in attrs: attr_dict[k] = v - if tag == 'input': - if 'value' in attr_dict: - self.params[attr_dict['name']] = attr_dict['value'] + if tag == "input": + if "value" in attr_dict: + self.params[attr_dict["name"]] = attr_dict["value"] def handle_endtag(self, tag): - if tag == 'form': + if tag == "form": self._in_form = False class GoogleTokenParser(HTMLParser): - def handle_starttag(self, tag, attrs): - if tag == 'input': + if tag == "input": attr_dict = {} for k, v in attrs: attr_dict[k] = v - if attr_dict['id'] == 'code': - self.code = attr_dict['value'] + if attr_dict["id"] == "code": + self.code = attr_dict["value"] def google_auth(email, password): session = requests.Session() - url_args = {'redirect_uri': GmailAuthHandler.OAUTH_REDIRECT_URI, - 'client_id': GmailAuthHandler.OAUTH_CLIENT_ID, - 'response_type': 'code', - 'scope': GmailAuthHandler.OAUTH_SCOPE, - 'access_type': 'offline', - 'login_hint': email} + url_args = { + "redirect_uri": GmailAuthHandler.OAUTH_REDIRECT_URI, + "client_id": GmailAuthHandler.OAUTH_CLIENT_ID, + "response_type": "code", + "scope": GmailAuthHandler.OAUTH_SCOPE, + "access_type": "offline", + "login_hint": email, + } url = url_concat(GmailAuthHandler.OAUTH_AUTHENTICATE_URL, url_args) req = session.get(url) assert req.ok @@ -84,8 +85,8 @@ def google_auth(email, password): params = auth_parser.params action = auth_parser.action - params['Email'] = email - params['Passwd'] = password + params["Email"] = email + params["Passwd"] = password req = session.post(action, data=params) assert req.ok @@ -96,7 +97,7 @@ def google_auth(email, password): params = connect_parser.params action = connect_parser.action - params['submit_access'] = 'true' + params["submit_access"] = "true" req = session.post(action, data=params) assert req.ok diff --git a/inbox/test/system/outlook_auth_helper.py b/inbox/test/system/outlook_auth_helper.py index cd4ce0597..2bf84ea5d 100644 --- a/inbox/test/system/outlook_auth_helper.py +++ b/inbox/test/system/outlook_auth_helper.py @@ -12,49 +12,58 @@ class OutlookAuthParser(HTMLParser): action = None def handle_starttag(self, tag, attrs): - if tag == 'script': + if tag == "script": self._in_script = True for k, v in attrs: - if k == 'action': + if k == "action": self.action = v def handle_endtag(self, tag): - if tag == 'script': + if tag == "script": self._in_script = False def parse_params(self, data): vals = {} # Convert the server data into a dict - for i in filter(lambda x: ':' in x, data.split(',')): - m = re.match('(.*?):(.*)', i) + for i in filter(lambda x: ":" in x, data.split(",")): + m = re.match("(.*?):(.*)", i) k = m.group(1) v = m.group(2) vals[k] = v # extract the PPFT - sfttag = vals['sFTTag'] + sfttag = vals["sFTTag"] m = re.match('.*value="(.*)".*', sfttag) - self.action = vals['urlPost'][1:-1] + self.action = vals["urlPost"][1:-1] # Static parameters that don't change between logins. Yes they look # obscure, because they are. They were taken from the login process # and although this may be a bit fragile, this is necessary for # getting the refresh token without a heavy-weight headless browser # that supports javascript just for this login flow. -cg3 - self.params = {'type': '11', 'PPSX': 'Passpo', 'NewUser': '1', - 'LoginOptions': '1', 'i3': '53255', 'm1': '2560', - 'm2': '1600', 'm3': '0', 'i12': '1', 'i17': '0', - 'i18': '__Login_Host|1'} + self.params = { + "type": "11", + "PPSX": "Passpo", + "NewUser": "1", + "LoginOptions": "1", + "i3": "53255", + "m1": "2560", + "m2": "1600", + "m3": "0", + "i12": "1", + "i17": "0", + "i18": "__Login_Host|1", + } # Generated value that we need to use to login - self.params['PPFT'] = m.group(1) + self.params["PPFT"] = m.group(1) def handle_data(self, data): if self._in_script: if data.startswith("var ServerData"): # Extract the server data - m = re.match('var ServerData = {(.*)};', data).group(1) + m = re.match("var ServerData = {(.*)};", data).group(1) self.parse_params(m) @@ -63,11 +72,11 @@ class OutlookUpdateParser(HTMLParser): params = {} def handle_starttag(self, tag, attrs): - if tag == 'form': + if tag == "form": self._in_form = True for k, v in attrs: - if k == 'action': + if k == "action": self.action = v if self._in_form: @@ -75,12 +84,12 @@ def handle_starttag(self, tag, attrs): for k, v in attrs: attr_dict[k] = v - if tag == 'input': - if 'value' in attr_dict: - self.params[attr_dict['name']] = attr_dict['value'] + if tag == "input": + if "value" in attr_dict: + self.params[attr_dict["name"]] = attr_dict["value"] def handle_endtag(self, tag): - if tag == 'form': + if tag == "form": self._in_form = False @@ -89,7 +98,7 @@ class OutlookConsentParser(HTMLParser): params = {} def handle_starttag(self, tag, attrs): - if tag == 'form': + if tag == "form": self._in_form = True if self._in_form: @@ -97,23 +106,25 @@ def handle_starttag(self, tag, attrs): for k, v in attrs: attr_dict[k] = v - if tag == 'input': - if 'value' in attr_dict: - self.params[attr_dict['name']] = attr_dict['value'] + if tag == "input": + if "value" in attr_dict: + self.params[attr_dict["name"]] = attr_dict["value"] def handle_endtag(self, tag): - if tag == 'form': + if tag == "form": self._in_form = False def outlook_auth(email, password): session = requests.Session() - url_args = {'redirect_uri': OutlookAuthHandler.OAUTH_REDIRECT_URI, - 'client_id': OutlookAuthHandler.OAUTH_CLIENT_ID, - 'response_type': 'code', - 'scope': OutlookAuthHandler.OAUTH_SCOPE, - 'access_type': 'offline', - 'login_hint': email} + url_args = { + "redirect_uri": OutlookAuthHandler.OAUTH_REDIRECT_URI, + "client_id": OutlookAuthHandler.OAUTH_CLIENT_ID, + "response_type": "code", + "scope": OutlookAuthHandler.OAUTH_SCOPE, + "access_type": "offline", + "login_hint": email, + } url = url_concat(OutlookAuthHandler.OAUTH_AUTHENTICATE_URL, url_args) req = session.get(url) assert req.ok @@ -122,8 +133,8 @@ def outlook_auth(email, password): auth_parser.feed(req.text) params = auth_parser.params - params['login'] = email - params['passwd'] = password + params["login"] = email + params["passwd"] = password req = session.post(auth_parser.action, data=params) assert req.ok @@ -140,5 +151,5 @@ def outlook_auth(email, password): req = session.post(update_parser.action, data=consent_parser.params) assert req.ok - code = re.match('https.*code=(.*)&lc=1033', req.url).group(1) + code = re.match("https.*code=(.*)&lc=1033", req.url).group(1) return code diff --git a/inbox/test/system/random_words.py b/inbox/test/system/random_words.py index 4eb833abb..4b5675777 100755 --- a/inbox/test/system/random_words.py +++ b/inbox/test/system/random_words.py @@ -3,26 +3,30 @@ import random import json -DICT_FILE = '/etc/dictionaries-common/words' +DICT_FILE = "/etc/dictionaries-common/words" def get_words(): words = [] try: - with open(DICT_FILE, 'r') as f: - words.extend(f.read().split('\n')) + with open(DICT_FILE, "r") as f: + words.extend(f.read().split("\n")) except IOError: try: - with open('LICENSE', 'r') as f: - words.extend(f.read().translate(string.maketrans("", ""), - string.punctuation).split()) + with open("LICENSE", "r") as f: + words.extend( + f.read() + .translate(string.maketrans("", ""), string.punctuation) + .split() + ) except IOError: - print json.dumps({'error': "couldn't open dictionary file", - 'filename': DICT_FILE}) + print json.dumps( + {"error": "couldn't open dictionary file", "filename": DICT_FILE} + ) return words -def random_words(count=int(random.uniform(1, 500)), sig='me'): +def random_words(count=int(random.uniform(1, 500)), sig="me"): words = get_words() random_word_list = [] @@ -30,7 +34,7 @@ def random_words(count=int(random.uniform(1, 500)), sig='me'): word_index = int(random.uniform(1, len(words))) random_word = words[word_index] - salutation = ['Hey', 'Hi', 'Ahoy', 'Yo'][int(random.uniform(0, 3))] + salutation = ["Hey", "Hi", "Ahoy", "Yo"][int(random.uniform(0, 3))] random_word_list.append("{} {},\n\n".format(salutation, random_word)) just_entered = False @@ -39,37 +43,37 @@ def random_words(count=int(random.uniform(1, 500)), sig='me'): random_word = words[word_index] if i > 0 and not just_entered: - random_word = ' ' + random_word + random_word = " " + random_word just_entered = False if int(random.uniform(1, 15)) == 1: - random_word += ('.') + random_word += "." if int(random.uniform(1, 3)) == 1 and sig: - random_word += ('\n') + random_word += "\n" just_entered = True if int(random.uniform(1, 3)) == 1 and sig: - random_word += ('\n') + random_word += "\n" just_entered = True random_word_list.append(random_word) - text = ''.join(random_word_list) + '.' + text = "".join(random_word_list) + "." if sig: if int(random.uniform(1, 2)) == 1: - salutation = ['Cheers', 'Adios', 'Ciao', 'Bye'][int(random.uniform(0, 3))] - punct = ['.', ',', '!', ''][int(random.uniform(0, 3))] + salutation = ["Cheers", "Adios", "Ciao", "Bye"][int(random.uniform(0, 3))] + punct = [".", ",", "!", ""][int(random.uniform(0, 3))] text += "\n\n{}{}\n".format(salutation, punct) else: - text += '\n\n' + text += "\n\n" - punct = ['-', '- ', '--', '-- '][int(random.uniform(0, 3))] - text += '{}{}'.format(punct, sig) + punct = ["-", "- ", "--", "-- "][int(random.uniform(0, 3))] + text += "{}{}".format(punct, sig) return text -if __name__ == '__main__': +if __name__ == "__main__": print random_words() diff --git a/inbox/test/system/test_auth.py b/inbox/test/system/test_auth.py index 923c2c14d..cf074b25e 100644 --- a/inbox/test/system/test_auth.py +++ b/inbox/test/system/test_auth.py @@ -2,7 +2,7 @@ from inbox.models.session import session_scope from client import NylasTestClient -from conftest import (timeout_loop, credentials, create_account, API_BASE) +from conftest import timeout_loop, credentials, create_account, API_BASE try: # If there's no broken accounts file, well, tough luck but don't crash. @@ -14,17 +14,17 @@ broken_credentials = [] -@timeout_loop('sync_start') +@timeout_loop("sync_start") def wait_for_sync_start(client): return True if client.messages.first() else False -@timeout_loop('auth') +@timeout_loop("auth") def wait_for_auth(client): namespaces = client.namespaces.all() if len(namespaces): - client.email_address = namespaces[0]['email_address'] - client.provider = namespaces[0]['provider'] + client.email_address = namespaces[0]["email_address"] + client.provider = namespaces[0]["provider"] return True return False @@ -41,7 +41,7 @@ def test_account_auth(account_credentials): wait_for_sync_start(client) -errors = __import__('inbox.basicauth', fromlist=['basicauth']) +errors = __import__("inbox.basicauth", fromlist=["basicauth"]) def test_account_create_should_fail(): @@ -52,8 +52,7 @@ def test_account_create_should_fail(): e.g. ({'user': 'foo@foo.com', 'password': 'pass'}, 'ConfigurationError') """ - credentials = [((c['user'], c['password']), e) - for (c, e) in broken_credentials] + credentials = [((c["user"], c["password"]), e) for (c, e) in broken_credentials] for ((email, password), error) in credentials: error_obj = getattr(errors, error) @@ -62,5 +61,5 @@ def test_account_create_should_fail(): create_account(db_session, email, password) -if __name__ == '__main__': +if __name__ == "__main__": pytest.main([__file__]) diff --git a/inbox/test/system/test_drafts.py b/inbox/test/system/test_drafts.py index 954d606cd..9178df7df 100644 --- a/inbox/test/system/test_drafts.py +++ b/inbox/test/system/test_drafts.py @@ -5,7 +5,7 @@ from inbox.client.errors import NotFoundError -@timeout_loop('file') +@timeout_loop("file") def wait_for_file(client, file_id): try: client.files.find(file_id) @@ -14,7 +14,7 @@ def wait_for_file(client, file_id): return False -@timeout_loop('draft') +@timeout_loop("draft") def wait_for_draft(client, draft_id): try: return client.drafts.find(draft_id) @@ -22,7 +22,7 @@ def wait_for_draft(client, draft_id): return False -@timeout_loop('draft_removed') +@timeout_loop("draft_removed") def check_draft_is_removed(client, draft_id): try: client.drafts.find(draft_id) @@ -37,14 +37,14 @@ def test_draft(client): # Create the file myfile = client.files.create() - myfile.filename = 'file_%d.txt' % time.time() - myfile.data = 'This is a file' + myfile.filename = "file_%d.txt" % time.time() + myfile.data = "This is a file" myfile.save() wait_for_file(client, myfile.id) # And the draft mydraft = client.drafts.create() - mydraft.to = [{'email': client.email_address}] + mydraft.to = [{"email": client.email_address}] mydraft.subject = "Test draft from Inbox - %s" % time.strftime("%H:%M:%S") mydraft.body = "This is a test email, disregard this." mydraft.attach(myfile) @@ -57,5 +57,5 @@ def test_draft(client): # check_draft_is_removed(client, mydraft.id) -if __name__ == '__main__': +if __name__ == "__main__": pytest.main([__file__]) diff --git a/inbox/test/system/test_events.py b/inbox/test/system/test_events.py index 638349060..f030230b5 100644 --- a/inbox/test/system/test_events.py +++ b/inbox/test/system/test_events.py @@ -24,7 +24,7 @@ def real_db(): session.close() -@timeout_loop('event') +@timeout_loop("event") def wait_for_event(client, event_id, real_db): try: return client.events.find(event_id) @@ -32,7 +32,7 @@ def wait_for_event(client, event_id, real_db): return False -@timeout_loop('event') +@timeout_loop("event") def wait_for_event_rename(client, event_id, new_title, real_db): try: ev = client.events.find(event_id) @@ -41,7 +41,7 @@ def wait_for_event_rename(client, event_id, new_title, real_db): return False -@timeout_loop('event') +@timeout_loop("event") def wait_for_event_deletion(client, event_id, real_db): try: client.events.find(event_id) @@ -50,18 +50,21 @@ def wait_for_event_deletion(client, event_id, real_db): return True -@timeout_loop('event action') +@timeout_loop("event action") def wait_for_syncback_success(client, real_db, action): # Waits for the most recent action of the specified type to transition # to 'successful'. Otherwise, we don't know the test has actually passed. - action_log = real_db.query(ActionLog).filter_by( - table_name='event', - action=action).order_by('created_at desc').first() + action_log = ( + real_db.query(ActionLog) + .filter_by(table_name="event", action=action) + .order_by("created_at desc") + .first() + ) if not action_log: return False - if action_log.status == 'successful': + if action_log.status == "successful": return True - if action_log.status == 'pending' and action_log.retries > 2: + if action_log.status == "pending" and action_log.retries > 2: # Give up after two retries in the test environment. return False @@ -86,26 +89,27 @@ def real_test_event_crud(client, real_db): ev.save() wait_for_event(client, ev.id, real_db) - wait_for_syncback_success(client, real_db, 'create_event') + wait_for_syncback_success(client, real_db, "create_event") # now, update it ev.title = "Renamed title" - ev.participants = [{'email': 'bland@example.com', 'name': 'John Bland'}] + ev.participants = [{"email": "bland@example.com", "name": "John Bland"}] ev.save() wait_for_event_rename(client, ev.id, ev.title, real_db) - wait_for_syncback_success(client, real_db, 'update_event') + wait_for_syncback_success(client, real_db, "update_event") # finally, delete it ns.events.delete(ev.id) wait_for_event_deletion(client, ev.id, real_db) - wait_for_syncback_success(client, real_db, 'delete_event') + wait_for_syncback_success(client, real_db, "delete_event") @pytest.mark.parametrize("client", calendar_accounts) def test_event_crud(client, real_db): real_test_event_crud(client, real_db) -if __name__ == '__main__': + +if __name__ == "__main__": pytest.main([__file__]) diff --git a/inbox/test/system/test_google_events.py b/inbox/test/system/test_google_events.py index cc89ddf8d..cad3bc2a0 100644 --- a/inbox/test/system/test_google_events.py +++ b/inbox/test/system/test_google_events.py @@ -21,25 +21,27 @@ def get_api_access(db_session, email_address): - account = db_session.query(Account).filter( - Account.email_address == email_address).one() + account = ( + db_session.query(Account).filter(Account.email_address == email_address).one() + ) if account is None: - raise Exception(("No account found for email address %s. " - "Are you sure you've authed it?") % email_address) + raise Exception( + ("No account found for email address %s. " "Are you sure you've authed it?") + % email_address + ) - return GoogleEventsProvider(account.id, account.namespace.id).\ - _get_google_service() + return GoogleEventsProvider(account.id, account.namespace.id)._get_google_service() -@timeout_loop('event') +@timeout_loop("event") def wait_for_event(client, event_id, real_db): try: ev = client.events.find(event_id) cal = client.calendars.find(ev.calendar_id) api = get_api_access(real_db, client.email_address) events = api.events().list(calendarId=cal.name).execute() - for event in events['items']: - if event['summary'] == ev.title: + for event in events["items"]: + if event["summary"] == ev.title: return True return False @@ -47,15 +49,15 @@ def wait_for_event(client, event_id, real_db): return False -@timeout_loop('event') +@timeout_loop("event") def wait_for_event_rename(client, event_id, new_title, real_db): try: ev = client.events.find(event_id) cal = client.calendars.find(ev.calendar_id) api = get_api_access(real_db, client.email_address) events = api.events().list(calendarId=cal.name).execute() - for event in events['items']: - if event['summary'] == new_title: + for event in events["items"]: + if event["summary"] == new_title: return True return False @@ -63,14 +65,14 @@ def wait_for_event_rename(client, event_id, new_title, real_db): return False -@timeout_loop('event') +@timeout_loop("event") def wait_for_event_deletion(client, calendar_id, event_title, real_db): try: cal = client.calendars.find(calendar_id) api = get_api_access(real_db, client.email_address) events = api.events().list(calendarId=cal.name).execute() - for event in events['items']: - if event['summary'] == event_title: + for event in events["items"]: + if event["summary"] == event_title: return False return True @@ -83,5 +85,5 @@ def test_event_crud(client, real_db): real_test_event_crud(client, real_db) -if __name__ == '__main__': +if __name__ == "__main__": pytest.main([__file__]) diff --git a/inbox/test/system/test_labels.py b/inbox/test/system/test_labels.py index 7923121b3..a8adae927 100644 --- a/inbox/test/system/test_labels.py +++ b/inbox/test/system/test_labels.py @@ -11,17 +11,17 @@ from conftest import gmail_accounts, timeout_loop -@timeout_loop('tag_add') +@timeout_loop("tag_add") def wait_for_tag(client, thread_id, tagname): thread = client.threads.find(thread_id) - tags = [tag['name'] for tag in thread.tags] + tags = [tag["name"] for tag in thread.tags] return True if tagname in tags else False -@timeout_loop('tag_remove') +@timeout_loop("tag_remove") def wait_for_tag_removal(client, thread_id, tagname): thread = client.threads.find(thread_id) - tags = [tag['name'] for tag in thread.tags] + tags = [tag["name"] for tag in thread.tags] return True if tagname not in tags else False @@ -34,33 +34,37 @@ def test_gmail_labels(client): account = None with session_scope() as db_session: - account = db_session.query(Account).filter_by( - email_address=client.email_address).one() + account = ( + db_session.query(Account) + .filter_by(email_address=client.email_address) + .one() + ) connection_pool = writable_connection_pool(account.id, pool_size=1) with connection_pool.get() as crispin_client: labelname = "custom-label" + datetime.now().strftime("%s.%f") print "Label: %s" % labelname - folder_name = crispin_client.folder_names()['all'] + folder_name = crispin_client.folder_names()["all"] crispin_client.select_folder(folder_name, uidvalidity_cb) print "Subject : %s" % thread.subject - uids = crispin_client.search_uids(['SUBJECT', thread.subject]) + uids = crispin_client.search_uids(["SUBJECT", thread.subject]) g_thrid = crispin_client.g_metadata(uids).items()[0][1].thrid crispin_client.add_label(g_thrid, labelname) wait_for_tag(client, thread.id, labelname) draft = client.drafts.create( - to=[{'name': 'Nylas SelfSend', 'email': client.email_address}], + to=[{"name": "Nylas SelfSend", "email": client.email_address}], body="Blah, replying to message", - subject=thread.subject) + subject=thread.subject, + ) draft.send() crispin_client.remove_label(g_thrid, labelname) wait_for_tag_removal(client, thread.id, labelname) -if __name__ == '__main__': +if __name__ == "__main__": pytest.main([__file__]) diff --git a/inbox/test/system/test_sending.py b/inbox/test/system/test_sending.py index 6fe8c6166..46398a725 100644 --- a/inbox/test/system/test_sending.py +++ b/inbox/test/system/test_sending.py @@ -7,7 +7,7 @@ import json -@timeout_loop('send') +@timeout_loop("send") def wait_for_send(client, subject): thread_query = client.threads.where(subject=subject) @@ -15,27 +15,28 @@ def wait_for_send(client, subject): if not threads: return False - if provider_from_address(client.email_address) not in ['unknown', 'eas']: + if provider_from_address(client.email_address) not in ["unknown", "eas"]: # Reconciliation doesn't seem to quite work on EAS because the # X-INBOX-ID header is stripped? - assert len(threads) == 1, \ - "Warning: Number of threads for unique subject is > 1!" + assert ( + len(threads) == 1 + ), "Warning: Number of threads for unique subject is > 1!" - tags = [t['name'] for thread in threads for t in thread.tags] + tags = [t["name"] for thread in threads for t in thread.tags] return True if ("sent" in tags and "inbox" in tags) else False -@timeout_loop('archive') +@timeout_loop("archive") def wait_for_archive(client, thread_id): thread = client.threads.find(thread_id) tags = [tag["name"] for tag in thread.tags] return True if ("archive" in tags and "inbox" not in tags) else False -@timeout_loop('trash') +@timeout_loop("trash") def wait_for_trash(client, thread_id): thread = client.threads.find(thread_id) - tags = [tag['name'] for tag in thread.tags] + tags = [tag["name"] for tag in thread.tags] return True if ("trash" in tags and "archive" not in tags) else False @@ -43,20 +44,22 @@ def wait_for_trash(client, thread_id): def test_sending(client): # Create a message and send it to ourselves subject = "%s (Self Send Test)" % strftime("%Y-%m-%d %H:%M:%S") - draft = client.drafts.create(to=[{"email": client.email_address}], - subject=subject, - body=subject + "Test email.") + draft = client.drafts.create( + to=[{"email": client.email_address}], + subject=subject, + body=subject + "Test email.", + ) - body = random_words(sig=client.email_address.split('@')[0]) + body = random_words(sig=client.email_address.split("@")[0]) - draft = client.drafts.create(to=[{"email": client.email_address}], - subject=subject, - body=body) + draft = client.drafts.create( + to=[{"email": client.email_address}], subject=subject, body=body + ) draft.send() wait_for_send(client, subject) # Archive the message - thread = client.threads.where(subject=subject, tag='inbox').first() + thread = client.threads.where(subject=subject, tag="inbox").first() thread.archive() wait_for_archive(client, thread.id) @@ -73,38 +76,38 @@ def test_multi_sending(client): # Create a message and send it to ourselves, with a different body subject = "%s (Self Multi Send Test)" % strftime("%Y-%m-%d %H:%M:%S") sent_body = subject + "Test email." - draft = client.drafts.create(to=[{"email": client.email_address}], - subject=subject, - body=sent_body) + draft = client.drafts.create( + to=[{"email": client.email_address}], subject=subject, body=sent_body + ) recv_body = subject + "Different body" - resp = client.session.post('{}/send-multiple'.format(client.api_server)) + resp = client.session.post("{}/send-multiple".format(client.api_server)) assert resp.status_code == 200 - resp = client.session.post('{}/send-multiple/{}'.format(client.api_server, - draft.id), - data=json.dumps({"body": recv_body, - "send_to": [ - {"email": - client.email_address} - ]})) + resp = client.session.post( + "{}/send-multiple/{}".format(client.api_server, draft.id), + data=json.dumps( + {"body": recv_body, "send_to": [{"email": client.email_address}]} + ), + ) assert resp.status_code == 200 wait_for_send(client, subject) - resp = client.session.delete('{}/send-multiple/{}' - .format(client.api_server, draft.id)) + resp = client.session.delete( + "{}/send-multiple/{}".format(client.api_server, draft.id) + ) assert resp.status_code == 200 wait_for_send(client, subject) # Check that there are two messages, one sent and one recieved, with # different bodies. - thread = client.threads.where(subject=subject, tag='inbox').first() + thread = client.threads.where(subject=subject, tag="inbox").first() assert len(thread.messages) == 2 assert thread.messages[0].body == recv_body assert thread.messages[1].body == sent_body # Archive the thread - thread = client.threads.where(subject=subject, tag='inbox').first() + thread = client.threads.where(subject=subject, tag="inbox").first() thread.archive() wait_for_archive(client, thread.id) @@ -115,5 +118,5 @@ def test_multi_sending(client): wait_for_trash(client, thread.id) -if __name__ == '__main__': +if __name__ == "__main__": pytest.main([__file__]) diff --git a/inbox/test/transactions/test_action_scheduling.py b/inbox/test/transactions/test_action_scheduling.py index 35c22ad10..89e0c4b0a 100644 --- a/inbox/test/transactions/test_action_scheduling.py +++ b/inbox/test/transactions/test_action_scheduling.py @@ -6,30 +6,46 @@ def test_action_scheduling(db, default_account): event = add_fake_event(db.session, default_account.namespace.id) - schedule_action('create_event', event, default_account.namespace.id, - db.session) + schedule_action("create_event", event, default_account.namespace.id, db.session) db.session.commit() - entry = db.session.query(ActionLog).filter( - ActionLog.namespace_id == default_account.namespace.id, - ActionLog.action == 'create_event').one() - - assert entry.discriminator == 'actionlog' - assert entry.table_name == 'event' and entry.record_id == event.id + entry = ( + db.session.query(ActionLog) + .filter( + ActionLog.namespace_id == default_account.namespace.id, + ActionLog.action == "create_event", + ) + .one() + ) + + assert entry.discriminator == "actionlog" + assert entry.table_name == "event" and entry.record_id == event.id assert not entry.extra_args - schedule_action('delete_event', event, default_account.namespace.id, - db.session, event_uid=event.uid, - calendar_name=event.calendar.name, - calendar_uid=event.calendar.uid) + schedule_action( + "delete_event", + event, + default_account.namespace.id, + db.session, + event_uid=event.uid, + calendar_name=event.calendar.name, + calendar_uid=event.calendar.uid, + ) db.session.commit() - entry = db.session.query(ActionLog).filter( - ActionLog.namespace_id == default_account.namespace.id, - ActionLog.action == 'delete_event').one() - - assert entry.discriminator == 'actionlog' - assert entry.table_name == 'event' and entry.record_id == event.id - assert entry.extra_args == \ - dict(event_uid=event.uid, calendar_name=event.calendar.name, - calendar_uid=event.calendar.uid) + entry = ( + db.session.query(ActionLog) + .filter( + ActionLog.namespace_id == default_account.namespace.id, + ActionLog.action == "delete_event", + ) + .one() + ) + + assert entry.discriminator == "actionlog" + assert entry.table_name == "event" and entry.record_id == event.id + assert entry.extra_args == dict( + event_uid=event.uid, + calendar_name=event.calendar.name, + calendar_uid=event.calendar.uid, + ) diff --git a/inbox/test/transactions/test_delta_sync.py b/inbox/test/transactions/test_delta_sync.py index 6ab48c985..39d6f6f0c 100644 --- a/inbox/test/transactions/test_delta_sync.py +++ b/inbox/test/transactions/test_delta_sync.py @@ -5,21 +5,25 @@ from inbox.test.util.base import add_fake_message from inbox.test.api.base import api_client -__all__ = ['api_client'] +__all__ = ["api_client"] -def add_account_with_different_namespace_id(db_session, - email_address='cypress@yahoo.com'): +def add_account_with_different_namespace_id( + db_session, email_address="cypress@yahoo.com" +): import platform from inbox.models.backends.generic import GenericAccount from inbox.models import Namespace - account = GenericAccount(id=11, - email_address=email_address, - sync_host=platform.node(), - desired_sync_host=platform.node(), - provider='yahoo') - account.imap_password = 'bananagrams' - account.smtp_password = 'bananagrams' + + account = GenericAccount( + id=11, + email_address=email_address, + sync_host=platform.node(), + desired_sync_host=platform.node(), + provider="yahoo", + ) + account.imap_password = "bananagrams" + account.smtp_password = "bananagrams" account.namespace = Namespace() db_session.add(account) db_session.commit() @@ -28,9 +32,10 @@ def add_account_with_different_namespace_id(db_session, def get_cursor(api_client, timestamp): - cursor_response = api_client.post_data('/delta/generate_cursor', - {'start': timestamp}) - return json.loads(cursor_response.data)['cursor'] + cursor_response = api_client.post_data( + "/delta/generate_cursor", {"start": timestamp} + ) + return json.loads(cursor_response.data)["cursor"] def test_latest_cursor(api_client): @@ -38,21 +43,22 @@ def test_latest_cursor(api_client): freezer.tick(datetime.timedelta(seconds=5)) now = int(time.time()) - latest_cursor_resp = api_client.post_raw('/delta/latest_cursor', None) - latest_cursor = json.loads(latest_cursor_resp.data)['cursor'] + latest_cursor_resp = api_client.post_raw("/delta/latest_cursor", None) + latest_cursor = json.loads(latest_cursor_resp.data)["cursor"] now_cursor = get_cursor(api_client, now) assert latest_cursor == now_cursor def test_invalid_input(api_client): - cursor_response = api_client.post_data('/delta/generate_cursor', - {'start': "I'm not a timestamp!"}) + cursor_response = api_client.post_data( + "/delta/generate_cursor", {"start": "I'm not a timestamp!"} + ) assert cursor_response.status_code == 400 sync_response = api_client.client.get( - '/delta?cursor={}'.format('fake cursor'), - headers=api_client.auth_header) + "/delta?cursor={}".format("fake cursor"), headers=api_client.auth_header + ) assert sync_response.status_code == 400 @@ -66,22 +72,22 @@ def test_events_are_condensed(api_client, message): cursor = get_cursor(api_client, ts) # Modify a message, then modify it again - message_id = api_client.get_data('/messages/')[0]['id'] - message_path = '/messages/{}'.format(message_id) - api_client.put_data(message_path, {'unread': True}) - api_client.put_data(message_path, {'unread': False}) - api_client.put_data(message_path, {'unread': True}) + message_id = api_client.get_data("/messages/")[0]["id"] + message_path = "/messages/{}".format(message_id) + api_client.put_data(message_path, {"unread": True}) + api_client.put_data(message_path, {"unread": False}) + api_client.put_data(message_path, {"unread": True}) # Check that successive modifies are condensed. - sync_data = api_client.get_data('/delta?cursor={}'.format(cursor)) - deltas = sync_data['deltas'] + sync_data = api_client.get_data("/delta?cursor={}".format(cursor)) + deltas = sync_data["deltas"] # A message modify propagates to its thread - message_deltas = [d for d in deltas if d['object'] == 'message'] + message_deltas = [d for d in deltas if d["object"] == "message"] assert len(message_deltas) == 1 delta = message_deltas[0] - assert delta['object'] == 'message' and delta['event'] == 'modify' - assert delta['attributes']['unread'] is True + assert delta["object"] == "message" and delta["event"] == "modify" + assert delta["attributes"]["unread"] is True def test_message_events_are_propagated_to_thread(api_client, message): @@ -93,34 +99,34 @@ def test_message_events_are_propagated_to_thread(api_client, message): ts = int(time.time() + 22) cursor = get_cursor(api_client, ts) - message = api_client.get_data('/messages/')[0] - message_id = message['id'] - assert message['unread'] is True + message = api_client.get_data("/messages/")[0] + message_id = message["id"] + assert message["unread"] is True - thread = api_client.get_data('/threads/{}'.format(message['thread_id'])) - assert thread['unread'] is True + thread = api_client.get_data("/threads/{}".format(message["thread_id"])) + assert thread["unread"] is True # Modify a `propagated_attribute` of the message - message_path = '/messages/{}'.format(message_id) - api_client.put_data(message_path, {'unread': False}) + message_path = "/messages/{}".format(message_id) + api_client.put_data(message_path, {"unread": False}) # Verify that a `message` and a `thread` modify delta is returned - sync_data = api_client.get_data('/delta?cursor={}'.format(cursor)) - deltas = sync_data['deltas'] + sync_data = api_client.get_data("/delta?cursor={}".format(cursor)) + deltas = sync_data["deltas"] assert len(deltas) == 2 - message_deltas = [d for d in deltas if d['object'] == 'message'] + message_deltas = [d for d in deltas if d["object"] == "message"] assert len(message_deltas) == 1 delta = message_deltas[0] - assert delta['object'] == 'message' and delta['event'] == 'modify' - assert delta['attributes']['unread'] is False + assert delta["object"] == "message" and delta["event"] == "modify" + assert delta["attributes"]["unread"] is False - thread_deltas = [d for d in deltas if d['object'] == 'thread'] + thread_deltas = [d for d in deltas if d["object"] == "thread"] assert len(thread_deltas) == 1 delta = thread_deltas[0] - assert delta['object'] == 'thread' and delta['event'] == 'modify' - assert delta['attributes']['unread'] is False - assert delta['attributes']['version'] == thread['version'] + 1 + assert delta["object"] == "thread" and delta["event"] == "modify" + assert delta["attributes"]["unread"] is False + assert delta["attributes"]["version"] == thread["version"] + 1 def test_handle_missing_objects(api_client, db, thread, default_namespace): @@ -129,15 +135,15 @@ def test_handle_missing_objects(api_client, db, thread, default_namespace): messages = [] for _ in range(100): - messages.append(add_fake_message(db.session, default_namespace.id, - thread)) + messages.append(add_fake_message(db.session, default_namespace.id, thread)) for message in messages: db.session.delete(message) db.session.commit() - sync_data = api_client.get_data('/delta?cursor={}&exclude_types=thread'. - format(cursor)) - assert len(sync_data['deltas']) == 100 - assert all(delta['event'] == 'delete' for delta in sync_data['deltas']) + sync_data = api_client.get_data( + "/delta?cursor={}&exclude_types=thread".format(cursor) + ) + assert len(sync_data["deltas"]) == 100 + assert all(delta["event"] == "delete" for delta in sync_data["deltas"]) def test_exclude_account(api_client, db, default_namespace, thread): @@ -145,23 +151,24 @@ def test_exclude_account(api_client, db, default_namespace, thread): cursor = get_cursor(api_client, ts) # Create `account`, `message`, `thread` deltas - default_namespace.account.sync_state = 'invalid' + default_namespace.account.sync_state = "invalid" db.session.commit() add_fake_message(db.session, default_namespace.id, thread) # Verify the default value of `exclude_account`=True and # the account delta is *not* included - sync_data = api_client.get_data('/delta?cursor={}'.format(cursor)) - assert len(sync_data['deltas']) == 2 - assert set([d['object'] for d in sync_data['deltas']]) == \ - set(['message', 'thread']) + sync_data = api_client.get_data("/delta?cursor={}".format(cursor)) + assert len(sync_data["deltas"]) == 2 + assert set([d["object"] for d in sync_data["deltas"]]) == set(["message", "thread"]) # Verify setting `exclude_account`=True returns the account delta as well. - sync_data = api_client.get_data('/delta?cursor={}&exclude_account=false'. - format(cursor)) - assert len(sync_data['deltas']) == 3 - assert set([d['object'] for d in sync_data['deltas']]) == \ - set(['message', 'thread', 'account']) + sync_data = api_client.get_data( + "/delta?cursor={}&exclude_account=false".format(cursor) + ) + assert len(sync_data["deltas"]) == 3 + assert set([d["object"] for d in sync_data["deltas"]]) == set( + ["message", "thread", "account"] + ) def test_account_delta(api_client, db, default_namespace): @@ -171,37 +178,39 @@ def test_account_delta(api_client, db, default_namespace): account = default_namespace.account # Create an `account` delta - default_namespace.account.sync_state = 'invalid' + default_namespace.account.sync_state = "invalid" db.session.commit() - sync_data = api_client.get_data('/delta?cursor={}&exclude_account=false'. - format(cursor)) - assert len(sync_data['deltas']) == 1 - delta = sync_data['deltas'][0] - assert delta['object'] == 'account' - assert delta['event'] == 'modify' - assert delta['attributes']['id'] == default_namespace.public_id - assert delta['attributes']['account_id'] == default_namespace.public_id - assert delta['attributes']['email_address'] == account.email_address - assert delta['attributes']['name'] == account.name - assert delta['attributes']['provider'] == account.provider - assert delta['attributes']['organization_unit'] == account.category_type - assert delta['attributes']['sync_state'] == 'invalid' - - cursor = sync_data['cursor_end'] + sync_data = api_client.get_data( + "/delta?cursor={}&exclude_account=false".format(cursor) + ) + assert len(sync_data["deltas"]) == 1 + delta = sync_data["deltas"][0] + assert delta["object"] == "account" + assert delta["event"] == "modify" + assert delta["attributes"]["id"] == default_namespace.public_id + assert delta["attributes"]["account_id"] == default_namespace.public_id + assert delta["attributes"]["email_address"] == account.email_address + assert delta["attributes"]["name"] == account.name + assert delta["attributes"]["provider"] == account.provider + assert delta["attributes"]["organization_unit"] == account.category_type + assert delta["attributes"]["sync_state"] == "invalid" + + cursor = sync_data["cursor_end"] # Create an new `account` delta - default_namespace.account.sync_state = 'running' + default_namespace.account.sync_state = "running" db.session.commit() - sync_data = api_client.get_data('/delta?cursor={}&exclude_account=false'. - format(cursor)) + sync_data = api_client.get_data( + "/delta?cursor={}&exclude_account=false".format(cursor) + ) - assert len(sync_data['deltas']) == 1 - delta = sync_data['deltas'][0] - assert delta['object'] == 'account' - assert delta['event'] == 'modify' - assert delta['attributes']['id'] == default_namespace.public_id - assert delta['attributes']['sync_state'] == 'running' + assert len(sync_data["deltas"]) == 1 + delta = sync_data["deltas"][0] + assert delta["object"] == "account" + assert delta["event"] == "modify" + assert delta["attributes"]["id"] == default_namespace.public_id + assert delta["attributes"]["sync_state"] == "running" def test_account_delta_for_different_namespace_id(db): @@ -211,15 +220,17 @@ def test_account_delta_for_different_namespace_id(db): namespace = account.namespace # Create an `account` delta - account.sync_state = 'invalid' + account.sync_state = "invalid" db.session.commit() # Verify `account` delta is not returned when exclude_account=True - txns, _ = format_transactions_after_pointer(namespace, 0, db.session, 10, - exclude_account=True) + txns, _ = format_transactions_after_pointer( + namespace, 0, db.session, 10, exclude_account=True + ) assert not txns # Verify `account` delta is returned when exclude_account=False - txns, _ = format_transactions_after_pointer(namespace, 0, db.session, 10, - exclude_account=False) + txns, _ = format_transactions_after_pointer( + namespace, 0, db.session, 10, exclude_account=False + ) assert txns diff --git a/inbox/test/transactions/test_thread_versioning.py b/inbox/test/transactions/test_thread_versioning.py index 7981f1171..41dc75440 100644 --- a/inbox/test/transactions/test_thread_versioning.py +++ b/inbox/test/transactions/test_thread_versioning.py @@ -2,7 +2,8 @@ def test_adding_and_removing_message_on_thread_increments_version( - db, thread, default_namespace): + db, thread, default_namespace +): assert thread.version == 0 message = add_fake_message(db.session, default_namespace.id, thread) assert thread.version == 1 @@ -12,7 +13,8 @@ def test_adding_and_removing_message_on_thread_increments_version( def test_updating_message_read_starred_increments_version( - db, thread, default_namespace): + db, thread, default_namespace +): assert thread.version == 0 message = add_fake_message(db.session, default_namespace.id, thread) @@ -21,7 +23,7 @@ def test_updating_message_read_starred_increments_version( # Modifying a non-propagated attribute does /not/ increment thread.version # (Non-propagated attributes on non-draft messages are technically # never modified) - message.subject = 'Jen nova temo' + message.subject = "Jen nova temo" db.session.commit() assert thread.version == 1 @@ -35,13 +37,11 @@ def test_updating_message_read_starred_increments_version( assert thread.version == 3 -def test_updating_message_categories_increments_version( - db, thread, default_namespace): +def test_updating_message_categories_increments_version(db, thread, default_namespace): assert thread.version == 0 message = add_fake_message(db.session, default_namespace.id, thread) - category = add_fake_category(db.session, default_namespace.id, - 'mia kategorio') + category = add_fake_category(db.session, default_namespace.id, "mia kategorio") # Modifying message's categories increments the thread.version message.categories = [category] diff --git a/inbox/test/transactions/test_transaction_creation.py b/inbox/test/transactions/test_transaction_creation.py index 5b684751a..2de5a7429 100644 --- a/inbox/test/transactions/test_transaction_creation.py +++ b/inbox/test/transactions/test_transaction_creation.py @@ -7,43 +7,58 @@ from inbox.models.mixins import HasRevisions from inbox.models.util import transaction_objects -from inbox.test.util.base import (add_fake_message, add_fake_thread, add_fake_event, - add_fake_category) +from inbox.test.util.base import ( + add_fake_message, + add_fake_thread, + add_fake_event, + add_fake_category, +) def get_latest_transaction(db_session, object_type, record_id, namespace_id): - return db_session.query(Transaction).filter( - Transaction.namespace_id == namespace_id, - Transaction.object_type == object_type, - Transaction.record_id == record_id). \ - order_by(desc(Transaction.id)).first() + return ( + db_session.query(Transaction) + .filter( + Transaction.namespace_id == namespace_id, + Transaction.object_type == object_type, + Transaction.record_id == record_id, + ) + .order_by(desc(Transaction.id)) + .first() + ) def get_latest_transaction_any(db_session, namespace_id): - return db_session.query(Transaction).filter( - Transaction.namespace_id == namespace_id).\ - order_by(desc(Transaction.id)).first() + return ( + db_session.query(Transaction) + .filter(Transaction.namespace_id == namespace_id) + .order_by(desc(Transaction.id)) + .first() + ) def test_thread_insert_creates_transaction(db, default_namespace): thr = add_fake_thread(db.session, default_namespace.id) - transaction = get_latest_transaction(db.session, 'thread', thr.id, - default_namespace.id) - assert transaction.command == 'insert' + transaction = get_latest_transaction( + db.session, "thread", thr.id, default_namespace.id + ) + assert transaction.command == "insert" def test_message_insert_creates_transaction(db, default_namespace): with db.session.no_autoflush: thr = add_fake_thread(db.session, default_namespace.id) msg = add_fake_message(db.session, default_namespace.id, thr) - transaction = get_latest_transaction(db.session, 'message', msg.id, - default_namespace.id) - assert transaction.command == 'insert' + transaction = get_latest_transaction( + db.session, "message", msg.id, default_namespace.id + ) + assert transaction.command == "insert" # Test that the thread gets revised too - transaction = get_latest_transaction(db.session, 'thread', thr.id, - default_namespace.id) - assert transaction.command == 'update' + transaction = get_latest_transaction( + db.session, "thread", thr.id, default_namespace.id + ) + assert transaction.command == "update" def test_message_updates_create_transaction(db, default_namespace): @@ -53,11 +68,12 @@ def test_message_updates_create_transaction(db, default_namespace): msg.is_read = True db.session.commit() - transaction = get_latest_transaction(db.session, 'message', msg.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "message", msg.id, default_namespace.id + ) assert transaction.record_id == msg.id - assert transaction.object_type == 'message' - assert transaction.command == 'update' + assert transaction.object_type == "message" + assert transaction.command == "update" def test_message_updates_create_thread_transaction(db, default_namespace): @@ -65,30 +81,34 @@ def test_message_updates_create_thread_transaction(db, default_namespace): thr = add_fake_thread(db.session, default_namespace.id) msg = add_fake_message(db.session, default_namespace.id, thr) - transaction = get_latest_transaction(db.session, 'thread', thr.id, - default_namespace.id) - assert (transaction.record_id == thr.id and - transaction.object_type == 'thread') - assert transaction.command == 'update' + transaction = get_latest_transaction( + db.session, "thread", thr.id, default_namespace.id + ) + assert transaction.record_id == thr.id and transaction.object_type == "thread" + assert transaction.command == "update" # An update to one of the message's propagated_attributes creates a # revision for the thread msg.is_read = True db.session.commit() - new_transaction = get_latest_transaction(db.session, 'thread', thr.id, - default_namespace.id) + new_transaction = get_latest_transaction( + db.session, "thread", thr.id, default_namespace.id + ) assert new_transaction.id != transaction.id - assert (new_transaction.record_id == thr.id and - new_transaction.object_type == 'thread') - assert new_transaction.command == 'update' + assert ( + new_transaction.record_id == thr.id + and new_transaction.object_type == "thread" + ) + assert new_transaction.command == "update" # An update to one of its other attributes does not - msg.subject = 'Ice cubes and dogs' + msg.subject = "Ice cubes and dogs" db.session.commit() - same_transaction = get_latest_transaction(db.session, 'thread', thr.id, - default_namespace.id) + same_transaction = get_latest_transaction( + db.session, "thread", thr.id, default_namespace.id + ) assert same_transaction.id == new_transaction.id @@ -96,20 +116,24 @@ def test_message_category_updates_create_transaction(db, default_namespace): with db.session.no_autoflush: thr = add_fake_thread(db.session, default_namespace.id) msg = add_fake_message(db.session, default_namespace.id, thr) - cat = add_fake_category(db.session, default_namespace.id, 'category') + cat = add_fake_category(db.session, default_namespace.id, "category") thread_trx_before_category_change = get_latest_transaction( - db.session, 'thread', thr.id, default_namespace.id) + db.session, "thread", thr.id, default_namespace.id + ) msg.categories = [cat] db.session.commit() latest_message_trx = get_latest_transaction( - db.session, 'message', msg.id, default_namespace.id) + db.session, "message", msg.id, default_namespace.id + ) thread_trx_after_category_change = get_latest_transaction( - db.session, 'thread', thr.id, default_namespace.id) + db.session, "thread", thr.id, default_namespace.id + ) - assert latest_message_trx.command == 'update' - assert thread_trx_before_category_change.id != \ - thread_trx_after_category_change.id + assert latest_message_trx.command == "update" + assert ( + thread_trx_before_category_change.id != thread_trx_after_category_change.id + ) def test_object_type_distinguishes_messages_and_drafts(db, default_namespace): @@ -118,54 +142,59 @@ def test_object_type_distinguishes_messages_and_drafts(db, default_namespace): msg = add_fake_message(db.session, default_namespace.id, thr) msg.is_draft = 1 db.session.commit() - transaction = get_latest_transaction(db.session, 'draft', msg.id, - default_namespace.id) - assert transaction.command == 'update' + transaction = get_latest_transaction( + db.session, "draft", msg.id, default_namespace.id + ) + assert transaction.command == "update" db.session.delete(msg) db.session.commit() - transaction = get_latest_transaction(db.session, 'draft', msg.id, - default_namespace.id) - assert transaction.command == 'delete' + transaction = get_latest_transaction( + db.session, "draft", msg.id, default_namespace.id + ) + assert transaction.command == "delete" def test_event_insert_creates_transaction(db, default_namespace): with db.session.no_autoflush: event = add_fake_event(db.session, default_namespace.id) - transaction = get_latest_transaction(db.session, 'event', - event.id, default_namespace.id) + transaction = get_latest_transaction( + db.session, "event", event.id, default_namespace.id + ) assert transaction.record_id == event.id - assert transaction.object_type == 'event' - assert transaction.command == 'insert' + assert transaction.object_type == "event" + assert transaction.command == "insert" def test_transactions_created_for_calendars(db, default_namespace): calendar = Calendar( - namespace_id=default_namespace.id, - name='New Calendar', - uid='uid') + namespace_id=default_namespace.id, name="New Calendar", uid="uid" + ) db.session.add(calendar) db.session.commit() - transaction = get_latest_transaction(db.session, 'calendar', - calendar.id, default_namespace.id) + transaction = get_latest_transaction( + db.session, "calendar", calendar.id, default_namespace.id + ) assert transaction.record_id == calendar.id - assert transaction.object_type == 'calendar' - assert transaction.command == 'insert' + assert transaction.object_type == "calendar" + assert transaction.command == "insert" - calendar.name = 'Updated Calendar' + calendar.name = "Updated Calendar" db.session.commit() - transaction = get_latest_transaction(db.session, 'calendar', - calendar.id, default_namespace.id) + transaction = get_latest_transaction( + db.session, "calendar", calendar.id, default_namespace.id + ) assert transaction.record_id == calendar.id - assert transaction.object_type == 'calendar' - assert transaction.command == 'update' + assert transaction.object_type == "calendar" + assert transaction.command == "update" db.session.delete(calendar) db.session.commit() - transaction = get_latest_transaction(db.session, 'calendar', - calendar.id, default_namespace.id) + transaction = get_latest_transaction( + db.session, "calendar", calendar.id, default_namespace.id + ) assert transaction.record_id == calendar.id - assert transaction.object_type == 'calendar' - assert transaction.command == 'delete' + assert transaction.object_type == "calendar" + assert transaction.command == "delete" def test_file_transactions(db, default_namespace): @@ -173,62 +202,70 @@ def test_file_transactions(db, default_namespace): account = default_namespace.account thread = add_fake_thread(db.session, default_namespace.id) - mime_msg = mime.create.multipart('mixed') + mime_msg = mime.create.multipart("mixed") mime_msg.append( - mime.create.text('plain', 'This is a message with attachments'), - mime.create.attachment('image/png', 'filler', 'attached_image.png', - 'attachment'), - mime.create.attachment('application/pdf', 'filler', - 'attached_file.pdf', 'attachment') + mime.create.text("plain", "This is a message with attachments"), + mime.create.attachment( + "image/png", "filler", "attached_image.png", "attachment" + ), + mime.create.attachment( + "application/pdf", "filler", "attached_file.pdf", "attachment" + ), + ) + msg = Message.create_from_synced( + account, 22, "[Gmail]/All Mail", datetime.utcnow(), mime_msg.to_string() ) - msg = Message.create_from_synced(account, 22, '[Gmail]/All Mail', - datetime.utcnow(), mime_msg.to_string()) msg.thread = thread db.session.add(msg) db.session.commit() assert len(msg.parts) == 2 - assert all(part.content_disposition == 'attachment' for part in msg.parts) + assert all(part.content_disposition == "attachment" for part in msg.parts) block_ids = [part.block.id for part in msg.parts] with db.session.no_autoflush: - transaction = get_latest_transaction(db.session, 'file', block_ids[0], - default_namespace.id) - assert transaction.command == 'insert' + transaction = get_latest_transaction( + db.session, "file", block_ids[0], default_namespace.id + ) + assert transaction.command == "insert" - transaction = get_latest_transaction(db.session, 'file', block_ids[1], - default_namespace.id) - assert transaction.command == 'insert' + transaction = get_latest_transaction( + db.session, "file", block_ids[1], default_namespace.id + ) + assert transaction.command == "insert" def test_account_transactions(db, default_namespace): account = default_namespace.account - transaction = get_latest_transaction(db.session, 'account', account.id, - default_namespace.id) - assert transaction.command == 'insert' + transaction = get_latest_transaction( + db.session, "account", account.id, default_namespace.id + ) + assert transaction.command == "insert" transaction_id = transaction.id with db.session.no_autoflush: account.last_synced_events = datetime.utcnow() db.session.commit() - transaction = get_latest_transaction(db.session, 'account', account.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "account", account.id, default_namespace.id + ) assert transaction.id == transaction_id - account.sync_state = 'invalid' + account.sync_state = "invalid" db.session.commit() - transaction = get_latest_transaction(db.session, 'account', account.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "account", account.id, default_namespace.id + ) assert transaction.id != transaction_id - assert transaction.command == 'update' + assert transaction.command == "update" - account.sync_host = 'anewhost' + account.sync_host = "anewhost" db.session.commit() - same_transaction = get_latest_transaction(db.session, 'account', - account.id, - default_namespace.id) + same_transaction = get_latest_transaction( + db.session, "account", account.id, default_namespace.id + ) assert same_transaction.id == transaction.id @@ -238,23 +275,26 @@ def test_object_deletions_create_transaction(db, default_namespace): msg = add_fake_message(db.session, default_namespace.id, thr) db.session.delete(msg) db.session.commit() - transaction = get_latest_transaction(db.session, 'message', msg.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "message", msg.id, default_namespace.id + ) assert transaction.record_id == msg.id - assert transaction.object_type == 'message' - assert transaction.command == 'delete' + assert transaction.object_type == "message" + assert transaction.command == "delete" db.session.delete(thr) db.session.commit() - transaction = get_latest_transaction(db.session, 'thread', thr.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "thread", thr.id, default_namespace.id + ) assert transaction.record_id == thr.id - assert transaction.object_type == 'thread' - assert transaction.command == 'delete' + assert transaction.object_type == "thread" + assert transaction.command == "delete" def test_transaction_creation_for_self_referential_message_relationship( - db, default_namespace): + db, default_namespace +): # Make sure that updating the self-refential relationship # `Message.reply_to_message` does not create a spurious update delta for # the parent message. @@ -265,11 +305,12 @@ def test_transaction_creation_for_self_referential_message_relationship( db.session.commit() assert reply.reply_to_message_id is not None assert msg.reply_to_message_id is None - transaction = get_latest_transaction(db.session, 'message', msg.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "message", msg.id, default_namespace.id + ) assert transaction.record_id == msg.id - assert transaction.object_type == 'message' - assert transaction.command == 'insert' + assert transaction.object_type == "message" + assert transaction.command == "insert" def test_transaction_objects_mapped_for_all_models(db, default_namespace): @@ -278,27 +319,29 @@ def test_transaction_objects_mapped_for_all_models(db, default_namespace): transaction_objects() function. """ - assert set(HasRevisions.__subclasses__()).issubset( - transaction_objects().values()) + assert set(HasRevisions.__subclasses__()).issubset(transaction_objects().values()) def test_accounttransactions(db, default_namespace): account = default_namespace.account - transaction = get_latest_transaction(db.session, 'account', - default_namespace.account.id, - default_namespace.id) - assert transaction.command == 'insert' + transaction = get_latest_transaction( + db.session, "account", default_namespace.account.id, default_namespace.id + ) + assert transaction.command == "insert" transaction_id = transaction.id # Verify an AccountTransaction is created - accounttransactions = db.session.query(AccountTransaction).filter( - AccountTransaction.namespace_id == default_namespace.id).all() + accounttransactions = ( + db.session.query(AccountTransaction) + .filter(AccountTransaction.namespace_id == default_namespace.id) + .all() + ) assert len(accounttransactions) == 1 accounttransaction = accounttransactions[0] assert accounttransaction.namespace_id == default_namespace.id - assert accounttransaction.command == 'insert' - assert accounttransaction.object_type == 'account' + assert accounttransaction.command == "insert" + assert accounttransaction.object_type == "account" assert accounttransaction.record_id == default_namespace.account.id accounttransaction_id = accounttransaction.id @@ -307,37 +350,47 @@ def test_accounttransactions(db, default_namespace): account.last_synced_events = datetime.utcnow() db.session.commit() - transaction = get_latest_transaction(db.session, 'account', - default_namespace.account.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "account", default_namespace.account.id, default_namespace.id + ) assert transaction.id == transaction_id - accounttransactions = db.session.query(AccountTransaction).filter( - AccountTransaction.namespace_id == default_namespace.id).all() + accounttransactions = ( + db.session.query(AccountTransaction) + .filter(AccountTransaction.namespace_id == default_namespace.id) + .all() + ) assert len(accounttransactions) == 1 assert accounttransactions[0].id == accounttransaction_id # Only Transaction record created thread = add_fake_thread(db.session, default_namespace.id) - transaction = get_latest_transaction(db.session, 'thread', thread.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "thread", thread.id, default_namespace.id + ) assert transaction.id > transaction_id - accounttransactions = db.session.query(AccountTransaction).filter( - AccountTransaction.namespace_id == default_namespace.id).all() + accounttransactions = ( + db.session.query(AccountTransaction) + .filter(AccountTransaction.namespace_id == default_namespace.id) + .all() + ) assert len(accounttransactions) == 1 assert accounttransactions[0].id == accounttransaction_id # Both Transaction or AccountTransaction records created - account.sync_state = 'invalid' + account.sync_state = "invalid" db.session.commit() - transaction = get_latest_transaction(db.session, 'account', - default_namespace.account.id, - default_namespace.id) + transaction = get_latest_transaction( + db.session, "account", default_namespace.account.id, default_namespace.id + ) assert transaction.id > transaction_id - assert transaction.command == 'update' - accounttransactions = db.session.query(AccountTransaction).filter( - AccountTransaction.namespace_id == default_namespace.id).all() + assert transaction.command == "update" + accounttransactions = ( + db.session.query(AccountTransaction) + .filter(AccountTransaction.namespace_id == default_namespace.id) + .all() + ) assert len(accounttransactions) == 2 assert accounttransactions[1].id != accounttransaction_id - assert accounttransactions[1].command == 'update' + assert accounttransactions[1].command == "update" diff --git a/inbox/test/util/base.py b/inbox/test/util/base.py index 553f3b918..801149d26 100644 --- a/inbox/test/util/base.py +++ b/inbox/test/util/base.py @@ -18,33 +18,36 @@ def absolute_path(path): """ return os.path.abspath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', path)) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", path) + ) def make_config(tmpdir_factory): from inbox.config import config - assert 'NYLAS_ENV' in os.environ and \ - os.environ['NYLAS_ENV'] == 'test', \ - "NYLAS_ENV must be 'test' to run tests" + + assert ( + "NYLAS_ENV" in os.environ and os.environ["NYLAS_ENV"] == "test" + ), "NYLAS_ENV must be 'test' to run tests" # don't try to write test data to the module tree - config['MSG_PARTS_DIRECTORY'] = str(tmpdir_factory.mktemp("parts")) + config["MSG_PARTS_DIRECTORY"] = str(tmpdir_factory.mktemp("parts")) return config -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def config(tmpdir_factory): return make_config(tmpdir_factory) -@fixture(scope='session') +@fixture(scope="session") def dbloader(config): setup_test_db() -@yield_fixture(scope='function') +@yield_fixture(scope="function") def db(dbloader): from inbox.ignition import engine_manager from inbox.models.session import new_session + engine = engine_manager.get_for_id(0) # TODO(emfree): tests should really either instantiate their own sessions, # or take a fixture that is itself a session. @@ -53,10 +56,11 @@ def db(dbloader): engine.session.close() -@yield_fixture(scope='function') +@yield_fixture(scope="function") def empty_db(config): from inbox.ignition import engine_manager from inbox.models.session import new_session + setup_test_db() engine = engine_manager.get_for_id(0) engine.session = new_session(engine) @@ -67,7 +71,8 @@ def empty_db(config): @yield_fixture def test_client(db): from inbox.api.srv import app - app.config['TESTING'] = True + + app.config["TESTING"] = True with app.test_client() as c: yield c @@ -75,18 +80,18 @@ def test_client(db): @yield_fixture def webhooks_client(db): from inbox.api.srv import app - app.config['TESTING'] = True + + app.config["TESTING"] = True with app.test_client() as c: yield TestWebhooksClient(c) class TestWebhooksClient(object): - def __init__(self, test_client): self.client = test_client def post_data(self, path, data, headers={}): - path = '/w' + path + path = "/w" + path return self.client.post(path, data=json.dumps(data), headers=headers) @@ -98,10 +103,12 @@ def patch_network_functions(monkeypatch): """ import inbox.actions.backends + for backend in inbox.actions.backends.module_registry.values(): for method_name in backend.__all__: - monkeypatch.setattr(backend.__name__ + '.' + method_name, - lambda *args, **kwargs: None) + monkeypatch.setattr( + backend.__name__ + "." + method_name, lambda *args, **kwargs: None + ) def make_default_account(db, config): @@ -113,18 +120,18 @@ def make_default_account(db, config): ns = Namespace() account = GmailAccount( - sync_host='{}:{}'.format(platform.node(), 0), - email_address='inboxapptest@gmail.com') + sync_host="{}:{}".format(platform.node(), 0), + email_address="inboxapptest@gmail.com", + ) account.namespace = ns account.create_emailed_events_calendar() - account.refresh_token = 'faketoken' + account.refresh_token = "faketoken" auth_creds = GmailAuthCredentials() - auth_creds.client_id = config.get_required('GOOGLE_OAUTH_CLIENT_ID') - auth_creds.client_secret = \ - config.get_required('GOOGLE_OAUTH_CLIENT_SECRET') - auth_creds.refresh_token = 'faketoken' - auth_creds.g_id_token = 'foo' + auth_creds.client_id = config.get_required("GOOGLE_OAUTH_CLIENT_ID") + auth_creds.client_secret = config.get_required("GOOGLE_OAUTH_CLIENT_SECRET") + auth_creds.refresh_token = "faketoken" + auth_creds.g_id_token = "foo" auth_creds.created_at = datetime.utcnow() auth_creds.updated_at = datetime.utcnow() auth_creds.gmailaccount = account @@ -140,6 +147,7 @@ def delete_default_accounts(db): from inbox.models.backends.gmail import GmailAccount from inbox.models.backends.gmail import GmailAuthCredentials from inbox.models import Namespace + delete_messages(db.session) db.session.rollback() db.session.query(GmailAccount).delete() @@ -148,50 +156,57 @@ def delete_default_accounts(db): db.session.commit() -@yield_fixture(scope='function') +@yield_fixture(scope="function") def default_account(db, config, redis_mock): yield make_default_account(db, config) delete_default_accounts(db) -@yield_fixture(scope='function') +@yield_fixture(scope="function") def default_namespace(db, default_account): yield default_account.namespace -@yield_fixture(scope='function') +@yield_fixture(scope="function") def default_accounts(db, config, redis_mock): yield [make_default_account(db, config) for _ in range(3)] delete_default_accounts(db) -@yield_fixture(scope='function') +@yield_fixture(scope="function") def default_namespaces(db, default_accounts): yield [account.namespace for account in default_accounts] -@yield_fixture(scope='function') +@yield_fixture(scope="function") def generic_account(db): yield add_generic_imap_account(db.session) -@yield_fixture(scope='function') +@yield_fixture(scope="function") def gmail_account(db): - yield add_fake_gmail_account(db.session, - email_address='almondsunshine', - refresh_token='tearsofgold', - password='COyPtHmj9E9bvGdN') + yield add_fake_gmail_account( + db.session, + email_address="almondsunshine", + refresh_token="tearsofgold", + password="COyPtHmj9E9bvGdN", + ) delete_gmail_accounts(db.session) -@fixture(scope='function') +@fixture(scope="function") def contact_sync(config, db, default_account): from inbox.contacts.remote_sync import ContactSync - return ContactSync('inboxapptest@gmail.com', 'gmail', default_account.id, - default_account.namespace.id) + + return ContactSync( + "inboxapptest@gmail.com", + "gmail", + default_account.id, + default_account.namespace.id, + ) -@fixture(scope='function') +@fixture(scope="function") def contacts_provider(config, db): return ContactsProviderStub() @@ -205,7 +220,7 @@ class ContactsProviderStub(object): """ - def __init__(self, provider_name='test_provider'): + def __init__(self, provider_name="test_provider"): self._contacts = [] self._next_uid = 1 self.PROVIDER_NAME = provider_name @@ -213,41 +228,47 @@ def __init__(self, provider_name='test_provider'): def supply_contact(self, name, email_address, deleted=False): from inbox.models import Contact - self._contacts.append(Contact(namespace_id=1, - uid=str(self._next_uid), - provider_name=self.PROVIDER_NAME, - name=name, - email_address=email_address, - deleted=deleted)) + + self._contacts.append( + Contact( + namespace_id=1, + uid=str(self._next_uid), + provider_name=self.PROVIDER_NAME, + name=name, + email_address=email_address, + deleted=deleted, + ) + ) self._next_uid = self._get_next_uid(self._next_uid) def get_items(self, *args, **kwargs): return self._contacts -def add_fake_folder(db_session, default_account, display_name='All Mail', - name='all'): +def add_fake_folder(db_session, default_account, display_name="All Mail", name="all"): from inbox.models.folder import Folder + return Folder.find_or_create(db_session, default_account, display_name, name) -def add_fake_label(db_session, default_account, display_name='My Label', - name=None): +def add_fake_label(db_session, default_account, display_name="My Label", name=None): from inbox.models.label import Label + return Label.find_or_create(db_session, default_account, display_name, name) -def add_generic_imap_account(db_session, email_address='test@nylas.com'): +def add_generic_imap_account(db_session, email_address="test@nylas.com"): import platform from inbox.models.backends.generic import GenericAccount from inbox.models import Namespace - account = GenericAccount(email_address=email_address, - sync_host=platform.node(), - provider='custom') - account.imap_endpoint = ('imap.custom.com', 993) - account.smtp_endpoint = ('smtp.custom.com', 587) - account.imap_password = 'bananagrams' - account.smtp_password = 'bananagrams' + + account = GenericAccount( + email_address=email_address, sync_host=platform.node(), provider="custom" + ) + account.imap_endpoint = ("imap.custom.com", 993) + account.smtp_endpoint = ("smtp.custom.com", 587) + account.imap_password = "bananagrams" + account.smtp_password = "bananagrams" account.namespace = Namespace() db_session.add(account) db_session.commit() @@ -257,30 +278,35 @@ def add_generic_imap_account(db_session, email_address='test@nylas.com'): def delete_generic_imap_accounts(db_session): from inbox.models.backends.generic import GenericAccount from inbox.models import Namespace + db_session.rollback() db_session.query(GenericAccount).delete() db_session.query(Namespace).delete() db_session.commit() -def add_fake_yahoo_account(db_session, email_address='cypresstest@yahoo.com'): +def add_fake_yahoo_account(db_session, email_address="cypresstest@yahoo.com"): import platform from inbox.models.backends.generic import GenericAccount from inbox.models import Namespace - account = GenericAccount(email_address=email_address, - sync_host=platform.node(), - provider='yahoo') - account.imap_password = 'bananagrams' - account.smtp_password = 'bananagrams' + + account = GenericAccount( + email_address=email_address, sync_host=platform.node(), provider="yahoo" + ) + account.imap_password = "bananagrams" + account.smtp_password = "bananagrams" account.namespace = Namespace() db_session.add(account) db_session.commit() return account -def add_fake_gmail_account(db_session, email_address='test@nilas.com', - refresh_token='tearsofgold', - password='COyPtHmj9E9bvGdN'): +def add_fake_gmail_account( + db_session, + email_address="test@nilas.com", + refresh_token="tearsofgold", + password="COyPtHmj9E9bvGdN", +): from inbox.models import Namespace from inbox.models.backends.gmail import GmailAccount import platform @@ -292,7 +318,8 @@ def add_fake_gmail_account(db_session, email_address='test@nilas.com', email_address=email_address, refresh_token=refresh_token, sync_host=platform.node(), - namespace=namespace) + namespace=namespace, + ) account.password = password db_session.add(account) @@ -303,19 +330,31 @@ def add_fake_gmail_account(db_session, email_address='test@nilas.com', def delete_gmail_accounts(db_session): from inbox.models import Namespace from inbox.models.backends.gmail import GmailAccount + db_session.rollback() db_session.query(GmailAccount).delete() db_session.query(Namespace).delete() db_session.commit() -def add_fake_message(db_session, namespace_id, thread=None, from_addr=None, - to_addr=None, cc_addr=None, bcc_addr=None, - received_date=None, subject='', - body='', snippet='', g_msgid=None, - add_sent_category=False): +def add_fake_message( + db_session, + namespace_id, + thread=None, + from_addr=None, + to_addr=None, + cc_addr=None, + bcc_addr=None, + received_date=None, + subject="", + body="", + snippet="", + g_msgid=None, + add_sent_category=False, +): from inbox.models import Message, Category from inbox.contacts.processing import update_contacts_from_message + m = Message() m.namespace_id = namespace_id m.from_addr = from_addr or [] @@ -340,7 +379,8 @@ def add_fake_message(db_session, namespace_id, thread=None, from_addr=None, if add_sent_category: category = Category.find_or_create( - db_session, namespace_id, 'sent', 'sent', type_='folder') + db_session, namespace_id, "sent", "sent", type_="folder" + ) if category not in m.categories: m.categories.add(category) db_session.commit() @@ -350,14 +390,16 @@ def add_fake_message(db_session, namespace_id, thread=None, from_addr=None, def delete_messages(db_session): from inbox.models import Message + db_session.rollback() - db_session.query(Message).update({'reply_to_message_id': None}) + db_session.query(Message).update({"reply_to_message_id": None}) db_session.query(Message).delete() db_session.commit() def delete_categories(db_session): from inbox.models import Category + db_session.rollback() db_session.query(Category).delete() db_session.commit() @@ -365,6 +407,7 @@ def delete_categories(db_session): def add_fake_thread(db_session, namespace_id): from inbox.models import Thread + dt = datetime.utcnow() thr = Thread(subjectdate=dt, recentdate=dt, namespace_id=namespace_id) db_session.add(thr) @@ -374,6 +417,7 @@ def add_fake_thread(db_session, namespace_id): def delete_threads(db_session): from inbox.models import Thread + delete_messages(db_session) db_session.rollback() db_session.query(Thread).delete() @@ -382,10 +426,10 @@ def delete_threads(db_session): def add_fake_imapuid(db_session, account_id, message, folder, msg_uid): from inbox.models.backends.imap import ImapUid - imapuid = ImapUid(account_id=account_id, - message=message, - folder=folder, - msg_uid=msg_uid) + + imapuid = ImapUid( + account_id=account_id, message=message, folder=folder, msg_uid=msg_uid + ) db_session.add(imapuid) db_session.commit() return imapuid @@ -393,19 +437,29 @@ def add_fake_imapuid(db_session, account_id, message, folder, msg_uid): def delete_imapuids(db_session): from inbox.models.backends.imap import ImapUid + db_session.rollback() db_session.query(ImapUid).delete() db_session.commit() -def add_fake_calendar(db_session, namespace_id, name="Cal", - description="A Calendar", uid="UID", read_only=False): +def add_fake_calendar( + db_session, + namespace_id, + name="Cal", + description="A Calendar", + uid="UID", + read_only=False, +): from inbox.models import Calendar - calendar = Calendar(namespace_id=namespace_id, - name=name, - description=description, - uid=uid, - read_only=read_only) + + calendar = Calendar( + namespace_id=namespace_id, + name=name, + description=description, + uid=uid, + read_only=read_only, + ) db_session.add(calendar) db_session.commit() return calendar @@ -413,33 +467,48 @@ def add_fake_calendar(db_session, namespace_id, name="Cal", def delete_calendars(db_session): from inbox.models import Calendar + db_session.rollback() db_session.query(Calendar).delete() db_session.commit() -def add_fake_event(db_session, namespace_id, calendar=None, - title='title', description='', location='', - busy=False, read_only=False, reminders='', recurrence='', - start=None, end=None, all_day=False): +def add_fake_event( + db_session, + namespace_id, + calendar=None, + title="title", + description="", + location="", + busy=False, + read_only=False, + reminders="", + recurrence="", + start=None, + end=None, + all_day=False, +): from inbox.models import Event + start = start or datetime.utcnow() end = end or (datetime.utcnow() + timedelta(seconds=1)) calendar = calendar or add_fake_calendar(db_session, namespace_id) - event = Event(namespace_id=namespace_id, - calendar=calendar, - title=title, - description=description, - location=location, - busy=busy, - read_only=read_only, - reminders=reminders, - recurrence=recurrence, - start=start, - end=end, - all_day=all_day, - raw_data='', - uid=str(uuid.uuid4())) + event = Event( + namespace_id=namespace_id, + calendar=calendar, + title=title, + description=description, + location=location, + busy=busy, + read_only=read_only, + reminders=reminders, + recurrence=recurrence, + start=start, + end=end, + all_day=all_day, + raw_data="", + uid=str(uuid.uuid4()), + ) event.sequence_number = 0 db_session.add(event) db_session.commit() @@ -448,18 +517,24 @@ def add_fake_event(db_session, namespace_id, calendar=None, def delete_events(db_session): from inbox.models import Event + db_session.rollback() db_session.query(Event).delete() db_session.commit() -def add_fake_contact(db_session, namespace_id, name='Ben Bitdiddle', - email_address='inboxapptest@gmail.com', uid='22'): +def add_fake_contact( + db_session, + namespace_id, + name="Ben Bitdiddle", + email_address="inboxapptest@gmail.com", + uid="22", +): from inbox.models import Contact - contact = Contact(namespace_id=namespace_id, - name=name, - email_address=email_address, - uid=uid) + + contact = Contact( + namespace_id=namespace_id, name=name, email_address=email_address, uid=uid + ) db_session.add(contact) db_session.commit() @@ -468,6 +543,7 @@ def add_fake_contact(db_session, namespace_id, name='Ben Bitdiddle', def delete_contacts(db_session): from inbox.models import Contact + db_session.rollback() db_session.query(Contact).delete() db_session.commit() @@ -475,9 +551,8 @@ def delete_contacts(db_session): def add_fake_category(db_session, namespace_id, display_name, name=None): from inbox.models import Category - category = Category(namespace_id=namespace_id, - display_name=display_name, - name=name) + + category = Category(namespace_id=namespace_id, display_name=display_name, name=name) db_session.add(category) db_session.commit() return category @@ -498,22 +573,22 @@ def message(db, default_namespace, thread): @fixture def folder(db, default_account): from inbox.models.folder import Folder - return Folder.find_or_create(db.session, default_account, - '[Gmail]/All Mail', 'all') + + return Folder.find_or_create(db.session, default_account, "[Gmail]/All Mail", "all") @fixture def label(db, default_account): from inbox.models import Label - return Label.find_or_create(db.session, default_account, - 'Inbox', 'inbox') + + return Label.find_or_create(db.session, default_account, "Inbox", "inbox") @fixture def custom_label(db, default_account): from inbox.models import Label - return Label.find_or_create(db.session, default_account, - 'Kraftwerk', '') + + return Label.find_or_create(db.session, default_account, "Kraftwerk", "") @yield_fixture @@ -524,39 +599,44 @@ def contact(db, default_account): @yield_fixture def imapuid(db, default_account, message, folder): - yield add_fake_imapuid(db.session, default_account.id, message, - folder, 2222) + yield add_fake_imapuid(db.session, default_account.id, message, folder, 2222) delete_imapuids(db.session) -@yield_fixture(scope='function') +@yield_fixture(scope="function") def calendar(db, default_account): yield add_fake_calendar(db.session, default_account.namespace.id) delete_calendars(db.session) -@yield_fixture(scope='function') +@yield_fixture(scope="function") def other_calendar(db, default_account): - yield add_fake_calendar(db.session, default_account.namespace.id, - uid='uid2', name='Calendar 2') + yield add_fake_calendar( + db.session, default_account.namespace.id, uid="uid2", name="Calendar 2" + ) delete_calendars(db.session) -@yield_fixture(scope='function') +@yield_fixture(scope="function") def event(db, default_account): yield add_fake_event(db.session, default_account.namespace.id) delete_events(db.session) delete_calendars(db.session) -@yield_fixture(scope='function') +@yield_fixture(scope="function") def imported_event(db, default_account, message): ev = add_fake_event(db.session, default_account.namespace.id) ev.message = message - message.from_addr = [['Mick Taylor', 'mick@example.com']] - ev.owner = 'Mick Taylor ' - ev.participants = [{"email": "inboxapptest@gmail.com", - "name": "Inbox Apptest", "status": "noreply"}] + message.from_addr = [["Mick Taylor", "mick@example.com"]] + ev.owner = "Mick Taylor " + ev.participants = [ + { + "email": "inboxapptest@gmail.com", + "name": "Inbox Apptest", + "status": "noreply", + } + ] db.session.commit() yield ev delete_events(db.session) @@ -565,26 +645,29 @@ def imported_event(db, default_account, message): @fixture def mime_message(): - msg = mime.create.multipart('alternative') + msg = mime.create.multipart("alternative") msg.append( - mime.create.text('plain', 'Hello World!'), - mime.create.text('html', 'Hello World!') + mime.create.text("plain", "Hello World!"), + mime.create.text("html", "Hello World!"), ) - msg.headers['To'] = 'Alice ' - msg.headers['Cc'] = 'Bob ' - msg.headers['Subject'] = 'Hello' + msg.headers["To"] = "Alice " + msg.headers["Cc"] = "Bob " + msg.headers["Subject"] = "Hello" return msg @fixture def new_message_from_synced(db, default_account, mime_message): from inbox.models import Message + received_date = datetime(2014, 9, 22, 17, 25, 46) - new_msg = Message.create_from_synced(default_account, - 139219, - '[Gmail]/All Mail', - received_date, - mime_message.to_string()) + new_msg = Message.create_from_synced( + default_account, + 139219, + "[Gmail]/All Mail", + received_date, + mime_message.to_string(), + ) assert new_msg.received_date == received_date new_msg.is_read = True new_msg.is_starred = False @@ -593,15 +676,13 @@ def new_message_from_synced(db, default_account, mime_message): def add_fake_msg_with_calendar_part(db_session, account, ics_str, thread=None): from inbox.models import Message - parsed = mime.create.multipart('mixed') - parsed.append( - mime.create.attachment('text/calendar', - ics_str, - disposition=None) - ) + + parsed = mime.create.multipart("mixed") + parsed.append(mime.create.attachment("text/calendar", ics_str, disposition=None)) msg = Message.create_from_synced( - account, 22, '[Gmail]/All Mail', datetime.utcnow(), parsed.to_string()) - msg.from_addr = [('Ben Bitdiddle', 'ben@inboxapp.com')] + account, 22, "[Gmail]/All Mail", datetime.utcnow(), parsed.to_string() + ) + msg.from_addr = [("Ben Bitdiddle", "ben@inboxapp.com")] if thread is None: msg.thread = add_fake_thread(db_session, account.namespace.id) @@ -614,7 +695,7 @@ def add_fake_msg_with_calendar_part(db_session, account, ics_str, thread=None): @yield_fixture def mock_gevent_sleep(monkeypatch): - monkeypatch.setattr('gevent.sleep', mock.Mock()) + monkeypatch.setattr("gevent.sleep", mock.Mock()) yield monkeypatch.undo() @@ -624,21 +705,21 @@ def mock_client(): # Adding a couple of methods we use that mockredis doesn't support yet. def scan_iter_patch(match=None, count=100): - match = str(match).replace('*', '') + match = str(match).replace("*", "") return filter(lambda k: k.startswith(match), mock_client.keys()) mock_client.scan_iter = scan_iter_patch mock_client.reset = lambda: True def zscan_iter_patch(key, match=None): - match = str(match).replace('*', '') - return filter(lambda k: k.startswith(match), - mock_client.zrange(key, 0, -1)) + match = str(match).replace("*", "") + return filter(lambda k: k.startswith(match), mock_client.zrange(key, 0, -1)) + mock_client.zscan_iter = zscan_iter_patch return mock_client -@yield_fixture(scope='function') +@yield_fixture(scope="function") def redis_client(monkeypatch): client = mock_client() yield client @@ -646,7 +727,7 @@ def redis_client(monkeypatch): client.flushdb() -@yield_fixture(scope='function', autouse=True) +@yield_fixture(scope="function", autouse=True) def redis_mock(redis_client, monkeypatch): def set_self_client(self, *args, **kwargs): # Ensure the same 'redis' client is returned across HeartbeatStore @@ -658,12 +739,13 @@ def set_self_client(self, *args, **kwargs): def fake_redis_client(host=None, port=6379, db=1): return redis_client - monkeypatch.setattr("inbox.heartbeat.config.get_redis_client", - fake_redis_client) - monkeypatch.setattr("inbox.heartbeat.store.HeartbeatStore.__init__", - set_self_client) - monkeypatch.setattr('inbox.scheduling.event_queue._get_redis_client', - fake_redis_client) - monkeypatch.setattr('inbox.mailsync.service.SHARED_SYNC_EVENT_QUEUE_ZONE_MAP', {}) + monkeypatch.setattr("inbox.heartbeat.config.get_redis_client", fake_redis_client) + monkeypatch.setattr( + "inbox.heartbeat.store.HeartbeatStore.__init__", set_self_client + ) + monkeypatch.setattr( + "inbox.scheduling.event_queue._get_redis_client", fake_redis_client + ) + monkeypatch.setattr("inbox.mailsync.service.SHARED_SYNC_EVENT_QUEUE_ZONE_MAP", {}) yield monkeypatch.undo() diff --git a/inbox/test/util/crispin.py b/inbox/test/util/crispin.py index 14f12a267..c14ed9f59 100644 --- a/inbox/test/util/crispin.py +++ b/inbox/test/util/crispin.py @@ -1,4 +1,5 @@ # NOT a fixture because it needs args def crispin_client(account_id, account_provider): from inbox.crispin import connection_pool + return connection_pool(account_id, pool_size=1).get() diff --git a/inbox/test/webhooks/test_gpush_calendar_notifications.py b/inbox/test/webhooks/test_gpush_calendar_notifications.py index a4b2abd75..14c12c205 100644 --- a/inbox/test/webhooks/test_gpush_calendar_notifications.py +++ b/inbox/test/webhooks/test_gpush_calendar_notifications.py @@ -6,28 +6,29 @@ from inbox.models.calendar import Calendar from inbox.test.util.base import webhooks_client -__all__ = ['webhooks_client'] -CALENDAR_LIST_PATH = '/calendar_list_update/{}' -CALENDAR_PATH = '/calendar_update/{}' +__all__ = ["webhooks_client"] -ACCOUNT_WATCH_UUID = 'this_is_a_unique_identifier' -CALENDAR_WATCH_UUID = 'this_is_a_unique_identifier' # lol +CALENDAR_LIST_PATH = "/calendar_list_update/{}" +CALENDAR_PATH = "/calendar_update/{}" + +ACCOUNT_WATCH_UUID = "this_is_a_unique_identifier" +CALENDAR_WATCH_UUID = "this_is_a_unique_identifier" # lol SYNC_HEADERS = { - 'X-Goog-Channel-Id': 'id', - 'X-Goog-Message-Number': 1, - 'X-Goog-Resource-Id': 'not relevant', - 'X-Goog-Resource-State': 'sync', - 'X-Goog-Resource-URI': 'resource/location' + "X-Goog-Channel-Id": "id", + "X-Goog-Message-Number": 1, + "X-Goog-Resource-Id": "not relevant", + "X-Goog-Resource-State": "sync", + "X-Goog-Resource-URI": "resource/location", } UPDATE_HEADERS = { - 'X-Goog-Channel-Id': 'id', - 'X-Goog-Message-Number': 2, - 'X-Goog-Resource-Id': 'not relevant', - 'X-Goog-Resource-State': 'update', - 'X-Goog-Resource-URI': 'resource/location' + "X-Goog-Channel-Id": "id", + "X-Goog-Message-Number": 2, + "X-Goog-Resource-Id": "not relevant", + "X-Goog-Resource-State": "update", + "X-Goog-Resource-URI": "resource/location", } WATCH_EXPIRATION = 1426325213000 # 3/14/15 - utc TS in milliseconds @@ -44,10 +45,12 @@ def watched_account(db, default_account): @pytest.fixture def watched_calendar(db, default_namespace): - calendar = Calendar(name='Colander', - uid='this_is_a_uid', - read_only=True, - namespace_id=default_namespace.id) + calendar = Calendar( + name="Colander", + uid="this_is_a_uid", + read_only=True, + namespace_id=default_namespace.id, + ) calendar.new_event_watch(WATCH_EXPIRATION) db.session.add(calendar) @@ -137,8 +140,7 @@ def test_should_update_logic_no_push(db, default_account, calendar): # Updated recently - should not update default_account.last_calendar_list_sync = ten_seconds_ago calendar.last_synced = ten_seconds_ago - assert not default_account.should_update_calendars(ten_minutes, - poll_frequency) + assert not default_account.should_update_calendars(ten_minutes, poll_frequency) assert not calendar.should_update_events(ten_minutes, poll_frequency) @@ -154,8 +156,7 @@ def test_needs_new_watch_logic(db, watched_account, watched_calendar): assert not watched_calendar.needs_new_watch() -def test_receive_sync_message(db, webhooks_client, - watched_account, watched_calendar): +def test_receive_sync_message(db, webhooks_client, watched_account, watched_calendar): # Sync messages can basically be ignored # (see https://developers.google.com/google-apps/calendar/v3/push#sync) @@ -177,7 +178,7 @@ def test_calendar_update(db, webhooks_client, watched_account): watched_account.gpush_calendar_list_last_ping = datetime(2010, 1, 1) headers = UPDATE_HEADERS.copy() - headers['X-Goog-Channel-Id'] = ACCOUNT_WATCH_UUID + headers["X-Goog-Channel-Id"] = ACCOUNT_WATCH_UUID r = webhooks_client.post_data(calendar_path, {}, headers) assert r.status_code == 200 db.session.refresh(watched_account) @@ -187,12 +188,12 @@ def test_calendar_update(db, webhooks_client, watched_account): r = webhooks_client.post_data(unknown_id_path, {}, headers) assert r.status_code == 404 # account not found - invalid_id_path = CALENDAR_LIST_PATH.format('invalid_id') + invalid_id_path = CALENDAR_LIST_PATH.format("invalid_id") r = webhooks_client.post_data(invalid_id_path, {}, headers) assert r.status_code == 400 bad_headers = UPDATE_HEADERS.copy() - del bad_headers['X-Goog-Resource-State'] + del bad_headers["X-Goog-Resource-State"] r = webhooks_client.post_data(calendar_path, {}, bad_headers) assert r.status_code == 400 @@ -205,7 +206,7 @@ def test_event_update(db, webhooks_client, watched_calendar): watched_calendar.gpush_last_ping = datetime(2010, 1, 1) headers = UPDATE_HEADERS.copy() - headers['X-Goog-Channel-Id'] = CALENDAR_WATCH_UUID + headers["X-Goog-Channel-Id"] = CALENDAR_WATCH_UUID r = webhooks_client.post_data(event_path, {}, headers) assert r.status_code == 200 assert len(limitlion.throttle.mock_calls) == 1 @@ -226,11 +227,11 @@ def test_event_update(db, webhooks_client, watched_calendar): r = webhooks_client.post_data(bad_event_path, {}, headers) assert r.status_code == 404 # calendar not found - invalid_id_path = CALENDAR_PATH.format('invalid_id') + invalid_id_path = CALENDAR_PATH.format("invalid_id") r = webhooks_client.post_data(invalid_id_path, {}, headers) assert r.status_code == 400 bad_headers = UPDATE_HEADERS.copy() - del bad_headers['X-Goog-Resource-State'] + del bad_headers["X-Goog-Resource-State"] r = webhooks_client.post_data(event_path, {}, bad_headers) assert r.status_code == 400 diff --git a/inbox/transactions/actions.py b/inbox/transactions/actions.py index e96f6ca56..102c7991f 100644 --- a/inbox/transactions/actions.py +++ b/inbox/transactions/actions.py @@ -20,6 +20,7 @@ from nylas.logging import get_logger from nylas.logging.sentry import log_uncaught_errors + logger = get_logger() from inbox.crispin import writable_connection_pool from inbox.ignition import engine_manager @@ -28,48 +29,49 @@ from inbox.models import ActionLog, Event from inbox.util.misc import DummyContextManager from inbox.util.stats import statsd_client -from inbox.actions.base import (can_handle_multiple_records, - mark_unread, - mark_starred, - move, - change_labels, - save_draft, - update_draft, - delete_draft, - save_sent_email, - create_folder, - create_label, - update_folder, - update_label, - delete_folder, - delete_label, - delete_sent_email) -from inbox.events.actions.base import (create_event, delete_event, - update_event) +from inbox.actions.base import ( + can_handle_multiple_records, + mark_unread, + mark_starred, + move, + change_labels, + save_draft, + update_draft, + delete_draft, + save_sent_email, + create_folder, + create_label, + update_folder, + update_label, + delete_folder, + delete_label, + delete_sent_email, +) +from inbox.events.actions.base import create_event, delete_event, update_event from inbox.config import config MAIL_ACTION_FUNCTION_MAP = { - 'mark_unread': mark_unread, - 'mark_starred': mark_starred, - 'move': move, - 'change_labels': change_labels, - 'save_draft': save_draft, - 'update_draft': update_draft, - 'delete_draft': delete_draft, - 'save_sent_email': save_sent_email, - 'delete_sent_email': delete_sent_email, - 'create_folder': create_folder, - 'create_label': create_label, - 'update_folder': update_folder, - 'delete_folder': delete_folder, - 'update_label': update_label, - 'delete_label': delete_label, + "mark_unread": mark_unread, + "mark_starred": mark_starred, + "move": move, + "change_labels": change_labels, + "save_draft": save_draft, + "update_draft": update_draft, + "delete_draft": delete_draft, + "save_sent_email": save_sent_email, + "delete_sent_email": delete_sent_email, + "create_folder": create_folder, + "create_label": create_label, + "update_folder": update_folder, + "delete_folder": delete_folder, + "update_label": update_label, + "delete_label": delete_label, } EVENT_ACTION_FUNCTION_MAP = { - 'create_event': create_event, - 'delete_event': delete_event, - 'update_event': update_event, + "create_event": create_event, + "delete_event": delete_event, + "update_event": update_event, } @@ -95,9 +97,17 @@ def function_for_action(action): class SyncbackService(gevent.Greenlet): """Asynchronously consumes the action log and executes syncback actions.""" - def __init__(self, syncback_id, process_number, total_processes, poll_interval=1, - retry_interval=120, num_workers=NUM_PARALLEL_ACCOUNTS, - batch_size=20, fetch_batch_size=100): + def __init__( + self, + syncback_id, + process_number, + total_processes, + poll_interval=1, + retry_interval=120, + num_workers=NUM_PARALLEL_ACCOUNTS, + batch_size=20, + fetch_batch_size=100, + ): self.process_number = process_number self.total_processes = total_processes self.poll_interval = poll_interval @@ -122,19 +132,24 @@ def __init__(self, syncback_id, process_number, total_processes, poll_interval=1 # This SyncbackService performs syncback for only and all the accounts # on shards it is reponsible for; shards are divided up between # running SyncbackServices. - self.log = logger.new(component='syncback') - syncback_assignments = {int(k): v for k, v in - config.get("SYNCBACK_ASSIGNMENTS", {}).items()} + self.log = logger.new(component="syncback") + syncback_assignments = { + int(k): v for k, v in config.get("SYNCBACK_ASSIGNMENTS", {}).items() + } if syncback_id in syncback_assignments: - self.keys = [key for key in engine_manager.engines - if key in syncback_assignments[syncback_id] and - key % total_processes == process_number] + self.keys = [ + key + for key in engine_manager.engines + if key in syncback_assignments[syncback_id] + and key % total_processes == process_number + ] else: - self.log.warn("No shards assigned to syncback server", - syncback_id=syncback_id) + self.log.warn( + "No shards assigned to syncback server", syncback_id=syncback_id + ) self.keys = [] - self.log = logger.new(component='syncback') + self.log = logger.new(component="syncback") self.num_workers = num_workers self.num_idle_workers = 0 self.worker_did_finish = gevent.event.Event() @@ -156,26 +171,32 @@ def _has_recent_move_action(self, db_session, log_entries): action_log_ids = [l.id for l in log_entries] # Check if there was a pending move action that recently completed. threshold = datetime.utcnow() - timedelta(seconds=90) - actionlog = (db_session.query(ActionLog) + actionlog = ( + db_session.query(ActionLog) .filter( ActionLog.namespace_id == log_entry.namespace.id, ActionLog.table_name == log_entry.table_name, ActionLog.record_id == log_entry.record_id, - ActionLog.action.in_(['change_labels', 'move']), - ActionLog.status == 'successful', - ActionLog.updated_at >= threshold) - .order_by(desc(ActionLog.id)).first()) + ActionLog.action.in_(["change_labels", "move"]), + ActionLog.status == "successful", + ActionLog.updated_at >= threshold, + ) + .order_by(desc(ActionLog.id)) + .first() + ) if actionlog: account_id = log_entries[0].namespace.account.id - self.log.debug('Temporarily skipping actions', - account_id=account_id, - table_name=log_entry.table_name, - record_id=log_entry.record_id, - action_log_ids=action_log_ids, - action=log_entry.action, - other_action_id=actionlog.id, - other_action_updated_at=actionlog.updated_at.isoformat()) + self.log.debug( + "Temporarily skipping actions", + account_id=account_id, + table_name=log_entry.table_name, + record_id=log_entry.record_id, + action_log_ids=action_log_ids, + action=log_entry.action, + other_action_id=actionlog.id, + other_action_updated_at=actionlog.updated_at.isoformat(), + ) return True else: return False @@ -197,76 +218,84 @@ def _tasks_for_log_entries(self, db_session, log_entries, has_more): # XXX: Don't do this for change_labels because we use optimistic # updates. - if ( - action == 'move' and - self._has_recent_move_action(db_session, log_entries) - ): + if action == "move" and self._has_recent_move_action(db_session, log_entries): return [] - if has_more and action in ('move', 'mark_unread', 'change_labels'): + if has_more and action in ("move", "mark_unread", "change_labels"): # There may be more records to deduplicate. - self.log.debug('fetching more entries', - account_id=account_id, - action=action, - record_id=log_entries[0].record_id) - log_entries = db_session.query(ActionLog).filter( - ActionLog.discriminator == 'actionlog', - ActionLog.status == 'pending', - ActionLog.namespace_id == namespace.id, - ActionLog.action == action, - ActionLog.record_id == log_entries[0].record_id).\ - order_by(ActionLog.id).\ - limit(MAX_DEDUPLICATION_BATCH_SIZE).all() + self.log.debug( + "fetching more entries", + account_id=account_id, + action=action, + record_id=log_entries[0].record_id, + ) + log_entries = ( + db_session.query(ActionLog) + .filter( + ActionLog.discriminator == "actionlog", + ActionLog.status == "pending", + ActionLog.namespace_id == namespace.id, + ActionLog.action == action, + ActionLog.record_id == log_entries[0].record_id, + ) + .order_by(ActionLog.id) + .limit(MAX_DEDUPLICATION_BATCH_SIZE) + .all() + ) record_ids = [l.record_id for l in log_entries] log_entry_ids = [l.id for l in log_entries] - if action in ('move', 'mark_unread'): + if action in ("move", "mark_unread"): extra_args = log_entries[-1].extra_args - elif action == 'change_labels': + elif action == "change_labels": added_labels = set() removed_labels = set() for log_entry in log_entries: - for label in log_entry.extra_args['added_labels']: + for label in log_entry.extra_args["added_labels"]: if label in removed_labels: removed_labels.remove(label) else: added_labels.add(label) - for label in log_entry.extra_args['removed_labels']: + for label in log_entry.extra_args["removed_labels"]: if label in added_labels: added_labels.remove(label) else: removed_labels.add(label) extra_args = { - 'added_labels': list(added_labels), - 'removed_labels': list(removed_labels), + "added_labels": list(added_labels), + "removed_labels": list(removed_labels), } else: # Can't merge - tasks = [SyncbackTask(action_name=log_entry.action, - semaphore=semaphore, - action_log_ids=[log_entry.id], - record_ids=[log_entry.record_id], - account_id=account_id, - provider=namespace.account. - verbose_provider, - service=self, - retry_interval=self.retry_interval, - extra_args=log_entry.extra_args) - for log_entry in log_entries] + tasks = [ + SyncbackTask( + action_name=log_entry.action, + semaphore=semaphore, + action_log_ids=[log_entry.id], + record_ids=[log_entry.record_id], + account_id=account_id, + provider=namespace.account.verbose_provider, + service=self, + retry_interval=self.retry_interval, + extra_args=log_entry.extra_args, + ) + for log_entry in log_entries + ] return tasks - task = SyncbackTask(action_name=action, - semaphore=semaphore, - action_log_ids=log_entry_ids, - record_ids=record_ids, - account_id=account_id, - provider=namespace.account. - verbose_provider, - service=self, - retry_interval=self.retry_interval, - extra_args=extra_args) + task = SyncbackTask( + action_name=action, + semaphore=semaphore, + action_log_ids=log_entry_ids, + record_ids=record_ids, + account_id=account_id, + provider=namespace.account.verbose_provider, + service=self, + retry_interval=self.retry_interval, + extra_args=extra_args, + ) return [task] def _get_batch_task(self, db_session, log_entries, has_more): @@ -283,10 +312,12 @@ def _get_batch_task(self, db_session, log_entries, has_more): group_keys = [] # Used for ordering for log_entry in log_entries: - group_key = (log_entry.namespace.id, - log_entry.table_name, - log_entry.record_id, - log_entry.action) + group_key = ( + log_entry.namespace.id, + log_entry.table_name, + log_entry.record_id, + log_entry.action, + ) if group_key not in grouper: group_keys.append(group_key) grouper[group_key].append(log_entry) @@ -294,15 +325,14 @@ def _get_batch_task(self, db_session, log_entries, has_more): tasks = [] for group_key in group_keys: group_log_entries = grouper[group_key] - group_tasks = self._tasks_for_log_entries(db_session, - group_log_entries, - has_more) + group_tasks = self._tasks_for_log_entries( + db_session, group_log_entries, has_more + ) tasks += group_tasks if len(tasks) > self.batch_size: break if tasks: - return SyncbackBatchTask(semaphore, tasks[:self.batch_size], - account_id) + return SyncbackBatchTask(semaphore, tasks[: self.batch_size], account_id) def _batch_log_entries(self, db_session, log_entries): """ @@ -317,12 +347,13 @@ def _batch_log_entries(self, db_session, log_entries): for log_entry in log_entries: if log_entry is None: - self.log.error('Got no action, skipping') + self.log.error("Got no action, skipping") continue if log_entry.id in self.running_action_ids: - self.log.debug('Skipping already running action', - action_log_id=log_entry.id) + self.log.debug( + "Skipping already running action", action_log_id=log_entry.id + ) # We're already running an action for this account, so don't # queue up any additional actions for this account until the # previous batch has finished. @@ -334,42 +365,52 @@ def _batch_log_entries(self, db_session, log_entries): else: assert account_id is namespace.account.id - if namespace.account.sync_state in ('invalid', 'stopped'): + if namespace.account.sync_state in ("invalid", "stopped"): sync_state = namespace.account.sync_state - self.log.warning('Skipping action for {} account'.format(sync_state), - account_id=account_id, - action_log_id=log_entry.id, - action=log_entry.action) + self.log.warning( + "Skipping action for {} account".format(sync_state), + account_id=account_id, + action_log_id=log_entry.id, + action=log_entry.action, + ) - action_age = (datetime.utcnow() - - log_entry.created_at).total_seconds() + action_age = (datetime.utcnow() - log_entry.created_at).total_seconds() if action_age > INVALID_ACCOUNT_GRACE_PERIOD: - log_entry.status = 'failed' + log_entry.status = "failed" db_session.commit() - self.log.warning('Marking action as failed for {} account, older than grace period'.format(sync_state), - account_id=account_id, - action_log_id=log_entry.id, - action=log_entry.action) - statsd_client.incr('syncback.{}_failed.total'.format(sync_state)) - statsd_client.incr('syncback.{}_failed.{}'.format(sync_state, account_id)) + self.log.warning( + "Marking action as failed for {} account, older than grace period".format( + sync_state + ), + account_id=account_id, + action_log_id=log_entry.id, + action=log_entry.action, + ) + statsd_client.incr("syncback.{}_failed.total".format(sync_state)) + statsd_client.incr( + "syncback.{}_failed.{}".format(sync_state, account_id) + ) continue # If there is a recently failed action, don't execute any actions # for this account. if log_entry.retries > 0: - action_updated_age = (datetime.utcnow() - - log_entry.updated_at).total_seconds() + action_updated_age = ( + datetime.utcnow() - log_entry.updated_at + ).total_seconds() # TODO(T6974): We might want to do some kind of exponential # backoff with jitter to avoid the thundering herd problem if a # provider suddenly starts having issues for a short period of # time. if action_updated_age < self.retry_interval: - self.log.info('Skipping tasks due to recently failed action', - account_id=account_id, - action_log_id=log_entry.id, - retries=log_entry.retries) + self.log.info( + "Skipping tasks due to recently failed action", + account_id=account_id, + action_log_id=log_entry.id, + retries=log_entry.retries, + ) return valid_log_entries.append(log_entry) @@ -379,14 +420,16 @@ def _batch_log_entries(self, db_session, log_entries): return for task in batch_task.tasks: self.running_action_ids.update(task.action_log_ids) - self.log.debug('Syncback added task', - process=self.process_number, - account_id=account_id, - action_log_ids=task.action_log_ids, - num_actions=len(task.action_log_ids), - msg=task.action_name, - task_count=self.task_queue.qsize(), - extra_args=task.extra_args) + self.log.debug( + "Syncback added task", + process=self.process_number, + account_id=account_id, + action_log_ids=task.action_log_ids, + num_actions=len(task.action_log_ids), + msg=task.action_name, + task_count=self.task_queue.qsize(), + extra_args=task.extra_args, + ) return batch_task def _process_log(self): @@ -394,9 +437,15 @@ def _process_log(self): with session_scope_by_shard_id(key) as db_session: # Get the list of namespace ids with pending actions - namespace_ids = [ns_id[0] for ns_id in db_session.query(ActionLog.namespace_id).filter( - ActionLog.discriminator == 'actionlog', - ActionLog.status == 'pending').distinct()] + namespace_ids = [ + ns_id[0] + for ns_id in db_session.query(ActionLog.namespace_id) + .filter( + ActionLog.discriminator == "actionlog", + ActionLog.status == "pending", + ) + .distinct() + ] # Pick NUM_PARALLEL_ACCOUNTS randomly to make sure we're # executing actions equally for each namespace_id --- we @@ -406,16 +455,22 @@ def _process_log(self): if len(namespace_ids) <= NUM_PARALLEL_ACCOUNTS: namespaces_to_process = namespace_ids else: - namespaces_to_process = random.sample(namespace_ids, - NUM_PARALLEL_ACCOUNTS) + namespaces_to_process = random.sample( + namespace_ids, NUM_PARALLEL_ACCOUNTS + ) for ns_id in namespaces_to_process: # The discriminator filter restricts actions to IMAP. EAS # uses a different system. - query = db_session.query(ActionLog).filter( - ActionLog.discriminator == 'actionlog', - ActionLog.status == 'pending', - ActionLog.namespace_id == ns_id).order_by(ActionLog.id).\ - limit(self.fetch_batch_size) + query = ( + db_session.query(ActionLog) + .filter( + ActionLog.discriminator == "actionlog", + ActionLog.status == "pending", + ActionLog.namespace_id == ns_id, + ) + .order_by(ActionLog.id) + .limit(self.fetch_batch_size) + ) task = self._batch_log_entries(db_session, query.all()) if task is not None: self.task_queue.put(task) @@ -443,10 +498,12 @@ def stop(self): self.workers.kill() def _run(self): - self.log.info('Starting syncback service', - process_num=self.process_number, - total_processes=self.total_processes, - keys=self.keys) + self.log.info( + "Starting syncback service", + process_num=self.process_number, + total_processes=self.total_processes, + keys=self.keys, + ) while self.keep_running: retry_with_logging(self._run_impl, self.log) @@ -465,7 +522,6 @@ def __del__(self): class SyncbackBatchTask(object): - def __init__(self, semaphore, tasks, account_id): self.semaphore = semaphore self.tasks = tasks @@ -481,14 +537,18 @@ def execute(self): log = logger.new() with self.semaphore: with self._crispin_client_or_none() as crispin_client: - log.debug("Syncback running batch of actions", - num_actions=len(self.tasks), - account_id=self.account_id) + log.debug( + "Syncback running batch of actions", + num_actions=len(self.tasks), + account_id=self.account_id, + ) for task in self.tasks: task.crispin_client = crispin_client if not task.execute_with_lock(): - log.info("Pausing syncback tasks due to error", - account_id=self.account_id) + log.info( + "Pausing syncback tasks due to error", + account_id=self.account_id, + ) # Stop executing further actions for an account if any # failed. break @@ -501,8 +561,7 @@ def timeout(self, per_task_timeout): @property def action_log_ids(self): - return [entry for task in self.tasks - for entry in task.action_log_ids] + return [entry for task in self.tasks for entry in task.action_log_ids] class SyncbackTask(object): @@ -521,9 +580,18 @@ class SyncbackTask(object): """ - def __init__(self, action_name, semaphore, action_log_ids, record_ids, - account_id, provider, service, retry_interval=30, - extra_args=None): + def __init__( + self, + action_name, + semaphore, + action_log_ids, + record_ids, + account_id, + provider, + service, + retry_interval=30, + extra_args=None, + ): self.parent_service = weakref.ref(service) self.action_name = action_name self.semaphore = semaphore @@ -540,23 +608,25 @@ def try_merge_with(self, other): if self.func != other.func: return None - if self.action_name == 'change_labels': - my_removed_labels = set(self.extra_args['removed_labels']) - other_removed_labels = set(other.extra_args['removed_labels']) + if self.action_name == "change_labels": + my_removed_labels = set(self.extra_args["removed_labels"]) + other_removed_labels = set(other.extra_args["removed_labels"]) if my_removed_labels != other_removed_labels: return None - my_added_labels = set(self.extra_args['added_labels']) - other_added_labels = set(other.extra_args['added_labels']) + my_added_labels = set(self.extra_args["added_labels"]) + other_added_labels = set(other.extra_args["added_labels"]) if my_added_labels != other_added_labels: return None # If anything seems fishy, conservatively return None. - if (self.provider != other.provider or - self.action_log_ids == other.action_log_ids or - self.record_ids == other.record_ids or - self.account_id != other.account_id or - self.action_name != other.action_name): + if ( + self.provider != other.provider + or self.action_log_ids == other.action_log_ids + or self.record_ids == other.record_ids + or self.account_id != other.account_id + or self.action_name != other.action_name + ): return None return SyncbackTask( self.action_name, @@ -567,14 +637,14 @@ def try_merge_with(self, other): self.provider, self.parent_service(), self.retry_interval, - self.extra_args + self.extra_args, ) return None def _log_to_statsd(self, action_log_status, latency=None): metric_names = [ "syncback.overall.{}".format(action_log_status), - "syncback.providers.{}.{}".format(self.provider, action_log_status) + "syncback.providers.{}.{}".format(self.provider, action_log_status), ] for metric in metric_names: @@ -590,45 +660,55 @@ def execute_with_lock(self): record_ids=list(set(self.record_ids)), action_log_ids=self.action_log_ids[:100], n_action_log_ids=len(self.action_log_ids), - action=self.action_name, account_id=self.account_id, - extra_args=self.extra_args) + action=self.action_name, + account_id=self.account_id, + extra_args=self.extra_args, + ) # Double-check that the action is still pending. # Although the task queue is populated based on pending actions, it's # possible that the processing of one action involved marking other # actions as failed. - records_to_process, action_ids_to_process = self._get_records_and_actions_to_process() + ( + records_to_process, + action_ids_to_process, + ) = self._get_records_and_actions_to_process() if len(action_ids_to_process) == 0: return True try: before, after = self._execute_timed_action(records_to_process) - self.log.debug("executing action", - action_log_ids=action_ids_to_process) + self.log.debug("executing action", action_log_ids=action_ids_to_process) with session_scope(self.account_id) as db_session: - action_log_entries = db_session.query(ActionLog). \ - filter(ActionLog.id.in_(action_ids_to_process)) + action_log_entries = db_session.query(ActionLog).filter( + ActionLog.id.in_(action_ids_to_process) + ) max_latency = max_func_latency = 0 for action_log_entry in action_log_entries: latency, func_latency = self._mark_action_as_successful( - action_log_entry, before, after, db_session) + action_log_entry, before, after, db_session + ) if latency > max_latency: max_latency = latency if func_latency > max_func_latency: max_func_latency = func_latency - self.log.info('syncback action completed', - latency=max_latency, - process=self.parent_service().process_number, - func_latency=max_func_latency) + self.log.info( + "syncback action completed", + latency=max_latency, + process=self.parent_service().process_number, + func_latency=max_func_latency, + ) return True except: - log_uncaught_errors(self.log, account_id=self.account_id, - provider=self.provider) + log_uncaught_errors( + self.log, account_id=self.account_id, provider=self.provider + ) with session_scope(self.account_id) as db_session: - action_log_entries = db_session.query(ActionLog). \ - filter(ActionLog.id.in_(action_ids_to_process)) + action_log_entries = db_session.query(ActionLog).filter( + ActionLog.id.in_(action_ids_to_process) + ) marked_as_failed = False for action_log_entry in action_log_entries: @@ -637,8 +717,10 @@ def execute_with_lock(self): marked_as_failed = True if marked_as_failed: - self.log.debug("marking actions as failed", - action_log_ids=action_ids_to_process) + self.log.debug( + "marking actions as failed", + action_log_ids=action_ids_to_process, + ) # If we merged actions, fail them all at the same time. for action_log_entry in action_log_entries: self._mark_action_as_failed(action_log_entry, db_session) @@ -651,11 +733,12 @@ def _get_records_and_actions_to_process(self): action_ids_to_process = [] action_log_record_map = dict(zip(self.action_log_ids, self.record_ids)) with session_scope(self.account_id) as db_session: - action_log_entries = db_session.query(ActionLog). \ - filter(ActionLog.id.in_(self.action_log_ids)) + action_log_entries = db_session.query(ActionLog).filter( + ActionLog.id.in_(self.action_log_ids) + ) for action_log_entry in action_log_entries: - if action_log_entry.status != 'pending': - self.log.info('Skipping SyncbackTask, action is no longer pending') + if action_log_entry.status != "pending": + self.log.info("Skipping SyncbackTask, action is no longer pending") continue action_ids_to_process.append(action_log_entry.id) records_to_process.append(action_log_record_map[action_log_entry.id]) @@ -680,29 +763,36 @@ def _execute_timed_action(self, records_to_process): return before_func, after_func def _mark_action_as_successful(self, action_log_entry, before, after, db_session): - action_log_entry.status = 'successful' + action_log_entry.status = "successful" db_session.commit() - latency = round((datetime.utcnow() - action_log_entry.created_at).total_seconds(), 2) + latency = round( + (datetime.utcnow() - action_log_entry.created_at).total_seconds(), 2 + ) func_latency = round((after - before).total_seconds(), 2) self._log_to_statsd(action_log_entry.status, latency) return (latency, func_latency) def _mark_action_as_failed(self, action_log_entry, db_session): - self.log.critical('Max retries reached, giving up.', exc_info=True) - action_log_entry.status = 'failed' + self.log.critical("Max retries reached, giving up.", exc_info=True) + action_log_entry.status = "failed" self._log_to_statsd(action_log_entry.status) - if action_log_entry.action == 'create_event': + if action_log_entry.action == "create_event": # Creating a remote copy of the event failed. # Without it, none of the other pending actions # for this event will succeed. To prevent their # execution, preemptively mark them as failed. - actions = db_session.query(ActionLog).filter_by( - record_id=action_log_entry.record_id, - namespace_id=action_log_entry.namespace_id, - status='pending').all() + actions = ( + db_session.query(ActionLog) + .filter_by( + record_id=action_log_entry.record_id, + namespace_id=action_log_entry.namespace_id, + status="pending", + ) + .all() + ) for pending_action in actions: - pending_action.status = 'failed' + pending_action.status = "failed" # Mark the local copy as deleted so future actions can't be made. event = db_session.query(Event).get(action_log_entry.record_id) @@ -721,11 +811,10 @@ def execute(self): class SyncbackWorker(gevent.Greenlet): - def __init__(self, parent_service, task_timeout=60): self.parent_service = weakref.ref(parent_service) self.task_timeout = task_timeout - self.log = logger.new(component='syncback-worker') + self.log = logger.new(component="syncback-worker") gevent.Greenlet.__init__(self) def _run(self): @@ -736,8 +825,10 @@ def _run(self): self.parent_service().notify_worker_active() gevent.with_timeout(task.timeout(self.task_timeout), task.execute) except: - self.log.error('SyncbackWorker caught exception', exc_info=True, - account_id=task.account_id) + self.log.error( + "SyncbackWorker caught exception", + exc_info=True, + account_id=task.account_id, + ) finally: - self.parent_service().notify_worker_finished( - task.action_log_ids) + self.parent_service().notify_worker_finished(task.action_log_ids) diff --git a/inbox/transactions/delta_sync.py b/inbox/transactions/delta_sync.py index 8780e033b..c2d54ef03 100644 --- a/inbox/transactions/delta_sync.py +++ b/inbox/transactions/delta_sync.py @@ -12,11 +12,7 @@ from inbox.sqlalchemy_ext.util import bakery -EVENT_NAME_FOR_COMMAND = { - 'insert': 'create', - 'update': 'modify', - 'delete': 'delete' -} +EVENT_NAME_FOR_COMMAND = {"insert": "create", "update": "modify", "delete": "delete"} def get_transaction_cursor_near_timestamp(namespace_id, timestamp, db_session): @@ -57,37 +53,55 @@ def get_transaction_cursor_near_timestamp(namespace_id, timestamp, db_session): # by `id`. However, that causes MySQL to perform a potentially expensive # filesort. Instead, get transactions with timestamp *matching* the last # one before what you have, and sort those by id: - latest_timestamp = db_session.query(Transaction.created_at). \ - order_by(desc(Transaction.created_at)). \ - filter(Transaction.created_at < dt, - Transaction.namespace_id == namespace_id).limit(1).subquery() - latest_transaction = db_session.query(Transaction). \ - filter(Transaction.created_at == latest_timestamp, - Transaction.namespace_id == namespace_id). \ - order_by(desc(Transaction.id)).first() + latest_timestamp = ( + db_session.query(Transaction.created_at) + .order_by(desc(Transaction.created_at)) + .filter(Transaction.created_at < dt, Transaction.namespace_id == namespace_id) + .limit(1) + .subquery() + ) + latest_transaction = ( + db_session.query(Transaction) + .filter( + Transaction.created_at == latest_timestamp, + Transaction.namespace_id == namespace_id, + ) + .order_by(desc(Transaction.id)) + .first() + ) if latest_transaction is None: # If there are no earlier deltas, use '0' as a special stamp parameter # to signal 'process from the start of the log'. - return '0' + return "0" return latest_transaction.public_id def _get_last_trx_id_for_namespace(namespace_id, db_session): q = bakery(lambda session: session.query(Transaction.id)) - q += lambda q: q.filter( - Transaction.namespace_id == bindparam('namespace_id')) - q += lambda q: q.order_by(desc(Transaction.created_at)).\ - order_by(desc(Transaction.id)).limit(1) + q += lambda q: q.filter(Transaction.namespace_id == bindparam("namespace_id")) + q += ( + lambda q: q.order_by(desc(Transaction.created_at)) + .order_by(desc(Transaction.id)) + .limit(1) + ) return q(db_session).params(namespace_id=namespace_id).one()[0] -def format_transactions_after_pointer(namespace, pointer, db_session, - result_limit, exclude_types=None, - include_types=None, exclude_folders=True, - exclude_metadata=True, exclude_account=True, - expand=False, is_n1=False): +def format_transactions_after_pointer( + namespace, + pointer, + db_session, + result_limit, + exclude_types=None, + include_types=None, + exclude_folders=True, + exclude_metadata=True, + exclude_account=True, + expand=False, + is_n1=False, +): """ Return a pair (deltas, new_pointer), where deltas is a list of change events, represented as dictionaries: @@ -121,17 +135,17 @@ def format_transactions_after_pointer(namespace, pointer, db_session, # Begin backwards-compatibility shim -- suppress new object types for now, # because clients may not be able to deal with them. if exclude_folders is True: - exclude_types.update(('folder', 'label')) + exclude_types.update(("folder", "label")) if exclude_account is True: - exclude_types.add('account') + exclude_types.add("account") # End backwards-compatibility shim. # Metadata is excluded by default, and can only be included by setting the # exclude_metadata flag to False. If listed in include_types, remove it. if exclude_metadata is True: - exclude_types.add('metadata') - if include_types is not None and 'metadata' in include_types: - include_types.remove('metadata') + exclude_types.add("metadata") + if include_types is not None and "metadata" in include_types: + include_types.remove("metadata") try: last_trx = _get_last_trx_id_for_namespace(namespace.id, db_session) @@ -142,21 +156,23 @@ def format_transactions_after_pointer(namespace, pointer, db_session, return ([], pointer) while True: - transactions = db_session.query(Transaction). \ - filter( - Transaction.id > pointer, - Transaction.namespace_id == namespace.id) + transactions = db_session.query(Transaction).filter( + Transaction.id > pointer, Transaction.namespace_id == namespace.id + ) if exclude_types is not None: transactions = transactions.filter( - ~Transaction.object_type.in_(exclude_types)) + ~Transaction.object_type.in_(exclude_types) + ) if include_types is not None: transactions = transactions.filter( - Transaction.object_type.in_(include_types)) + Transaction.object_type.in_(include_types) + ) - transactions = transactions. \ - order_by(asc(Transaction.id)).limit(result_limit).all() + transactions = ( + transactions.order_by(asc(Transaction.id)).limit(result_limit).all() + ) if not transactions: return ([], pointer) @@ -174,13 +190,14 @@ def format_transactions_after_pointer(namespace, pointer, db_session, # in the list of transactions, this will only keep the latest # one (which is what we want). sorted_trxs = sorted(trxs, key=lambda t: t.id) - latest_trxs = {(trx.record_id, trx.command): trx for trx in - sorted_trxs} - oldest_trxs = {(trx.record_id, trx.command): trx for trx in - reversed(sorted_trxs)} + latest_trxs = {(trx.record_id, trx.command): trx for trx in sorted_trxs} + oldest_trxs = { + (trx.record_id, trx.command): trx for trx in reversed(sorted_trxs) + } # Load all referenced not-deleted objects. - ids_to_query = [trx.record_id for trx in latest_trxs.values() - if trx.command != 'delete'] + ids_to_query = [ + trx.record_id for trx in latest_trxs.values() if trx.command != "delete" + ] object_cls = transaction_objects()[obj_type] @@ -188,9 +205,11 @@ def format_transactions_after_pointer(namespace, pointer, db_session, # The base query for Account queries the /Namespace/ table # since the API-returned "`account`" is a `namespace` # under-the-hood. - query = db_session.query(Namespace).join(Account).filter( - Account.id.in_(ids_to_query), - Namespace.id == namespace.id) + query = ( + db_session.query(Namespace) + .join(Account) + .filter(Account.id.in_(ids_to_query), Namespace.id == namespace.id) + ) # Key by /namespace.account_id/ -- # namespace.id may not be equal to account.id @@ -199,7 +218,8 @@ def format_transactions_after_pointer(namespace, pointer, db_session, else: query = db_session.query(object_cls).filter( object_cls.id.in_(ids_to_query), - object_cls.namespace_id == namespace.id) + object_cls.namespace_id == namespace.id, + ) if object_cls == Thread: query = query.options(*Thread.api_loading_options(expand)) @@ -213,21 +233,24 @@ def format_transactions_after_pointer(namespace, pointer, db_session, for key, trx in latest_trxs.items(): oldest_trx = oldest_trxs[key] delta = { - 'object': trx.object_type, - 'event': EVENT_NAME_FOR_COMMAND[trx.command], - 'id': trx.object_public_id, - 'cursor': trx.public_id, - 'start_timestamp': oldest_trx.created_at, - 'end_timestamp': trx.created_at, + "object": trx.object_type, + "event": EVENT_NAME_FOR_COMMAND[trx.command], + "id": trx.object_public_id, + "cursor": trx.public_id, + "start_timestamp": oldest_trx.created_at, + "end_timestamp": trx.created_at, } - if trx.command != 'delete': + if trx.command != "delete": obj = objects.get(trx.record_id) if obj is None: continue repr_ = encode( - obj, namespace_public_id=namespace.public_id, - expand=expand, is_n1=is_n1) - delta['attributes'] = repr_ + obj, + namespace_public_id=namespace.public_id, + expand=expand, + is_n1=is_n1, + ) + delta["attributes"] = repr_ results.append((trx.id, delta)) @@ -243,11 +266,19 @@ def format_transactions_after_pointer(namespace, pointer, db_session, pointer = transactions[-1].id -def streaming_change_generator(namespace, poll_interval, timeout, - transaction_pointer, exclude_types=None, - include_types=None, exclude_folders=True, - exclude_metadata=True, exclude_account=True, - expand=False, is_n1=False): +def streaming_change_generator( + namespace, + poll_interval, + timeout, + transaction_pointer, + exclude_types=None, + include_types=None, + exclude_folders=True, + exclude_metadata=True, + exclude_account=True, + expand=False, + is_n1=False, +): """ Poll the transaction log for the given `namespace_id` until `timeout` expires, and yield each time new entries are detected. @@ -269,14 +300,23 @@ def streaming_change_generator(namespace, poll_interval, timeout, while time.time() - start_time < timeout: with session_scope(namespace.id) as db_session: deltas, new_pointer = format_transactions_after_pointer( - namespace, transaction_pointer, db_session, 100, - exclude_types, include_types, exclude_folders, - exclude_metadata, exclude_account, expand=expand, is_n1=is_n1) + namespace, + transaction_pointer, + db_session, + 100, + exclude_types, + include_types, + exclude_folders, + exclude_metadata, + exclude_account, + expand=expand, + is_n1=is_n1, + ) if new_pointer is not None and new_pointer != transaction_pointer: transaction_pointer = new_pointer for delta in deltas: - yield encoder.cereal(delta) + '\n' + yield encoder.cereal(delta) + "\n" else: - yield '\n' + yield "\n" gevent.sleep(poll_interval) diff --git a/inbox/transactions/search.py b/inbox/transactions/search.py index 459c66b5b..2a5f396cd 100644 --- a/inbox/transactions/search.py +++ b/inbox/transactions/search.py @@ -12,8 +12,11 @@ from inbox.util.stats import statsd_client from inbox.models.session import session_scope_by_shard_id from inbox.models.search import ContactSearchIndexCursor -from inbox.contacts.search import (get_doc_service, DOC_UPLOAD_CHUNK_SIZE, - cloudsearch_contact_repr) +from inbox.contacts.search import ( + get_doc_service, + DOC_UPLOAD_CHUNK_SIZE, + cloudsearch_contact_repr, +) from nylas.logging import get_logger from nylas.logging.sentry import log_uncaught_errors @@ -34,7 +37,7 @@ def __init__(self, poll_interval=30, chunk_size=DOC_UPLOAD_CHUNK_SIZE): self.chunk_size = chunk_size self.transaction_pointers = {} - self.log = log.new(component='contact-search-index') + self.log = log.new(component="contact-search-index") Greenlet.__init__(self) def _report_batch_upload(self): @@ -64,21 +67,17 @@ def _publish_heartbeat(self): def _set_transaction_pointers(self): for key in engine_manager.engines: with session_scope_by_shard_id(key) as db_session: - pointer = db_session.query( - ContactSearchIndexCursor).first() + pointer = db_session.query(ContactSearchIndexCursor).first() if pointer: self.transaction_pointers[key] = pointer.transaction_id else: # Never start from 0; if the service hasn't run before # start from the latest transaction, with the expectation # that a backfill will be run separately. - max_id = db_session.query( - func.max(Transaction.id)).scalar() or 0 - latest_transaction = \ - db_session.query(Transaction).get(max_id) + max_id = db_session.query(func.max(Transaction.id)).scalar() or 0 + latest_transaction = db_session.query(Transaction).get(max_id) if latest_transaction: - self.transaction_pointers[ - key] = latest_transaction.id + self.transaction_pointers[key] = latest_transaction.id else: self.transaction_pointers[key] = 0 @@ -90,24 +89,25 @@ def _index_transactions(self, namespace_ids=[]): with session_scope_by_shard_id(key) as db_session: txn_query = db_session.query(Transaction).filter( Transaction.id > self.transaction_pointers[key], - Transaction.object_type == 'contact') + Transaction.object_type == "contact", + ) if namespace_ids: txn_query = txn_query.filter( - Transaction.namespace_id.in_( - namespace_ids)) - transactions = txn_query\ - .order_by(asc(Transaction.id)) \ - .limit(self.chunk_size).all() + Transaction.namespace_id.in_(namespace_ids) + ) + transactions = ( + txn_query.order_by(asc(Transaction.id)).limit(self.chunk_size).all() + ) # index up to chunk_size transactions should_sleep = False if transactions: self.index(transactions, db_session) - oldest_transaction = min( - transactions, key=lambda t: t.created_at) + oldest_transaction = min(transactions, key=lambda t: t.created_at) current_timestamp = datetime.utcnow() - latency = (current_timestamp - - oldest_transaction.created_at).total_seconds() + latency = ( + current_timestamp - oldest_transaction.created_at + ).total_seconds() self._report_transactions_latency(latency) new_pointer = transactions[-1].id self.update_pointer(new_pointer, key, db_session) @@ -116,7 +116,7 @@ def _index_transactions(self, namespace_ids=[]): should_sleep = True shard_should_sleep.append(should_sleep) if all(shard_should_sleep): - log.info('sleeping') + log.info("sleeping") sleep(self.poll_interval) def _run(self): @@ -127,8 +127,10 @@ def _run(self): try: self._set_transaction_pointers() - self.log.info('Starting contact-search-index service', - transaction_pointers=self.transaction_pointers) + self.log.info( + "Starting contact-search-index service", + transaction_pointers=self.transaction_pointers, + ) while True: self._publish_heartbeat() @@ -146,26 +148,28 @@ def index(self, transactions, db_session): docs = [] doc_service = get_doc_service() add_txns, delete_txns = partition( - lambda trx: trx.command == 'delete', transactions) - delete_docs = [{'type': 'delete', 'id': txn.record_id} - for txn in delete_txns] + lambda trx: trx.command == "delete", transactions + ) + delete_docs = [{"type": "delete", "id": txn.record_id} for txn in delete_txns] add_record_ids = [txn.record_id for txn in add_txns] - add_records = db_session.query(Contact).options( - joinedload("phone_numbers")).filter( - Contact.id.in_(add_record_ids)) - add_docs = [{'type': 'add', 'id': obj.id, - 'fields': cloudsearch_contact_repr(obj)} - for obj in add_records] + add_records = ( + db_session.query(Contact) + .options(joinedload("phone_numbers")) + .filter(Contact.id.in_(add_record_ids)) + ) + add_docs = [ + {"type": "add", "id": obj.id, "fields": cloudsearch_contact_repr(obj)} + for obj in add_records + ] docs = delete_docs + add_docs if docs: doc_service.upload_documents( - documents=json.dumps(docs), - contentType='application/json') + documents=json.dumps(docs), contentType="application/json" + ) self._report_batch_upload() - self.log.info('docs indexed', adds=len(add_docs), - deletes=len(delete_docs)) + self.log.info("docs indexed", adds=len(add_docs), deletes=len(delete_docs)) def update_pointer(self, new_pointer, shard_key, db_session): """ diff --git a/inbox/util/__init__.py b/inbox/util/__init__.py index 597806348..b003c0dd8 100644 --- a/inbox/util/__init__.py +++ b/inbox/util/__init__.py @@ -6,4 +6,5 @@ """ # Allow out-of-tree submodules. from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/inbox/util/addr.py b/inbox/util/addr.py index 277ec59ef..104b83170 100644 --- a/inbox/util/addr.py +++ b/inbox/util/addr.py @@ -25,9 +25,9 @@ def canonicalize_address(addr): return addr local_part = parsed_address.mailbox.lower() hostname = parsed_address.hostname.lower() - if hostname in ('gmail.com', 'googlemail.com'): - local_part = local_part.replace('.', '') - return '@'.join((local_part, hostname)) + if hostname in ("gmail.com", "googlemail.com"): + local_part = local_part.replace(".", "") + return "@".join((local_part, hostname)) def parse_mimepart_address_header(mimepart, header_name): diff --git a/inbox/util/blockstore.py b/inbox/util/blockstore.py index 517064ecc..f18009f9d 100644 --- a/inbox/util/blockstore.py +++ b/inbox/util/blockstore.py @@ -5,10 +5,11 @@ from inbox.config import config from inbox.util.stats import statsd_client from nylas.logging import get_logger + log = get_logger() # TODO: store AWS credentials in a better way. -STORE_MSG_ON_S3 = config.get('STORE_MESSAGES_ON_S3', None) +STORE_MSG_ON_S3 = config.get("STORE_MESSAGES_ON_S3", None) if STORE_MSG_ON_S3: from boto.s3.connection import S3Connection @@ -17,8 +18,15 @@ from inbox.util.file import mkdirp def _data_file_directory(h): - return os.path.join(config.get_required('MSG_PARTS_DIRECTORY'), - h[0], h[1], h[2], h[3], h[4], h[5]) + return os.path.join( + config.get_required("MSG_PARTS_DIRECTORY"), + h[0], + h[1], + h[2], + h[3], + h[4], + h[5], + ) def _data_file_path(h): return os.path.join(_data_file_directory(h), h) @@ -29,7 +37,7 @@ def save_to_blockstore(data_sha256, data): assert type(data) is not unicode if len(data) == 0: - log.warning('Not saving 0-length data blob') + log.warning("Not saving 0-length data blob") return if STORE_MSG_ON_S3: @@ -38,26 +46,27 @@ def save_to_blockstore(data_sha256, data): directory = _data_file_directory(data_sha256) mkdirp(directory) - with open(_data_file_path(data_sha256), 'wb') as f: + with open(_data_file_path(data_sha256), "wb") as f: f.write(data) def _save_to_s3(data_sha256, data): - assert 'TEMP_MESSAGE_STORE_BUCKET_NAME' in config, \ - 'Need temp bucket name to store message data!' + assert ( + "TEMP_MESSAGE_STORE_BUCKET_NAME" in config + ), "Need temp bucket name to store message data!" - _save_to_s3_bucket(data_sha256, - config.get('TEMP_MESSAGE_STORE_BUCKET_NAME'), data) + _save_to_s3_bucket(data_sha256, config.get("TEMP_MESSAGE_STORE_BUCKET_NAME"), data) def _save_to_s3_bucket(data_sha256, bucket_name, data): - assert 'AWS_ACCESS_KEY_ID' in config, 'Need AWS key!' - assert 'AWS_SECRET_ACCESS_KEY' in config, 'Need AWS secret!' + assert "AWS_ACCESS_KEY_ID" in config, "Need AWS key!" + assert "AWS_SECRET_ACCESS_KEY" in config, "Need AWS secret!" start = time.time() # Boto pools connections at the class level - conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'), - config.get('AWS_SECRET_ACCESS_KEY')) + conn = S3Connection( + config.get("AWS_ACCESS_KEY_ID"), config.get("AWS_SECRET_ACCESS_KEY") + ) bucket = conn.get_bucket(bucket_name, validate=False) # See if it already exists; if so, don't recreate. @@ -71,7 +80,7 @@ def _save_to_s3_bucket(data_sha256, bucket_name, data): end = time.time() latency_millis = (end - start) * 1000 - statsd_client.timing('s3_blockstore.save_latency', latency_millis) + statsd_client.timing("s3_blockstore.save_latency", latency_millis) def get_from_blockstore(data_sha256): @@ -82,33 +91,40 @@ def get_from_blockstore(data_sha256): if value is None: # The block may have expired. - log.warning('No data returned!') + log.warning("No data returned!") return value - assert data_sha256 == sha256(value).hexdigest(), \ - "Returned data doesn't match stored hash!" + assert ( + data_sha256 == sha256(value).hexdigest() + ), "Returned data doesn't match stored hash!" return value def _get_from_s3(data_sha256): - assert 'AWS_ACCESS_KEY_ID' in config, 'Need AWS key!' - assert 'AWS_SECRET_ACCESS_KEY' in config, 'Need AWS secret!' + assert "AWS_ACCESS_KEY_ID" in config, "Need AWS key!" + assert "AWS_SECRET_ACCESS_KEY" in config, "Need AWS secret!" - assert 'TEMP_MESSAGE_STORE_BUCKET_NAME' in config, \ - 'Need temp bucket name to store message data!' + assert ( + "TEMP_MESSAGE_STORE_BUCKET_NAME" in config + ), "Need temp bucket name to store message data!" # Try getting data from our temporary blockstore before # trying getting it from the provider. - data = _get_from_s3_bucket(data_sha256, - config.get('TEMP_MESSAGE_STORE_BUCKET_NAME')) + data = _get_from_s3_bucket( + data_sha256, config.get("TEMP_MESSAGE_STORE_BUCKET_NAME") + ) if data is not None: - log.info('Found hash in temporary blockstore!', - sha256=data_sha256, logstash_tag='s3_direct') + log.info( + "Found hash in temporary blockstore!", + sha256=data_sha256, + logstash_tag="s3_direct", + ) return data - log.info("Couldn't find data in blockstore", - sha256=data_sha256, logstash_tag='s3_direct') + log.info( + "Couldn't find data in blockstore", sha256=data_sha256, logstash_tag="s3_direct" + ) return None @@ -117,14 +133,15 @@ def _get_from_s3_bucket(data_sha256, bucket_name): if not data_sha256: return None - conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'), - config.get('AWS_SECRET_ACCESS_KEY')) + conn = S3Connection( + config.get("AWS_ACCESS_KEY_ID"), config.get("AWS_SECRET_ACCESS_KEY") + ) bucket = conn.get_bucket(bucket_name, validate=False) key = bucket.get_key(data_sha256) if not key: - log.warning('No key with name: {} returned!'.format(data_sha256)) + log.warning("No key with name: {} returned!".format(data_sha256)) return return key.get_contents_as_string() @@ -135,10 +152,10 @@ def _get_from_disk(data_sha256): return None try: - with open(_data_file_path(data_sha256), 'rb') as f: + with open(_data_file_path(data_sha256), "rb") as f: return f.read() except IOError: - log.warning('No file with name: {}!'.format(data_sha256)) + log.warning("No file with name: {}!".format(data_sha256)) return @@ -147,20 +164,21 @@ def _delete_from_s3_bucket(data_sha256_hashes, bucket_name): if not data_sha256_hashes: return None - assert 'AWS_ACCESS_KEY_ID' in config, 'Need AWS key!' - assert 'AWS_SECRET_ACCESS_KEY' in config, 'Need AWS secret!' + assert "AWS_ACCESS_KEY_ID" in config, "Need AWS key!" + assert "AWS_SECRET_ACCESS_KEY" in config, "Need AWS secret!" start = time.time() # Boto pools connections at the class level - conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'), - config.get('AWS_SECRET_ACCESS_KEY')) + conn = S3Connection( + config.get("AWS_ACCESS_KEY_ID"), config.get("AWS_SECRET_ACCESS_KEY") + ) bucket = conn.get_bucket(bucket_name, validate=False) bucket.delete_keys([key for key in data_sha256_hashes], quiet=True) end = time.time() latency_millis = (end - start) * 1000 - statsd_client.timing('s3_blockstore.delete_latency', latency_millis) + statsd_client.timing("s3_blockstore.delete_latency", latency_millis) def _delete_from_disk(data_sha256): @@ -170,15 +188,16 @@ def _delete_from_disk(data_sha256): try: os.remove(_data_file_path(data_sha256)) except OSError: - log.warning('No file with name: {}!'.format(data_sha256)) + log.warning("No file with name: {}!".format(data_sha256)) def delete_from_blockstore(*data_sha256_hashes): - log.info('deleting from blockstore', sha256=data_sha256_hashes) + log.info("deleting from blockstore", sha256=data_sha256_hashes) if STORE_MSG_ON_S3: - _delete_from_s3_bucket(data_sha256_hashes, - config.get('TEMP_MESSAGE_STORE_BUCKET_NAME')) + _delete_from_s3_bucket( + data_sha256_hashes, config.get("TEMP_MESSAGE_STORE_BUCKET_NAME") + ) else: for data_sha256 in data_sha256_hashes: _delete_from_disk(data_sha256) diff --git a/inbox/util/concurrency.py b/inbox/util/concurrency.py index 5b951a03b..316911b8e 100644 --- a/inbox/util/concurrency.py +++ b/inbox/util/concurrency.py @@ -14,15 +14,12 @@ from nylas.logging import get_logger, create_error_log_context from nylas.logging.sentry import log_uncaught_errors + log = get_logger() BACKOFF_DELAY = 30 # seconds to wait before retrying after a failure -TRANSIENT_NETWORK_ERRS = ( - socket.timeout, - TimeoutError, - socket.error, - ssl.SSLError) +TRANSIENT_NETWORK_ERRS = (socket.timeout, TimeoutError, socket.error, ssl.SSLError) TRANSIENT_MYSQL_MESSAGES = ( "try restarting transaction", @@ -30,11 +27,17 @@ "Lost connection to MySQL server", "MySQL server has gone away", "Can't connect to MySQL server", - "Max connect timeout reached") + "Max connect timeout reached", +) -def retry(func, retry_classes=None, fail_classes=None, exc_callback=None, - backoff_delay=BACKOFF_DELAY): +def retry( + func, + retry_classes=None, + fail_classes=None, + exc_callback=None, + backoff_delay=BACKOFF_DELAY, +): """ Executes the callable func, retrying on uncaught exceptions matching the class filters. @@ -52,10 +55,10 @@ class filters. Configures what not to retry on. If specified, func is /not/ retried if one of these exceptions is raised. """ - if (fail_classes and retry_classes and - set(fail_classes).intersection(retry_classes)): - raise ValueError("Can't include exception classes in both fail_on and " - "retry_on") + if fail_classes and retry_classes and set(fail_classes).intersection(retry_classes): + raise ValueError( + "Can't include exception classes in both fail_on and " "retry_on" + ) def should_retry_on(exc): if fail_classes and isinstance(exc, tuple(fail_classes)): @@ -87,9 +90,15 @@ def wrapped(*args, **kwargs): return wrapped -def retry_with_logging(func, logger=None, retry_classes=None, - fail_classes=None, account_id=None, provider=None, - backoff_delay=BACKOFF_DELAY): +def retry_with_logging( + func, + logger=None, + retry_classes=None, + fail_classes=None, + account_id=None, + provider=None, + backoff_delay=BACKOFF_DELAY, +): # Sharing the network_errs counter between invocations of callback by # placing it inside an array: @@ -104,7 +113,9 @@ def callback(e): if isinstance(e, _mysql_exceptions.OperationalError): mysql_error = e - elif isinstance(e, StatementError) and isinstance(e.orig, _mysql_exceptions.OperationalError): + elif isinstance(e, StatementError) and isinstance( + e.orig, _mysql_exceptions.OperationalError + ): mysql_error = e.orig if mysql_error: @@ -128,12 +139,20 @@ def callback(e): account.update_sync_error(e) db_session.commit() except: - log.error('Error saving sync_error to account object', - account_id=account_id, - **create_error_log_context(sys.exc_info())) - - log_uncaught_errors(logger, account_id=account_id, provider=provider, - occurrences=occurrences[0]) - - return retry(func, exc_callback=callback, retry_classes=retry_classes, - fail_classes=fail_classes, backoff_delay=backoff_delay)() + log.error( + "Error saving sync_error to account object", + account_id=account_id, + **create_error_log_context(sys.exc_info()) + ) + + log_uncaught_errors( + logger, account_id=account_id, provider=provider, occurrences=occurrences[0] + ) + + return retry( + func, + exc_callback=callback, + retry_classes=retry_classes, + fail_classes=fail_classes, + backoff_delay=backoff_delay, + )() diff --git a/inbox/util/db.py b/inbox/util/db.py index d5abfb10c..2250d6718 100644 --- a/inbox/util/db.py +++ b/inbox/util/db.py @@ -33,19 +33,20 @@ def drop_everything(engine, keep_tables=[], reset_columns={}): column_names = reset_columns[table_name] for c in inspector.get_columns(table_name): - if c['name'] in column_names: - assert c['default'] + if c["name"] in column_names: + assert c["default"] - q = "UPDATE {0} SET {1}={2};".\ - format(table_name, c['name'], c['default']) + q = "UPDATE {0} SET {1}={2};".format( + table_name, c["name"], c["default"] + ) conn.execute(q) continue fks = [] for fk in inspector.get_foreign_keys(table_name): - if not fk['name']: + if not fk["name"]: continue - fks.append(ForeignKeyConstraint((), (), name=fk['name'])) + fks.append(ForeignKeyConstraint((), (), name=fk["name"])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) diff --git a/inbox/util/debug.py b/inbox/util/debug.py index f7cd0869d..50b5f340c 100644 --- a/inbox/util/debug.py +++ b/inbox/util/debug.py @@ -13,6 +13,7 @@ def wrapper(*args, **kwargs): profiler.stop() print profiler.output_text(color=True) return r + return wrapper @@ -27,7 +28,7 @@ def handle_signal(signum, frame): print profiler.output_text(color=True) # Work around an arguable bug in pyinstrument in which output gets # frozen after the first call to profiler.output_text() - delattr(profiler, '_root_frame') + delattr(profiler, "_root_frame") signal.signal(signal.SIGTRAP, handle_signal) @@ -40,4 +41,4 @@ def bind_context(gr, role, account_id, *args): TODO(emfree): this should move to inbox/instrumentation. """ - gr.context = ':'.join([role, str(account_id)] + [str(arg) for arg in args]) + gr.context = ":".join([role, str(account_id)] + [str(arg) for arg in args]) diff --git a/inbox/util/encoding.py b/inbox/util/encoding.py index 9bca7219f..09eb85cc6 100644 --- a/inbox/util/encoding.py +++ b/inbox/util/encoding.py @@ -1,12 +1,12 @@ def base36encode(number): if not isinstance(number, (int, long)): - raise TypeError('number must be an integer') + raise TypeError("number must be an integer") if number < 0: - raise ValueError('number must be positive') + raise ValueError("number must be positive") - alphabet = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' + alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" - base36 = '' + base36 = "" while number: number, i = divmod(number, 36) base36 = alphabet[i] + base36 @@ -24,5 +24,5 @@ def unicode_safe_truncate(s, max_length): string, number or unicode string. """ if not isinstance(s, unicode): - s = str(s).decode('utf-8', 'ignore') + s = str(s).decode("utf-8", "ignore") return s.rstrip()[:max_length] diff --git a/inbox/util/file.py b/inbox/util/file.py index d8c7dfdef..0df370647 100644 --- a/inbox/util/file.py +++ b/inbox/util/file.py @@ -9,7 +9,7 @@ def safe_filename(filename): """ Strip filesystem-unfriendly characters from a filename. """ valid_chars = "-_.() {}{}".format(string.ascii_letters, string.digits) - return ''.join(c for c in filename if c in valid_chars) + return "".join(c for c in filename if c in valid_chars) # http://my.safaribooksonline.com/book/programming/python/0596001673/files/pythoncook-chp-4-sect-16 @@ -82,16 +82,17 @@ class Lock(object): Whether to block or throw IOError if the lock is grabbed multiple times. """ + TIMEOUT = 60 def __init__(self, f, block=True): if isinstance(f, file): self.filename = f.name - self.handle = f if not f.closed else open(f, 'w') + self.handle = f if not f.closed else open(f, "w") else: self.filename = f mkdirp(os.path.dirname(f)) - self.handle = open(f, 'w') + self.handle = open(f, "w") if block: self.lock_op = fcntl.LOCK_EX else: @@ -102,8 +103,11 @@ def __init__(self, f, block=True): def acquire(self): got_gevent_lock = self.gevent_lock.acquire(blocking=self.block) if not got_gevent_lock: - raise IOError("cannot acquire gevent lock; associated file is {}" - .format(self.filename)) + raise IOError( + "cannot acquire gevent lock; associated file is {}".format( + self.filename + ) + ) fcntl.flock(self.handle, self.lock_op) def release(self): diff --git a/inbox/util/fleet.py b/inbox/util/fleet.py index 9d026a665..2fec0144b 100644 --- a/inbox/util/fleet.py +++ b/inbox/util/fleet.py @@ -5,7 +5,7 @@ def get_sync_hosts_in_zone(zone, level, include_debug=False): # Hack to make local dev VM work. if zone is None: - return [{'name': 'localhost', 'ip_address': '127.0.0.1', 'num_procs': 4}] + return [{"name": "localhost", "ip_address": "127.0.0.1", "num_procs": 4}] instances = [] regions = ec2.regions() @@ -21,28 +21,31 @@ def get_sync_hosts_in_zone(zone, level, include_debug=False): for i in r.instances: if i.placement != zone: continue - if i.tags.get('Role') != 'sync': + if i.tags.get("Role") != "sync": continue - if i.tags.get('Level') != level: + if i.tags.get("Level") != level: continue - if not include_debug and i.tags.get('Debug') == 'true': + if not include_debug and i.tags.get("Debug") == "true": continue instances.append(i) except: print "Unable to connect to region {}".format(region.name) raise - return [{ - 'name': i.tags.get('Name'), - 'ip_address': i.private_ip_address, - 'num_procs': num_vcpus(i.instance_type) * 2, - 'debug': i.tags.get('Debug') == 'true', - } for i in instances] + return [ + { + "name": i.tags.get("Name"), + "ip_address": i.private_ip_address, + "num_procs": num_vcpus(i.instance_type) * 2, + "debug": i.tags.get("Debug") == "true", + } + for i in instances + ] def get_random_sync_host(level): instances = [] - if level not in ('staging', 'prod'): + if level not in ("staging", "prod"): return None regions = ec2.regions() @@ -55,16 +58,22 @@ def get_random_sync_host(level): instances.append(instance) instances = filter(lambda instance: instance.state == "running", instances) - instances = filter(lambda instance: instance.tags.get('Role') == "sync", instances) - instances = filter(lambda instance: instance.tags.get('Level') == level, instances) - instances = filter(lambda instance: instance.tags.get('Debug') == 'false' , instances) + instances = filter( + lambda instance: instance.tags.get("Role") == "sync", instances + ) + instances = filter( + lambda instance: instance.tags.get("Level") == level, instances + ) + instances = filter( + lambda instance: instance.tags.get("Debug") == "false", instances + ) except: print "Unable to connect to region {}".format(region.name) raise instance = random.choice(instances) - return instance.tags.get('Name') + return instance.tags.get("Name") # For whatever reason, the ec2 API doesn't provide us with an easy way to get @@ -72,19 +81,19 @@ def get_random_sync_host(level): # These numbers were grabbed from https://aws.amazon.com/ec2/instance-types/ def num_vcpus(instance_type): return { - 't2.nano': 1, - 't2.micro': 1, - 't2.small': 1, - 't2.medium': 2, - 't2.large': 2, - 'm3.medium': 1, - 'm3.large': 2, - 'm3.xlarge': 4, - 'm3.2xlarge': 8, - 'm4.large': 2, - 'm4.xlarge': 4, - 'm4.2xlarge': 8, - 'm4.4xlarge': 16, - 'm4.10xlarge': 40, - 'm4.16xlarge': 64, + "t2.nano": 1, + "t2.micro": 1, + "t2.small": 1, + "t2.medium": 2, + "t2.large": 2, + "m3.medium": 1, + "m3.large": 2, + "m3.xlarge": 4, + "m3.2xlarge": 8, + "m4.large": 2, + "m4.xlarge": 4, + "m4.2xlarge": 8, + "m4.4xlarge": 16, + "m4.10xlarge": 40, + "m4.16xlarge": 64, }[instance_type] diff --git a/inbox/util/html.py b/inbox/util/html.py index 0a0543e98..b4f7a4fe0 100644 --- a/inbox/util/html.py +++ b/inbox/util/html.py @@ -18,8 +18,8 @@ def __init__(self): def handle_starttag(self, tag, attrs): # Replace
,
tags by spaces - if tag.lower() in ('br', 'div'): - self.fed.append(' ') + if tag.lower() in ("br", "div"): + self.fed.append(" ") # Strip the contents of a tag when it's # in strippedTags. We can do this because # HTMLParser won't try to parse the inner @@ -36,7 +36,7 @@ def handle_data(self, d): def handle_charref(self, d): try: - if d.startswith('x'): + if d.startswith("x"): val = int(d[1:], 16) else: val = int(d) @@ -52,7 +52,7 @@ def handle_entityref(self, d): self.fed.append(val) def get_data(self): - return u''.join(self.fed) + return u"".join(self.fed) def strip_tags(html): @@ -60,39 +60,44 @@ def strip_tags(html): try: s.feed(html) except HTMLParseError: - get_logger().error('error stripping tags', raw_html=html) + get_logger().error("error stripping tags", raw_html=html) return s.get_data() + # https://djangosnippets.org/snippets/19/ -re_string = re.compile(ur'(?P[<&>])|(?P^[ \t]+)|(?P\n)|(?P(^|\s)((http|ftp)://.*?))(\s|$)', re.S | re.M | re.I | re.U) # noqa +re_string = re.compile( + ur"(?P[<&>])|(?P^[ \t]+)|(?P\n)|(?P(^|\s)((http|ftp)://.*?))(\s|$)", + re.S | re.M | re.I | re.U, +) # noqa def plaintext2html(text, tabstop=4): - assert '\r' not in text, "newlines not normalized" + assert "\r" not in text, "newlines not normalized" def do_sub(m): c = m.groupdict() - if c['htmlchars']: - return cgi.escape(c['htmlchars']) - if c['lineend']: - return '
' - elif c['space']: - t = m.group().replace('\t', u' ' * tabstop) - t = t.replace(' ', ' ') + if c["htmlchars"]: + return cgi.escape(c["htmlchars"]) + if c["lineend"]: + return "
" + elif c["space"]: + t = m.group().replace("\t", u" " * tabstop) + t = t.replace(" ", " ") return t - elif c['space'] == '\t': - return ' ' * tabstop + elif c["space"] == "\t": + return " " * tabstop else: - url = m.group('protocol') - if url.startswith(' '): - prefix = ' ' + url = m.group("protocol") + if url.startswith(" "): + prefix = " " url = url[1:] else: - prefix = '' + prefix = "" last = m.groups()[-1] - if last in ['\n', '\r', '\r\n']: - last = '
' - return u'{0}{2}{3}'.format( - prefix, url, url, last) - return '\n'.join([u'

{0}

'.format( - re.sub(re_string, do_sub, p)) for p in text.split('\n\n')]) + if last in ["\n", "\r", "\r\n"]: + last = "
" + return u'{0}{2}{3}'.format(prefix, url, url, last) + + return "\n".join( + [u"

{0}

".format(re.sub(re_string, do_sub, p)) for p in text.split("\n\n")] + ) diff --git a/inbox/util/logging_helper.py b/inbox/util/logging_helper.py index b29b17539..3080810d1 100644 --- a/inbox/util/logging_helper.py +++ b/inbox/util/logging_helper.py @@ -4,6 +4,6 @@ def reconfigure_logging(): - logging.getLogger('boto').setLevel(logging.ERROR) - logging.getLogger('boto3').setLevel(logging.ERROR) - logging.getLogger('botocore').setLevel(logging.ERROR) + logging.getLogger("boto").setLevel(logging.ERROR) + logging.getLogger("boto3").setLevel(logging.ERROR) + logging.getLogger("botocore").setLevel(logging.ERROR) diff --git a/inbox/util/misc.py b/inbox/util/misc.py index 26638e019..0de1a1fe8 100644 --- a/inbox/util/misc.py +++ b/inbox/util/misc.py @@ -11,7 +11,6 @@ class DummyContextManager(object): - def __enter__(self): return None @@ -37,13 +36,13 @@ def parse_ml_headers(headers): """ attrs = {} - attrs['List-Archive'] = headers.get('List-Archive') - attrs['List-Help'] = headers.get('List-Help') - attrs['List-Id'] = headers.get('List-Id') - attrs['List-Owner'] = headers.get('List-Owner') - attrs['List-Post'] = headers.get('List-Post') - attrs['List-Subscribe'] = headers.get('List-Subscribe') - attrs['List-Unsubscribe'] = headers.get('List-Unsubscribe') + attrs["List-Archive"] = headers.get("List-Archive") + attrs["List-Help"] = headers.get("List-Help") + attrs["List-Id"] = headers.get("List-Id") + attrs["List-Owner"] = headers.get("List-Owner") + attrs["List-Post"] = headers.get("List-Post") + attrs["List-Subscribe"] = headers.get("List-Subscribe") + attrs["List-Unsubscribe"] = headers.get("List-Unsubscribe") return attrs @@ -89,7 +88,7 @@ def dt_to_timestamp(dt): def get_internaldate(date, received): """ Get the date from the headers. """ if date is None: - other, date = received.split(';') + other, date = received.split(";") # All in UTC parsed_date = parsedate_tz(date) @@ -113,9 +112,13 @@ def timed_fn(self, *args, **kwargs): except AttributeError: fn_logger = get_logger() # out = None - fn_logger.info('[timer] {0} took {1:.3f} seconds.'.format( - str(fn), float(time.time() - start_time))) + fn_logger.info( + "[timer] {0} took {1:.3f} seconds.".format( + str(fn), float(time.time() - start_time) + ) + ) return ret + return timed_fn @@ -137,11 +140,10 @@ def load_modules(base_name, base_path): modules = [] for importer, module_name, _ in pkgutil.iter_modules(base_path): - full_module_name = '{}.{}'.format(base_name, module_name) + full_module_name = "{}.{}".format(base_name, module_name) if full_module_name not in sys.modules: - module = importer.find_module(module_name).load_module( - full_module_name) + module = importer.find_module(module_name).load_module(full_module_name) else: module = sys.modules[full_module_name] modules.append(module) @@ -159,12 +161,12 @@ def register_backends(base_name, base_path): mod_for = {} for module in modules: - if hasattr(module, 'PROVIDER'): + if hasattr(module, "PROVIDER"): provider_name = module.PROVIDER - if provider_name == 'generic': + if provider_name == "generic": for p_name, p in providers.iteritems(): - p_type = p.get('type', None) - if p_type == 'generic' and p_name not in mod_for: + p_type = p.get("type", None) + if p_type == "generic" and p_name not in mod_for: mod_for[p_name] = module else: mod_for[provider_name] = module @@ -176,7 +178,7 @@ def cleanup_subject(subject_str): """Clean-up a message subject-line, including whitespace. For instance, 'Re: Re: Re: Birthday party' becomes 'Birthday party'""" if subject_str is None: - return '' + return "" # TODO consider expanding to all # http://en.wikipedia.org/wiki/List_of_email_subject_abbreviations prefix_regexp = "(?i)^((re|fw|fwd|aw|wg|undeliverable|undelivered):\s*)+" @@ -189,8 +191,8 @@ def cleanup_subject(subject_str): # IMAP doesn't support nested folders and instead encodes paths inside folder # names. # imap_folder_path converts a "/" delimited path to an IMAP compatible path. -def imap_folder_path(path, separator='.', prefix=''): - folders = [folder for folder in path.split('/') if folder != ''] +def imap_folder_path(path, separator=".", prefix=""): + folders = [folder for folder in path.split("/") if folder != ""] res = None @@ -210,20 +212,20 @@ def imap_folder_path(path, separator='.', prefix=''): def strip_prefix(path, prefix): if path.startswith(prefix): - return path[len(prefix):] + return path[len(prefix) :] return path # fs_folder_path converts an IMAP compatible path to a "/" delimited path. -def fs_folder_path(path, separator='.', prefix=''): +def fs_folder_path(path, separator=".", prefix=""): if prefix: path = strip_prefix(path, prefix) folders = path.split(separator) # Remove stray '' which can happen if the folder is prefixed # i.e: INBOX.Taxes.Accounting -> .Taxes.Accounting -> ['', 'Taxes', 'Accounting'] - if folders[0] == '': + if folders[0] == "": folders.pop(0) - return '/'.join(folders) + return "/".join(folders) diff --git a/inbox/util/rdb.py b/inbox/util/rdb.py index 83addb658..9d6832af4 100644 --- a/inbox/util/rdb.py +++ b/inbox/util/rdb.py @@ -1,8 +1,10 @@ import socket import sys from gevent import monkey + monkey.patch_all(aggressive=False) import gevent_openssl + gevent_openssl.monkey_patch() from code import InteractiveConsole from nylas.logging import get_logger @@ -27,10 +29,9 @@ class RemoteConsole(InteractiveConsole): - def __init__(self, socket, locals=None): self.socket = socket - self.handle = socket.makefile('rw') + self.handle = socket.makefile("rw") InteractiveConsole.__init__(self, locals=locals) self.handle.write(doc) @@ -70,9 +71,10 @@ def interact(self, banner=None): sys.ps2 = "... " cprt = 'Type "help", "copyright", "credits" or "license" for more information.' # noqa if banner is None: - self.write("Python %s on %s\n%s\n(%s)\n" % - (sys.version, sys.platform, cprt, - self.__class__.__name__)) + self.write( + "Python %s on %s\n%s\n(%s)\n" + % (sys.version, sys.platform, cprt, self.__class__.__name__) + ) else: self.write("%s\n" % str(banner)) more = 0 @@ -134,5 +136,5 @@ def break_to_interpreter(host="localhost", port=None): # example usage - connect with 'netcat localhost 4444' -if __name__ == '__main__': +if __name__ == "__main__": break_to_interpreter(port=4444) diff --git a/inbox/util/sharding.py b/inbox/util/sharding.py index ebbfbde9f..d138d32a8 100644 --- a/inbox/util/sharding.py +++ b/inbox/util/sharding.py @@ -12,11 +12,14 @@ def get_shards(): def get_open_shards(): # Can't use engine_manager.engines here because it does not track # shard state (open/ closed) - database_hosts = config.get_required('DATABASE_HOSTS') + database_hosts = config.get_required("DATABASE_HOSTS") open_shards = [] for host in database_hosts: - open_shards.extend(shard['ID'] for shard in host['SHARDS'] if - shard['OPEN'] and not shard.get('DISABLED')) + open_shards.extend( + shard["ID"] + for shard in host["SHARDS"] + if shard["OPEN"] and not shard.get("DISABLED") + ) return open_shards @@ -25,12 +28,12 @@ def get_shard_schemas(): # Can't use engine_manager.engines here because it does not track # shard schemas. shard_schemas = {} - database_hosts = config.get_required('DATABASE_HOSTS') + database_hosts = config.get_required("DATABASE_HOSTS") for host in database_hosts: - for shard in host['SHARDS']: - if not shard.get('DISABLED'): - shard_id = shard['ID'] - schema_name = shard['SCHEMA_NAME'] + for shard in host["SHARDS"]: + if not shard.get("DISABLED"): + shard_id = shard["ID"] + schema_name = shard["SCHEMA_NAME"] shard_schemas[shard_id] = schema_name return shard_schemas diff --git a/inbox/util/startup.py b/inbox/util/startup.py index e370bff7b..a1a11a57c 100644 --- a/inbox/util/startup.py +++ b/inbox/util/startup.py @@ -8,12 +8,12 @@ from inbox.config import config from nylas.logging import get_logger + log = get_logger() def _absolute_path(relative_path): - return os.path.join(os.path.dirname(os.path.abspath(__file__)), - relative_path) + return os.path.join(os.path.dirname(os.path.abspath(__file__)), relative_path) def check_sudo(): @@ -48,7 +48,7 @@ def check_sudo(): def check_tz(): - if time.tzname[time.daylight] != 'UTC': + if time.tzname[time.daylight] != "UTC": sys.exit(_TZ_ERROR_TEXT) @@ -63,16 +63,13 @@ def load_overrides(file_path, loaded_config=config): try: overrides = json.load(data_file) except ValueError: - sys.exit('Failed parsing configuration file at {}' - .format(file_path)) + sys.exit("Failed parsing configuration file at {}".format(file_path)) if not overrides: - log.debug('No config overrides found.') + log.debug("No config overrides found.") return - assert isinstance(overrides, dict), \ - 'overrides must be dictionary' + assert isinstance(overrides, dict), "overrides must be dictionary" loaded_config.update(overrides) - log.debug('Imported config overrides {}'.format( - overrides.keys())) + log.debug("Imported config overrides {}".format(overrides.keys())) def preflight(): @@ -82,4 +79,5 @@ def preflight(): # Print a traceback when the process receives signal SIGSEGV, SIGFPE, # SIGABRT, SIGBUS or SIGILL import faulthandler + faulthandler.enable() diff --git a/inbox/util/stats.py b/inbox/util/stats.py index 10e59fa3a..c1ae30623 100644 --- a/inbox/util/stats.py +++ b/inbox/util/stats.py @@ -7,6 +7,8 @@ def get_statsd_client(): return statsd.StatsClient( str(config.get("STATSD_HOST", "localhost")), config.get("STATSD_PORT", 8125), - prefix=config.get("STATSD_PREFIX", "stats")) + prefix=config.get("STATSD_PREFIX", "stats"), + ) + statsd_client = get_statsd_client() diff --git a/inbox/util/testutils.py b/inbox/util/testutils.py index 3a9ce2649..5fcdff475 100644 --- a/inbox/util/testutils.py +++ b/inbox/util/testutils.py @@ -11,36 +11,46 @@ from inbox.basicauth import ValidationError -FILENAMES = ['muir.jpg', 'LetMeSendYouEmail.wav', 'piece-jointe.jpg', - 'andra-moi-ennepe.txt', 'long-non-ascii-filename.txt'] +FILENAMES = [ + "muir.jpg", + "LetMeSendYouEmail.wav", + "piece-jointe.jpg", + "andra-moi-ennepe.txt", + "long-non-ascii-filename.txt", +] def create_test_db(): """ Creates new, empty test databases. """ from inbox.config import config - database_hosts = config.get_required('DATABASE_HOSTS') - database_users = config.get_required('DATABASE_USERS') + database_hosts = config.get_required("DATABASE_HOSTS") + database_users = config.get_required("DATABASE_USERS") schemas = [ ( - shard['SCHEMA_NAME'], - host['HOSTNAME'], - database_users[host['HOSTNAME']]['USER'], - database_users[host['HOSTNAME']]['PASSWORD'], + shard["SCHEMA_NAME"], + host["HOSTNAME"], + database_users[host["HOSTNAME"]]["USER"], + database_users[host["HOSTNAME"]]["PASSWORD"], ) - for host in database_hosts for shard in host['SHARDS'] + for host in database_hosts + for shard in host["SHARDS"] ] # The various test databases necessarily have "test" in their name. - assert all(['test' in s for s, h, u, p in schemas]) + assert all(["test" in s for s, h, u, p in schemas]) for name, host, user, password in schemas: - cmd = 'DROP DATABASE IF EXISTS {name}; ' \ - 'CREATE DATABASE IF NOT EXISTS {name} ' \ - 'DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE ' \ - 'utf8mb4_general_ci'.format(name=name) + cmd = ( + "DROP DATABASE IF EXISTS {name}; " + "CREATE DATABASE IF NOT EXISTS {name} " + "DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE " + "utf8mb4_general_ci".format(name=name) + ) - subprocess.check_call('mysql -h {} -u{} -p{} ' - '-e "{}"'.format(host, user, password, cmd), shell=True) + subprocess.check_call( + "mysql -h {} -u{} -p{} " '-e "{}"'.format(host, user, password, cmd), + shell=True, + ) def setup_test_db(): @@ -55,10 +65,10 @@ def setup_test_db(): create_test_db() - database_hosts = config.get_required('DATABASE_HOSTS') + database_hosts = config.get_required("DATABASE_HOSTS") for host in database_hosts: - for shard in host['SHARDS']: - key = shard['ID'] + for shard in host["SHARDS"]: + key = shard["ID"] engine = engine_manager.engines[key] init_db(engine, key) @@ -73,7 +83,7 @@ def __str__(self): class MockDNSResolver(object): def __init__(self): - self._registry = {'mx': {}, 'ns': {}} + self._registry = {"mx": {}, "ns": {}} def _load_records(self, pkg, filename): self._registry = json.loads(pkgutil.get_data(pkg, filename)) @@ -83,42 +93,43 @@ def query(self, domain, record_type): entry = self._registry[record_type][domain] if isinstance(entry, dict): raise { - 'NoNameservers': dns.resolver.NoNameservers, - 'NXDOMAIN': dns.resolver.NXDOMAIN, - 'Timeout': dns.resolver.Timeout, - 'NoAnswer': dns.resolver.NoAnswer, - }[entry['error']]() + "NoNameservers": dns.resolver.NoNameservers, + "NXDOMAIN": dns.resolver.NXDOMAIN, + "Timeout": dns.resolver.Timeout, + "NoAnswer": dns.resolver.NoAnswer, + }[entry["error"]]() return [MockAnswer(e) for e in self._registry[record_type][domain]] @pytest.yield_fixture def mock_dns_resolver(monkeypatch): dns_resolver = MockDNSResolver() - monkeypatch.setattr('inbox.util.url.dns_resolver', dns_resolver) + monkeypatch.setattr("inbox.util.url.dns_resolver", dns_resolver) yield dns_resolver monkeypatch.undo() -@pytest.yield_fixture(scope='function') +@pytest.yield_fixture(scope="function") def dump_dns_queries(monkeypatch): original_query = dns.resolver.Resolver.query - query_results = {'ns': {}, 'mx': {}} + query_results = {"ns": {}, "mx": {}} def mock_query(self, domain, record_type): try: result = original_query(self, domain, record_type) except Exception as e: - query_results[record_type.lower()][domain] = {'error': type(e).__name__} + query_results[record_type.lower()][domain] = {"error": type(e).__name__} raise record_type = record_type.lower() - if record_type == 'mx': - query_results['mx'][domain] = [str(r.exchange).lower() for r in result] - elif record_type == 'ns': - query_results['ns'][domain] = [str(rdata) for rdata in result] + if record_type == "mx": + query_results["mx"][domain] = [str(r.exchange).lower() for r in result] + elif record_type == "ns": + query_results["ns"][domain] = [str(rdata) for rdata in result] else: raise RuntimeError("Unknown record type: %s" % record_type) return result - monkeypatch.setattr('dns.resolver.Resolver.query', mock_query) + + monkeypatch.setattr("dns.resolver.Resolver.query", mock_query) yield print json.dumps(query_results, indent=4, sort_keys=True) @@ -147,8 +158,8 @@ def login(self, email, password): def logout(self): pass - def list_folders(self, directory=u'', pattern=u'*'): - return [('\\All', '/', '[Gmail]/All Mail')] + def list_folders(self, directory=u"", pattern=u"*"): + return [("\\All", "/", "[Gmail]/All Mail")] def has_capability(self, capability): return False @@ -157,7 +168,7 @@ def idle_check(self, timeout=None): return [] def idle_done(self): - return ('Idle terminated', []) + return ("Idle terminated", []) def add_folder_data(self, folder_name, uids): """Adds fake UID data for the given folder.""" @@ -167,22 +178,22 @@ def search(self, criteria): assert self.selected_folder is not None assert isinstance(criteria, list) uid_dict = self._data[self.selected_folder] - if criteria == ['ALL']: + if criteria == ["ALL"]: return uid_dict.keys() - if criteria == ['X-GM-LABELS', 'inbox']: - return [k for k, v in uid_dict.items() - if ('\\Inbox,') in v['X-GM-LABELS']] - if criteria[0] == 'HEADER': + if criteria == ["X-GM-LABELS", "inbox"]: + return [k for k, v in uid_dict.items() if ("\\Inbox,") in v["X-GM-LABELS"]] + if criteria[0] == "HEADER": name, value = criteria[1:] - headerstring = '{}: {}'.format(name, value).lower() + headerstring = "{}: {}".format(name, value).lower() # Slow implementation, but whatever - return [u for u, v in uid_dict.items() if headerstring in - v['BODY[]'].lower()] - if criteria[0] in ['X-GM-THRID', 'X-GM-MSGID']: + return [ + u for u, v in uid_dict.items() if headerstring in v["BODY[]"].lower() + ] + if criteria[0] in ["X-GM-THRID", "X-GM-MSGID"]: assert len(criteria) == 2 thrid = criteria[1] return [u for u, v in uid_dict.items() if v[criteria[0]] == thrid] - raise ValueError('unsupported test criteria: {!r}'.format(criteria)) + raise ValueError("unsupported test criteria: {!r}".format(criteria)) def select_folder(self, folder_name, readonly=False): self.selected_folder = folder_name @@ -192,38 +203,37 @@ def fetch(self, items, data, modifiers=None): assert self.selected_folder is not None uid_dict = self._data[self.selected_folder] resp = {} - if 'BODY.PEEK[]' in data: - data.remove('BODY.PEEK[]') - data.append('BODY[]') + if "BODY.PEEK[]" in data: + data.remove("BODY.PEEK[]") + data.append("BODY[]") if isinstance(items, (int, long)): items = [items] - elif isinstance(items, basestring) and re.match('[0-9]+:\*', items): - min_uid = int(items.split(':')[0]) + elif isinstance(items, basestring) and re.match("[0-9]+:\*", items): + min_uid = int(items.split(":")[0]) items = {u for u in uid_dict if u >= min_uid} | {max(uid_dict)} if modifiers is not None: - m = re.match('CHANGEDSINCE (?P[0-9]+)', modifiers[0]) + m = re.match("CHANGEDSINCE (?P[0-9]+)", modifiers[0]) if m: - modseq = int(m.group('modseq')) - items = {u for u in items - if uid_dict[u]['MODSEQ'][0] > modseq} + modseq = int(m.group("modseq")) + items = {u for u in items if uid_dict[u]["MODSEQ"][0] > modseq} for u in items: if u in uid_dict: - resp[u] = {k: v for k, v in uid_dict[u].items() if k in data or - k == 'MODSEQ'} + resp[u] = { + k: v for k, v in uid_dict[u].items() if k in data or k == "MODSEQ" + } return resp - def append(self, folder_name, mimemsg, flags, date, - x_gm_msgid=0, x_gm_thrid=0): + def append(self, folder_name, mimemsg, flags, date, x_gm_msgid=0, x_gm_thrid=0): uid_dict = self._data[folder_name] uidnext = max(uid_dict) if uid_dict else 1 uid_dict[uidnext] = { # TODO(emfree) save other attributes - 'BODY[]': mimemsg, - 'INTERNALDATE': None, - 'X-GM-LABELS': (), - 'FLAGS': (), - 'X-GM-MSGID': x_gm_msgid, - 'X-GM-THRID': x_gm_thrid, + "BODY[]": mimemsg, + "INTERNALDATE": None, + "X-GM-LABELS": (), + "FLAGS": (), + "X-GM-MSGID": x_gm_msgid, + "X-GM-THRID": x_gm_thrid, } def copy(self, matching_uids, folder_name): @@ -241,13 +251,9 @@ def capabilities(self): def folder_status(self, folder_name, data=None): folder_data = self._data[folder_name] lastuid = max(folder_data) if folder_data else 0 - resp = { - 'UIDNEXT': lastuid + 1, - 'UIDVALIDITY': self.uidvalidity - } - if data and 'HIGHESTMODSEQ' in data: - resp['HIGHESTMODSEQ'] = max(v['MODSEQ'] for v in - folder_data.values()) + resp = {"UIDNEXT": lastuid + 1, "UIDVALIDITY": self.uidvalidity} + if data and "HIGHESTMODSEQ" in data: + resp["HIGHESTMODSEQ"] = max(v["MODSEQ"] for v in folder_data.values()) return resp def delete_messages(self, uids, silent=False): @@ -271,16 +277,14 @@ def oauth2_login(self, email, token): def mock_imapclient(monkeypatch): conn = MockIMAPClient() monkeypatch.setattr( - 'inbox.crispin.CrispinConnectionPool._new_raw_connection', - lambda *args, **kwargs: conn + "inbox.crispin.CrispinConnectionPool._new_raw_connection", + lambda *args, **kwargs: conn, ) monkeypatch.setattr( - 'inbox.auth.oauth.create_imap_connection', - lambda *args, **kwargs: conn + "inbox.auth.oauth.create_imap_connection", lambda *args, **kwargs: conn ) monkeypatch.setattr( - 'inbox.auth.generic.create_imap_connection', - lambda *args, **kwargs: conn + "inbox.auth.generic.create_imap_connection", lambda *args, **kwargs: conn ) yield conn monkeypatch.undo() @@ -298,43 +302,44 @@ def mock_smtp_get_connection(monkeypatch): @contextlib.contextmanager def get_connection(account): yield client + monkeypatch.setattr( - 'inbox.sendmail.smtp.postel.SMTPClient._get_connection', - get_connection + "inbox.sendmail.smtp.postel.SMTPClient._get_connection", get_connection ) yield client monkeypatch.undo() -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def files(db): filenames = FILENAMES data = [] for filename in filenames: - path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', - 'test', 'data', filename).encode('utf-8') + path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "..", "test", "data", filename + ).encode("utf-8") data.append((filename, path)) return data -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def uploaded_file_ids(api_client, files): file_ids = [] - upload_path = '/files' + upload_path = "/files" for filename, path in files: # Mac and linux fight over filesystem encodings if we store this # filename on the fs. Work around by changing the filename we upload # instead. - if filename == 'piece-jointe.jpg': - filename = u'pièce-jointe.jpg' - elif filename == 'andra-moi-ennepe.txt': - filename = u'ἄνδρα μοι ἔννεπε' - elif filename == 'long-non-ascii-filename.txt': - filename = 100 * u'μ' - data = {'file': (open(path, 'rb'), filename)} + if filename == "piece-jointe.jpg": + filename = u"pièce-jointe.jpg" + elif filename == "andra-moi-ennepe.txt": + filename = u"ἄνδρα μοι ἔννεπε" + elif filename == "long-non-ascii-filename.txt": + filename = 100 * u"μ" + data = {"file": (open(path, "rb"), filename)} r = api_client.post_raw(upload_path, data=data) assert r.status_code == 200 - file_id = json.loads(r.data)[0]['id'] + file_id = json.loads(r.data)[0]["id"] file_ids.append(file_id) return file_ids diff --git a/inbox/util/threading.py b/inbox/util/threading.py index cc7e2abd9..272cb63d9 100644 --- a/inbox/util/threading.py +++ b/inbox/util/threading.py @@ -32,28 +32,37 @@ def fetch_corresponding_thread(db_session, namespace_id, message): # no particular order (as opposed to `joinedload`, which would use the # order_by on the Message._thread backref). We also use a limit to avoid # scanning too many / large threads. - threads = db_session.query(Thread). \ - filter(Thread.namespace_id == namespace_id, - Thread._cleaned_subject == clean_subject). \ - outerjoin(Message, Thread.messages). \ - order_by(desc(Thread.id)). \ - options(load_only('id', 'discriminator'), - contains_eager(Thread.messages).load_only( - 'from_addr', 'to_addr', 'bcc_addr', 'cc_addr', 'received_date')). \ - limit(MAX_MESSAGES_SCANNED) + threads = ( + db_session.query(Thread) + .filter( + Thread.namespace_id == namespace_id, + Thread._cleaned_subject == clean_subject, + ) + .outerjoin(Message, Thread.messages) + .order_by(desc(Thread.id)) + .options( + load_only("id", "discriminator"), + contains_eager(Thread.messages).load_only( + "from_addr", "to_addr", "bcc_addr", "cc_addr", "received_date" + ), + ) + .limit(MAX_MESSAGES_SCANNED) + ) for thread in threads: - messages = sorted(thread.messages, key=attrgetter('received_date')) + messages = sorted(thread.messages, key=attrgetter("received_date")) for match in messages: # A lot of people BCC some address when sending mass # emails so ignore BCC. match_bcc = match.bcc_addr if match.bcc_addr else [] message_bcc = message.bcc_addr if message.bcc_addr else [] - match_emails = set([t[1].lower() for t in match.participants - if t not in match_bcc]) - message_emails = set([t[1].lower() for t in message.participants - if t not in message_bcc]) + match_emails = set( + [t[1].lower() for t in match.participants if t not in match_bcc] + ) + message_emails = set( + [t[1].lower() for t in message.participants if t not in message_bcc] + ) # A conversation takes place between two or more persons. # Are there more than two participants in common in this @@ -69,8 +78,12 @@ def fetch_corresponding_thread(db_session, namespace_id, message): match_from = [t[1] for t in match.from_addr] match_to = [t[1] for t in match.from_addr] - if (len(message_to) == 1 and message_from == message_to and - match_from == match_to and message_to == match_from): + if ( + len(message_to) == 1 + and message_from == message_to + and match_from == match_to + and message_to == match_from + ): # Check that we're not over max thread length in this case # No need to loop through the rest of the messages # in the thread. diff --git a/inbox/util/url.py b/inbox/util/url.py index 32691e3fe..071dd1098 100644 --- a/inbox/util/url.py +++ b/inbox/util/url.py @@ -6,16 +6,16 @@ from nylas.logging import get_logger from tldextract import extract as tld_extract import re -log = get_logger('inbox.util.url') + +log = get_logger("inbox.util.url") from inbox.providers import providers # http://www.regular-expressions.info/email.html -EMAIL_REGEX = re.compile(r'[A-Z0-9._%+-]+@(?:[A-Z0-9-]+\.)+[A-Z]{2,4}', - re.IGNORECASE) +EMAIL_REGEX = re.compile(r"[A-Z0-9._%+-]+@(?:[A-Z0-9-]+\.)+[A-Z]{2,4}", re.IGNORECASE) # Use Google's Public DNS server (8.8.8.8) -GOOGLE_DNS_IP = '8.8.8.8' +GOOGLE_DNS_IP = "8.8.8.8" dns_resolver = Resolver() dns_resolver.nameservers = [GOOGLE_DNS_IP] @@ -47,16 +47,16 @@ def get_mx_domains(domain, dns_resolver=_dns_resolver): """ Retrieve and return the MX records for a domain. """ mx_records = [] try: - mx_records = dns_resolver().query(domain, 'MX') + mx_records = dns_resolver().query(domain, "MX") except NoNameservers: - log.error('NoMXservers', domain=domain) + log.error("NoMXservers", domain=domain) except NXDOMAIN: - log.error('No such domain', domain=domain) + log.error("No such domain", domain=domain) except Timeout: - log.error('Time out during resolution', domain=domain) + log.error("Time out during resolution", domain=domain) raise except NoAnswer: - log.error('No answer from provider', domain=domain) + log.error("No answer from provider", domain=domain) mx_records = _fallback_get_mx_domains(domain) return [str(rdata.exchange).lower() for rdata in mx_records] @@ -69,13 +69,14 @@ def mx_match(mx_domains, match_domains): """ # convert legible glob patterns into real regexes - match_domains = [d.replace('.', '[.]').replace('*', '.*') + '$' - for d in match_domains] + match_domains = [ + d.replace(".", "[.]").replace("*", ".*") + "$" for d in match_domains + ] for mx_domain in mx_domains: # Depending on how the MX server is configured, domain may # refer to a relative name or to an absolute one. # FIXME @karim: maybe resolve the server instead. - if mx_domain[-1] == '.': + if mx_domain[-1] == ".": mx_domain = mx_domain[:-1] # Match the given domain against any of the mx_server regular @@ -83,6 +84,7 @@ def mx_match(mx_domains, match_domains): # match, then we cannot confirm this as the given provider def match_filter(x): return re.match(x, mx_domain) + if any(match_filter(m) for m in match_domains): return True @@ -91,24 +93,24 @@ def match_filter(x): def provider_from_address(email_address, dns_resolver=_dns_resolver): if not EMAIL_REGEX.match(email_address): - raise InvalidEmailAddressError('Invalid email address') + raise InvalidEmailAddressError("Invalid email address") - domain = email_address.split('@')[1].lower() + domain = email_address.split("@")[1].lower() mx_domains = get_mx_domains(domain, dns_resolver) ns_records = [] try: - ns_records = dns_resolver().query(domain, 'NS') + ns_records = dns_resolver().query(domain, "NS") except NoNameservers: - log.error('NoNameservers', domain=domain) + log.error("NoNameservers", domain=domain) except NXDOMAIN: - log.error('No such domain', domain=domain) + log.error("No such domain", domain=domain) except Timeout: - log.error('Time out during resolution', domain=domain) + log.error("Time out during resolution", domain=domain) except NoAnswer: - log.error('No answer from provider', domain=domain) + log.error("No answer from provider", domain=domain) for (name, info) in providers.iteritems(): - provider_domains = info.get('domains', []) + provider_domains = info.get("domains", []) # If domain is in the list of known domains for a provider, # return the provider. @@ -117,7 +119,7 @@ def provider_from_address(email_address, dns_resolver=_dns_resolver): return name for (name, info) in providers.iteritems(): - provider_mx = info.get('mx_servers', []) + provider_mx = info.get("mx_servers", []) # If a retrieved mx_domain is in the list of stored MX domains for a # provider, return the provider. @@ -125,7 +127,7 @@ def provider_from_address(email_address, dns_resolver=_dns_resolver): return name for (name, info) in providers.iteritems(): - provider_ns = info.get('ns_servers', []) + provider_ns = info.get("ns_servers", []) # If a retrieved name server is in the list of stored name servers for # a provider, return the provider. @@ -133,7 +135,7 @@ def provider_from_address(email_address, dns_resolver=_dns_resolver): if str(rdata).lower() in provider_ns: return name - return 'unknown' + return "unknown" # From tornado.httputil @@ -150,17 +152,17 @@ def url_concat(url, args, fragments=None): return url # Strip off hashes - while url[-1] == '#': + while url[-1] == "#": url = url[:-1] - fragment_tail = '' + fragment_tail = "" if fragments: - fragment_tail = '#' + urlencode(fragments) + fragment_tail = "#" + urlencode(fragments) - args_tail = '' + args_tail = "" if args: - if url[-1] not in ('?', '&'): - args_tail += '&' if ('?' in url) else '?' + if url[-1] not in ("?", "&"): + args_tail += "&" if ("?" in url) else "?" args_tail += urlencode(args) return url + args_tail + fragment_tail @@ -182,7 +184,7 @@ def naked_domain(url): # It works indiscriminately on URLs or plain domains. res = tld_extract(url) - if not res.subdomain or res.subdomain == '': + if not res.subdomain or res.subdomain == "": return res.registered_domain else: return ".".join([res.subdomain, res.registered_domain]) @@ -213,27 +215,25 @@ def matching_subdomains(new_value, old_value): old_parent_domain = parent_domain(old_value) if old_parent_domain is None: - log.error('old_parent_domain is None', - old_value=old_value, new_value=new_value) + log.error("old_parent_domain is None", old_value=old_value, new_value=new_value) # Shouldn't actually happen. return False if new_parent_domain is None: - log.error('new_parent_domain is None', - old_value=old_value, new_value=new_value) + log.error("new_parent_domain is None", old_value=old_value, new_value=new_value) return False if new_parent_domain != old_parent_domain: - log.error("Domains aren't matching", - new_value=new_value, old_value=old_value) + log.error("Domains aren't matching", new_value=new_value, old_value=old_value) return False new_ip = resolve_hostname(new_value) old_ip = resolve_hostname(old_value) - if (new_ip is None or old_ip is None or new_ip != old_ip): - log.error("IP addresses aren't matching", - new_value=new_value, old_Value=old_value) + if new_ip is None or old_ip is None or new_ip != old_ip: + log.error( + "IP addresses aren't matching", new_value=new_value, old_Value=old_value + ) return False return True diff --git a/inbox/webhooks/gpush_notifications.py b/inbox/webhooks/gpush_notifications.py index a1d43485d..920bd2395 100644 --- a/inbox/webhooks/gpush_notifications.py +++ b/inbox/webhooks/gpush_notifications.py @@ -7,6 +7,7 @@ from inbox.api.err import APIException, NotFoundError, InputError from inbox.api.validation import valid_public_id from nylas.logging import get_logger + log = get_logger() from inbox.models.session import global_session_scope @@ -15,22 +16,19 @@ import limitlion -app = Blueprint( - 'webhooks', - 'webhooks_api', - url_prefix='/w') +app = Blueprint("webhooks", "webhooks_api", url_prefix="/w") -GOOGLE_CHANNEL_ID_STRING = 'X-Goog-Channel-ID' -GOOGLE_RESOURCE_STATE_STRING = 'X-Goog-Resource-State' -GOOGLE_RESOURCE_ID_STRING = 'X-Goog-Resource-ID' +GOOGLE_CHANNEL_ID_STRING = "X-Goog-Channel-ID" +GOOGLE_RESOURCE_STATE_STRING = "X-Goog-Resource-State" +GOOGLE_RESOURCE_ID_STRING = "X-Goog-Resource-ID" def resp(http_code, message=None, **kwargs): resp = kwargs if message: - resp['message'] = message + resp["message"] = message if http_code == 204: - body = '' + body = "" else: body = jsonify(resp) return make_response(body, http_code) @@ -43,63 +41,66 @@ def start(): g.watch_channel_id = request.headers[GOOGLE_CHANNEL_ID_STRING] g.watch_resource_id = request.headers[GOOGLE_RESOURCE_ID_STRING] except KeyError: - raise InputError('Malformed headers') + raise InputError("Malformed headers") - request.environ.setdefault('log_context', {}).update({ - 'watch_state': watch_state, - 'watch_channel_id': g.watch_channel_id, - 'watch_resource_id': g.watch_resource_id - }) + request.environ.setdefault("log_context", {}).update( + { + "watch_state": watch_state, + "watch_channel_id": g.watch_channel_id, + "watch_resource_id": g.watch_resource_id, + } + ) - if watch_state == 'sync': + if watch_state == "sync": return resp(204) @app.errorhandler(APIException) def handle_input_error(error): - response = jsonify(message=error.message, - type='invalid_request_error') + response = jsonify(message=error.message, type="invalid_request_error") response.status_code = error.status_code return response -@app.route('/calendar_list_update/', methods=['POST']) +@app.route("/calendar_list_update/", methods=["POST"]) def calendar_update(account_public_id): - request.environ['log_context']['account_public_id'] = account_public_id + request.environ["log_context"]["account_public_id"] = account_public_id try: valid_public_id(account_public_id) with global_session_scope() as db_session: - account = db_session.query(GmailAccount) \ - .filter(GmailAccount.public_id == account_public_id) \ + account = ( + db_session.query(GmailAccount) + .filter(GmailAccount.public_id == account_public_id) .one() + ) account.handle_gpush_notification() db_session.commit() return resp(200) except ValueError: - raise InputError('Invalid public ID') + raise InputError("Invalid public ID") except NoResultFound: - raise NotFoundError("Couldn't find account `{0}`" - .format(account_public_id)) + raise NotFoundError("Couldn't find account `{0}`".format(account_public_id)) -@app.route('/calendar_update/', methods=['POST']) +@app.route("/calendar_update/", methods=["POST"]) def event_update(calendar_public_id): - request.environ['log_context']['calendar_public_id'] = calendar_public_id + request.environ["log_context"]["calendar_public_id"] = calendar_public_id try: valid_public_id(calendar_public_id) allowed, tokens, sleep = limitlion.throttle( - 'gcal:{}'.format(calendar_public_id), rps=.5 + "gcal:{}".format(calendar_public_id), rps=0.5 ) if allowed: with global_session_scope() as db_session: - calendar = db_session.query(Calendar) \ - .filter(Calendar.public_id == calendar_public_id) \ + calendar = ( + db_session.query(Calendar) + .filter(Calendar.public_id == calendar_public_id) .one() + ) calendar.handle_gpush_notification() db_session.commit() return resp(200) except ValueError: - raise InputError('Invalid public ID') + raise InputError("Invalid public ID") except NoResultFound: - raise NotFoundError("Couldn't find calendar `{0}`" - .format(calendar_public_id)) + raise NotFoundError("Couldn't find calendar `{0}`".format(calendar_public_id)) diff --git a/migrations/env.py b/migrations/env.py index 1bca21999..3c6085233 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -11,6 +11,7 @@ # for 'autogenerate' support # from myapp import mymodel from inbox.models.base import MailSyncBase + target_metadata = MailSyncBase.metadata from inbox.config import config @@ -25,17 +26,18 @@ # alembic -x shard_id=1 upgrade +1 # # to target shard 1 for the migration. -config_shard_id = context.config.get_main_option('shard_id') -x_shard_id = context.get_x_argument(as_dictionary=True).get( - 'shard_id') +config_shard_id = context.config.get_main_option("shard_id") +x_shard_id = context.get_x_argument(as_dictionary=True).get("shard_id") if config_shard_id is not None: shard_id = int(config_shard_id) elif x_shard_id is not None: shard_id = int(x_shard_id) else: - raise ValueError('No shard_id is configured for migration; ' - 'run `alembic -x shard_id= upgrade +1`') + raise ValueError( + "No shard_id is configured for migration; " + "run `alembic -x shard_id= upgrade +1`" + ) def run_migrations_offline(): @@ -50,9 +52,11 @@ def run_migrations_offline(): script output. """ - engine_manager = EngineManager(config.get_required('DATABASE_HOSTS'), - config.get_required('DATABASE_USERS'), - include_disabled=True) + engine_manager = EngineManager( + config.get_required("DATABASE_HOSTS"), + config.get_required("DATABASE_USERS"), + include_disabled=True, + ) engine = engine_manager.engines[shard_id] context.configure(engine=engine, url=engine.url) @@ -67,18 +71,17 @@ def run_migrations_online(): and associate a connection with the context. """ - engine_manager = EngineManager(config.get_required('DATABASE_HOSTS'), - config.get_required('DATABASE_USERS'), - include_disabled=True) + engine_manager = EngineManager( + config.get_required("DATABASE_HOSTS"), + config.get_required("DATABASE_USERS"), + include_disabled=True, + ) engine = engine_manager.engines[shard_id] connection = engine.connect() # Set sane lock wait timeout value. - connection.execute('SET @@lock_wait_timeout=15') - context.configure( - connection=connection, - target_metadata=target_metadata - ) + connection.execute("SET @@lock_wait_timeout=15") + context.configure(connection=connection, target_metadata=target_metadata) try: with context.begin_transaction(): @@ -86,6 +89,7 @@ def run_migrations_online(): finally: connection.close() + if context.is_offline_mode(): run_migrations_offline() else: diff --git a/migrations/versions/000_g_msgid_g_thrid_as_integers.py b/migrations/versions/000_g_msgid_g_thrid_as_integers.py index b7cb7337c..633d444a4 100644 --- a/migrations/versions/000_g_msgid_g_thrid_as_integers.py +++ b/migrations/versions/000_g_msgid_g_thrid_as_integers.py @@ -7,7 +7,7 @@ """ # revision identifiers, used by Alembic. -revision = '2605b23e1fe6' +revision = "2605b23e1fe6" down_revision = None from alembic import op @@ -16,16 +16,16 @@ def upgrade(): - op.alter_column('message', 'g_msgid', type_=mysql.BIGINT) - op.alter_column('message', 'g_thrid', type_=mysql.BIGINT) + op.alter_column("message", "g_msgid", type_=mysql.BIGINT) + op.alter_column("message", "g_thrid", type_=mysql.BIGINT) - op.create_index('ix_message_g_msgid', 'message', ['g_msgid'], unique=False) - op.create_index('ix_message_g_thrid', 'message', ['g_thrid'], unique=False) + op.create_index("ix_message_g_msgid", "message", ["g_msgid"], unique=False) + op.create_index("ix_message_g_thrid", "message", ["g_thrid"], unique=False) def downgrade(): - op.alter_column('message', 'g_msgid', type_=mysql.VARCHAR(40)) - op.alter_column('message', 'g_thrid', type_=mysql.VARCHAR(40)) + op.alter_column("message", "g_msgid", type_=mysql.VARCHAR(40)) + op.alter_column("message", "g_thrid", type_=mysql.VARCHAR(40)) - op.drop_index('ix_message_g_thrid', table_name='message') - op.drop_index('ix_message_g_msgid', table_name='message') + op.drop_index("ix_message_g_thrid", table_name="message") + op.drop_index("ix_message_g_msgid", table_name="message") diff --git a/migrations/versions/001_rename_message_id_to_message_id_header.py b/migrations/versions/001_rename_message_id_to_message_id_header.py index d5e35405e..0ff4e4e14 100644 --- a/migrations/versions/001_rename_message_id_to_message_id_header.py +++ b/migrations/versions/001_rename_message_id_to_message_id_header.py @@ -7,18 +7,22 @@ """ # revision identifiers, used by Alembic. -revision = '217431caacc7' -down_revision = '2605b23e1fe6' +revision = "217431caacc7" +down_revision = "2605b23e1fe6" from alembic import op def upgrade(): - op.execute("ALTER TABLE message CHANGE message_id message_id_header VARCHAR(255) NULL") + op.execute( + "ALTER TABLE message CHANGE message_id message_id_header VARCHAR(255) NULL" + ) def downgrade(): # First make all current NULL values actually 0. This isn't a great solution, but it works. print "WARNING: This removes data about messages that do not contain a Message-Id header!" op.execute("UPDATE message SET message_id_header=0 WHERE message_id_header IS NULL") - op.execute("ALTER TABLE message CHANGE message_id_header message_id VARCHAR(255) NOT NULL") + op.execute( + "ALTER TABLE message CHANGE message_id_header message_id VARCHAR(255) NOT NULL" + ) diff --git a/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py b/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py index f331c39a0..6ecdf9442 100644 --- a/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py +++ b/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '297aa1e1acc7' -down_revision = '217431caacc7' +revision = "297aa1e1acc7" +down_revision = "217431caacc7" from alembic import op import sqlalchemy as sa @@ -16,9 +16,9 @@ def upgrade(): - op.alter_column('thread', 'g_thrid', type_=mysql.BIGINT) - op.execute('OPTIMIZE TABLE thread') + op.alter_column("thread", "g_thrid", type_=mysql.BIGINT) + op.execute("OPTIMIZE TABLE thread") def downgrade(): - op.alter_column('thread', 'g_thrid', type_=sa.String(255)) + op.alter_column("thread", "g_thrid", type_=sa.String(255)) diff --git a/migrations/versions/003_expand_littlejson.py b/migrations/versions/003_expand_littlejson.py index c78c6d979..77e295d7d 100644 --- a/migrations/versions/003_expand_littlejson.py +++ b/migrations/versions/003_expand_littlejson.py @@ -7,16 +7,16 @@ """ # revision identifiers, used by Alembic. -revision = '269247bc37d3' -down_revision = '297aa1e1acc7' +revision = "269247bc37d3" +down_revision = "297aa1e1acc7" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('imapuid', 'extra_flags', type_=sa.String(255)) + op.alter_column("imapuid", "extra_flags", type_=sa.String(255)) def downgrade(): - op.alter_column('imapuid', 'extra_flags', type_=sa.String(40)) + op.alter_column("imapuid", "extra_flags", type_=sa.String(40)) diff --git a/migrations/versions/004_drafts_as_required_folder.py b/migrations/versions/004_drafts_as_required_folder.py index 32671b45a..45c518d3b 100644 --- a/migrations/versions/004_drafts_as_required_folder.py +++ b/migrations/versions/004_drafts_as_required_folder.py @@ -7,16 +7,18 @@ """ # revision identifiers, used by Alembic. -revision = '41a7e825d108' -down_revision = '269247bc37d3' +revision = "41a7e825d108" +down_revision = "269247bc37d3" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('imapaccount', sa.Column('drafts_folder_name', sa.String(255), nullable=True)) + op.add_column( + "imapaccount", sa.Column("drafts_folder_name", sa.String(255), nullable=True) + ) def downgrade(): - op.drop_column('imapaccount', 'drafts_folder_name') + op.drop_column("imapaccount", "drafts_folder_name") diff --git a/migrations/versions/005_import_old_accounts.py b/migrations/versions/005_import_old_accounts.py index a5a6f04ed..cbd8b8939 100644 --- a/migrations/versions/005_import_old_accounts.py +++ b/migrations/versions/005_import_old_accounts.py @@ -7,53 +7,55 @@ """ # revision identifiers, used by Alembic. -revision = 'adc646e1f11' -down_revision = '41a7e825d108' +revision = "adc646e1f11" +down_revision = "41a7e825d108" from alembic import op from sqlalchemy.ext.declarative import declarative_base import os.path -SQL_DUMP_FILENAME = 'alphasync_rds_inbox_imapaccount.sql' +SQL_DUMP_FILENAME = "alphasync_rds_inbox_imapaccount.sql" def upgrade(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.backends.imap import ImapAccount import inbox.auth.gmail as gmail # Assert we have the dump file if not os.path.isfile(SQL_DUMP_FILENAME): - print "Can't find old user SQL dump at {0}...\nMigration no users."\ - .format(SQL_DUMP_FILENAME) + print "Can't find old user SQL dump at {0}...\nMigration no users.".format( + SQL_DUMP_FILENAME + ) return # Imports to `imapaccount_old` table - with open(SQL_DUMP_FILENAME, 'r') as f: - print 'Importing old account data...', + with open(SQL_DUMP_FILENAME, "r") as f: + print "Importing old account data...", op.execute(f.read()) - print 'OK!' + print "OK!" Base = declarative_base() Base.metadata.reflect(engine) class ImapAccount_Old(Base): - __table__ = Base.metadata.tables['imapaccount_old'] + __table__ = Base.metadata.tables["imapaccount_old"] with session_scope() as db_session: migrated_accounts = [] for acct in db_session.query(ImapAccount_Old): - print 'Importing {0}'. format(acct.email_address) + print "Importing {0}".format(acct.email_address) - existing_account = db_session.query(ImapAccount)\ - .filter_by(email_address=acct.email_address) + existing_account = db_session.query(ImapAccount).filter_by( + email_address=acct.email_address + ) if existing_account.count() > 0: - print 'Already have account for {0}' \ - .format(acct.email_address) + print "Already have account for {0}".format(acct.email_address) continue # Create a mock OAuth response using data from the old table @@ -69,12 +71,12 @@ class ImapAccount_Old(Base): audience=acct.o_audience, scope=acct.o_scope, refresh_token=acct.o_refresh_token, - verified_email=acct.o_verified_email + verified_email=acct.o_verified_email, ) - new_account = gmail.create_account(db_session, - acct.email_address, - mock_response) + new_account = gmail.create_account( + db_session, acct.email_address, mock_response + ) # Note that this doesn't verify **anything** about the account. # We're just doing the migration now @@ -82,23 +84,24 @@ class ImapAccount_Old(Base): db_session.commit() migrated_accounts.append(new_account) - print '\nDone! Imported {0} accounts.'.format(len(migrated_accounts)) - print '\nNow verifying refresh tokens...\n' + print "\nDone! Imported {0} accounts.".format(len(migrated_accounts)) + print "\nNow verifying refresh tokens...\n" verified_accounts = [] for acct in migrated_accounts: - print 'Verifying {0}... '.format(acct.email_address), + print "Verifying {0}... ".format(acct.email_address), if gmail.verify_account(acct): verified_accounts.append(acct) - print 'OK!' + print "OK!" else: - print 'FAILED!' + print "FAILED!" - print 'Done! Verified {0} of {1}'.format(len(verified_accounts), - len(migrated_accounts)) + print "Done! Verified {0} of {1}".format( + len(verified_accounts), len(migrated_accounts) + ) - op.drop_table('imapaccount_old') + op.drop_table("imapaccount_old") def downgrade(): - print 'Not removing any accounts!' + print "Not removing any accounts!" diff --git a/migrations/versions/006_add_search_tokens.py b/migrations/versions/006_add_search_tokens.py index a9c94a420..a90247868 100644 --- a/migrations/versions/006_add_search_tokens.py +++ b/migrations/versions/006_add_search_tokens.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '482338e7a7d6' -down_revision = 'adc646e1f11' +revision = "482338e7a7d6" +down_revision = "adc646e1f11" from alembic import op import sqlalchemy as sa @@ -16,16 +16,15 @@ def upgrade(): op.create_table( - 'searchtoken', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('token', sa.String(length=255), nullable=True), - sa.Column('source', sa.Enum('name', 'email_address'), nullable=True), - sa.Column('contact_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "searchtoken", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("token", sa.String(length=255), nullable=True), + sa.Column("source", sa.Enum("name", "email_address"), nullable=True), + sa.Column("contact_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["contact_id"], ["contact.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), ) def downgrade(): - op.drop_table('searchtoken') + op.drop_table("searchtoken") diff --git a/migrations/versions/007_per_provider_table_split.py b/migrations/versions/007_per_provider_table_split.py index 7ef19d96e..57269a3b3 100644 --- a/migrations/versions/007_per_provider_table_split.py +++ b/migrations/versions/007_per_provider_table_split.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1c3f1812f2d9' -down_revision = '482338e7a7d6' +revision = "1c3f1812f2d9" +down_revision = "482338e7a7d6" from alembic import op import sqlalchemy as sa @@ -33,53 +33,55 @@ def downgrade(): def genericize_imapaccount(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class ImapAccount_(Base): - __table__ = Base.metadata.tables['imapaccount'] + __table__ = Base.metadata.tables["imapaccount"] # Get data from columns-to-be-dropped with session_scope() as db_session: - results = db_session.query(ImapAccount_.id, - ImapAccount_.imap_host).all() + results = db_session.query(ImapAccount_.id, ImapAccount_.imap_host).all() to_insert = [dict(id=r[0], imap_host=r[1]) for r in results] # Rename table, add new columns. - op.rename_table('imapaccount', 'account') - op.add_column('account', sa.Column('type', sa.String(16))) + op.rename_table("imapaccount", "account") + op.add_column("account", sa.Column("type", sa.String(16))) # Create new table, insert data # The table - op.create_table('imapaccount', - sa.Column('imap_host', sa.String(512)), - sa.Column('id', sa.Integer()), - sa.ForeignKeyConstraint(['id'], ['account.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id')) + op.create_table( + "imapaccount", + sa.Column("imap_host", sa.String(512)), + sa.Column("id", sa.Integer()), + sa.ForeignKeyConstraint(["id"], ["account.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) # The ad-hoc table for insert - table_ = table('imapaccount', - column('imap_host', sa.String()), - column('id', sa.Integer)) + table_ = table( + "imapaccount", column("imap_host", sa.String()), column("id", sa.Integer) + ) if to_insert: op.bulk_insert(table_, to_insert) # Drop columns now - op.drop_column('account', 'imap_host') + op.drop_column("account", "imap_host") def genericize_thread(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class Thread_(Base): - __table__ = Base.metadata.tables['thread'] + __table__ = Base.metadata.tables["thread"] # Get data from columns-to-be-dropped with session_scope() as db_session: @@ -88,114 +90,163 @@ class Thread_(Base): to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results] # Add new columns - op.add_column('thread', sa.Column('type', sa.String(16))) + op.add_column("thread", sa.Column("type", sa.String(16))) # Create new table, insert data # The table - op.create_table('imapthread', - sa.Column('g_thrid', sa.BigInteger(), nullable=True, - index=True), - sa.Column('id', sa.Integer()), - sa.ForeignKeyConstraint(['id'], ['thread.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id')) + op.create_table( + "imapthread", + sa.Column("g_thrid", sa.BigInteger(), nullable=True, index=True), + sa.Column("id", sa.Integer()), + sa.ForeignKeyConstraint(["id"], ["thread.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) # The ad-hoc table for insert - table_ = table('imapthread', - column('g_thrid', sa.BigInteger), - column('id', sa.Integer)) + table_ = table( + "imapthread", column("g_thrid", sa.BigInteger), column("id", sa.Integer) + ) if to_insert: op.bulk_insert(table_, to_insert) # Drop columns now - op.drop_column('thread', 'g_thrid') + op.drop_column("thread", "g_thrid") def genericize_namespace_contact_foldersync(): # Namespace - op.drop_constraint('namespace_ibfk_1', 'namespace', type_='foreignkey') - op.alter_column('namespace', 'imapaccount_id', - new_column_name='account_id', existing_type=sa.Integer(), - existing_nullable=True) - - op.create_foreign_key('namespace_ibfk_1', 'namespace', 'account', - ['account_id'], ['id'], ondelete='CASCADE') + op.drop_constraint("namespace_ibfk_1", "namespace", type_="foreignkey") + op.alter_column( + "namespace", + "imapaccount_id", + new_column_name="account_id", + existing_type=sa.Integer(), + existing_nullable=True, + ) + + op.create_foreign_key( + "namespace_ibfk_1", + "namespace", + "account", + ["account_id"], + ["id"], + ondelete="CASCADE", + ) # Contact - op.drop_constraint('contact_ibfk_1', 'contact', type_='foreignkey') - op.alter_column('contact', 'imapaccount_id', - new_column_name='account_id', existing_type=sa.Integer(), - existing_nullable=False) - - op.create_foreign_key('contact_ibfk_1', 'contact', 'account', - ['account_id'], ['id'], ondelete='CASCADE') - - op.drop_constraint('foldersync_ibfk_1', 'foldersync', type_='foreignkey') - op.alter_column('foldersync', 'imapaccount_id', - new_column_name='account_id', existing_type=sa.Integer(), - existing_nullable=False) - - op.create_foreign_key('foldersync_ibfk_1', 'foldersync', 'account', - ['account_id'], ['id'], ondelete='CASCADE') + op.drop_constraint("contact_ibfk_1", "contact", type_="foreignkey") + op.alter_column( + "contact", + "imapaccount_id", + new_column_name="account_id", + existing_type=sa.Integer(), + existing_nullable=False, + ) + + op.create_foreign_key( + "contact_ibfk_1", + "contact", + "account", + ["account_id"], + ["id"], + ondelete="CASCADE", + ) + + op.drop_constraint("foldersync_ibfk_1", "foldersync", type_="foreignkey") + op.alter_column( + "foldersync", + "imapaccount_id", + new_column_name="account_id", + existing_type=sa.Integer(), + existing_nullable=False, + ) + + op.create_foreign_key( + "foldersync_ibfk_1", + "foldersync", + "account", + ["account_id"], + ["id"], + ondelete="CASCADE", + ) # Downgrade functions: def downgrade_imapaccount(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class ImapAccount_(Base): - __table__ = Base.metadata.tables['imapaccount'] + __table__ = Base.metadata.tables["imapaccount"] # Get data from table-to-be-dropped with session_scope() as db_session: - results = db_session.query(ImapAccount_.id, - ImapAccount_.imap_host).all() + results = db_session.query(ImapAccount_.id, ImapAccount_.imap_host).all() to_insert = [dict(id=r[0], imap_host=r[1]) for r in results] # Drop columns, add new columns + insert data - op.drop_column('account', 'type') - op.add_column('account', sa.Column('imap_host', sa.String(512))) + op.drop_column("account", "type") + op.add_column("account", sa.Column("imap_host", sa.String(512))) - table_ = table('account', - column('imap_host', sa.String(512)), - column('id', sa.Integer)) + table_ = table( + "account", column("imap_host", sa.String(512)), column("id", sa.Integer) + ) for r in to_insert: op.execute( - table_.update(). - where(table_.c.id == r['id']). - values({'imap_host': r['imap_host']}) + table_.update() + .where(table_.c.id == r["id"]) + .values({"imap_host": r["imap_host"]}) ) # Table switch-over - op.drop_constraint('imapuid_ibfk_1', 'imapuid', type_='foreignkey') - op.drop_constraint('uidvalidity_ibfk_1', 'uidvalidity', type_='foreignkey') - op.drop_constraint('foldersync_ibfk_1', 'foldersync', type_='foreignkey') - op.drop_table('imapaccount') - - op.rename_table('account', 'imapaccount') - - op.create_foreign_key('imapuid_ibfk_1', 'imapuid', 'imapaccount', - ['imapaccount_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key('uidvalidity_ibfk_1', 'uidvalidity', 'imapaccount', - ['imapaccount_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key('foldersync_ibfk_1', 'foldersync', 'imapaccount', - ['account_id'], ['id'], ondelete='CASCADE') + op.drop_constraint("imapuid_ibfk_1", "imapuid", type_="foreignkey") + op.drop_constraint("uidvalidity_ibfk_1", "uidvalidity", type_="foreignkey") + op.drop_constraint("foldersync_ibfk_1", "foldersync", type_="foreignkey") + op.drop_table("imapaccount") + + op.rename_table("account", "imapaccount") + + op.create_foreign_key( + "imapuid_ibfk_1", + "imapuid", + "imapaccount", + ["imapaccount_id"], + ["id"], + ondelete="CASCADE", + ) + op.create_foreign_key( + "uidvalidity_ibfk_1", + "uidvalidity", + "imapaccount", + ["imapaccount_id"], + ["id"], + ondelete="CASCADE", + ) + op.create_foreign_key( + "foldersync_ibfk_1", + "foldersync", + "imapaccount", + ["account_id"], + ["id"], + ondelete="CASCADE", + ) def downgrade_imapthread(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class ImapThread_(Base): - __table__ = Base.metadata.tables['imapthread'] + __table__ = Base.metadata.tables["imapthread"] # Get data from table-to-be-dropped with session_scope() as db_session: @@ -203,50 +254,76 @@ class ImapThread_(Base): to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results] # Drop columns, add new columns + insert data - op.drop_column('thread', 'type') - op.add_column('thread', sa.Column('g_thrid', sa.BigInteger(), - nullable=True, index=True)) - table_ = table('thread', - column('g_thrid', sa.BigInteger), - column('id', sa.Integer)) + op.drop_column("thread", "type") + op.add_column( + "thread", sa.Column("g_thrid", sa.BigInteger(), nullable=True, index=True) + ) + table_ = table("thread", column("g_thrid", sa.BigInteger), column("id", sa.Integer)) for r in to_insert: op.execute( - table_.update(). - where(table_.c.id == r['id']). - values({'g_thrid': r['g_thrid']}) + table_.update() + .where(table_.c.id == r["id"]) + .values({"g_thrid": r["g_thrid"]}) ) # Drop table - op.drop_table('imapthread') + op.drop_table("imapthread") def downgrade_namespace_contact_foldersync(): # Namespace - op.drop_constraint('namespace_ibfk_1', 'namespace', type_='foreignkey') - op.alter_column('namespace', 'account_id', - new_column_name='imapaccount_id', - existing_type=sa.Integer(), - existing_nullable=True) - op.create_foreign_key('namespace_ibfk_1', 'namespace', 'imapaccount', - ['imapaccount_id'], ['id'], ondelete='CASCADE') + op.drop_constraint("namespace_ibfk_1", "namespace", type_="foreignkey") + op.alter_column( + "namespace", + "account_id", + new_column_name="imapaccount_id", + existing_type=sa.Integer(), + existing_nullable=True, + ) + op.create_foreign_key( + "namespace_ibfk_1", + "namespace", + "imapaccount", + ["imapaccount_id"], + ["id"], + ondelete="CASCADE", + ) # Contact - op.drop_constraint('contact_ibfk_1', 'contact', type_='foreignkey') - op.alter_column('contact', 'account_id', - new_column_name='imapaccount_id', - existing_type=sa.Integer(), - existing_nullable=False) - - op.create_foreign_key('contact_ibfk_1', 'contact', 'imapaccount', - ['imapaccount_id'], ['id'], ondelete='CASCADE') + op.drop_constraint("contact_ibfk_1", "contact", type_="foreignkey") + op.alter_column( + "contact", + "account_id", + new_column_name="imapaccount_id", + existing_type=sa.Integer(), + existing_nullable=False, + ) + + op.create_foreign_key( + "contact_ibfk_1", + "contact", + "imapaccount", + ["imapaccount_id"], + ["id"], + ondelete="CASCADE", + ) # Foldersync - op.drop_constraint('foldersync_ibfk_1', 'foldersync', type_='foreignkey') - op.alter_column('foldersync', 'account_id', - new_column_name='imapaccount_id', - existing_type=sa.Integer(), - existing_nullable=False) - - op.create_foreign_key('foldersync_ibfk_1', 'foldersync', 'imapaccount', - ['imapaccount_id'], ['id'], ondelete='CASCADE') + op.drop_constraint("foldersync_ibfk_1", "foldersync", type_="foreignkey") + op.alter_column( + "foldersync", + "account_id", + new_column_name="imapaccount_id", + existing_type=sa.Integer(), + existing_nullable=False, + ) + + op.create_foreign_key( + "foldersync_ibfk_1", + "foldersync", + "imapaccount", + ["imapaccount_id"], + ["id"], + ondelete="CASCADE", + ) diff --git a/migrations/versions/008_store_userinfo_from_oauth.py b/migrations/versions/008_store_userinfo_from_oauth.py index 79e19f009..a99cc343b 100644 --- a/migrations/versions/008_store_userinfo_from_oauth.py +++ b/migrations/versions/008_store_userinfo_from_oauth.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3c11391b5eb0' -down_revision = '1c3f1812f2d9' +revision = "3c11391b5eb0" +down_revision = "1c3f1812f2d9" from alembic import op import sqlalchemy as sa @@ -16,23 +16,33 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.add_column('account', sa.Column('family_name', sa.String(length=255), nullable=True)) - op.add_column('account', sa.Column('g_gender', sa.String(length=16), nullable=True)) - op.add_column('account', sa.Column('g_locale', sa.String(length=16), nullable=True)) - op.add_column('account', sa.Column('g_picture_url', sa.String(length=255), nullable=True)) - op.add_column('account', sa.Column('g_plus_url', sa.String(length=255), nullable=True)) - op.add_column('account', sa.Column('given_name', sa.String(length=255), nullable=True)) - op.add_column('account', sa.Column('google_id', sa.String(length=255), nullable=True)) + op.add_column( + "account", sa.Column("family_name", sa.String(length=255), nullable=True) + ) + op.add_column("account", sa.Column("g_gender", sa.String(length=16), nullable=True)) + op.add_column("account", sa.Column("g_locale", sa.String(length=16), nullable=True)) + op.add_column( + "account", sa.Column("g_picture_url", sa.String(length=255), nullable=True) + ) + op.add_column( + "account", sa.Column("g_plus_url", sa.String(length=255), nullable=True) + ) + op.add_column( + "account", sa.Column("given_name", sa.String(length=255), nullable=True) + ) + op.add_column( + "account", sa.Column("google_id", sa.String(length=255), nullable=True) + ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_column('account', 'google_id') - op.drop_column('account', 'given_name') - op.drop_column('account', 'g_plus_url') - op.drop_column('account', 'g_picture_url') - op.drop_column('account', 'g_locale') - op.drop_column('account', 'g_gender') - op.drop_column('account', 'family_name') + op.drop_column("account", "google_id") + op.drop_column("account", "given_name") + op.drop_column("account", "g_plus_url") + op.drop_column("account", "g_picture_url") + op.drop_column("account", "g_locale") + op.drop_column("account", "g_gender") + op.drop_column("account", "family_name") ### end Alembic commands ### diff --git a/migrations/versions/009_multiple_contact_providers.py b/migrations/versions/009_multiple_contact_providers.py index 0d99763eb..8ad959521 100644 --- a/migrations/versions/009_multiple_contact_providers.py +++ b/migrations/versions/009_multiple_contact_providers.py @@ -7,30 +7,40 @@ """ # revision identifiers, used by Alembic. -revision = '169cac0cd87e' -down_revision = '3c11391b5eb0' +revision = "169cac0cd87e" +down_revision = "3c11391b5eb0" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('contact', sa.Column('provider_name', sa.String(length=64), - nullable=False)) - op.alter_column('contact', 'g_id', new_column_name='uid', nullable=False, - existing_type=sa.String(length=64)) + op.add_column( + "contact", sa.Column("provider_name", sa.String(length=64), nullable=False) + ) + op.alter_column( + "contact", + "g_id", + new_column_name="uid", + nullable=False, + existing_type=sa.String(length=64), + ) # Previously we were just syncing google contacts. op.execute('UPDATE contact SET provider_name="google"') - op.drop_constraint('g_id', 'contact', type_='unique') - op.create_unique_constraint('uid', 'contact', ['uid', 'source', - 'account_id', - 'provider_name']) + op.drop_constraint("g_id", "contact", type_="unique") + op.create_unique_constraint( + "uid", "contact", ["uid", "source", "account_id", "provider_name"] + ) def downgrade(): - op.alter_column('contact', 'uid', new_column_name='g_id', - existing_type=sa.String(length=64), existing_nullable=True) - op.drop_column('contact', 'provider_name') - op.drop_constraint('uid', 'contact', type_='unique') - op.create_unique_constraint('g_id', 'contact', ['g_id', 'source', - 'account_id']) + op.alter_column( + "contact", + "uid", + new_column_name="g_id", + existing_type=sa.String(length=64), + existing_nullable=True, + ) + op.drop_column("contact", "provider_name") + op.drop_constraint("uid", "contact", type_="unique") + op.create_unique_constraint("g_id", "contact", ["g_id", "source", "account_id"]) diff --git a/migrations/versions/010_store_raw_contact_data.py b/migrations/versions/010_store_raw_contact_data.py index d5753b133..f1b843067 100644 --- a/migrations/versions/010_store_raw_contact_data.py +++ b/migrations/versions/010_store_raw_contact_data.py @@ -7,16 +7,16 @@ """ # revision identifiers, used by Alembic. -revision = '3b511977a01f' -down_revision = '169cac0cd87e' +revision = "3b511977a01f" +down_revision = "169cac0cd87e" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('contact', sa.Column('raw_data', sa.Text(), nullable=True)) + op.add_column("contact", sa.Column("raw_data", sa.Text(), nullable=True)) def downgrade(): - op.drop_column('contact', 'raw_data') + op.drop_column("contact", "raw_data") diff --git a/migrations/versions/011_use_server_default.py b/migrations/versions/011_use_server_default.py index a13f5690c..b1368b473 100644 --- a/migrations/versions/011_use_server_default.py +++ b/migrations/versions/011_use_server_default.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3237b6b1ee03' -down_revision = '3b511977a01f' +revision = "3237b6b1ee03" +down_revision = "3b511977a01f" from alembic import op import sqlalchemy as sa @@ -16,117 +16,179 @@ def upgrade(): # Base tables - op.alter_column('account', 'save_raw_messages', - server_default=sa.sql.expression.true(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=None) - op.alter_column('namespace', 'type', - server_default='root', - existing_type=sa.Enum('root', 'shared_folder'), - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('message', 'is_draft', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('message', 'decode_error', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('block', 'is_inboxapp_attachment', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=None) + op.alter_column( + "account", + "save_raw_messages", + server_default=sa.sql.expression.true(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=None, + ) + op.alter_column( + "namespace", + "type", + server_default="root", + existing_type=sa.Enum("root", "shared_folder"), + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "message", + "is_draft", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "message", + "decode_error", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "block", + "is_inboxapp_attachment", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=None, + ) # Imap tables - op.alter_column('imapuid', 'is_draft', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('imapuid', 'is_seen', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('imapuid', 'is_flagged', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('imapuid', 'is_recent', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('imapuid', 'is_answered', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('foldersync', 'state', - server_default='initial', - existing_type=sa.Enum('initial', 'initial uidinvalid', - 'poll', 'poll uidinvalid', 'finish'), - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) + op.alter_column( + "imapuid", + "is_draft", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "imapuid", + "is_seen", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "imapuid", + "is_flagged", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "imapuid", + "is_recent", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "imapuid", + "is_answered", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "foldersync", + "state", + server_default="initial", + existing_type=sa.Enum( + "initial", "initial uidinvalid", "poll", "poll uidinvalid", "finish" + ), + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) # EAS tables - op.alter_column('easaccount', 'eas_account_sync_key', - server_default='0', - nullable=False, - existing_type=sa.String(64), - existing_server_default=sa.sql.expression.null(), - existing_nullable=None) - op.alter_column('easaccount', 'eas_state', - server_default='sync', - nullable=False, - existing_type=sa.Enum('sync', 'sync keyinvalid', 'finish'), - existing_server_default=sa.sql.expression.null(), - existing_nullable=None) - op.alter_column('easuid', 'is_draft', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('easuid', 'is_flagged', - server_default=sa.sql.expression.false(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('easuid', 'is_seen', - server_default=sa.sql.expression.false(), - nullable=False, - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.null(), - existing_nullable=True) - op.alter_column('easfoldersync', 'state', - server_default='initial', - existing_type=sa.Enum('initial', 'initial uidinvalid', - 'poll', 'poll uidinvalid', 'finish'), - existing_server_default=sa.sql.expression.null(), - existing_nullable=False) - op.alter_column('easfoldersync', 'eas_folder_sync_key', - nullable=False, - server_default='0', - existing_type=sa.String(64), - existing_server_default=sa.sql.expression.null(), - existing_nullable=None) + op.alter_column( + "easaccount", + "eas_account_sync_key", + server_default="0", + nullable=False, + existing_type=sa.String(64), + existing_server_default=sa.sql.expression.null(), + existing_nullable=None, + ) + op.alter_column( + "easaccount", + "eas_state", + server_default="sync", + nullable=False, + existing_type=sa.Enum("sync", "sync keyinvalid", "finish"), + existing_server_default=sa.sql.expression.null(), + existing_nullable=None, + ) + op.alter_column( + "easuid", + "is_draft", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "easuid", + "is_flagged", + server_default=sa.sql.expression.false(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "easuid", + "is_seen", + server_default=sa.sql.expression.false(), + nullable=False, + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.null(), + existing_nullable=True, + ) + op.alter_column( + "easfoldersync", + "state", + server_default="initial", + existing_type=sa.Enum( + "initial", "initial uidinvalid", "poll", "poll uidinvalid", "finish" + ), + existing_server_default=sa.sql.expression.null(), + existing_nullable=False, + ) + op.alter_column( + "easfoldersync", + "eas_folder_sync_key", + nullable=False, + server_default="0", + existing_type=sa.String(64), + existing_server_default=sa.sql.expression.null(), + existing_nullable=None, + ) def downgrade(): # Only downgrade those that can be nullable - op.alter_column('account', 'save_raw_messages', - server_default=sa.sql.expression.null(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.true(), - existing_nullable=None) - op.alter_column('block', 'is_inboxapp_attachment', - server_default=sa.sql.expression.null(), - existing_type=sa.Boolean, - existing_server_default=sa.sql.expression.false(), - existing_nullable=None) + op.alter_column( + "account", + "save_raw_messages", + server_default=sa.sql.expression.null(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.true(), + existing_nullable=None, + ) + op.alter_column( + "block", + "is_inboxapp_attachment", + server_default=sa.sql.expression.null(), + existing_type=sa.Boolean, + existing_server_default=sa.sql.expression.false(), + existing_nullable=None, + ) diff --git a/migrations/versions/012_move_google_userinfo_fields_to_.py b/migrations/versions/012_move_google_userinfo_fields_to_.py index 99b2e2e84..6e4d4b0ee 100644 --- a/migrations/versions/012_move_google_userinfo_fields_to_.py +++ b/migrations/versions/012_move_google_userinfo_fields_to_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '193802835c33' -down_revision = '3237b6b1ee03' +revision = "193802835c33" +down_revision = "3237b6b1ee03" from alembic import op import sqlalchemy as sa @@ -19,136 +19,164 @@ def upgrade(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) # ADD: - op.add_column('imapaccount', sa.Column('family_name', sa.String(length=255), - nullable=True)) - op.add_column('imapaccount', sa.Column('g_gender', sa.String(length=16), - nullable=True)) - op.add_column('imapaccount', sa.Column('g_locale', sa.String(length=16), - nullable=True)) - op.add_column('imapaccount', sa.Column('g_picture_url', sa.String(length=255), - nullable=True)) - op.add_column('imapaccount', sa.Column('g_plus_url', sa.String(length=255), - nullable=True)) - op.add_column('imapaccount', sa.Column('given_name', sa.String(length=255), - nullable=True)) - op.add_column('imapaccount', sa.Column('google_id', sa.String(length=255), - nullable=True)) + op.add_column( + "imapaccount", sa.Column("family_name", sa.String(length=255), nullable=True) + ) + op.add_column( + "imapaccount", sa.Column("g_gender", sa.String(length=16), nullable=True) + ) + op.add_column( + "imapaccount", sa.Column("g_locale", sa.String(length=16), nullable=True) + ) + op.add_column( + "imapaccount", sa.Column("g_picture_url", sa.String(length=255), nullable=True) + ) + op.add_column( + "imapaccount", sa.Column("g_plus_url", sa.String(length=255), nullable=True) + ) + op.add_column( + "imapaccount", sa.Column("given_name", sa.String(length=255), nullable=True) + ) + op.add_column( + "imapaccount", sa.Column("google_id", sa.String(length=255), nullable=True) + ) # MOVE: class Account_(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] with session_scope() as db_session: - results = db_session.query(Account_.id, - Account_.family_name, - Account_.google_id, - Account_.g_plus_url, - Account_.g_picture_url, - Account_.g_gender, - Account_.given_name, - Account_.g_locale).all() - - imapaccount = table('imapaccount', - column('id', sa.String), - column('family_name', sa.String), - column('google_id', sa.String), - column('g_plus_url', sa.String), - column('g_picture_url', sa.String), - column('g_gender', sa.String), - column('given_name', sa.String), - column('g_locale', sa.String)) + results = db_session.query( + Account_.id, + Account_.family_name, + Account_.google_id, + Account_.g_plus_url, + Account_.g_picture_url, + Account_.g_gender, + Account_.given_name, + Account_.g_locale, + ).all() + + imapaccount = table( + "imapaccount", + column("id", sa.String), + column("family_name", sa.String), + column("google_id", sa.String), + column("g_plus_url", sa.String), + column("g_picture_url", sa.String), + column("g_gender", sa.String), + column("given_name", sa.String), + column("g_locale", sa.String), + ) for r in results: op.execute( - imapaccount.update().where(imapaccount.c.id == r[0]).values({ - 'family_name': r[1], - 'google_id': r[2], - 'g_plus_url': r[3], - 'g_picture_url': r[4], - 'g_gender': r[5], - 'given_name': r[6], - 'g_locale': r[7] - }) + imapaccount.update() + .where(imapaccount.c.id == r[0]) + .values( + { + "family_name": r[1], + "google_id": r[2], + "g_plus_url": r[3], + "g_picture_url": r[4], + "g_gender": r[5], + "given_name": r[6], + "g_locale": r[7], + } + ) ) # DROP: - op.drop_column('account', 'family_name') - op.drop_column('account', 'google_id') - op.drop_column('account', 'g_plus_url') - op.drop_column('account', 'g_picture_url') - op.drop_column('account', 'g_gender') - op.drop_column('account', 'given_name') - op.drop_column('account', 'g_locale') + op.drop_column("account", "family_name") + op.drop_column("account", "google_id") + op.drop_column("account", "g_plus_url") + op.drop_column("account", "g_picture_url") + op.drop_column("account", "g_gender") + op.drop_column("account", "given_name") + op.drop_column("account", "g_locale") def downgrade(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) # ADD: - op.add_column('account', sa.Column('family_name', sa.String(length=255), - nullable=True)) - op.add_column('account', sa.Column('g_gender', sa.String(length=16), - nullable=True)) - op.add_column('account', sa.Column('g_locale', sa.String(length=16), - nullable=True)) - op.add_column('account', sa.Column('g_picture_url', sa.String(length=255), - nullable=True)) - op.add_column('account', sa.Column('g_plus_url', sa.String(length=255), - nullable=True)) - op.add_column('account', sa.Column('given_name', sa.String(length=255), - nullable=True)) - op.add_column('account', sa.Column('google_id', sa.String(length=255), - nullable=True)) + op.add_column( + "account", sa.Column("family_name", sa.String(length=255), nullable=True) + ) + op.add_column("account", sa.Column("g_gender", sa.String(length=16), nullable=True)) + op.add_column("account", sa.Column("g_locale", sa.String(length=16), nullable=True)) + op.add_column( + "account", sa.Column("g_picture_url", sa.String(length=255), nullable=True) + ) + op.add_column( + "account", sa.Column("g_plus_url", sa.String(length=255), nullable=True) + ) + op.add_column( + "account", sa.Column("given_name", sa.String(length=255), nullable=True) + ) + op.add_column( + "account", sa.Column("google_id", sa.String(length=255), nullable=True) + ) # MOVE: class ImapAccount_(Base): - __table__ = Base.metadata.tables['imapaccount'] + __table__ = Base.metadata.tables["imapaccount"] with session_scope() as db_session: - results = db_session.query(ImapAccount_.id, - ImapAccount_.family_name, - ImapAccount_.google_id, - ImapAccount_.g_plus_url, - ImapAccount_.g_picture_url, - ImapAccount_.g_gender, - ImapAccount_.given_name, - ImapAccount_.g_locale).all() - - account = table('account', - column('id', sa.String), - column('family_name', sa.String), - column('google_id', sa.String), - column('g_plus_url', sa.String), - column('g_picture_url', sa.String), - column('g_gender', sa.String), - column('given_name', sa.String), - column('g_locale', sa.String)) + results = db_session.query( + ImapAccount_.id, + ImapAccount_.family_name, + ImapAccount_.google_id, + ImapAccount_.g_plus_url, + ImapAccount_.g_picture_url, + ImapAccount_.g_gender, + ImapAccount_.given_name, + ImapAccount_.g_locale, + ).all() + + account = table( + "account", + column("id", sa.String), + column("family_name", sa.String), + column("google_id", sa.String), + column("g_plus_url", sa.String), + column("g_picture_url", sa.String), + column("g_gender", sa.String), + column("given_name", sa.String), + column("g_locale", sa.String), + ) for r in results: op.execute( - account.update().where(account.c.id == r[0]).values({ - 'family_name': r[1], - 'google_id': r[2], - 'g_plus_url': r[3], - 'g_picture_url': r[4], - 'g_gender': r[5], - 'given_name': r[6], - 'g_locale': r[7] - }) + account.update() + .where(account.c.id == r[0]) + .values( + { + "family_name": r[1], + "google_id": r[2], + "g_plus_url": r[3], + "g_picture_url": r[4], + "g_gender": r[5], + "given_name": r[6], + "g_locale": r[7], + } + ) ) # DROP: - op.drop_column('imapaccount', 'family_name') - op.drop_column('imapaccount', 'google_id') - op.drop_column('imapaccount', 'g_plus_url') - op.drop_column('imapaccount', 'g_picture_url') - op.drop_column('imapaccount', 'g_gender') - op.drop_column('imapaccount', 'given_name') - op.drop_column('imapaccount', 'g_locale') + op.drop_column("imapaccount", "family_name") + op.drop_column("imapaccount", "google_id") + op.drop_column("imapaccount", "g_plus_url") + op.drop_column("imapaccount", "g_picture_url") + op.drop_column("imapaccount", "g_gender") + op.drop_column("imapaccount", "given_name") + op.drop_column("imapaccount", "g_locale") diff --git a/migrations/versions/013_add_spool_msg.py b/migrations/versions/013_add_spool_msg.py index d2ebdcd3f..7502a56f3 100644 --- a/migrations/versions/013_add_spool_msg.py +++ b/migrations/versions/013_add_spool_msg.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'f7dbd9bf4a6' -down_revision = '193802835c33' +revision = "f7dbd9bf4a6" +down_revision = "193802835c33" from alembic import op import sqlalchemy as sa @@ -16,34 +16,35 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.add_column('message', sa.Column('inbox_uid', sa.String(length=64), - nullable=True)) - op.add_column('message', sa.Column('type', sa.String(length=16), - nullable=True)) + op.add_column( + "message", sa.Column("inbox_uid", sa.String(length=64), nullable=True) + ) + op.add_column("message", sa.Column("type", sa.String(length=16), nullable=True)) op.create_table( - 'spoolmessage', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('created_date', sa.DateTime(), nullable=True), + "spoolmessage", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("created_date", sa.DateTime(), nullable=True), sa.Column( - 'is_sent', sa.Boolean, + "is_sent", + sa.Boolean, server_default=sa.sql.expression.false(), - nullable=False), - sa.Column('resolved_message_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['id'], ['message.id'], ondelete='CASCADE'), + nullable=False, + ), + sa.Column("resolved_message_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["id"], ["message.id"], ondelete="CASCADE"), sa.ForeignKeyConstraint( - ['resolved_message_id'], - ['message.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + ["resolved_message_id"], ["message.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_column('message', 'type') - op.drop_column('message', 'inbox_uid') + op.drop_column("message", "type") + op.drop_column("message", "inbox_uid") - op.drop_table('spoolmessage') + op.drop_table("spoolmessage") ### end Alembic commands ### diff --git a/migrations/versions/014_contact_ranking_signals.py b/migrations/versions/014_contact_ranking_signals.py index 2f42b7b5d..25a211cb3 100644 --- a/migrations/versions/014_contact_ranking_signals.py +++ b/migrations/versions/014_contact_ranking_signals.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '563d405d1f99' -down_revision = 'f7dbd9bf4a6' +revision = "563d405d1f99" +down_revision = "f7dbd9bf4a6" from alembic import op import sqlalchemy as sa @@ -16,18 +16,17 @@ def upgrade(): op.create_table( - 'searchsignal', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=40), nullable=True), - sa.Column('value', sa.Integer(), nullable=True), - sa.Column('contact_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "searchsignal", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=40), nullable=True), + sa.Column("value", sa.Integer(), nullable=True), + sa.Column("contact_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["contact_id"], ["contact.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), ) - op.add_column('contact', sa.Column('score', sa.Integer(), nullable=True)) + op.add_column("contact", sa.Column("score", sa.Integer(), nullable=True)) def downgrade(): - op.drop_column('contact', 'score') - op.drop_table('searchsignal') + op.drop_column("contact", "score") + op.drop_table("searchsignal") diff --git a/migrations/versions/015_generalize_from_sender_header_field.py b/migrations/versions/015_generalize_from_sender_header_field.py index 1843f03a0..648b2860c 100644 --- a/migrations/versions/015_generalize_from_sender_header_field.py +++ b/migrations/versions/015_generalize_from_sender_header_field.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3fee2f161614' -down_revision = '563d405d1f99' +revision = "3fee2f161614" +down_revision = "563d405d1f99" def upgrade(): diff --git a/migrations/versions/016_extra_transaction_data.py b/migrations/versions/016_extra_transaction_data.py index 81e2c637a..1ea1b0fcb 100644 --- a/migrations/versions/016_extra_transaction_data.py +++ b/migrations/versions/016_extra_transaction_data.py @@ -7,17 +7,18 @@ """ # revision identifiers, used by Alembic. -revision = '5093433b073' -down_revision = '3fee2f161614' +revision = "5093433b073" +down_revision = "3fee2f161614" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('transaction', sa.Column('additional_data', sa.Text(4194304), - nullable=True)) + op.add_column( + "transaction", sa.Column("additional_data", sa.Text(4194304), nullable=True) + ) def downgrade(): - op.drop_column('transaction', 'additional_data') + op.drop_column("transaction", "additional_data") diff --git a/migrations/versions/017_haspublicid.py b/migrations/versions/017_haspublicid.py index 846c5f25d..3329ba7c0 100644 --- a/migrations/versions/017_haspublicid.py +++ b/migrations/versions/017_haspublicid.py @@ -9,8 +9,8 @@ from __future__ import division # revision identifiers, used by Alembic. -revision = '2c9f3a06de09' -down_revision = '5093433b073' +revision = "2c9f3a06de09" +down_revision = "5093433b073" import sys from gc import collect as garbage_collect @@ -29,75 +29,111 @@ def upgrade(): # These all inherit HasPublicID from inbox.models import ( - Account, Block, Contact, Message, Namespace, - SharedFolder, Thread, User, UserSession, HasPublicID) + Account, + Block, + Contact, + Message, + Namespace, + SharedFolder, + Thread, + User, + UserSession, + HasPublicID, + ) classes = [ - Account, Block, Contact, Message, Namespace, - SharedFolder, Thread, User, UserSession] + Account, + Block, + Contact, + Message, + Namespace, + SharedFolder, + Thread, + User, + UserSession, + ] for c in classes: assert issubclass(c, HasPublicID) - print '[{0}] adding public_id column... '.format(c.__tablename__), + print "[{0}] adding public_id column... ".format(c.__tablename__), sys.stdout.flush() - op.add_column(c.__tablename__, sa.Column( - 'public_id', mysql.BINARY(16), nullable=False)) + op.add_column( + c.__tablename__, sa.Column("public_id", mysql.BINARY(16), nullable=False) + ) - print 'adding index... ', + print "adding index... ", op.create_index( - 'ix_{0}_public_id'.format(c.__tablename__), + "ix_{0}_public_id".format(c.__tablename__), c.__tablename__, - ['public_id'], - unique=False) + ["public_id"], + unique=False, + ) - print 'Done!' + print "Done!" sys.stdout.flush() - print 'Finished adding columns. \nNow generating public_ids' + print "Finished adding columns. \nNow generating public_ids" with session_scope() as db_session: count = 0 for c in classes: garbage_collect() - print '[{0}] Loading rows. '.format(c.__name__), + print "[{0}] Loading rows. ".format(c.__name__), sys.stdout.flush() - print 'Generating public_ids', + print "Generating public_ids", sys.stdout.flush() for r in db_session.query(c).yield_per(chunk_size): count += 1 r.public_id = generate_public_id() if not count % chunk_size: - sys.stdout.write('.') + sys.stdout.write(".") sys.stdout.flush() db_session.commit() garbage_collect() - sys.stdout.write(' Saving. '.format(c.__name__)), + sys.stdout.write(" Saving. ".format(c.__name__)), # sys.stdout.flush() sys.stdout.flush() db_session.commit() - sys.stdout.write('Done!\n') + sys.stdout.write("Done!\n") sys.stdout.flush() - print '\nUpdgraded OK!\n' + print "\nUpdgraded OK!\n" def downgrade(): # These all inherit HasPublicID from inbox.models import ( - Account, Block, Contact, Message, Namespace, - SharedFolder, Thread, User, UserSession, HasPublicID) + Account, + Block, + Contact, + Message, + Namespace, + SharedFolder, + Thread, + User, + UserSession, + HasPublicID, + ) classes = [ - Account, Block, Contact, Message, Namespace, - SharedFolder, Thread, User, UserSession] + Account, + Block, + Contact, + Message, + Namespace, + SharedFolder, + Thread, + User, + UserSession, + ] for c in classes: assert issubclass(c, HasPublicID) - print '[{0}] Dropping public_id column... '.format(c.__tablename__), - op.drop_column(c.__tablename__, 'public_id') + print "[{0}] Dropping public_id column... ".format(c.__tablename__), + op.drop_column(c.__tablename__, "public_id") - print 'Dropping index... ', + print "Dropping index... ", op.drop_index( - 'ix_{0}_public_id'.format(c.__tablename__), - table_name=c.__tablename__) + "ix_{0}_public_id".format(c.__tablename__), table_name=c.__tablename__ + ) - print 'Done.' + print "Done." diff --git a/migrations/versions/018_message_contact_association.py b/migrations/versions/018_message_contact_association.py index 952df9657..146adfe72 100644 --- a/migrations/versions/018_message_contact_association.py +++ b/migrations/versions/018_message_contact_association.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '223041bb858b' -down_revision = '2c9f3a06de09' +revision = "223041bb858b" +down_revision = "2c9f3a06de09" from alembic import op @@ -17,26 +17,30 @@ def upgrade(): op.create_table( - 'messagecontactassociation', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('contact_id', sa.Integer(), nullable=False), - sa.Column('message_id', sa.Integer(), nullable=False), - sa.Column('field', - sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), - nullable=True), - sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), - sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), - sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') + "messagecontactassociation", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("contact_id", sa.Integer(), nullable=False), + sa.Column("message_id", sa.Integer(), nullable=False), + sa.Column( + "field", + sa.Enum("from_addr", "to_addr", "cc_addr", "bcc_addr"), + nullable=True, + ), + sa.ForeignKeyConstraint(["contact_id"], ["contact.id"],), + sa.ForeignKeyConstraint(["message_id"], ["message.id"],), + sa.PrimaryKeyConstraint("id", "contact_id", "message_id"), ) # Yes, this is a terrible hack. But tools/rerank_contacts.py already # contains a script to process contacts from messages, so it's very # expedient. import sys - sys.path.append('./tools') + + sys.path.append("./tools") from rerank_contacts import rerank_contacts + rerank_contacts() def downgrade(): - op.drop_table('messagecontactassociation') + op.drop_table("messagecontactassociation") diff --git a/migrations/versions/019_blocks_to_parts.py b/migrations/versions/019_blocks_to_parts.py index 609840141..ab11af1d4 100644 --- a/migrations/versions/019_blocks_to_parts.py +++ b/migrations/versions/019_blocks_to_parts.py @@ -8,8 +8,8 @@ """ # revision identifiers, used by Alembic. -revision = '5a787816e2bc' -down_revision = '223041bb858b' +revision = "5a787816e2bc" +down_revision = "223041bb858b" from alembic import op import sqlalchemy as sa @@ -22,44 +22,46 @@ def upgrade(): from inbox.models.session import session_scope, Session from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - from inbox.models import (Part, Namespace, Message, Thread) + from inbox.models import Part, Namespace, Message, Thread from inbox.sqlalchemy_ext.util import JSON - print 'Creating table for parts...' - op.create_table('part', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('message_id', sa.Integer(), nullable=True), - sa.Column('walk_index', sa.Integer(), nullable=True), - sa.Column('content_disposition', sa.Enum( - 'inline', 'attachment'), nullable=True), - sa.Column( - 'content_id', sa.String(length=255), nullable=True), - sa.Column('misc_keyval', JSON(), nullable=True), - sa.Column('is_inboxapp_attachment', sa.Boolean(), - server_default=sa.sql.expression.false(), - nullable=True), - sa.ForeignKeyConstraint( - ['id'], ['block.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint( - ['message_id'], ['message.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('message_id', 'walk_index') - ) - - print 'Reflecting old block table schema' + print "Creating table for parts..." + op.create_table( + "part", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("message_id", sa.Integer(), nullable=True), + sa.Column("walk_index", sa.Integer(), nullable=True), + sa.Column( + "content_disposition", sa.Enum("inline", "attachment"), nullable=True + ), + sa.Column("content_id", sa.String(length=255), nullable=True), + sa.Column("misc_keyval", JSON(), nullable=True), + sa.Column( + "is_inboxapp_attachment", + sa.Boolean(), + server_default=sa.sql.expression.false(), + nullable=True, + ), + sa.ForeignKeyConstraint(["id"], ["block.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["message_id"], ["message.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("message_id", "walk_index"), + ) + + print "Reflecting old block table schema" Base = declarative_base() Base.metadata.reflect(engine) class Block_(Base): # old schema, reflected from database table - __table__ = Base.metadata.tables['block'] + __table__ = Base.metadata.tables["block"] - print 'Adding namespace_id column to blocks ', - op.add_column( - u'block', sa.Column('namespace_id', sa.Integer(), nullable=False)) + print "Adding namespace_id column to blocks ", + op.add_column(u"block", sa.Column("namespace_id", sa.Integer(), nullable=False)) - print 'Migrating from blocks to parts' + print "Migrating from blocks to parts" new_parts = [] with session_scope() as db_session: for block in db_session.query(Block_).yield_per(chunk_size): @@ -75,34 +77,43 @@ class Block_(Base): # old schema, reflected from database table p.misc_keyval = block.misc_keyval p.is_inboxapp_attachment - old_namespace = db_session.query(Namespace) \ - .join(Message.thread, Thread.namespace) \ - .filter(Message.id == block.message_id).one() + old_namespace = ( + db_session.query(Namespace) + .join(Message.thread, Thread.namespace) + .filter(Message.id == block.message_id) + .one() + ) p.namespace_id = old_namespace.id # Commit after column modifications new_parts.append(p) - print 'Deleting old blocks (now parts)... ', + print "Deleting old blocks (now parts)... ", db_session.query(Block_).delete() db_session.commit() - print 'Done!' - - print 'Removing `message_id` constraint from block' - op.drop_constraint('block_ibfk_1', 'block', type_='foreignkey') - - print 'Creating foreign key for block -> namespace on block' - op.create_foreign_key('block_ibfk_1', 'block', 'namespace', - ['namespace_id'], ['id'], ondelete='CASCADE') - - print 'Dropping old block columns which are now in part' - op.drop_column(u'block', u'walk_index') - op.drop_column(u'block', u'content_disposition') - op.drop_column(u'block', u'misc_keyval') - op.drop_column(u'block', u'content_id') - op.drop_column(u'block', u'is_inboxapp_attachment') - op.drop_constraint(u'message_id', 'block', type_='unique') - op.drop_column(u'block', u'message_id') + print "Done!" + + print "Removing `message_id` constraint from block" + op.drop_constraint("block_ibfk_1", "block", type_="foreignkey") + + print "Creating foreign key for block -> namespace on block" + op.create_foreign_key( + "block_ibfk_1", + "block", + "namespace", + ["namespace_id"], + ["id"], + ondelete="CASCADE", + ) + + print "Dropping old block columns which are now in part" + op.drop_column(u"block", u"walk_index") + op.drop_column(u"block", u"content_disposition") + op.drop_column(u"block", u"misc_keyval") + op.drop_column(u"block", u"content_id") + op.drop_column(u"block", u"is_inboxapp_attachment") + op.drop_constraint(u"message_id", "block", type_="unique") + op.drop_column(u"block", u"message_id") # Note: here we use the regular database session, since the transaction # log requires the `namespace` property on objects. We've set the @@ -112,7 +123,7 @@ class Block_(Base): # old schema, reflected from database table no_tx_session.add_all(new_parts) no_tx_session.commit() - print 'Done migration blocks to parts!' + print "Done migration blocks to parts!" def downgrade(): diff --git a/migrations/versions/020_store_webhook_parameters.py b/migrations/versions/020_store_webhook_parameters.py index b8c63d908..cd626049e 100644 --- a/migrations/versions/020_store_webhook_parameters.py +++ b/migrations/versions/020_store_webhook_parameters.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '10ef1d46f016' -down_revision = '5a787816e2bc' +revision = "10ef1d46f016" +down_revision = "5a787816e2bc" from alembic import op import sqlalchemy as sa @@ -17,43 +17,45 @@ def upgrade(): op.create_table( - 'webhookparameters', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('public_id', mysql.BINARY(16), nullable=False), - sa.Column('namespace_id', sa.Integer(), nullable=False), - sa.Column('callback_url', sa.Text(), nullable=False), - sa.Column('failure_notify_url', sa.Text(), nullable=True), - sa.Column('to_addr', sa.String(length=255), nullable=True), - sa.Column('from_addr', sa.String(length=255), nullable=True), - sa.Column('cc_addr', sa.String(length=255), nullable=True), - sa.Column('bcc_addr', sa.String(length=255), nullable=True), - sa.Column('email', sa.String(length=255), nullable=True), - sa.Column('subject', sa.String(length=255), nullable=True), - sa.Column('thread', mysql.BINARY(16), nullable=True), - sa.Column('filename', sa.String(length=255), nullable=True), - sa.Column('started_before', sa.DateTime(), nullable=True), - sa.Column('started_after', sa.DateTime(), nullable=True), - sa.Column('last_message_before', sa.DateTime(), nullable=True), - sa.Column('last_message_after', sa.DateTime(), nullable=True), - sa.Column('include_body', sa.Boolean(), nullable=False), - sa.Column('max_retries', sa.Integer(), server_default='3', - nullable=False), - sa.Column('retry_interval', sa.Integer(), server_default='60', - nullable=False), - sa.Column('active', sa.Boolean(), - server_default=sa.sql.expression.true(), - nullable=False), - sa.Column('min_processed_id', sa.Integer(), server_default='0', - nullable=False), - sa.ForeignKeyConstraint(['namespace_id'], ['namespace.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "webhookparameters", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("public_id", mysql.BINARY(16), nullable=False), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column("callback_url", sa.Text(), nullable=False), + sa.Column("failure_notify_url", sa.Text(), nullable=True), + sa.Column("to_addr", sa.String(length=255), nullable=True), + sa.Column("from_addr", sa.String(length=255), nullable=True), + sa.Column("cc_addr", sa.String(length=255), nullable=True), + sa.Column("bcc_addr", sa.String(length=255), nullable=True), + sa.Column("email", sa.String(length=255), nullable=True), + sa.Column("subject", sa.String(length=255), nullable=True), + sa.Column("thread", mysql.BINARY(16), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=True), + sa.Column("started_before", sa.DateTime(), nullable=True), + sa.Column("started_after", sa.DateTime(), nullable=True), + sa.Column("last_message_before", sa.DateTime(), nullable=True), + sa.Column("last_message_after", sa.DateTime(), nullable=True), + sa.Column("include_body", sa.Boolean(), nullable=False), + sa.Column("max_retries", sa.Integer(), server_default="3", nullable=False), + sa.Column("retry_interval", sa.Integer(), server_default="60", nullable=False), + sa.Column( + "active", + sa.Boolean(), + server_default=sa.sql.expression.true(), + nullable=False, + ), + sa.Column("min_processed_id", sa.Integer(), server_default="0", nullable=False), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_webhookparameters_public_id", + "webhookparameters", + ["public_id"], + unique=False, ) - op.create_index('ix_webhookparameters_public_id', 'webhookparameters', - ['public_id'], unique=False) def downgrade(): - op.drop_index('ix_webhookparameters_public_id', - table_name='webhookparameters') - op.drop_table('webhookparameters') + op.drop_index("ix_webhookparameters_public_id", table_name="webhookparameters") + op.drop_table("webhookparameters") diff --git a/migrations/versions/021_add_references_column_to_message_table.py b/migrations/versions/021_add_references_column_to_message_table.py index be4b8b39a..eae41716b 100644 --- a/migrations/versions/021_add_references_column_to_message_table.py +++ b/migrations/versions/021_add_references_column_to_message_table.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4fd291c6940c' -down_revision = '10ef1d46f016' +revision = "4fd291c6940c" +down_revision = "10ef1d46f016" from alembic import op import sqlalchemy as sa @@ -17,8 +17,8 @@ def upgrade(): from inbox.sqlalchemy_ext.util import JSON - op.add_column('message', sa.Column('references', JSON, nullable=True)) + op.add_column("message", sa.Column("references", JSON, nullable=True)) def downgrade(): - op.drop_column('message', 'references') + op.drop_column("message", "references") diff --git a/migrations/versions/022_store_imapuid_msg_uid_as_biginteger_.py b/migrations/versions/022_store_imapuid_msg_uid_as_biginteger_.py index 717632375..34d9884b1 100644 --- a/migrations/versions/022_store_imapuid_msg_uid_as_biginteger_.py +++ b/migrations/versions/022_store_imapuid_msg_uid_as_biginteger_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '519e462df171' -down_revision = '4fd291c6940c' +revision = "519e462df171" +down_revision = "4fd291c6940c" from alembic import op import sqlalchemy as sa @@ -16,8 +16,8 @@ def upgrade(): - op.alter_column('imapuid', 'msg_uid', type_=mysql.BIGINT) + op.alter_column("imapuid", "msg_uid", type_=mysql.BIGINT) def downgrade(): - op.alter_column('imapuid', 'msg_uid', type_=sa.Integer) + op.alter_column("imapuid", "msg_uid", type_=sa.Integer) diff --git a/migrations/versions/022_webhooks_and_filters.py b/migrations/versions/022_webhooks_and_filters.py index cfe618d57..3cf06d9c5 100644 --- a/migrations/versions/022_webhooks_and_filters.py +++ b/migrations/versions/022_webhooks_and_filters.py @@ -10,8 +10,8 @@ """ # revision identifiers, used by Alembic. -revision = '2c313b6ddd9b' -down_revision = '519e462df171' +revision = "2c313b6ddd9b" +down_revision = "519e462df171" from alembic import op import sqlalchemy as sa @@ -21,73 +21,65 @@ def upgrade(): from inbox.sqlalchemy_ext.util import Base36UID print "Rename WebhookParameters -> Webhook" - op.rename_table('webhookparameters', 'webhook') + op.rename_table("webhookparameters", "webhook") - op.drop_index('ix_webhookparameters_public_id', table_name='webhook') + op.drop_index("ix_webhookparameters_public_id", table_name="webhook") op.create_index( - 'ix_webhook_namespace_id', 'webhook', ['namespace_id'], unique=False) - op.create_index( - 'ix_webhook_public_id', 'webhook', ['public_id'], unique=False) - op.create_foreign_key('webhooks_ibfk_1', 'webhook', 'namespace', - ['namespace_id'], ['id'], ondelete='CASCADE') - - print 'Creating Lens' - op.create_table('lens', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column( - 'public_id', Base36UID(length=16), nullable=False), - sa.Column('namespace_id', sa.Integer(), nullable=False), - sa.Column('subject', sa.String(length=255), nullable=True), - sa.Column( - 'thread_public_id', - Base36UID(length=16), - nullable=True), - sa.Column('started_before', sa.DateTime(), nullable=True), - sa.Column('started_after', sa.DateTime(), nullable=True), - sa.Column( - 'last_message_before', sa.DateTime(), nullable=True), - sa.Column( - 'last_message_after', sa.DateTime(), nullable=True), - sa.Column( - 'any_email', sa.String(length=255), nullable=True), - sa.Column('to_addr', sa.String(length=255), nullable=True), - sa.Column( - 'from_addr', sa.String(length=255), nullable=True), - sa.Column('cc_addr', sa.String(length=255), nullable=True), - sa.Column( - 'bcc_addr', sa.String(length=255), nullable=True), - sa.Column( - 'filename', sa.String(length=255), nullable=True), - sa.Column('tag', sa.String(length=255), nullable=True), - sa.ForeignKeyConstraint( - ['namespace_id'], - ['namespace.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - - op.create_index( - 'ix_lens_namespace_id', 'lens', ['namespace_id'], unique=False) - op.create_index('ix_lens_public_id', 'lens', ['public_id'], unique=False) - - print 'Removing old webhooks' - op.add_column( - u'webhook', sa.Column('lens_id', sa.Integer(), nullable=False)) - - op.drop_column(u'webhook', u'last_message_after') - op.drop_column(u'webhook', u'last_message_before') - op.drop_column(u'webhook', u'thread') - op.drop_column(u'webhook', u'from_addr') - op.drop_column(u'webhook', u'started_after') - op.drop_column(u'webhook', u'to_addr') - op.drop_column(u'webhook', u'filename') - op.drop_column(u'webhook', u'bcc_addr') - op.drop_column(u'webhook', u'cc_addr') - op.drop_column(u'webhook', u'started_before') - op.drop_column(u'webhook', u'email') - op.drop_column(u'webhook', u'subject') - - op.create_index('ix_webhook_lens_id', 'webhook', ['lens_id'], unique=False) + "ix_webhook_namespace_id", "webhook", ["namespace_id"], unique=False + ) + op.create_index("ix_webhook_public_id", "webhook", ["public_id"], unique=False) + op.create_foreign_key( + "webhooks_ibfk_1", + "webhook", + "namespace", + ["namespace_id"], + ["id"], + ondelete="CASCADE", + ) + + print "Creating Lens" + op.create_table( + "lens", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("public_id", Base36UID(length=16), nullable=False), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column("subject", sa.String(length=255), nullable=True), + sa.Column("thread_public_id", Base36UID(length=16), nullable=True), + sa.Column("started_before", sa.DateTime(), nullable=True), + sa.Column("started_after", sa.DateTime(), nullable=True), + sa.Column("last_message_before", sa.DateTime(), nullable=True), + sa.Column("last_message_after", sa.DateTime(), nullable=True), + sa.Column("any_email", sa.String(length=255), nullable=True), + sa.Column("to_addr", sa.String(length=255), nullable=True), + sa.Column("from_addr", sa.String(length=255), nullable=True), + sa.Column("cc_addr", sa.String(length=255), nullable=True), + sa.Column("bcc_addr", sa.String(length=255), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=True), + sa.Column("tag", sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + + op.create_index("ix_lens_namespace_id", "lens", ["namespace_id"], unique=False) + op.create_index("ix_lens_public_id", "lens", ["public_id"], unique=False) + + print "Removing old webhooks" + op.add_column(u"webhook", sa.Column("lens_id", sa.Integer(), nullable=False)) + + op.drop_column(u"webhook", u"last_message_after") + op.drop_column(u"webhook", u"last_message_before") + op.drop_column(u"webhook", u"thread") + op.drop_column(u"webhook", u"from_addr") + op.drop_column(u"webhook", u"started_after") + op.drop_column(u"webhook", u"to_addr") + op.drop_column(u"webhook", u"filename") + op.drop_column(u"webhook", u"bcc_addr") + op.drop_column(u"webhook", u"cc_addr") + op.drop_column(u"webhook", u"started_before") + op.drop_column(u"webhook", u"email") + op.drop_column(u"webhook", u"subject") + + op.create_index("ix_webhook_lens_id", "webhook", ["lens_id"], unique=False) def downgrade(): diff --git a/migrations/versions/023_tighten_nullable_constraints_on_.py b/migrations/versions/023_tighten_nullable_constraints_on_.py index 9dd72b504..9d001522d 100644 --- a/migrations/versions/023_tighten_nullable_constraints_on_.py +++ b/migrations/versions/023_tighten_nullable_constraints_on_.py @@ -10,8 +10,8 @@ """ # revision identifiers, used by Alembic. -revision = '4e04f752b7ad' -down_revision = '2c313b6ddd9b' +revision = "4e04f752b7ad" +down_revision = "2c313b6ddd9b" from alembic import op @@ -23,29 +23,28 @@ def upgrade(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class ImapUid(Base): - __table__ = Base.metadata.tables['imapuid'] + __table__ = Base.metadata.tables["imapuid"] - print 'Deleting imapuid objects with NULL message_id...' + print "Deleting imapuid objects with NULL message_id..." with session_scope(versioned=False) as session: session.query(ImapUid).filter_by(message_id=None).delete() session.commit() - print 'Tightening NULL constraints...' + print "Tightening NULL constraints..." - op.alter_column('imapuid', 'message_id', existing_type=sa.Integer(), - nullable=False) + op.alter_column("imapuid", "message_id", existing_type=sa.Integer(), nullable=False) # unrelated to current bugs, but no reason this should be NULLable either - op.alter_column('imapuid', 'msg_uid', existing_type=sa.BigInteger(), - nullable=False) + op.alter_column("imapuid", "msg_uid", existing_type=sa.BigInteger(), nullable=False) def downgrade(): - op.alter_column('imapuid', 'message_id', nullable=True) - op.alter_column('imapuid', 'msg_uid', nullable=True) + op.alter_column("imapuid", "message_id", nullable=True) + op.alter_column("imapuid", "msg_uid", nullable=True) diff --git a/migrations/versions/024_remote_folders_and_inbox_tags_split.py b/migrations/versions/024_remote_folders_and_inbox_tags_split.py index 8a351c1c9..3034110de 100644 --- a/migrations/versions/024_remote_folders_and_inbox_tags_split.py +++ b/migrations/versions/024_remote_folders_and_inbox_tags_split.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4c1eb89f6bed' -down_revision = '4e04f752b7ad' +revision = "4c1eb89f6bed" +down_revision = "4e04f752b7ad" from alembic import op import sqlalchemy as sa @@ -21,194 +21,207 @@ CHUNK_SIZE = 250 # This is a bit of a hack (english locale only), but good enough for now. -folder_name_subst_map = {'archive': u'[Gmail]/All Mail', - 'drafts': u'[Gmail]/Drafts', - 'draft': u'[Gmail]/Drafts', - 'important': u'[Gmail]/Important', - 'inbox': u'Inbox', - 'INBOX': u'Inbox', - 'sent': u'[Gmail]/Sent Mail', - 'spam': u'[Gmail]/Spam', - 'starred': u'[Gmail]/Starred', - 'trash': u'[Gmail]/Trash'} +folder_name_subst_map = { + "archive": u"[Gmail]/All Mail", + "drafts": u"[Gmail]/Drafts", + "draft": u"[Gmail]/Drafts", + "important": u"[Gmail]/Important", + "inbox": u"Inbox", + "INBOX": u"Inbox", + "sent": u"[Gmail]/Sent Mail", + "spam": u"[Gmail]/Spam", + "starred": u"[Gmail]/Starred", + "trash": u"[Gmail]/Trash", +} def upgrade(): easupdate = False - print 'Creating new tables and columns...' - op.create_table('folder', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('account_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String( - length=191, collation='utf8mb4_general_ci'), - nullable=True), - sa.ForeignKeyConstraint(['account_id'], ['account.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('account_id', 'name') - ) - op.create_table('internaltag', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('public_id', mysql.BINARY(16), nullable=False), - sa.Column('namespace_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=191), nullable=False), - sa.Column('thread_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['namespace_id'], ['namespace.id'], - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['thread_id'], ['thread.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('namespace_id', 'name') - ) - op.add_column('folderitem', - sa.Column('folder_id', sa.Integer(), nullable=True)) - op.create_foreign_key("fk_folder_id", "folderitem", - "folder", ["folder_id"], ["id"], - ondelete='CASCADE') - - op.add_column('account', sa.Column('inbox_folder_id', - sa.Integer, nullable=True)) - op.add_column('account', sa.Column('sent_folder_id', - sa.Integer, nullable=True)) - op.add_column('account', sa.Column('drafts_folder_id', - sa.Integer, nullable=True)) - op.add_column('account', sa.Column('spam_folder_id', - sa.Integer, nullable=True)) - op.add_column('account', sa.Column('trash_folder_id', - sa.Integer, nullable=True)) - op.add_column('account', sa.Column('archive_folder_id', - sa.Integer, nullable=True)) - op.add_column('account', sa.Column('all_folder_id', - sa.Integer, nullable=True)) - op.add_column('account', sa.Column('starred_folder_id', - sa.Integer, nullable=True)) - op.create_foreign_key('account_ibfk_2', 'account', 'folder', - ['inbox_folder_id'], ['id']) - op.create_foreign_key('account_ibfk_3', 'account', 'folder', - ['sent_folder_id'], ['id']) - op.create_foreign_key('account_ibfk_4', 'account', 'folder', - ['drafts_folder_id'], ['id']) - op.create_foreign_key('account_ibfk_5', 'account', 'folder', - ['spam_folder_id'], ['id']) - op.create_foreign_key('account_ibfk_6', 'account', 'folder', - ['trash_folder_id'], ['id']) - op.create_foreign_key('account_ibfk_7', 'account', 'folder', - ['archive_folder_id'], ['id']) - op.create_foreign_key('account_ibfk_8', 'account', 'folder', - ['all_folder_id'], ['id']) - op.create_foreign_key('account_ibfk_9', 'account', 'folder', - ['starred_folder_id'], ['id']) - - op.add_column('imapuid', sa.Column('folder_id', sa.Integer, nullable=True)) - op.create_foreign_key('imapuid_ibfk_3', 'imapuid', 'folder', - ['folder_id'], ['id']) + print "Creating new tables and columns..." + op.create_table( + "folder", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("account_id", sa.Integer(), nullable=False), + sa.Column( + "name", sa.String(length=191, collation="utf8mb4_general_ci"), nullable=True + ), + sa.ForeignKeyConstraint(["account_id"], ["account.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("account_id", "name"), + ) + op.create_table( + "internaltag", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("public_id", mysql.BINARY(16), nullable=False), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=191), nullable=False), + sa.Column("thread_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["thread_id"], ["thread.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("namespace_id", "name"), + ) + op.add_column("folderitem", sa.Column("folder_id", sa.Integer(), nullable=True)) + op.create_foreign_key( + "fk_folder_id", + "folderitem", + "folder", + ["folder_id"], + ["id"], + ondelete="CASCADE", + ) + + op.add_column("account", sa.Column("inbox_folder_id", sa.Integer, nullable=True)) + op.add_column("account", sa.Column("sent_folder_id", sa.Integer, nullable=True)) + op.add_column("account", sa.Column("drafts_folder_id", sa.Integer, nullable=True)) + op.add_column("account", sa.Column("spam_folder_id", sa.Integer, nullable=True)) + op.add_column("account", sa.Column("trash_folder_id", sa.Integer, nullable=True)) + op.add_column("account", sa.Column("archive_folder_id", sa.Integer, nullable=True)) + op.add_column("account", sa.Column("all_folder_id", sa.Integer, nullable=True)) + op.add_column("account", sa.Column("starred_folder_id", sa.Integer, nullable=True)) + op.create_foreign_key( + "account_ibfk_2", "account", "folder", ["inbox_folder_id"], ["id"] + ) + op.create_foreign_key( + "account_ibfk_3", "account", "folder", ["sent_folder_id"], ["id"] + ) + op.create_foreign_key( + "account_ibfk_4", "account", "folder", ["drafts_folder_id"], ["id"] + ) + op.create_foreign_key( + "account_ibfk_5", "account", "folder", ["spam_folder_id"], ["id"] + ) + op.create_foreign_key( + "account_ibfk_6", "account", "folder", ["trash_folder_id"], ["id"] + ) + op.create_foreign_key( + "account_ibfk_7", "account", "folder", ["archive_folder_id"], ["id"] + ) + op.create_foreign_key( + "account_ibfk_8", "account", "folder", ["all_folder_id"], ["id"] + ) + op.create_foreign_key( + "account_ibfk_9", "account", "folder", ["starred_folder_id"], ["id"] + ) + + op.add_column("imapuid", sa.Column("folder_id", sa.Integer, nullable=True)) + op.create_foreign_key("imapuid_ibfk_3", "imapuid", "folder", ["folder_id"], ["id"]) from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easuid' in Base.metadata.tables: + if "easuid" in Base.metadata.tables: easupdate = True - print 'Adding new EASUid columns...' + print "Adding new EASUid columns..." - op.add_column('easuid', - sa.Column('fld_uid', sa.Integer(), nullable=True)) + op.add_column("easuid", sa.Column("fld_uid", sa.Integer(), nullable=True)) - op.add_column('easuid', - sa.Column('folder_id', sa.Integer(), nullable=True)) + op.add_column("easuid", sa.Column("folder_id", sa.Integer(), nullable=True)) - op.create_foreign_key('easuid_ibfk_3', 'easuid', 'folder', - ['folder_id'], ['id']) + op.create_foreign_key( + "easuid_ibfk_3", "easuid", "folder", ["folder_id"], ["id"] + ) op.create_unique_constraint( - 'uq_easuid_folder_id_msg_uid_easaccount_id', - 'easuid', - ['folder_id', 'msg_uid', 'easaccount_id']) + "uq_easuid_folder_id_msg_uid_easaccount_id", + "easuid", + ["folder_id", "msg_uid", "easaccount_id"], + ) - op.create_index('easuid_easaccount_id_folder_id', 'easuid', - ['easaccount_id', 'folder_id']) + op.create_index( + "easuid_easaccount_id_folder_id", "easuid", ["easaccount_id", "folder_id"] + ) # Include our changes to the EASUid table: Base = declarative_base() Base.metadata.reflect(engine) class Folder(Base): - __table__ = Base.metadata.tables['folder'] - account = relationship('Account', foreign_keys='Folder.account_id', - backref='folders') + __table__ = Base.metadata.tables["folder"] + account = relationship( + "Account", foreign_keys="Folder.account_id", backref="folders" + ) class FolderItem(Base): - __table__ = Base.metadata.tables['folderitem'] - folder = relationship('Folder', backref='threads', lazy='joined') + __table__ = Base.metadata.tables["folderitem"] + folder = relationship("Folder", backref="threads", lazy="joined") class Thread(Base): - __table__ = Base.metadata.tables['thread'] - folderitems = relationship('FolderItem', backref="thread", - single_parent=True, - cascade='all, delete, delete-orphan') - namespace = relationship('Namespace', backref='threads') + __table__ = Base.metadata.tables["thread"] + folderitems = relationship( + "FolderItem", + backref="thread", + single_parent=True, + cascade="all, delete, delete-orphan", + ) + namespace = relationship("Namespace", backref="threads") class Namespace(Base): - __table__ = Base.metadata.tables['namespace'] - account = relationship('Account', - backref=backref('namespace', uselist=False)) + __table__ = Base.metadata.tables["namespace"] + account = relationship("Account", backref=backref("namespace", uselist=False)) class Account(Base): - __table__ = Base.metadata.tables['account'] - inbox_folder = relationship('Folder', - foreign_keys='Account.inbox_folder_id') - sent_folder = relationship('Folder', - foreign_keys='Account.sent_folder_id') - drafts_folder = relationship('Folder', - foreign_keys='Account.drafts_folder_id') - spam_folder = relationship('Folder', - foreign_keys='Account.spam_folder_id') - trash_folder = relationship('Folder', - foreign_keys='Account.trash_folder_id') - starred_folder = relationship('Folder', - foreign_keys='Account.starred_folder_id') - archive_folder = relationship('Folder', - foreign_keys='Account.archive_folder_id') - all_folder = relationship('Folder', - foreign_keys='Account.all_folder_id') + __table__ = Base.metadata.tables["account"] + inbox_folder = relationship("Folder", foreign_keys="Account.inbox_folder_id") + sent_folder = relationship("Folder", foreign_keys="Account.sent_folder_id") + drafts_folder = relationship("Folder", foreign_keys="Account.drafts_folder_id") + spam_folder = relationship("Folder", foreign_keys="Account.spam_folder_id") + trash_folder = relationship("Folder", foreign_keys="Account.trash_folder_id") + starred_folder = relationship( + "Folder", foreign_keys="Account.starred_folder_id" + ) + archive_folder = relationship( + "Folder", foreign_keys="Account.archive_folder_id" + ) + all_folder = relationship("Folder", foreign_keys="Account.all_folder_id") class ImapUid(Base): - __table__ = Base.metadata.tables['imapuid'] - folder = relationship('Folder', backref='imapuids', lazy='joined') + __table__ = Base.metadata.tables["imapuid"] + folder = relationship("Folder", backref="imapuids", lazy="joined") if easupdate: - class EASUid(Base): - __table__ = Base.metadata.tables['easuid'] - folder = relationship('Folder', foreign_keys='EASUid.folder_id', - backref='easuids', lazy='joined') - print 'Creating Folder rows and migrating FolderItems...' + class EASUid(Base): + __table__ = Base.metadata.tables["easuid"] + folder = relationship( + "Folder", + foreign_keys="EASUid.folder_id", + backref="easuids", + lazy="joined", + ) + + print "Creating Folder rows and migrating FolderItems..." # not many folders per account, so shouldn't grow that big with session_scope(versioned=False) as db_session: - folders = dict([((i.account_id, i.name), i) - for i in db_session.query(Folder).all()]) + folders = dict( + [((i.account_id, i.name), i) for i in db_session.query(Folder).all()] + ) count = 0 - for folderitem in db_session.query(FolderItem).join(Thread).join( - Namespace).yield_per(CHUNK_SIZE): + for folderitem in ( + db_session.query(FolderItem) + .join(Thread) + .join(Namespace) + .yield_per(CHUNK_SIZE) + ): account_id = folderitem.thread.namespace.account_id - if folderitem.thread.namespace.account.provider == 'gmail': + if folderitem.thread.namespace.account.provider == "gmail": if folderitem.folder_name in folder_name_subst_map: - new_folder_name = folder_name_subst_map[ - folderitem.folder_name] + new_folder_name = folder_name_subst_map[folderitem.folder_name] else: new_folder_name = folderitem.folder_name - elif folderitem.thread.namespace.account.provider == 'eas': + elif folderitem.thread.namespace.account.provider == "eas": new_folder_name = folderitem.folder_name.title() if (account_id, new_folder_name) in folders: f = folders[(account_id, new_folder_name)] else: - f = Folder(account_id=account_id, - name=new_folder_name) + f = Folder(account_id=account_id, name=new_folder_name) folders[(account_id, new_folder_name)] = f folderitem.folder = f count += 1 @@ -217,7 +230,7 @@ class EASUid(Base): count = 0 db_session.commit() - print 'Migrating ImapUids to reference Folder rows...' + print "Migrating ImapUids to reference Folder rows..." for imapuid in db_session.query(ImapUid).yield_per(CHUNK_SIZE): account_id = imapuid.imapaccount_id if imapuid.folder_name in folder_name_subst_map: @@ -227,8 +240,7 @@ class EASUid(Base): if (account_id, new_folder_name) in folders: f = folders[(account_id, new_folder_name)] else: - f = Folder(account_id=account_id, - name=new_folder_name) + f = Folder(account_id=account_id, name=new_folder_name) folders[(account_id, new_folder_name)] = f imapuid.folder = f count += 1 @@ -238,7 +250,7 @@ class EASUid(Base): db_session.commit() if easupdate: - print 'Migrating EASUids to reference Folder rows...' + print "Migrating EASUids to reference Folder rows..." for easuid in db_session.query(EASUid).yield_per(CHUNK_SIZE): account_id = easuid.easaccount_id @@ -247,8 +259,7 @@ class EASUid(Base): if (account_id, new_folder_name) in folders: f = folders[(account_id, new_folder_name)] else: - f = Folder(account_id=account_id, - name=new_folder_name) + f = Folder(account_id=account_id, name=new_folder_name) folders[(account_id, new_folder_name)] = f easuid.folder = f count += 1 @@ -257,33 +268,34 @@ class EASUid(Base): count = 0 db_session.commit() - print 'Migrating *_folder_name fields to reference Folder rows...' - for account in db_session.query(Account).filter_by(provider='gmail'): + print "Migrating *_folder_name fields to reference Folder rows..." + for account in db_session.query(Account).filter_by(provider="gmail"): if account.inbox_folder_name: # hard replace INBOX with canonicalized caps - k = (account.id, 'Inbox') + k = (account.id, "Inbox") if k in folders: account.inbox_folder = folders[k] else: account.inbox_folder = Folder( account_id=account.id, - name=folder_name_subst_map[account.inbox_folder_name]) + name=folder_name_subst_map[account.inbox_folder_name], + ) if account.sent_folder_name: k = (account.id, account.sent_folder_name) if k in folders: account.sent_folder = folders[k] else: account.sent_folder = Folder( - account_id=account.id, - name=account.sent_folder_name) + account_id=account.id, name=account.sent_folder_name + ) if account.drafts_folder_name: k = (account.id, account.drafts_folder_name) if k in folders: account.drafts_folder = folders[k] else: account.drafts_folder = Folder( - account_id=account.id, - name=account.drafts_folder_name) + account_id=account.id, name=account.drafts_folder_name + ) # all/archive mismatch is intentional; semantics have changed if account.archive_folder_name: k = (account.id, account.archive_folder_name) @@ -291,70 +303,72 @@ class EASUid(Base): account.all_folder = folders[k] else: account.all_folder = Folder( - account_id=account.id, - name=account.archive_folder_name) + account_id=account.id, name=account.archive_folder_name + ) db_session.commit() if easupdate: - print "Migrating EAS accounts' *_folder_name fields to reference "\ - "Folder rows..." + print "Migrating EAS accounts' *_folder_name fields to reference " "Folder rows..." - for account in db_session.query(Account).filter_by(provider='eas'): + for account in db_session.query(Account).filter_by(provider="eas"): if account.inbox_folder_name: k = (account.id, account.inbox_folder_name) if k in folders: account.inbox_folder = folders[k] else: account.inbox_folder = Folder( - account_id=account.id, - name=account.inbox_folder_name) + account_id=account.id, name=account.inbox_folder_name + ) if account.sent_folder_name: k = (account.id, account.sent_folder_name) if k in folders: account.sent_folder = folders[k] else: account.sent_folder = Folder( - account_id=account.id, - name=account.sent_folder_name) + account_id=account.id, name=account.sent_folder_name + ) if account.drafts_folder_name: k = (account.id, account.drafts_folder_name) if k in folders: account.drafts_folder = folders[k] else: account.drafts_folder = Folder( - account_id=account.id, - name=account.drafts_folder_name) + account_id=account.id, name=account.drafts_folder_name + ) if account.archive_folder_name: k = (account.id, account.archive_folder_name) if k in folders: account.archive_folder = folders[k] else: account.archive_folder = Folder( - account_id=account.id, - name=account.archive_folder_name) + account_id=account.id, name=account.archive_folder_name + ) db_session.commit() - print 'Final schema tweaks and new constraint enforcement' - op.alter_column('folderitem', 'folder_id', existing_type=sa.Integer(), - nullable=False) - op.drop_constraint('folder_name', 'folderitem', type_='unique') - op.drop_constraint('folder_name', 'imapuid', type_='unique') - op.create_unique_constraint('uq_imapuid_folder_id_msg_uid_imapaccount_id', - 'imapuid', - ['folder_id', 'msg_uid', 'imapaccount_id']) - op.drop_column('folderitem', 'folder_name') - op.drop_column('imapuid', 'folder_name') - op.drop_column('account', 'inbox_folder_name') - op.drop_column('account', 'drafts_folder_name') - op.drop_column('account', 'sent_folder_name') - op.drop_column('account', 'archive_folder_name') + print "Final schema tweaks and new constraint enforcement" + op.alter_column( + "folderitem", "folder_id", existing_type=sa.Integer(), nullable=False + ) + op.drop_constraint("folder_name", "folderitem", type_="unique") + op.drop_constraint("folder_name", "imapuid", type_="unique") + op.create_unique_constraint( + "uq_imapuid_folder_id_msg_uid_imapaccount_id", + "imapuid", + ["folder_id", "msg_uid", "imapaccount_id"], + ) + op.drop_column("folderitem", "folder_name") + op.drop_column("imapuid", "folder_name") + op.drop_column("account", "inbox_folder_name") + op.drop_column("account", "drafts_folder_name") + op.drop_column("account", "sent_folder_name") + op.drop_column("account", "archive_folder_name") if easupdate: - print 'Dropping old EASUid columns...' + print "Dropping old EASUid columns..." - op.drop_constraint('folder_name', 'easuid', type_='unique') - op.drop_index('easuid_easaccount_id_folder_name', 'easuid') - op.drop_column('easuid', 'folder_name') + op.drop_constraint("folder_name", "easuid", type_="unique") + op.drop_index("easuid_easaccount_id_folder_name", "easuid") + op.drop_column("easuid", "folder_name") def downgrade(): diff --git a/migrations/versions/025_remove_user_sharedfolder_and_usersession.py b/migrations/versions/025_remove_user_sharedfolder_and_usersession.py index 5ba3b7f0c..cc268f0b6 100644 --- a/migrations/versions/025_remove_user_sharedfolder_and_usersession.py +++ b/migrations/versions/025_remove_user_sharedfolder_and_usersession.py @@ -7,24 +7,22 @@ """ # revision identifiers, used by Alembic. -revision = '59b42d0ac749' -down_revision = '4c1eb89f6bed' +revision = "59b42d0ac749" +down_revision = "4c1eb89f6bed" from alembic import op def upgrade(): - op.drop_constraint('account_ibfk_1', 'account', type_='foreignkey') - op.drop_constraint('usersession_ibfk_1', 'usersession', type_='foreignkey') - op.drop_constraint( - 'sharedfolder_ibfk_1', 'sharedfolder', type_='foreignkey') - op.drop_constraint( - 'sharedfolder_ibfk_2', 'sharedfolder', type_='foreignkey') - - op.drop_table(u'user') - op.drop_table(u'sharedfolder') - op.drop_table(u'usersession') - op.drop_column('account', u'user_id') + op.drop_constraint("account_ibfk_1", "account", type_="foreignkey") + op.drop_constraint("usersession_ibfk_1", "usersession", type_="foreignkey") + op.drop_constraint("sharedfolder_ibfk_1", "sharedfolder", type_="foreignkey") + op.drop_constraint("sharedfolder_ibfk_2", "sharedfolder", type_="foreignkey") + + op.drop_table(u"user") + op.drop_table(u"sharedfolder") + op.drop_table(u"usersession") + op.drop_column("account", u"user_id") def downgrade(): diff --git a/migrations/versions/026_add_audit_timestamps_to_all_objects.py b/migrations/versions/026_add_audit_timestamps_to_all_objects.py index f4defe161..470421bd1 100644 --- a/migrations/versions/026_add_audit_timestamps_to_all_objects.py +++ b/migrations/versions/026_add_audit_timestamps_to_all_objects.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '146b1817e4a8' -down_revision = '59b42d0ac749' +revision = "146b1817e4a8" +down_revision = "59b42d0ac749" from alembic import op import sqlalchemy as sa @@ -17,20 +17,35 @@ from datetime import datetime -table_names = {'account', 'block', 'contact', - 'folder', 'folderitem', 'foldersync', - 'imapuid', 'internaltag', 'lens', 'message', - 'messagecontactassociation', 'namespace', - 'searchsignal', 'searchtoken', 'thread', 'transaction', - 'uidvalidity', 'webhook'} +table_names = { + "account", + "block", + "contact", + "folder", + "folderitem", + "foldersync", + "imapuid", + "internaltag", + "lens", + "message", + "messagecontactassociation", + "namespace", + "searchsignal", + "searchtoken", + "thread", + "transaction", + "uidvalidity", + "webhook", +} def add_eas_tables(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - for table_name in ['easuid', 'easfoldersync']: + for table_name in ["easuid", "easfoldersync"]: if table_name in Base.metadata.tables: table_names.add(table_name) @@ -40,44 +55,56 @@ def upgrade(): # mysql 5.5 / sqlalchemy interactions necessitate doing this in steps for table_name in sorted(table_names): - if table_name != 'contact': - op.add_column(table_name, sa.Column('created_at', sa.DateTime(), - nullable=True)) - op.add_column(table_name, sa.Column('updated_at', sa.DateTime(), - nullable=True)) - op.add_column(table_name, sa.Column('deleted_at', sa.DateTime(), - nullable=True)) - - t = table(table_name, - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()), - ) + if table_name != "contact": + op.add_column( + table_name, sa.Column("created_at", sa.DateTime(), nullable=True) + ) + op.add_column( + table_name, sa.Column("updated_at", sa.DateTime(), nullable=True) + ) + op.add_column(table_name, sa.Column("deleted_at", sa.DateTime(), nullable=True)) + + t = table( + table_name, + column("created_at", sa.DateTime()), + column("updated_at", sa.DateTime()), + ) op.execute( t.update().values( - {'created_at': datetime.utcnow(), - 'updated_at': datetime.utcnow()})) + {"created_at": datetime.utcnow(), "updated_at": datetime.utcnow()} + ) + ) - op.alter_column(table_name, 'created_at', existing_type=sa.DateTime(), - nullable=False) - op.alter_column(table_name, 'updated_at', existing_type=sa.DateTime(), - nullable=False) + op.alter_column( + table_name, "created_at", existing_type=sa.DateTime(), nullable=False + ) + op.alter_column( + table_name, "updated_at", existing_type=sa.DateTime(), nullable=False + ) # missing from a prev revision - op.create_index('imapaccount_id_folder_id', 'imapuid', ['imapaccount_id', - 'folder_id'], - unique=False) - op.drop_index('imapuid_imapaccount_id_folder_name', table_name='imapuid') + op.create_index( + "imapaccount_id_folder_id", + "imapuid", + ["imapaccount_id", "folder_id"], + unique=False, + ) + op.drop_index("imapuid_imapaccount_id_folder_name", table_name="imapuid") def downgrade(): add_eas_tables() for table_name in sorted(table_names): - if table_name != 'contact': - op.drop_column(table_name, 'updated_at') - op.drop_column(table_name, 'created_at') - op.drop_column(table_name, 'deleted_at') - - op.create_index('imapuid_imapaccount_id_folder_name', 'imapuid', - [u'imapaccount_id', u'folder_id'], unique=False) - op.drop_index('imapaccount_id_folder_id', table_name='imapuid') + if table_name != "contact": + op.drop_column(table_name, "updated_at") + op.drop_column(table_name, "created_at") + op.drop_column(table_name, "deleted_at") + + op.create_index( + "imapuid_imapaccount_id_folder_name", + "imapuid", + [u"imapaccount_id", u"folder_id"], + unique=False, + ) + op.drop_index("imapaccount_id_folder_id", table_name="imapuid") diff --git a/migrations/versions/027_imapuid_soft_deletes.py b/migrations/versions/027_imapuid_soft_deletes.py index c3dfb41f7..73c0eedb9 100644 --- a/migrations/versions/027_imapuid_soft_deletes.py +++ b/migrations/versions/027_imapuid_soft_deletes.py @@ -10,8 +10,8 @@ """ # revision identifiers, used by Alembic. -revision = '924ffd092832' -down_revision = '146b1817e4a8' +revision = "924ffd092832" +down_revision = "146b1817e4a8" from alembic import op import sqlalchemy as sa @@ -19,7 +19,7 @@ def upgrade(): - t = table('imapuid', column('deleted_at', sa.DateTime())) + t = table("imapuid", column("deleted_at", sa.DateTime())) op.execute(t.delete().where(t.c.deleted_at != None)) # noqa: E711 diff --git a/migrations/versions/028_tag_api_migration.py b/migrations/versions/028_tag_api_migration.py index a93836f2d..3d44cc2f7 100644 --- a/migrations/versions/028_tag_api_migration.py +++ b/migrations/versions/028_tag_api_migration.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '40629415951c' -down_revision = '924ffd092832' +revision = "40629415951c" +down_revision = "924ffd092832" from alembic import op import sqlalchemy as sa @@ -21,6 +21,7 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Session = sessionmaker(bind=engine) @@ -39,87 +40,100 @@ def basic_session(): finally: session.close() - op.rename_table('internaltag', 'usertag') + op.rename_table("internaltag", "usertag") op.create_table( - 'usertagitem', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('thread_id', sa.Integer(), nullable=False), - sa.Column('usertag_id', sa.Integer(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['thread_id'], ['thread.id'], - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['usertag_id'], ['usertag.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "usertagitem", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("thread_id", sa.Integer(), nullable=False), + sa.Column("usertag_id", sa.Integer(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(["thread_id"], ["thread.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["usertag_id"], ["usertag.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.add_column( + u"folder", sa.Column("exposed_name", sa.String(length=255), nullable=True) + ) + op.add_column( + u"folder", sa.Column("public_id", sa.String(length=191), nullable=True) ) - op.add_column(u'folder', sa.Column('exposed_name', sa.String(length=255), - nullable=True)) - op.add_column(u'folder', sa.Column('public_id', sa.String(length=191), - nullable=True)) - op.add_column(u'account', sa.Column('provider_prefix', - sa.String(length=64), - nullable=False)) - op.add_column(u'account', sa.Column('important_folder_id', sa.Integer, - nullable=True)) + op.add_column( + u"account", sa.Column("provider_prefix", sa.String(length=64), nullable=False) + ) + op.add_column( + u"account", sa.Column("important_folder_id", sa.Integer, nullable=True) + ) Base = declarative_base() Base.metadata.reflect(engine) class Folder(Base): - __table__ = Base.metadata.tables['folder'] - account = relationship('Account', foreign_keys='Folder.account_id', - backref='folders') + __table__ = Base.metadata.tables["folder"] + account = relationship( + "Account", foreign_keys="Folder.account_id", backref="folders" + ) class FolderItem(Base): - __table__ = Base.metadata.tables['folderitem'] - folder = relationship('Folder', backref='threads', lazy='joined') + __table__ = Base.metadata.tables["folderitem"] + folder = relationship("Folder", backref="threads", lazy="joined") class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] print "setting provider_prefix for current accounts" with basic_session() as db_session: for account in db_session.query(Account): - if account.provider == 'gmail': - account.provider_prefix = 'gmail' - elif account.provider == 'eas': - account.provider_prefix = 'exchange' + if account.provider == "gmail": + account.provider_prefix = "gmail" + elif account.provider == "eas": + account.provider_prefix = "exchange" db_session.commit() print "Merging folders" - for name, alias in [('Sent', '[Gmail]/Sent Mail'), - ('Draft', '[Gmail]/Drafts'), - ('Starred', '[Gmail]/Starred'), - ('Important', '[Gmail]/Important')]: + for name, alias in [ + ("Sent", "[Gmail]/Sent Mail"), + ("Draft", "[Gmail]/Drafts"), + ("Starred", "[Gmail]/Starred"), + ("Important", "[Gmail]/Important"), + ]: for account in db_session.query(Account): - if account.provider != 'gmail': + if account.provider != "gmail": continue - src = db_session.query(Folder).filter( - Folder.account == account, - Folder.name == name).first() + src = ( + db_session.query(Folder) + .filter(Folder.account == account, Folder.name == name) + .first() + ) if src is None: continue try: - dest = db_session.query(Folder).filter( - Folder.account == account, - Folder.name == alias).one() + dest = ( + db_session.query(Folder) + .filter(Folder.account == account, Folder.name == alias) + .one() + ) except NoResultFound: # Create destination folder if it doesn't exist. # (in particular, databases created before migration 024 # have a [Gmail]/Important folder, but databases created # after may not). - dest = Folder(account=src.account, - name=alias, - created_at=datetime.utcnow(), - updated_at=datetime.utcnow()) + dest = Folder( + account=src.account, + name=alias, + created_at=datetime.utcnow(), + updated_at=datetime.utcnow(), + ) db_session.add(dest) - for folderitem in db_session.query(FolderItem).filter( - FolderItem.folder == src).yield_per(500): + for folderitem in ( + db_session.query(FolderItem) + .filter(FolderItem.folder == src) + .yield_per(500) + ): folderitem.folder = dest db_session.delete(src) @@ -130,35 +144,35 @@ class Account(Base): print "Adding public_id and exposed_name to folders." with basic_session() as db_session: for folder in db_session.query(Folder): - if folder.account.provider != 'gmail': + if folder.account.provider != "gmail": # punt on non-Gmail providers for now continue - if folder.name == '[Gmail]/All Mail': - folder.public_id = 'all' - folder.exposed_name = 'all' - elif folder.name == '[Gmail]/Drafts': - folder.public_id = 'drafts' - folder.exposed_name = 'drafts' - elif folder.name == '[Gmail]/Sent Mail': - folder.public_id = 'sent' - folder.exposed_name = 'sent' - elif folder.name == '[Gmail]/Starred': - folder.public_id = 'starred' - folder.exposed_name = 'starred' - elif folder.name == '[Gmail]/Spam': - folder.public_id = 'spam' - folder.exposed_name = 'spam' - elif folder.name == '[Gmail]/Trash': - folder.public_id = 'trash' - folder.exposed_name = 'trash' - elif folder.name == '[Gmail]/Important': - folder.public_id = 'important' - folder.exposed_name = 'important' - elif folder.name == 'Inbox': - folder.public_id = 'inbox' - folder.exposed_name = 'inbox' + if folder.name == "[Gmail]/All Mail": + folder.public_id = "all" + folder.exposed_name = "all" + elif folder.name == "[Gmail]/Drafts": + folder.public_id = "drafts" + folder.exposed_name = "drafts" + elif folder.name == "[Gmail]/Sent Mail": + folder.public_id = "sent" + folder.exposed_name = "sent" + elif folder.name == "[Gmail]/Starred": + folder.public_id = "starred" + folder.exposed_name = "starred" + elif folder.name == "[Gmail]/Spam": + folder.public_id = "spam" + folder.exposed_name = "spam" + elif folder.name == "[Gmail]/Trash": + folder.public_id = "trash" + folder.exposed_name = "trash" + elif folder.name == "[Gmail]/Important": + folder.public_id = "important" + folder.exposed_name = "important" + elif folder.name == "Inbox": + folder.public_id = "inbox" + folder.exposed_name = "inbox" else: - folder.exposed_name = '-'.join(('gmail', folder.name.lower())) + folder.exposed_name = "-".join(("gmail", folder.name.lower())) db_session.commit() diff --git a/migrations/versions/029_set_inbox_folder_exposed_name.py b/migrations/versions/029_set_inbox_folder_exposed_name.py index 7c7ef6dfb..7142e6549 100644 --- a/migrations/versions/029_set_inbox_folder_exposed_name.py +++ b/migrations/versions/029_set_inbox_folder_exposed_name.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '52a9a976a2e0' -down_revision = '40629415951c' +revision = "52a9a976a2e0" +down_revision = "40629415951c" from sqlalchemy.ext.declarative import declarative_base @@ -16,18 +16,18 @@ def upgrade(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class Folder(Base): - __table__ = Base.metadata.tables['folder'] + __table__ = Base.metadata.tables["folder"] with session_scope(versioned=False) as db_session: - for folder in db_session.query(Folder).filter( - Folder.name == 'Inbox'): - folder.public_id = 'inbox' - folder.exposed_name = 'inbox' + for folder in db_session.query(Folder).filter(Folder.name == "Inbox"): + folder.public_id = "inbox" + folder.exposed_name = "inbox" db_session.commit() diff --git a/migrations/versions/030_add_is_read_attribute_to_messages.py b/migrations/versions/030_add_is_read_attribute_to_messages.py index c4fb8f0f2..b4f17bce1 100644 --- a/migrations/versions/030_add_is_read_attribute_to_messages.py +++ b/migrations/versions/030_add_is_read_attribute_to_messages.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1b6ceae51b43' -down_revision = '52a9a976a2e0' +revision = "1b6ceae51b43" +down_revision = "52a9a976a2e0" from alembic import op import sqlalchemy as sa @@ -18,35 +18,47 @@ def upgrade(): - op.add_column('message', - sa.Column('is_read', sa.Boolean(), - server_default=sa.sql.expression.false(), - nullable=False)) + op.add_column( + "message", + sa.Column( + "is_read", + sa.Boolean(), + server_default=sa.sql.expression.false(), + nullable=False, + ), + ) - op.alter_column('usertagitem', 'created_at', - existing_type=mysql.DATETIME(), nullable=False) - op.alter_column('usertagitem', 'updated_at', - existing_type=mysql.DATETIME(), nullable=False) + op.alter_column( + "usertagitem", "created_at", existing_type=mysql.DATETIME(), nullable=False + ) + op.alter_column( + "usertagitem", "updated_at", existing_type=mysql.DATETIME(), nullable=False + ) from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class Message(Base): - __table__ = Base.metadata.tables['message'] + __table__ = Base.metadata.tables["message"] class ImapUid(Base): - __table__ = Base.metadata.tables['imapuid'] - message = relationship('Message', - backref=backref('imapuids', - primaryjoin='and_(' - 'Message.id == ImapUid.message_id, ' - 'ImapUid.deleted_at == None)'), - primaryjoin='and_(' - 'ImapUid.message_id == Message.id,' - 'Message.deleted_at == None)') + __table__ = Base.metadata.tables["imapuid"] + message = relationship( + "Message", + backref=backref( + "imapuids", + primaryjoin="and_(" + "Message.id == ImapUid.message_id, " + "ImapUid.deleted_at == None)", + ), + primaryjoin="and_(" + "ImapUid.message_id == Message.id," + "Message.deleted_at == None)", + ) with session_scope(versioned=False) as db_session: for uid in db_session.query(ImapUid).yield_per(500): @@ -57,8 +69,10 @@ class ImapUid(Base): def downgrade(): - op.alter_column('usertagitem', 'updated_at', - existing_type=mysql.DATETIME(), nullable=True) - op.alter_column('usertagitem', 'created_at', - existing_type=mysql.DATETIME(), nullable=True) - op.drop_column('message', 'is_read') + op.alter_column( + "usertagitem", "updated_at", existing_type=mysql.DATETIME(), nullable=True + ) + op.alter_column( + "usertagitem", "created_at", existing_type=mysql.DATETIME(), nullable=True + ) + op.drop_column("message", "is_read") diff --git a/migrations/versions/031_add_indexes_to_timestamps.py b/migrations/versions/031_add_indexes_to_timestamps.py index a7185bfd3..cf1cf3d7f 100644 --- a/migrations/versions/031_add_indexes_to_timestamps.py +++ b/migrations/versions/031_add_indexes_to_timestamps.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '55f0ff54c776' -down_revision = '1b6ceae51b43' +revision = "55f0ff54c776" +down_revision = "1b6ceae51b43" from alembic import op @@ -17,163 +17,248 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - op.create_index('ix_account_created_at', 'account', ['created_at'], unique=False) - op.create_index('ix_account_deleted_at', 'account', ['deleted_at'], unique=False) - op.create_index('ix_account_updated_at', 'account', ['updated_at'], unique=False) - op.create_index('ix_block_created_at', 'block', ['created_at'], unique=False) - op.create_index('ix_block_deleted_at', 'block', ['deleted_at'], unique=False) - op.create_index('ix_block_updated_at', 'block', ['updated_at'], unique=False) - op.create_index('ix_contact_created_at', 'contact', ['created_at'], unique=False) - op.create_index('ix_contact_deleted_at', 'contact', ['deleted_at'], unique=False) - op.create_index('ix_contact_updated_at', 'contact', ['updated_at'], unique=False) - - if 'easfoldersync' in Base.metadata.tables: - op.create_index('ix_easfoldersync_created_at', 'easfoldersync', ['created_at'], unique=False) - op.create_index('ix_easfoldersync_deleted_at', 'easfoldersync', ['deleted_at'], unique=False) - op.create_index('ix_easfoldersync_updated_at', 'easfoldersync', ['updated_at'], unique=False) - - if 'easuid' in Base.metadata.tables: - op.create_index('easuid_easaccount_id_folder_id', 'easuid', ['easaccount_id', 'folder_id'], unique=False) - op.create_index('ix_easuid_created_at', 'easuid', ['created_at'], unique=False) - op.create_index('ix_easuid_deleted_at', 'easuid', ['deleted_at'], unique=False) - op.create_index('ix_easuid_updated_at', 'easuid', ['updated_at'], unique=False) - - op.create_index('ix_folder_created_at', 'folder', ['created_at'], unique=False) - op.create_index('ix_folder_deleted_at', 'folder', ['deleted_at'], unique=False) - op.create_index('ix_folder_updated_at', 'folder', ['updated_at'], unique=False) - op.create_index('ix_folderitem_created_at', 'folderitem', ['created_at'], unique=False) - op.create_index('ix_folderitem_deleted_at', 'folderitem', ['deleted_at'], unique=False) - op.create_index('ix_folderitem_updated_at', 'folderitem', ['updated_at'], unique=False) - - op.create_index('ix_foldersync_created_at', 'foldersync', ['created_at'], unique=False) - op.create_index('ix_foldersync_deleted_at', 'foldersync', ['deleted_at'], unique=False) - op.create_index('ix_foldersync_updated_at', 'foldersync', ['updated_at'], unique=False) - - op.create_index('ix_imapuid_created_at', 'imapuid', ['created_at'], unique=False) - op.create_index('ix_imapuid_deleted_at', 'imapuid', ['deleted_at'], unique=False) - op.create_index('ix_imapuid_updated_at', 'imapuid', ['updated_at'], unique=False) - op.create_index('ix_lens_created_at', 'lens', ['created_at'], unique=False) - op.create_index('ix_lens_deleted_at', 'lens', ['deleted_at'], unique=False) - op.create_index('ix_lens_updated_at', 'lens', ['updated_at'], unique=False) - op.create_index('ix_message_created_at', 'message', ['created_at'], unique=False) - op.create_index('ix_message_deleted_at', 'message', ['deleted_at'], unique=False) - op.create_index('ix_message_updated_at', 'message', ['updated_at'], unique=False) - op.create_index('ix_messagecontactassociation_created_at', 'messagecontactassociation', ['created_at'], unique=False) - op.create_index('ix_messagecontactassociation_deleted_at', 'messagecontactassociation', ['deleted_at'], unique=False) - op.create_index('ix_messagecontactassociation_updated_at', 'messagecontactassociation', ['updated_at'], unique=False) - op.create_index('ix_namespace_created_at', 'namespace', ['created_at'], unique=False) - op.create_index('ix_namespace_deleted_at', 'namespace', ['deleted_at'], unique=False) - op.create_index('ix_namespace_updated_at', 'namespace', ['updated_at'], unique=False) - op.create_index('ix_searchsignal_created_at', 'searchsignal', ['created_at'], unique=False) - op.create_index('ix_searchsignal_deleted_at', 'searchsignal', ['deleted_at'], unique=False) - op.create_index('ix_searchsignal_updated_at', 'searchsignal', ['updated_at'], unique=False) - op.create_index('ix_searchtoken_created_at', 'searchtoken', ['created_at'], unique=False) - op.create_index('ix_searchtoken_deleted_at', 'searchtoken', ['deleted_at'], unique=False) - op.create_index('ix_searchtoken_updated_at', 'searchtoken', ['updated_at'], unique=False) - op.create_index('ix_thread_created_at', 'thread', ['created_at'], unique=False) - op.create_index('ix_thread_deleted_at', 'thread', ['deleted_at'], unique=False) - op.create_index('ix_thread_updated_at', 'thread', ['updated_at'], unique=False) - op.create_index('ix_transaction_created_at', 'transaction', ['created_at'], unique=False) - op.create_index('ix_transaction_deleted_at', 'transaction', ['deleted_at'], unique=False) - op.create_index('ix_transaction_updated_at', 'transaction', ['updated_at'], unique=False) - op.create_index('ix_uidvalidity_created_at', 'uidvalidity', ['created_at'], unique=False) - op.create_index('ix_uidvalidity_deleted_at', 'uidvalidity', ['deleted_at'], unique=False) - op.create_index('ix_uidvalidity_updated_at', 'uidvalidity', ['updated_at'], unique=False) - - op.create_index('ix_usertag_created_at', 'usertag', ['created_at'], unique=False) - op.create_index('ix_usertag_deleted_at', 'usertag', ['deleted_at'], unique=False) - op.create_index('ix_usertag_public_id', 'usertag', ['public_id'], unique=False) - op.create_index('ix_usertag_updated_at', 'usertag', ['updated_at'], unique=False) - - op.create_index('ix_usertagitem_created_at', 'usertagitem', ['created_at'], unique=False) - op.create_index('ix_usertagitem_deleted_at', 'usertagitem', ['deleted_at'], unique=False) - op.create_index('ix_usertagitem_updated_at', 'usertagitem', ['updated_at'], unique=False) - op.create_index('ix_webhook_created_at', 'webhook', ['created_at'], unique=False) - op.create_index('ix_webhook_deleted_at', 'webhook', ['deleted_at'], unique=False) - op.create_index('ix_webhook_updated_at', 'webhook', ['updated_at'], unique=False) + op.create_index("ix_account_created_at", "account", ["created_at"], unique=False) + op.create_index("ix_account_deleted_at", "account", ["deleted_at"], unique=False) + op.create_index("ix_account_updated_at", "account", ["updated_at"], unique=False) + op.create_index("ix_block_created_at", "block", ["created_at"], unique=False) + op.create_index("ix_block_deleted_at", "block", ["deleted_at"], unique=False) + op.create_index("ix_block_updated_at", "block", ["updated_at"], unique=False) + op.create_index("ix_contact_created_at", "contact", ["created_at"], unique=False) + op.create_index("ix_contact_deleted_at", "contact", ["deleted_at"], unique=False) + op.create_index("ix_contact_updated_at", "contact", ["updated_at"], unique=False) + + if "easfoldersync" in Base.metadata.tables: + op.create_index( + "ix_easfoldersync_created_at", "easfoldersync", ["created_at"], unique=False + ) + op.create_index( + "ix_easfoldersync_deleted_at", "easfoldersync", ["deleted_at"], unique=False + ) + op.create_index( + "ix_easfoldersync_updated_at", "easfoldersync", ["updated_at"], unique=False + ) + + if "easuid" in Base.metadata.tables: + op.create_index( + "easuid_easaccount_id_folder_id", + "easuid", + ["easaccount_id", "folder_id"], + unique=False, + ) + op.create_index("ix_easuid_created_at", "easuid", ["created_at"], unique=False) + op.create_index("ix_easuid_deleted_at", "easuid", ["deleted_at"], unique=False) + op.create_index("ix_easuid_updated_at", "easuid", ["updated_at"], unique=False) + + op.create_index("ix_folder_created_at", "folder", ["created_at"], unique=False) + op.create_index("ix_folder_deleted_at", "folder", ["deleted_at"], unique=False) + op.create_index("ix_folder_updated_at", "folder", ["updated_at"], unique=False) + op.create_index( + "ix_folderitem_created_at", "folderitem", ["created_at"], unique=False + ) + op.create_index( + "ix_folderitem_deleted_at", "folderitem", ["deleted_at"], unique=False + ) + op.create_index( + "ix_folderitem_updated_at", "folderitem", ["updated_at"], unique=False + ) + + op.create_index( + "ix_foldersync_created_at", "foldersync", ["created_at"], unique=False + ) + op.create_index( + "ix_foldersync_deleted_at", "foldersync", ["deleted_at"], unique=False + ) + op.create_index( + "ix_foldersync_updated_at", "foldersync", ["updated_at"], unique=False + ) + + op.create_index("ix_imapuid_created_at", "imapuid", ["created_at"], unique=False) + op.create_index("ix_imapuid_deleted_at", "imapuid", ["deleted_at"], unique=False) + op.create_index("ix_imapuid_updated_at", "imapuid", ["updated_at"], unique=False) + op.create_index("ix_lens_created_at", "lens", ["created_at"], unique=False) + op.create_index("ix_lens_deleted_at", "lens", ["deleted_at"], unique=False) + op.create_index("ix_lens_updated_at", "lens", ["updated_at"], unique=False) + op.create_index("ix_message_created_at", "message", ["created_at"], unique=False) + op.create_index("ix_message_deleted_at", "message", ["deleted_at"], unique=False) + op.create_index("ix_message_updated_at", "message", ["updated_at"], unique=False) + op.create_index( + "ix_messagecontactassociation_created_at", + "messagecontactassociation", + ["created_at"], + unique=False, + ) + op.create_index( + "ix_messagecontactassociation_deleted_at", + "messagecontactassociation", + ["deleted_at"], + unique=False, + ) + op.create_index( + "ix_messagecontactassociation_updated_at", + "messagecontactassociation", + ["updated_at"], + unique=False, + ) + op.create_index( + "ix_namespace_created_at", "namespace", ["created_at"], unique=False + ) + op.create_index( + "ix_namespace_deleted_at", "namespace", ["deleted_at"], unique=False + ) + op.create_index( + "ix_namespace_updated_at", "namespace", ["updated_at"], unique=False + ) + op.create_index( + "ix_searchsignal_created_at", "searchsignal", ["created_at"], unique=False + ) + op.create_index( + "ix_searchsignal_deleted_at", "searchsignal", ["deleted_at"], unique=False + ) + op.create_index( + "ix_searchsignal_updated_at", "searchsignal", ["updated_at"], unique=False + ) + op.create_index( + "ix_searchtoken_created_at", "searchtoken", ["created_at"], unique=False + ) + op.create_index( + "ix_searchtoken_deleted_at", "searchtoken", ["deleted_at"], unique=False + ) + op.create_index( + "ix_searchtoken_updated_at", "searchtoken", ["updated_at"], unique=False + ) + op.create_index("ix_thread_created_at", "thread", ["created_at"], unique=False) + op.create_index("ix_thread_deleted_at", "thread", ["deleted_at"], unique=False) + op.create_index("ix_thread_updated_at", "thread", ["updated_at"], unique=False) + op.create_index( + "ix_transaction_created_at", "transaction", ["created_at"], unique=False + ) + op.create_index( + "ix_transaction_deleted_at", "transaction", ["deleted_at"], unique=False + ) + op.create_index( + "ix_transaction_updated_at", "transaction", ["updated_at"], unique=False + ) + op.create_index( + "ix_uidvalidity_created_at", "uidvalidity", ["created_at"], unique=False + ) + op.create_index( + "ix_uidvalidity_deleted_at", "uidvalidity", ["deleted_at"], unique=False + ) + op.create_index( + "ix_uidvalidity_updated_at", "uidvalidity", ["updated_at"], unique=False + ) + + op.create_index("ix_usertag_created_at", "usertag", ["created_at"], unique=False) + op.create_index("ix_usertag_deleted_at", "usertag", ["deleted_at"], unique=False) + op.create_index("ix_usertag_public_id", "usertag", ["public_id"], unique=False) + op.create_index("ix_usertag_updated_at", "usertag", ["updated_at"], unique=False) + + op.create_index( + "ix_usertagitem_created_at", "usertagitem", ["created_at"], unique=False + ) + op.create_index( + "ix_usertagitem_deleted_at", "usertagitem", ["deleted_at"], unique=False + ) + op.create_index( + "ix_usertagitem_updated_at", "usertagitem", ["updated_at"], unique=False + ) + op.create_index("ix_webhook_created_at", "webhook", ["created_at"], unique=False) + op.create_index("ix_webhook_deleted_at", "webhook", ["deleted_at"], unique=False) + op.create_index("ix_webhook_updated_at", "webhook", ["updated_at"], unique=False) def downgrade(): - op.drop_index('ix_webhook_updated_at', table_name='webhook') - op.drop_index('ix_webhook_deleted_at', table_name='webhook') - op.drop_index('ix_webhook_created_at', table_name='webhook') - op.drop_index('ix_usertagitem_updated_at', table_name='usertagitem') - op.drop_index('ix_usertagitem_deleted_at', table_name='usertagitem') - op.drop_index('ix_usertagitem_created_at', table_name='usertagitem') - - op.drop_index('ix_usertag_updated_at', table_name='usertag') - op.drop_index('ix_usertag_public_id', table_name='usertag') - op.drop_index('ix_usertag_deleted_at', table_name='usertag') - op.drop_index('ix_usertag_created_at', table_name='usertag') - - op.drop_index('ix_uidvalidity_updated_at', table_name='uidvalidity') - op.drop_index('ix_uidvalidity_deleted_at', table_name='uidvalidity') - op.drop_index('ix_uidvalidity_created_at', table_name='uidvalidity') - op.drop_index('ix_transaction_updated_at', table_name='transaction') - op.drop_index('ix_transaction_deleted_at', table_name='transaction') - op.drop_index('ix_transaction_created_at', table_name='transaction') - op.drop_index('ix_thread_updated_at', table_name='thread') - op.drop_index('ix_thread_deleted_at', table_name='thread') - op.drop_index('ix_thread_created_at', table_name='thread') - op.drop_index('ix_searchtoken_updated_at', table_name='searchtoken') - op.drop_index('ix_searchtoken_deleted_at', table_name='searchtoken') - op.drop_index('ix_searchtoken_created_at', table_name='searchtoken') - op.drop_index('ix_searchsignal_updated_at', table_name='searchsignal') - op.drop_index('ix_searchsignal_deleted_at', table_name='searchsignal') - op.drop_index('ix_searchsignal_created_at', table_name='searchsignal') - op.drop_index('ix_namespace_updated_at', table_name='namespace') - op.drop_index('ix_namespace_deleted_at', table_name='namespace') - op.drop_index('ix_namespace_created_at', table_name='namespace') - op.drop_index('ix_messagecontactassociation_updated_at', table_name='messagecontactassociation') - op.drop_index('ix_messagecontactassociation_deleted_at', table_name='messagecontactassociation') - op.drop_index('ix_messagecontactassociation_created_at', table_name='messagecontactassociation') - op.drop_index('ix_message_updated_at', table_name='message') - op.drop_index('ix_message_deleted_at', table_name='message') - op.drop_index('ix_message_created_at', table_name='message') - op.drop_index('ix_lens_updated_at', table_name='lens') - op.drop_index('ix_lens_deleted_at', table_name='lens') - op.drop_index('ix_lens_created_at', table_name='lens') - op.drop_index('ix_imapuid_updated_at', table_name='imapuid') - op.drop_index('ix_imapuid_deleted_at', table_name='imapuid') - op.drop_index('ix_imapuid_created_at', table_name='imapuid') - - op.drop_index('ix_foldersync_updated_at', table_name='foldersync') - op.drop_index('ix_foldersync_deleted_at', table_name='foldersync') - op.drop_index('ix_foldersync_created_at', table_name='foldersync') - - op.drop_index('ix_folderitem_updated_at', table_name='folderitem') - op.drop_index('ix_folderitem_deleted_at', table_name='folderitem') - op.drop_index('ix_folderitem_created_at', table_name='folderitem') - op.drop_index('ix_folder_updated_at', table_name='folder') - op.drop_index('ix_folder_deleted_at', table_name='folder') - op.drop_index('ix_folder_created_at', table_name='folder') + op.drop_index("ix_webhook_updated_at", table_name="webhook") + op.drop_index("ix_webhook_deleted_at", table_name="webhook") + op.drop_index("ix_webhook_created_at", table_name="webhook") + op.drop_index("ix_usertagitem_updated_at", table_name="usertagitem") + op.drop_index("ix_usertagitem_deleted_at", table_name="usertagitem") + op.drop_index("ix_usertagitem_created_at", table_name="usertagitem") + + op.drop_index("ix_usertag_updated_at", table_name="usertag") + op.drop_index("ix_usertag_public_id", table_name="usertag") + op.drop_index("ix_usertag_deleted_at", table_name="usertag") + op.drop_index("ix_usertag_created_at", table_name="usertag") + + op.drop_index("ix_uidvalidity_updated_at", table_name="uidvalidity") + op.drop_index("ix_uidvalidity_deleted_at", table_name="uidvalidity") + op.drop_index("ix_uidvalidity_created_at", table_name="uidvalidity") + op.drop_index("ix_transaction_updated_at", table_name="transaction") + op.drop_index("ix_transaction_deleted_at", table_name="transaction") + op.drop_index("ix_transaction_created_at", table_name="transaction") + op.drop_index("ix_thread_updated_at", table_name="thread") + op.drop_index("ix_thread_deleted_at", table_name="thread") + op.drop_index("ix_thread_created_at", table_name="thread") + op.drop_index("ix_searchtoken_updated_at", table_name="searchtoken") + op.drop_index("ix_searchtoken_deleted_at", table_name="searchtoken") + op.drop_index("ix_searchtoken_created_at", table_name="searchtoken") + op.drop_index("ix_searchsignal_updated_at", table_name="searchsignal") + op.drop_index("ix_searchsignal_deleted_at", table_name="searchsignal") + op.drop_index("ix_searchsignal_created_at", table_name="searchsignal") + op.drop_index("ix_namespace_updated_at", table_name="namespace") + op.drop_index("ix_namespace_deleted_at", table_name="namespace") + op.drop_index("ix_namespace_created_at", table_name="namespace") + op.drop_index( + "ix_messagecontactassociation_updated_at", + table_name="messagecontactassociation", + ) + op.drop_index( + "ix_messagecontactassociation_deleted_at", + table_name="messagecontactassociation", + ) + op.drop_index( + "ix_messagecontactassociation_created_at", + table_name="messagecontactassociation", + ) + op.drop_index("ix_message_updated_at", table_name="message") + op.drop_index("ix_message_deleted_at", table_name="message") + op.drop_index("ix_message_created_at", table_name="message") + op.drop_index("ix_lens_updated_at", table_name="lens") + op.drop_index("ix_lens_deleted_at", table_name="lens") + op.drop_index("ix_lens_created_at", table_name="lens") + op.drop_index("ix_imapuid_updated_at", table_name="imapuid") + op.drop_index("ix_imapuid_deleted_at", table_name="imapuid") + op.drop_index("ix_imapuid_created_at", table_name="imapuid") + + op.drop_index("ix_foldersync_updated_at", table_name="foldersync") + op.drop_index("ix_foldersync_deleted_at", table_name="foldersync") + op.drop_index("ix_foldersync_created_at", table_name="foldersync") + + op.drop_index("ix_folderitem_updated_at", table_name="folderitem") + op.drop_index("ix_folderitem_deleted_at", table_name="folderitem") + op.drop_index("ix_folderitem_created_at", table_name="folderitem") + op.drop_index("ix_folder_updated_at", table_name="folder") + op.drop_index("ix_folder_deleted_at", table_name="folder") + op.drop_index("ix_folder_created_at", table_name="folder") from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easuid' in Base.metadata.tables: - op.drop_index('ix_easuid_updated_at', table_name='easuid') - op.drop_index('ix_easuid_deleted_at', table_name='easuid') - op.drop_index('ix_easuid_created_at', table_name='easuid') - op.drop_index('easuid_easaccount_id_folder_id', table_name='easuid') - - if 'easfoldersync' in Base.metadata.tables: - op.drop_index('ix_easfoldersync_updated_at', table_name='easfoldersync') - op.drop_index('ix_easfoldersync_deleted_at', table_name='easfoldersync') - op.drop_index('ix_easfoldersync_created_at', table_name='easfoldersync') - op.drop_index('ix_contact_updated_at', table_name='contact') - op.drop_index('ix_contact_deleted_at', table_name='contact') - op.drop_index('ix_contact_created_at', table_name='contact') - op.drop_index('ix_block_updated_at', table_name='block') - op.drop_index('ix_block_deleted_at', table_name='block') - op.drop_index('ix_block_created_at', table_name='block') - - op.drop_index('ix_account_updated_at', table_name='account') - op.drop_index('ix_account_deleted_at', table_name='account') - op.drop_index('ix_account_created_at', table_name='account') + if "easuid" in Base.metadata.tables: + op.drop_index("ix_easuid_updated_at", table_name="easuid") + op.drop_index("ix_easuid_deleted_at", table_name="easuid") + op.drop_index("ix_easuid_created_at", table_name="easuid") + op.drop_index("easuid_easaccount_id_folder_id", table_name="easuid") + + if "easfoldersync" in Base.metadata.tables: + op.drop_index("ix_easfoldersync_updated_at", table_name="easfoldersync") + op.drop_index("ix_easfoldersync_deleted_at", table_name="easfoldersync") + op.drop_index("ix_easfoldersync_created_at", table_name="easfoldersync") + op.drop_index("ix_contact_updated_at", table_name="contact") + op.drop_index("ix_contact_deleted_at", table_name="contact") + op.drop_index("ix_contact_created_at", table_name="contact") + op.drop_index("ix_block_updated_at", table_name="block") + op.drop_index("ix_block_deleted_at", table_name="block") + op.drop_index("ix_block_created_at", table_name="block") + + op.drop_index("ix_account_updated_at", table_name="account") + op.drop_index("ix_account_deleted_at", table_name="account") + op.drop_index("ix_account_created_at", table_name="account") diff --git a/migrations/versions/032_tighten_easuid.py b/migrations/versions/032_tighten_easuid.py index 7e57f5f64..7e312627c 100644 --- a/migrations/versions/032_tighten_easuid.py +++ b/migrations/versions/032_tighten_easuid.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3f96e92953e1' -down_revision = '55f0ff54c776' +revision = "3f96e92953e1" +down_revision = "55f0ff54c776" from alembic import op import sqlalchemy as sa @@ -17,42 +17,50 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easfoldersync' in Base.metadata.tables: - op.alter_column('easfoldersync', 'state', - type_=sa.Enum('initial', 'initial keyinvalid', - 'poll', 'poll keyinvalid', 'finish'), - existing_nullable=False, - server_default='initial') + if "easfoldersync" in Base.metadata.tables: + op.alter_column( + "easfoldersync", + "state", + type_=sa.Enum( + "initial", "initial keyinvalid", "poll", "poll keyinvalid", "finish" + ), + existing_nullable=False, + server_default="initial", + ) - if 'easuid' in Base.metadata.tables: - op.alter_column('easuid', 'message_id', existing_type=sa.Integer(), - nullable=False) - op.alter_column('easuid', 'fld_uid', existing_type=sa.Integer(), - nullable=False) - op.alter_column('easuid', 'msg_uid', existing_type=sa.Integer(), - nullable=False) + if "easuid" in Base.metadata.tables: + op.alter_column( + "easuid", "message_id", existing_type=sa.Integer(), nullable=False + ) + op.alter_column("easuid", "fld_uid", existing_type=sa.Integer(), nullable=False) + op.alter_column("easuid", "msg_uid", existing_type=sa.Integer(), nullable=False) def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easfoldersync' in Base.metadata.tables: - op.alter_column('easfoldersync', 'state', - type_=sa.Enum('initial', 'initial uidinvalid', - 'poll', 'poll uidinvalid', 'finish'), - existing_nullable=False) - - if 'easuid' in Base.metadata.tables: - op.alter_column('easuid', 'message_id', existing_type=sa.Integer(), - nullable=True) - op.alter_column('easuid', 'fld_uid', existing_type=sa.Integer(), - nullable=True) - op.alter_column('easuid', 'msg_uid', existing_type=sa.Integer(), - nullable=True) + if "easfoldersync" in Base.metadata.tables: + op.alter_column( + "easfoldersync", + "state", + type_=sa.Enum( + "initial", "initial uidinvalid", "poll", "poll uidinvalid", "finish" + ), + existing_nullable=False, + ) + + if "easuid" in Base.metadata.tables: + op.alter_column( + "easuid", "message_id", existing_type=sa.Integer(), nullable=True + ) + op.alter_column("easuid", "fld_uid", existing_type=sa.Integer(), nullable=True) + op.alter_column("easuid", "msg_uid", existing_type=sa.Integer(), nullable=True) diff --git a/migrations/versions/033_add_more_indexes.py b/migrations/versions/033_add_more_indexes.py index ab676374d..b8ab193b2 100644 --- a/migrations/versions/033_add_more_indexes.py +++ b/migrations/versions/033_add_more_indexes.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1eab2619cc4f' -down_revision = '3f96e92953e1' +revision = "1eab2619cc4f" +down_revision = "3f96e92953e1" from alembic import op from sqlalchemy.ext.declarative import declarative_base @@ -16,27 +16,29 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easuid' in Base.metadata.tables: - op.create_index('ix_easuid_msg_uid', 'easuid', ['msg_uid'], - unique=False) + if "easuid" in Base.metadata.tables: + op.create_index("ix_easuid_msg_uid", "easuid", ["msg_uid"], unique=False) - op.create_index('ix_imapuid_msg_uid', 'imapuid', ['msg_uid'], unique=False) - op.create_index('ix_transaction_table_name', 'transaction', ['table_name'], - unique=False) + op.create_index("ix_imapuid_msg_uid", "imapuid", ["msg_uid"], unique=False) + op.create_index( + "ix_transaction_table_name", "transaction", ["table_name"], unique=False + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easuid' in Base.metadata.tables: - op.drop_index('ix_easuid_msg_uid', table_name='easuid') + if "easuid" in Base.metadata.tables: + op.drop_index("ix_easuid_msg_uid", table_name="easuid") - op.drop_index('ix_transaction_table_name', table_name='transaction') - op.drop_index('ix_imapuid_msg_uid', table_name='imapuid') + op.drop_index("ix_transaction_table_name", table_name="transaction") + op.drop_index("ix_imapuid_msg_uid", table_name="imapuid") diff --git a/migrations/versions/034_cascade_folder_deletes_to_imapuid.py b/migrations/versions/034_cascade_folder_deletes_to_imapuid.py index 036edbed3..fe66669ff 100644 --- a/migrations/versions/034_cascade_folder_deletes_to_imapuid.py +++ b/migrations/versions/034_cascade_folder_deletes_to_imapuid.py @@ -16,77 +16,140 @@ """ # revision identifiers, used by Alembic. -revision = '350a08df27ee' -down_revision = '1eab2619cc4f' +revision = "350a08df27ee" +down_revision = "1eab2619cc4f" from alembic import op def upgrade(): - op.drop_constraint('imapuid_ibfk_3', 'imapuid', type_='foreignkey') - op.create_foreign_key('imapuid_ibfk_3', 'imapuid', 'folder', - ['folder_id'], ['id'], ondelete='CASCADE') - op.drop_constraint('account_ibfk_2', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_2', 'account', 'folder', - ['inbox_folder_id'], ['id'], ondelete='SET NULL') - op.drop_constraint('account_ibfk_3', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_3', 'account', 'folder', - ['sent_folder_id'], ['id'], ondelete='SET NULL') - op.drop_constraint('account_ibfk_4', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_4', 'account', 'folder', - ['drafts_folder_id'], ['id'], ondelete='SET NULL') - op.drop_constraint('account_ibfk_5', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_5', 'account', 'folder', - ['spam_folder_id'], ['id'], ondelete='SET NULL') - op.drop_constraint('account_ibfk_6', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_6', 'account', 'folder', - ['trash_folder_id'], ['id'], ondelete='SET NULL') - op.drop_constraint('account_ibfk_7', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_7', 'account', 'folder', - ['archive_folder_id'], ['id'], ondelete='SET NULL') - op.drop_constraint('account_ibfk_8', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_8', 'account', 'folder', - ['all_folder_id'], ['id'], ondelete='SET NULL') - op.drop_constraint('account_ibfk_9', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_9', 'account', 'folder', - ['starred_folder_id'], ['id'], ondelete='SET NULL') + op.drop_constraint("imapuid_ibfk_3", "imapuid", type_="foreignkey") + op.create_foreign_key( + "imapuid_ibfk_3", "imapuid", "folder", ["folder_id"], ["id"], ondelete="CASCADE" + ) + op.drop_constraint("account_ibfk_2", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_2", + "account", + "folder", + ["inbox_folder_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("account_ibfk_3", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_3", + "account", + "folder", + ["sent_folder_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("account_ibfk_4", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_4", + "account", + "folder", + ["drafts_folder_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("account_ibfk_5", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_5", + "account", + "folder", + ["spam_folder_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("account_ibfk_6", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_6", + "account", + "folder", + ["trash_folder_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("account_ibfk_7", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_7", + "account", + "folder", + ["archive_folder_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("account_ibfk_8", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_8", + "account", + "folder", + ["all_folder_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("account_ibfk_9", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_9", + "account", + "folder", + ["starred_folder_id"], + ["id"], + ondelete="SET NULL", + ) # for some reason this was left out of migration 024, so might not exist try: - op.drop_constraint('account_ibfk_10', 'account', type_='foreignkey') + op.drop_constraint("account_ibfk_10", "account", type_="foreignkey") except: pass - op.create_foreign_key('account_ibfk_10', 'account', 'folder', - ['important_folder_id'], ['id'], ondelete='SET NULL') + op.create_foreign_key( + "account_ibfk_10", + "account", + "folder", + ["important_folder_id"], + ["id"], + ondelete="SET NULL", + ) def downgrade(): - op.drop_constraint('imapuid_ibfk_3', 'imapuid', type_='foreignkey') - op.create_foreign_key('imapuid_ibfk_3', 'imapuid', 'folder', - ['folder_id'], ['id']) - op.drop_constraint('account_ibfk_2', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_2', 'account', 'folder', - ['inbox_folder_id'], ['id']) - op.drop_constraint('account_ibfk_3', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_3', 'account', 'folder', - ['sent_folder_id'], ['id']) - op.drop_constraint('account_ibfk_4', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_4', 'account', 'folder', - ['drafts_folder_id'], ['id']) - op.drop_constraint('account_ibfk_5', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_5', 'account', 'folder', - ['spam_folder_id'], ['id']) - op.drop_constraint('account_ibfk_6', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_6', 'account', 'folder', - ['trash_folder_id'], ['id']) - op.drop_constraint('account_ibfk_7', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_7', 'account', 'folder', - ['archive_folder_id'], ['id']) - op.drop_constraint('account_ibfk_8', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_8', 'account', 'folder', - ['all_folder_id'], ['id']) - op.drop_constraint('account_ibfk_9', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_9', 'account', 'folder', - ['starred_folder_id'], ['id']) - op.drop_constraint('account_ibfk_10', 'account', type_='foreignkey') - op.create_foreign_key('account_ibfk_10', 'account', 'folder', - ['important_folder_id'], ['id']) + op.drop_constraint("imapuid_ibfk_3", "imapuid", type_="foreignkey") + op.create_foreign_key("imapuid_ibfk_3", "imapuid", "folder", ["folder_id"], ["id"]) + op.drop_constraint("account_ibfk_2", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_2", "account", "folder", ["inbox_folder_id"], ["id"] + ) + op.drop_constraint("account_ibfk_3", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_3", "account", "folder", ["sent_folder_id"], ["id"] + ) + op.drop_constraint("account_ibfk_4", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_4", "account", "folder", ["drafts_folder_id"], ["id"] + ) + op.drop_constraint("account_ibfk_5", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_5", "account", "folder", ["spam_folder_id"], ["id"] + ) + op.drop_constraint("account_ibfk_6", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_6", "account", "folder", ["trash_folder_id"], ["id"] + ) + op.drop_constraint("account_ibfk_7", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_7", "account", "folder", ["archive_folder_id"], ["id"] + ) + op.drop_constraint("account_ibfk_8", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_8", "account", "folder", ["all_folder_id"], ["id"] + ) + op.drop_constraint("account_ibfk_9", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_9", "account", "folder", ["starred_folder_id"], ["id"] + ) + op.drop_constraint("account_ibfk_10", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_10", "account", "folder", ["important_folder_id"], ["id"] + ) diff --git a/migrations/versions/035_add_columns_for_drafts_support_to_.py b/migrations/versions/035_add_columns_for_drafts_support_to_.py index f78d1bbe5..5dcbb23ef 100644 --- a/migrations/versions/035_add_columns_for_drafts_support_to_.py +++ b/migrations/versions/035_add_columns_for_drafts_support_to_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '24e085e152c0' -down_revision = '350a08df27ee' +revision = "24e085e152c0" +down_revision = "350a08df27ee" from alembic import op import sqlalchemy as sa @@ -17,64 +17,75 @@ def upgrade(): # Create DraftThread table - op.create_table('draftthread', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('master_public_id', mysql.BINARY(16), - nullable=False), - sa.Column('thread_id', sa.Integer()), - sa.ForeignKeyConstraint(['thread_id'], - ['thread.id'], - ondelete='CASCADE'), - sa.Column('message_id', sa.Integer()), - sa.ForeignKeyConstraint(['message_id'], - ['message.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.Column('created_at', sa.DateTime(), - nullable=False), - sa.Column('updated_at', sa.DateTime(), - nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('public_id', mysql.BINARY(16), nullable=False, - index=True), - ) + op.create_table( + "draftthread", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("master_public_id", mysql.BINARY(16), nullable=False), + sa.Column("thread_id", sa.Integer()), + sa.ForeignKeyConstraint(["thread_id"], ["thread.id"], ondelete="CASCADE"), + sa.Column("message_id", sa.Integer()), + sa.ForeignKeyConstraint(["message_id"], ["message.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("public_id", mysql.BINARY(16), nullable=False, index=True), + ) # Add columns to SpoolMessage table - op.add_column('spoolmessage', - sa.Column('parent_draft_id', sa.Integer(), nullable=True)) - op.create_foreign_key('spoolmessage_ibfk_3', - 'spoolmessage', 'spoolmessage', - ['parent_draft_id'], ['id']) + op.add_column( + "spoolmessage", sa.Column("parent_draft_id", sa.Integer(), nullable=True) + ) + op.create_foreign_key( + "spoolmessage_ibfk_3", + "spoolmessage", + "spoolmessage", + ["parent_draft_id"], + ["id"], + ) - op.add_column('spoolmessage', - sa.Column('draft_copied_from', sa.Integer(), nullable=True)) - op.create_foreign_key('spoolmessage_ibfk_4', - 'spoolmessage', 'spoolmessage', - ['draft_copied_from'], ['id']) + op.add_column( + "spoolmessage", sa.Column("draft_copied_from", sa.Integer(), nullable=True) + ) + op.create_foreign_key( + "spoolmessage_ibfk_4", + "spoolmessage", + "spoolmessage", + ["draft_copied_from"], + ["id"], + ) - op.add_column('spoolmessage', - sa.Column('replyto_thread_id', sa.Integer(), nullable=True)) - op.create_foreign_key('spoolmessage_ibfk_5', - 'spoolmessage', 'draftthread', - ['replyto_thread_id'], ['id']) + op.add_column( + "spoolmessage", sa.Column("replyto_thread_id", sa.Integer(), nullable=True) + ) + op.create_foreign_key( + "spoolmessage_ibfk_5", + "spoolmessage", + "draftthread", + ["replyto_thread_id"], + ["id"], + ) - op.add_column('spoolmessage', sa.Column('state', sa.Enum('draft', - 'sending', 'sending failed', 'sent'), server_default='draft', - nullable=False)) + op.add_column( + "spoolmessage", + sa.Column( + "state", + sa.Enum("draft", "sending", "sending failed", "sent"), + server_default="draft", + nullable=False, + ), + ) def downgrade(): - op.drop_constraint('spoolmessage_ibfk_3', 'spoolmessage', - type_='foreignkey') - op.drop_column('spoolmessage', 'parent_draft_id') + op.drop_constraint("spoolmessage_ibfk_3", "spoolmessage", type_="foreignkey") + op.drop_column("spoolmessage", "parent_draft_id") - op.drop_constraint('spoolmessage_ibfk_4', 'spoolmessage', - type_='foreignkey') - op.drop_column('spoolmessage', 'draft_copied_from') + op.drop_constraint("spoolmessage_ibfk_4", "spoolmessage", type_="foreignkey") + op.drop_column("spoolmessage", "draft_copied_from") - op.drop_constraint('spoolmessage_ibfk_5', 'spoolmessage', - type_='foreignkey') - op.drop_column('spoolmessage', 'replyto_thread_id') - op.drop_column('spoolmessage', 'state') + op.drop_constraint("spoolmessage_ibfk_5", "spoolmessage", type_="foreignkey") + op.drop_column("spoolmessage", "replyto_thread_id") + op.drop_column("spoolmessage", "state") - op.drop_table('draftthread') + op.drop_table("draftthread") diff --git a/migrations/versions/036_replace_usertag_by_generic_tag.py b/migrations/versions/036_replace_usertag_by_generic_tag.py index 5a1381db8..2592c38c1 100644 --- a/migrations/versions/036_replace_usertag_by_generic_tag.py +++ b/migrations/versions/036_replace_usertag_by_generic_tag.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '21878b1b3d4b' -down_revision = '24e085e152c0' +revision = "21878b1b3d4b" +down_revision = "24e085e152c0" from alembic import op import sqlalchemy as sa @@ -16,55 +16,64 @@ def upgrade(): op.create_table( - 'tag', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('namespace_id', sa.Integer(), nullable=False), - sa.Column('public_id', sa.String(length=191), nullable=False), - sa.Column('name', sa.String(length=191), nullable=False), - sa.Column('user_created', sa.Boolean(), nullable=False, - server_default=sa.sql.expression.false()), - sa.Column('user_mutable', sa.Boolean(), nullable=False, - server_default=sa.sql.expression.true()), - sa.ForeignKeyConstraint(['namespace_id'], ['namespace.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('namespace_id', 'name'), - sa.UniqueConstraint('namespace_id', 'public_id') + "tag", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column("public_id", sa.String(length=191), nullable=False), + sa.Column("name", sa.String(length=191), nullable=False), + sa.Column( + "user_created", + sa.Boolean(), + nullable=False, + server_default=sa.sql.expression.false(), + ), + sa.Column( + "user_mutable", + sa.Boolean(), + nullable=False, + server_default=sa.sql.expression.true(), + ), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("namespace_id", "name"), + sa.UniqueConstraint("namespace_id", "public_id"), ) - op.create_index('ix_tag_created_at', 'tag', ['created_at'], unique=False) - op.create_index('ix_tag_deleted_at', 'tag', ['deleted_at'], unique=False) - op.create_index('ix_tag_updated_at', 'tag', ['updated_at'], unique=False) + op.create_index("ix_tag_created_at", "tag", ["created_at"], unique=False) + op.create_index("ix_tag_deleted_at", "tag", ["deleted_at"], unique=False) + op.create_index("ix_tag_updated_at", "tag", ["updated_at"], unique=False) op.create_table( - 'tagitem', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('thread_id', sa.Integer(), nullable=False), - sa.Column('tag_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['tag_id'], ['tag.id']), - sa.ForeignKeyConstraint(['thread_id'], ['thread.id']), - sa.PrimaryKeyConstraint('id') + "tagitem", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("thread_id", sa.Integer(), nullable=False), + sa.Column("tag_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["tag_id"], ["tag.id"]), + sa.ForeignKeyConstraint(["thread_id"], ["thread.id"]), + sa.PrimaryKeyConstraint("id"), ) - op.create_index('ix_tagitem_created_at', 'tagitem', ['created_at'], - unique=False) - op.create_index('ix_tagitem_deleted_at', 'tagitem', ['deleted_at'], - unique=False) - op.create_index('ix_tagitem_updated_at', 'tagitem', ['updated_at'], - unique=False) - op.drop_table(u'usertagitem') - op.drop_table(u'usertag') + op.create_index("ix_tagitem_created_at", "tagitem", ["created_at"], unique=False) + op.create_index("ix_tagitem_deleted_at", "tagitem", ["deleted_at"], unique=False) + op.create_index("ix_tagitem_updated_at", "tagitem", ["updated_at"], unique=False) + op.drop_table(u"usertagitem") + op.drop_table(u"usertag") - op.alter_column('folder', 'public_id', new_column_name='canonical_name', - existing_nullable=True, - existing_type=sa.String(length=191)) + op.alter_column( + "folder", + "public_id", + new_column_name="canonical_name", + existing_nullable=True, + existing_type=sa.String(length=191), + ) - op.drop_column('folder', u'exposed_name') + op.drop_column("folder", u"exposed_name") from inbox.models.session import session_scope + # Doing this ties this migration to the state of the code at the time # of this commit. However, the alternative is to have a crazy long, # involved and error-prone recreation of the models and their behavior @@ -73,21 +82,41 @@ def upgrade(): with session_scope(versioned=False) as db_session: # create canonical tags that don't already exist. - CANONICAL_TAG_NAMES = ['inbox', 'all', 'archive', 'drafts', 'send', - 'sending', 'sent', 'spam', 'starred', - 'unstarred', 'unread', 'replied', 'trash', - 'file', 'attachment'] + CANONICAL_TAG_NAMES = [ + "inbox", + "all", + "archive", + "drafts", + "send", + "sending", + "sent", + "spam", + "starred", + "unstarred", + "unread", + "replied", + "trash", + "file", + "attachment", + ] for namespace in db_session.query(Namespace): - existing_canonical_tags = db_session.query(Tag).filter( - Tag.namespace == namespace, - Tag.public_id.in_(CANONICAL_TAG_NAMES)).all() + existing_canonical_tags = ( + db_session.query(Tag) + .filter( + Tag.namespace == namespace, Tag.public_id.in_(CANONICAL_TAG_NAMES) + ) + .all() + ) missing_canonical_names = set(CANONICAL_TAG_NAMES).difference( - {tag.canonical_name for tag in existing_canonical_tags}) + {tag.canonical_name for tag in existing_canonical_tags} + ) for canonical_name in missing_canonical_names: - tag = Tag(namespace=namespace, - public_id=canonical_name, - name=canonical_name, - user_mutable=True) + tag = Tag( + namespace=namespace, + public_id=canonical_name, + name=canonical_name, + user_mutable=True, + ) db_session.add(tag) db_session.commit() diff --git a/migrations/versions/037_shorten_addresses.py b/migrations/versions/037_shorten_addresses.py index c6d3f1fbc..94ed98c26 100644 --- a/migrations/versions/037_shorten_addresses.py +++ b/migrations/versions/037_shorten_addresses.py @@ -7,18 +7,18 @@ """ # revision identifiers, used by Alembic. -revision = '1d7374c286c5' -down_revision = '21878b1b3d4b' +revision = "1d7374c286c5" +down_revision = "21878b1b3d4b" from alembic import op from sqlalchemy.dialects import mysql def upgrade(): - op.alter_column('account', 'email_address', type_=mysql.VARCHAR(191)) - op.alter_column('contact', 'email_address', type_=mysql.VARCHAR(191)) + op.alter_column("account", "email_address", type_=mysql.VARCHAR(191)) + op.alter_column("contact", "email_address", type_=mysql.VARCHAR(191)) def downgrade(): - op.alter_column('account', 'email_address', type_=mysql.VARCHAR(254)) - op.alter_column('contact', 'email_address', type_=mysql.VARCHAR(191)) + op.alter_column("account", "email_address", type_=mysql.VARCHAR(254)) + op.alter_column("contact", "email_address", type_=mysql.VARCHAR(191)) diff --git a/migrations/versions/038_add_public_ids_to_transactions.py b/migrations/versions/038_add_public_ids_to_transactions.py index 7ab638dea..40acc20eb 100644 --- a/migrations/versions/038_add_public_ids_to_transactions.py +++ b/migrations/versions/038_add_public_ids_to_transactions.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1edbd63582c2' -down_revision = '1d7374c286c5' +revision = "1edbd63582c2" +down_revision = "1d7374c286c5" import sys from gc import collect as garbage_collect @@ -19,58 +19,67 @@ def upgrade(): - op.add_column('transaction', - sa.Column('public_id', mysql.BINARY(16), nullable=True)) - op.add_column('transaction', - sa.Column('object_public_id', sa.String(length=191), - nullable=True)) - op.create_index('ix_transaction_public_id', 'transaction', ['public_id'], - unique=False) + op.add_column( + "transaction", sa.Column("public_id", mysql.BINARY(16), nullable=True) + ) + op.add_column( + "transaction", + sa.Column("object_public_id", sa.String(length=191), nullable=True), + ) + op.create_index( + "ix_transaction_public_id", "transaction", ["public_id"], unique=False + ) from inbox.sqlalchemy_ext.util import generate_public_id, b36_to_bin + # TODO(emfree) reflect from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class Transaction(Base): - __table__ = Base.metadata.tables['transaction'] + __table__ = Base.metadata.tables["transaction"] with session_scope(versioned=False) as db_session: count = 0 - num_transactions, = db_session.query(sa.func.max(Transaction.id)).one() - print 'Adding public ids to {} transactions'.format(num_transactions) + (num_transactions,) = db_session.query(sa.func.max(Transaction.id)).one() + print "Adding public ids to {} transactions".format(num_transactions) for pointer in range(0, num_transactions + 1, 500): for entry in db_session.query(Transaction).filter( - Transaction.id >= pointer, - Transaction.id < pointer + 500): + Transaction.id >= pointer, Transaction.id < pointer + 500 + ): entry.public_id = b36_to_bin(generate_public_id()) count += 1 if not count % 500: - sys.stdout.write('.') + sys.stdout.write(".") sys.stdout.flush() db_session.commit() garbage_collect() - op.alter_column('transaction', 'public_id', - existing_type=mysql.BINARY(16), nullable=False) + op.alter_column( + "transaction", "public_id", existing_type=mysql.BINARY(16), nullable=False + ) - op.add_column('transaction', sa.Column('public_snapshot', - sa.Text(length=4194304), - nullable=True)) - op.add_column('transaction', sa.Column('private_snapshot', - sa.Text(length=4194304), - nullable=True)) - op.drop_column('transaction', u'additional_data') + op.add_column( + "transaction", + sa.Column("public_snapshot", sa.Text(length=4194304), nullable=True), + ) + op.add_column( + "transaction", + sa.Column("private_snapshot", sa.Text(length=4194304), nullable=True), + ) + op.drop_column("transaction", u"additional_data") def downgrade(): - op.drop_index('ix_transaction_public_id', table_name='transaction') - op.drop_column('transaction', 'public_id') - op.drop_column('transaction', 'object_public_id') - op.add_column('transaction', sa.Column(u'additional_data', - mysql.LONGTEXT(), nullable=True)) - op.drop_column('transaction', 'public_snapshot') - op.drop_column('transaction', 'private_snapshot') + op.drop_index("ix_transaction_public_id", table_name="transaction") + op.drop_column("transaction", "public_id") + op.drop_column("transaction", "object_public_id") + op.add_column( + "transaction", sa.Column(u"additional_data", mysql.LONGTEXT(), nullable=True) + ) + op.drop_column("transaction", "public_snapshot") + op.drop_column("transaction", "private_snapshot") diff --git a/migrations/versions/039_change_easfoldersync_unique_constraint.py b/migrations/versions/039_change_easfoldersync_unique_constraint.py index fc5fdb8a9..ea0f7cdfd 100644 --- a/migrations/versions/039_change_easfoldersync_unique_constraint.py +++ b/migrations/versions/039_change_easfoldersync_unique_constraint.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1c72d8a0120e' -down_revision = '1edbd63582c2' +revision = "1c72d8a0120e" +down_revision = "1edbd63582c2" from alembic import op from sqlalchemy.ext.declarative import declarative_base @@ -16,26 +16,31 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easfoldersync' in Base.metadata.tables: - op.create_unique_constraint('uq_account_id_eas_folder_id', - 'easfoldersync', - ['account_id', 'eas_folder_id']) - op.drop_constraint('account_id', 'easfoldersync', type_='unique') + if "easfoldersync" in Base.metadata.tables: + op.create_unique_constraint( + "uq_account_id_eas_folder_id", + "easfoldersync", + ["account_id", "eas_folder_id"], + ) + op.drop_constraint("account_id", "easfoldersync", type_="unique") def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easfoldersync' in Base.metadata.tables: - op.create_unique_constraint('account_id', - 'easfoldersync', - ['account_id', 'folder_name']) - op.drop_constraint('uq_account_id_eas_folder_id', 'easfoldersync', - type_='unique') + if "easfoldersync" in Base.metadata.tables: + op.create_unique_constraint( + "account_id", "easfoldersync", ["account_id", "folder_name"] + ) + op.drop_constraint( + "uq_account_id_eas_folder_id", "easfoldersync", type_="unique" + ) diff --git a/migrations/versions/040_gmailaccount.py b/migrations/versions/040_gmailaccount.py index 0a5e8d709..6c058392f 100644 --- a/migrations/versions/040_gmailaccount.py +++ b/migrations/versions/040_gmailaccount.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4085dd542739' -down_revision = '1c72d8a0120e' +revision = "4085dd542739" +down_revision = "1c72d8a0120e" from alembic import op import sqlalchemy as sa @@ -16,35 +16,35 @@ def upgrade(): - print 'Creating new table gmailaccount' + print "Creating new table gmailaccount" op.create_table( - 'gmailaccount', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('access_token', sa.String(length=512), nullable=True), - sa.Column('refresh_token', sa.String(length=512), nullable=True), - sa.Column('scope', sa.String(length=512), nullable=True), - sa.Column('expires_in', sa.Integer(), nullable=True), - sa.Column('token_type', sa.String(length=64), nullable=True), - sa.Column('access_type', sa.String(length=64), nullable=True), - sa.Column('family_name', sa.String(length=256), nullable=True), - sa.Column('given_name', sa.String(length=256), nullable=True), - sa.Column('name', sa.String(length=256), nullable=True), - sa.Column('gender', sa.String(length=16), nullable=True), - sa.Column('g_id', sa.String(length=32), nullable=True), - sa.Column('g_id_token', sa.String(length=1024), nullable=True), - sa.Column('g_user_id', sa.String(length=32), nullable=True), - sa.Column('link', sa.String(length=256), nullable=True), - sa.Column('locale', sa.String(length=8), nullable=True), - sa.Column('picture', sa.String(length=1024), nullable=True), - sa.Column('home_domain', sa.String(length=256), nullable=True), - sa.ForeignKeyConstraint(['id'], ['imapaccount.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "gmailaccount", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("access_token", sa.String(length=512), nullable=True), + sa.Column("refresh_token", sa.String(length=512), nullable=True), + sa.Column("scope", sa.String(length=512), nullable=True), + sa.Column("expires_in", sa.Integer(), nullable=True), + sa.Column("token_type", sa.String(length=64), nullable=True), + sa.Column("access_type", sa.String(length=64), nullable=True), + sa.Column("family_name", sa.String(length=256), nullable=True), + sa.Column("given_name", sa.String(length=256), nullable=True), + sa.Column("name", sa.String(length=256), nullable=True), + sa.Column("gender", sa.String(length=16), nullable=True), + sa.Column("g_id", sa.String(length=32), nullable=True), + sa.Column("g_id_token", sa.String(length=1024), nullable=True), + sa.Column("g_user_id", sa.String(length=32), nullable=True), + sa.Column("link", sa.String(length=256), nullable=True), + sa.Column("locale", sa.String(length=8), nullable=True), + sa.Column("picture", sa.String(length=1024), nullable=True), + sa.Column("home_domain", sa.String(length=256), nullable=True), + sa.ForeignKeyConstraint(["id"], ["imapaccount.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), ) from sqlalchemy.ext.declarative import declarative_base from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope @@ -52,63 +52,63 @@ def upgrade(): Base.metadata.reflect(engine) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] class ImapAccount(Base): - __table__ = Base.metadata.tables['imapaccount'] + __table__ = Base.metadata.tables["imapaccount"] class GmailAccount(Base): - __table__ = Base.metadata.tables['gmailaccount'] + __table__ = Base.metadata.tables["gmailaccount"] with session_scope(versioned=False) as db_session: for acct in db_session.query(Account): - if acct.provider == 'Gmail': - imap_acct = db_session.query(ImapAccount). \ - filter_by(id=acct.id).one() - gmail_acct = GmailAccount(id=acct.id, - access_token=acct.o_access_token, - refresh_token=acct.o_refresh_token, - scope=acct.o_scope, - expires_in=acct.o_expires_in, - token_type=acct.o_token_type, - access_type=acct.o_access_type, - family_name=imap_acct.family_name, - given_name=imap_acct.given_name, - name=None, - gender=imap_acct.g_gender, - g_id=imap_acct.google_id, - g_id_token=acct.o_id_token, - g_user_id=acct.o_user_id, - link=imap_acct.g_plus_url, - locale=imap_acct.g_locale, - picture=imap_acct.g_picture_url, - home_domain=None - ) - acct.type = 'gmailaccount' + if acct.provider == "Gmail": + imap_acct = db_session.query(ImapAccount).filter_by(id=acct.id).one() + gmail_acct = GmailAccount( + id=acct.id, + access_token=acct.o_access_token, + refresh_token=acct.o_refresh_token, + scope=acct.o_scope, + expires_in=acct.o_expires_in, + token_type=acct.o_token_type, + access_type=acct.o_access_type, + family_name=imap_acct.family_name, + given_name=imap_acct.given_name, + name=None, + gender=imap_acct.g_gender, + g_id=imap_acct.google_id, + g_id_token=acct.o_id_token, + g_user_id=acct.o_user_id, + link=imap_acct.g_plus_url, + locale=imap_acct.g_locale, + picture=imap_acct.g_picture_url, + home_domain=None, + ) + acct.type = "gmailaccount" db_session.add(gmail_acct) db_session.commit() - op.drop_column('account', u'o_access_token') - op.drop_column('account', u'o_audience') - op.drop_column('account', u'o_scope') - op.drop_column('account', u'o_token_type') - op.drop_column('account', u'o_id_token') - op.drop_column('account', u'o_access_type') - op.drop_column('account', u'o_expires_in') - op.drop_column('account', u'o_user_id') - op.drop_column('account', u'provider_prefix') - op.drop_column('account', u'o_verified_email') - op.drop_column('account', u'provider') - op.drop_column('account', u'date') - op.drop_column('account', u'o_token_issued_to') - op.drop_column('account', u'o_refresh_token') - op.drop_column('imapaccount', u'family_name') - op.drop_column('imapaccount', u'google_id') - op.drop_column('imapaccount', u'g_plus_url') - op.drop_column('imapaccount', u'g_picture_url') - op.drop_column('imapaccount', u'g_gender') - op.drop_column('imapaccount', u'given_name') - op.drop_column('imapaccount', u'g_locale') + op.drop_column("account", u"o_access_token") + op.drop_column("account", u"o_audience") + op.drop_column("account", u"o_scope") + op.drop_column("account", u"o_token_type") + op.drop_column("account", u"o_id_token") + op.drop_column("account", u"o_access_type") + op.drop_column("account", u"o_expires_in") + op.drop_column("account", u"o_user_id") + op.drop_column("account", u"provider_prefix") + op.drop_column("account", u"o_verified_email") + op.drop_column("account", u"provider") + op.drop_column("account", u"date") + op.drop_column("account", u"o_token_issued_to") + op.drop_column("account", u"o_refresh_token") + op.drop_column("imapaccount", u"family_name") + op.drop_column("imapaccount", u"google_id") + op.drop_column("imapaccount", u"g_plus_url") + op.drop_column("imapaccount", u"g_picture_url") + op.drop_column("imapaccount", u"g_gender") + op.drop_column("imapaccount", u"given_name") + op.drop_column("imapaccount", u"g_locale") def downgrade(): diff --git a/migrations/versions/041_add_sync_status_columns_to_foldersync.py b/migrations/versions/041_add_sync_status_columns_to_foldersync.py index ccf4578dc..c85fc2af3 100644 --- a/migrations/versions/041_add_sync_status_columns_to_foldersync.py +++ b/migrations/versions/041_add_sync_status_columns_to_foldersync.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '159609404baf' -down_revision = '4085dd542739' +revision = "159609404baf" +down_revision = "4085dd542739" from alembic import op import sqlalchemy as sa @@ -17,28 +17,33 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.sqlalchemy_ext.util import JSON, MutableDict + Base = declarative_base() Base.metadata.reflect(engine) - op.add_column('foldersync', - sa.Column('_sync_status', MutableDict.as_mutable(JSON()), - nullable=True)) + op.add_column( + "foldersync", + sa.Column("_sync_status", MutableDict.as_mutable(JSON()), nullable=True), + ) - if 'easfoldersync' in Base.metadata.tables: - op.add_column('easfoldersync', - sa.Column('_sync_status', MutableDict.as_mutable(JSON()), - nullable=True)) + if "easfoldersync" in Base.metadata.tables: + op.add_column( + "easfoldersync", + sa.Column("_sync_status", MutableDict.as_mutable(JSON()), nullable=True), + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - op.drop_column('foldersync', '_sync_status') + op.drop_column("foldersync", "_sync_status") - if 'easfoldersync' in Base.metadata.tables: - op.drop_column('easfoldersync', '_sync_status') + if "easfoldersync" in Base.metadata.tables: + op.drop_column("easfoldersync", "_sync_status") diff --git a/migrations/versions/042_simplify_tags_schema.py b/migrations/versions/042_simplify_tags_schema.py index be134fd3b..2780e0973 100644 --- a/migrations/versions/042_simplify_tags_schema.py +++ b/migrations/versions/042_simplify_tags_schema.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '459dbc29648' -down_revision = '159609404baf' +revision = "459dbc29648" +down_revision = "159609404baf" from alembic import op import sqlalchemy as sa @@ -16,10 +16,16 @@ def upgrade(): - op.drop_column('tag', u'user_mutable') + op.drop_column("tag", u"user_mutable") def downgrade(): - op.add_column('tag', sa.Column(u'user_mutable', - mysql.TINYINT(display_width=1), - server_default='1', nullable=False)) + op.add_column( + "tag", + sa.Column( + u"user_mutable", + mysql.TINYINT(display_width=1), + server_default="1", + nullable=False, + ), + ) diff --git a/migrations/versions/043_columns_for_sync_running_stopped_killed.py b/migrations/versions/043_columns_for_sync_running_stopped_killed.py index 2f29fbcaf..ffb20b372 100644 --- a/migrations/versions/043_columns_for_sync_running_stopped_killed.py +++ b/migrations/versions/043_columns_for_sync_running_stopped_killed.py @@ -7,25 +7,23 @@ """ # revision identifiers, used by Alembic. -revision = '5a136610b50b' -down_revision = '459dbc29648' +revision = "5a136610b50b" +down_revision = "459dbc29648" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('account', - sa.Column('sync_state', - sa.Enum('running', 'stopped', 'killed'), - nullable=True)) - op.add_column('account', - sa.Column('sync_start_time', sa.DateTime(), nullable=True)) - op.add_column('account', - sa.Column('sync_end_time', sa.DateTime(), nullable=True)) + op.add_column( + "account", + sa.Column("sync_state", sa.Enum("running", "stopped", "killed"), nullable=True), + ) + op.add_column("account", sa.Column("sync_start_time", sa.DateTime(), nullable=True)) + op.add_column("account", sa.Column("sync_end_time", sa.DateTime(), nullable=True)) def downgrade(): - op.drop_column('account', 'sync_state') - op.drop_column('account', 'sync_start_time') - op.drop_column('account', 'sync_end_time') + op.drop_column("account", "sync_state") + op.drop_column("account", "sync_start_time") + op.drop_column("account", "sync_end_time") diff --git a/migrations/versions/044_update_drafts_schema.py b/migrations/versions/044_update_drafts_schema.py index 3551feb3c..2e1b5c1ee 100644 --- a/migrations/versions/044_update_drafts_schema.py +++ b/migrations/versions/044_update_drafts_schema.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '247cd689758c' -down_revision = '5a136610b50b' +revision = "247cd689758c" +down_revision = "5a136610b50b" from alembic import op import sqlalchemy as sa @@ -16,45 +16,60 @@ def upgrade(): - op.add_column('spoolmessage', - sa.Column('is_reply', sa.Boolean(), - server_default=sa.sql.expression.false(), - nullable=False)) + op.add_column( + "spoolmessage", + sa.Column( + "is_reply", + sa.Boolean(), + server_default=sa.sql.expression.false(), + nullable=False, + ), + ) # Drop draft_copied_from and replyto_thread_id foreign key constraints. - op.drop_constraint('spoolmessage_ibfk_4', 'spoolmessage', - type_='foreignkey') - op.drop_constraint('spoolmessage_ibfk_5', 'spoolmessage', - type_='foreignkey') - op.drop_column('spoolmessage', u'draft_copied_from') - op.drop_column('spoolmessage', u'replyto_thread_id') - op.drop_table(u'draftthread') + op.drop_constraint("spoolmessage_ibfk_4", "spoolmessage", type_="foreignkey") + op.drop_constraint("spoolmessage_ibfk_5", "spoolmessage", type_="foreignkey") + op.drop_column("spoolmessage", u"draft_copied_from") + op.drop_column("spoolmessage", u"replyto_thread_id") + op.drop_table(u"draftthread") def downgrade(): - op.add_column('spoolmessage', sa.Column(u'replyto_thread_id', - mysql.INTEGER(display_width=11), - nullable=True)) - op.add_column('spoolmessage', sa.Column(u'draft_copied_from', - mysql.INTEGER(display_width=11), - nullable=True)) - op.drop_column('spoolmessage', 'is_reply') + op.add_column( + "spoolmessage", + sa.Column(u"replyto_thread_id", mysql.INTEGER(display_width=11), nullable=True), + ) + op.add_column( + "spoolmessage", + sa.Column(u"draft_copied_from", mysql.INTEGER(display_width=11), nullable=True), + ) + op.drop_column("spoolmessage", "is_reply") op.create_table( - u'draftthread', - sa.Column(u'created_at', mysql.DATETIME(), nullable=False), - sa.Column(u'updated_at', mysql.DATETIME(), nullable=False), - sa.Column(u'deleted_at', mysql.DATETIME(), nullable=True), - sa.Column(u'public_id', sa.BINARY(length=16), nullable=False), - sa.Column(u'id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column(u'master_public_id', sa.BINARY(length=16), nullable=False), - sa.Column(u'thread_id', mysql.INTEGER(display_width=11), - autoincrement=False, nullable=False), - sa.Column(u'message_id', mysql.INTEGER(display_width=11), - autoincrement=False, nullable=False), - sa.ForeignKeyConstraint(['message_id'], [u'message.id'], - name=u'draftthread_ibfk_2'), - sa.ForeignKeyConstraint(['thread_id'], [u'thread.id'], - name=u'draftthread_ibfk_1'), - sa.PrimaryKeyConstraint(u'id'), - mysql_default_charset=u'utf8mb4', - mysql_engine=u'InnoDB' + u"draftthread", + sa.Column(u"created_at", mysql.DATETIME(), nullable=False), + sa.Column(u"updated_at", mysql.DATETIME(), nullable=False), + sa.Column(u"deleted_at", mysql.DATETIME(), nullable=True), + sa.Column(u"public_id", sa.BINARY(length=16), nullable=False), + sa.Column(u"id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column(u"master_public_id", sa.BINARY(length=16), nullable=False), + sa.Column( + u"thread_id", + mysql.INTEGER(display_width=11), + autoincrement=False, + nullable=False, + ), + sa.Column( + u"message_id", + mysql.INTEGER(display_width=11), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["message_id"], [u"message.id"], name=u"draftthread_ibfk_2" + ), + sa.ForeignKeyConstraint( + ["thread_id"], [u"thread.id"], name=u"draftthread_ibfk_1" + ), + sa.PrimaryKeyConstraint(u"id"), + mysql_default_charset=u"utf8mb4", + mysql_engine=u"InnoDB", ) diff --git a/migrations/versions/045_new_password_storage.py b/migrations/versions/045_new_password_storage.py index f2e75e204..f3d9b0673 100644 --- a/migrations/versions/045_new_password_storage.py +++ b/migrations/versions/045_new_password_storage.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '7a117720554' -down_revision = '247cd689758c' +revision = "7a117720554" +down_revision = "247cd689758c" import os from alembic import op @@ -18,7 +18,7 @@ # this migration. # If you're running this migration and for some reason you had specified a # different key directory, you should change this accordingly. -KEY_DIR = '/var/lib/inboxapp/keys' +KEY_DIR = "/var/lib/inboxapp/keys" # Copied from deprecated inbox.util.cryptography module. @@ -32,17 +32,18 @@ def decrypt_aes(ciphertext, key): from Crypto.Cipher import AES def unpad(s): - return s[:-ord(s[-1])] + return s[: -ord(s[-1])] - iv = ciphertext[:AES.block_size] + iv = ciphertext[: AES.block_size] cipher = AES.new(key, AES.MODE_CBC, iv) - plaintext = unpad(cipher.decrypt(ciphertext))[AES.block_size:] + plaintext = unpad(cipher.decrypt(ciphertext))[AES.block_size :] return plaintext def upgrade(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.util.file import mkdirp from hashlib import sha256 @@ -50,18 +51,18 @@ def upgrade(): OriginalBase = sa.ext.declarative.declarative_base() OriginalBase.metadata.reflect(engine) - if 'easaccount' in OriginalBase.metadata.tables: - op.add_column('easaccount', sa.Column('password', sa.String(256))) + if "easaccount" in OriginalBase.metadata.tables: + op.add_column("easaccount", sa.Column("password", sa.String(256))) # Reflect again to pick up added column Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] class EASAccount(Account): - __table__ = Base.metadata.tables['easaccount'] + __table__ = Base.metadata.tables["easaccount"] @property def _keyfile(self, create_dir=True): @@ -70,12 +71,12 @@ def _keyfile(self, create_dir=True): assert KEY_DIR if create_dir: mkdirp(KEY_DIR) - key_filename = '{0}'.format(sha256(self.key).hexdigest()) + key_filename = "{0}".format(sha256(self.key).hexdigest()) return os.path.join(KEY_DIR, key_filename) def get_old_password(self): if self.password_aes is not None: - with open(self._keyfile, 'r') as f: + with open(self._keyfile, "r") as f: key = f.read() key = self.key + key @@ -87,9 +88,9 @@ def get_old_password(self): db_session.add(account) db_session.commit() - op.drop_column('account', 'password_aes') - op.drop_column('account', 'key') + op.drop_column("account", "password_aes") + op.drop_column("account", "key") def downgrade(): - raise Exception('No rolling back') + raise Exception("No rolling back") diff --git a/migrations/versions/046_yahoo.py b/migrations/versions/046_yahoo.py index 6ac6387a7..b0096ff18 100644 --- a/migrations/versions/046_yahoo.py +++ b/migrations/versions/046_yahoo.py @@ -7,22 +7,22 @@ """ # revision identifiers, used by Alembic. -revision = '38d78543f8be' -down_revision = '7a117720554' +revision = "38d78543f8be" +down_revision = "7a117720554" from alembic import op import sqlalchemy as sa def upgrade(): - op.create_table('yahooaccount', - sa.Column('id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['id'], [u'imapaccount.id'], - ondelete='CASCADE'), - sa.Column('password', sa.String(256)), - sa.PrimaryKeyConstraint('id') - ) + op.create_table( + "yahooaccount", + sa.Column("id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["id"], [u"imapaccount.id"], ondelete="CASCADE"), + sa.Column("password", sa.String(256)), + sa.PrimaryKeyConstraint("id"), + ) def downgrade(): - op.drop_table('yahooaccount') + op.drop_table("yahooaccount") diff --git a/migrations/versions/047_store_more_on_threads.py b/migrations/versions/047_store_more_on_threads.py index 2e05cc0b5..0b975896c 100644 --- a/migrations/versions/047_store_more_on_threads.py +++ b/migrations/versions/047_store_more_on_threads.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '161b88c17615' -down_revision = '38d78543f8be' +revision = "161b88c17615" +down_revision = "38d78543f8be" import itertools from alembic import op @@ -17,41 +17,46 @@ def upgrade(): from inbox.sqlalchemy_ext.util import JSON - op.add_column('thread', sa.Column('participants', JSON, - nullable=True)) - op.add_column('thread', sa.Column('message_public_ids', JSON, - nullable=True)) - op.add_column('thread', sa.Column('snippet', sa.String(191), - nullable=True)) + + op.add_column("thread", sa.Column("participants", JSON, nullable=True)) + op.add_column("thread", sa.Column("message_public_ids", JSON, nullable=True)) + op.add_column("thread", sa.Column("snippet", sa.String(191), nullable=True)) from inbox.models.session import session_scope from inbox.models import Thread with session_scope(versioned=False) as db_session: - num_threads, = db_session.query(sa.func.max(Thread.id)).one() + (num_threads,) = db_session.query(sa.func.max(Thread.id)).one() if num_threads is None: # There aren't actually any threads to update. return for pointer in range(0, num_threads + 1, 1000): print pointer for thread in db_session.query(Thread).filter( - Thread.id >= pointer, - Thread.id < pointer + 1000): + Thread.id >= pointer, Thread.id < pointer + 1000 + ): message = thread.messages[-1] thread.snippet = thread.messages[-1].snippet participant_set = set() for message in thread.messages: - participant_set.update({tuple(p) for p in itertools.chain( - message.from_addr, message.to_addr, message.cc_addr, - message.bcc_addr)}) + participant_set.update( + { + tuple(p) + for p in itertools.chain( + message.from_addr, + message.to_addr, + message.cc_addr, + message.bcc_addr, + ) + } + ) thread.participants = list(participant_set) - thread.message_public_ids = [m.public_id for m in - thread.messages] + thread.message_public_ids = [m.public_id for m in thread.messages] db_session.add(thread) db_session.commit() def downgrade(): - op.drop_column('thread', 'participants') - op.drop_column('thread', 'message_public_ids') - op.drop_column('thread', 'snippet') + op.drop_column("thread", "participants") + op.drop_column("thread", "message_public_ids") + op.drop_column("thread", "snippet") diff --git a/migrations/versions/048_remove_storage_of_access_token.py b/migrations/versions/048_remove_storage_of_access_token.py index 9f3dbf5d0..ebd52d804 100644 --- a/migrations/versions/048_remove_storage_of_access_token.py +++ b/migrations/versions/048_remove_storage_of_access_token.py @@ -7,23 +7,24 @@ """ # revision identifiers, used by Alembic. -revision = '4e44216e9830' -down_revision = '161b88c17615' +revision = "4e44216e9830" +down_revision = "161b88c17615" from alembic import op import sqlalchemy as sa def upgrade(): - op.drop_column('gmailaccount', 'access_token') - op.drop_column('gmailaccount', 'expires_in') - op.drop_column('gmailaccount', 'token_type') + op.drop_column("gmailaccount", "access_token") + op.drop_column("gmailaccount", "expires_in") + op.drop_column("gmailaccount", "token_type") def downgrade(): - op.add_column('gmailaccount', sa.Column('access_token', - sa.String(length=512), nullable=True)) - op.add_column('gmailaccount', sa.Column('expires_in', - sa.Integer(), nullable=True)) - op.add_column('gmailaccount', sa.Column('token_type', - sa.String(length=64), nullable=True)) + op.add_column( + "gmailaccount", sa.Column("access_token", sa.String(length=512), nullable=True) + ) + op.add_column("gmailaccount", sa.Column("expires_in", sa.Integer(), nullable=True)) + op.add_column( + "gmailaccount", sa.Column("token_type", sa.String(length=64), nullable=True) + ) diff --git a/migrations/versions/049_store_less_on_threads_after_all.py b/migrations/versions/049_store_less_on_threads_after_all.py index c6a6104d2..cf0d6f708 100644 --- a/migrations/versions/049_store_less_on_threads_after_all.py +++ b/migrations/versions/049_store_less_on_threads_after_all.py @@ -7,16 +7,16 @@ """ # revision identifiers, used by Alembic. -revision = '1b751e8d9cac' -down_revision = '4e44216e9830' +revision = "1b751e8d9cac" +down_revision = "4e44216e9830" from alembic import op def upgrade(): - op.drop_column('thread', 'participants') - op.drop_column('thread', 'message_public_ids') + op.drop_column("thread", "participants") + op.drop_column("thread", "message_public_ids") def downgrade(): - raise Exception('would not recreate data') + raise Exception("would not recreate data") diff --git a/migrations/versions/050_imap_table_cleanups.py b/migrations/versions/050_imap_table_cleanups.py index 47083f4bd..c25cffdd9 100644 --- a/migrations/versions/050_imap_table_cleanups.py +++ b/migrations/versions/050_imap_table_cleanups.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '29217fad3f46' -down_revision = '1b751e8d9cac' +revision = "29217fad3f46" +down_revision = "1b751e8d9cac" from alembic import op import sqlalchemy as sa @@ -19,140 +19,192 @@ def upgrade(): from inbox.models.folder import Folder from inbox.sqlalchemy_ext.util import JSON from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) # foldersync => imapfoldersyncstatus # note that renaming a table does in fact migrate constraints + indexes too - op.rename_table('foldersync', 'imapfoldersyncstatus') + op.rename_table("foldersync", "imapfoldersyncstatus") - op.alter_column('imapfoldersyncstatus', '_sync_status', - existing_type=JSON(), nullable=True, - new_column_name='_metrics') + op.alter_column( + "imapfoldersyncstatus", + "_sync_status", + existing_type=JSON(), + nullable=True, + new_column_name="_metrics", + ) - op.add_column('imapfoldersyncstatus', - sa.Column('folder_id', sa.Integer(), nullable=False)) + op.add_column( + "imapfoldersyncstatus", sa.Column("folder_id", sa.Integer(), nullable=False) + ) # uidvalidity => imapfolderinfo - op.rename_table('uidvalidity', 'imapfolderinfo') - op.alter_column('imapfolderinfo', 'uid_validity', - existing_type=sa.Integer(), nullable=False, - new_column_name='uidvalidity') - op.alter_column('imapfolderinfo', 'highestmodseq', - existing_type=sa.Integer(), nullable=True) - - op.drop_constraint('imapfolderinfo_ibfk_1', - 'imapfolderinfo', type_='foreignkey') - op.alter_column('imapfolderinfo', 'imapaccount_id', - existing_type=sa.Integer(), nullable=False, - new_column_name='account_id') - op.create_foreign_key('imapfolderinfo_ibfk_1', - 'imapfolderinfo', 'imapaccount', - ['account_id'], ['id']) - - op.add_column('imapfolderinfo', - sa.Column('folder_id', sa.Integer(), nullable=False)) + op.rename_table("uidvalidity", "imapfolderinfo") + op.alter_column( + "imapfolderinfo", + "uid_validity", + existing_type=sa.Integer(), + nullable=False, + new_column_name="uidvalidity", + ) + op.alter_column( + "imapfolderinfo", "highestmodseq", existing_type=sa.Integer(), nullable=True + ) + + op.drop_constraint("imapfolderinfo_ibfk_1", "imapfolderinfo", type_="foreignkey") + op.alter_column( + "imapfolderinfo", + "imapaccount_id", + existing_type=sa.Integer(), + nullable=False, + new_column_name="account_id", + ) + op.create_foreign_key( + "imapfolderinfo_ibfk_1", "imapfolderinfo", "imapaccount", ["account_id"], ["id"] + ) + + op.add_column( + "imapfolderinfo", sa.Column("folder_id", sa.Integer(), nullable=False) + ) # imapuid - op.drop_constraint('imapuid_ibfk_1', 'imapuid', type_='foreignkey') - op.alter_column('imapuid', 'imapaccount_id', - existing_type=sa.Integer(), nullable=False, - new_column_name='account_id') - op.create_foreign_key('imapuid_ibfk_1', - 'imapuid', 'imapaccount', ['account_id'], ['id']) + op.drop_constraint("imapuid_ibfk_1", "imapuid", type_="foreignkey") + op.alter_column( + "imapuid", + "imapaccount_id", + existing_type=sa.Integer(), + nullable=False, + new_column_name="account_id", + ) + op.create_foreign_key( + "imapuid_ibfk_1", "imapuid", "imapaccount", ["account_id"], ["id"] + ) # migrate data and add new constraints Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) - if 'easfoldersync' in Base.metadata.tables: - op.rename_table('easfoldersync', 'easfoldersyncstatus') - op.add_column('easfoldersyncstatus', - sa.Column('folder_id', sa.Integer(), nullable=False)) - op.alter_column('easfoldersyncstatus', '_sync_status', - existing_type=JSON(), nullable=True, - new_column_name='_metrics') + if "easfoldersync" in Base.metadata.tables: + op.rename_table("easfoldersync", "easfoldersyncstatus") + op.add_column( + "easfoldersyncstatus", sa.Column("folder_id", sa.Integer(), nullable=False) + ) + op.alter_column( + "easfoldersyncstatus", + "_sync_status", + existing_type=JSON(), + nullable=True, + new_column_name="_metrics", + ) Base.metadata.reflect(engine) class EASFolderSyncStatus(Base): - __table__ = Base.metadata.tables['easfoldersyncstatus'] + __table__ = Base.metadata.tables["easfoldersyncstatus"] class ImapFolderSyncStatus(Base): - __table__ = Base.metadata.tables['imapfoldersyncstatus'] + __table__ = Base.metadata.tables["imapfoldersyncstatus"] class ImapFolderInfo(Base): - __table__ = Base.metadata.tables['imapfolderinfo'] - - with session_scope(versioned=False) \ - as db_session: - folder_id_for = dict([((account_id, name.lower()), id_) - for id_, account_id, name in - db_session.query(Folder.id, Folder.account_id, - Folder.name)]) + __table__ = Base.metadata.tables["imapfolderinfo"] + + with session_scope(versioned=False) as db_session: + folder_id_for = dict( + [ + ((account_id, name.lower()), id_) + for id_, account_id, name in db_session.query( + Folder.id, Folder.account_id, Folder.name + ) + ] + ) for status in db_session.query(ImapFolderSyncStatus): print "migrating", status.folder_name status.folder_id = folder_id_for[ - (status.account_id, status.folder_name.lower())] + (status.account_id, status.folder_name.lower()) + ] db_session.commit() - if 'easfoldersyncstatus' in Base.metadata.tables: + if "easfoldersyncstatus" in Base.metadata.tables: for status in db_session.query(EASFolderSyncStatus): print "migrating", status.folder_name folder_id = folder_id_for.get( - (status.account_id, status.folder_name.lower())) + (status.account_id, status.folder_name.lower()) + ) if folder_id is not None: status.folder_id = folder_id else: # EAS folder rows *may* not exist if have no messages - folder = Folder(account_id=status.account_id, name=status.folder_name) + folder = Folder( + account_id=status.account_id, name=status.folder_name + ) db_session.add(folder) db_session.commit() status.folder_id = folder.id db_session.commit() # some weird alembic bug? need to drop and recreate this FK - op.drop_constraint('easfoldersyncstatus_ibfk_1', - 'easfoldersyncstatus', type_='foreignkey') - op.drop_column('easfoldersyncstatus', 'folder_name') - op.create_foreign_key('easfoldersyncstatus_ibfk_1', - 'easfoldersyncstatus', - 'easaccount', ['account_id'], ['id']) - op.create_foreign_key('easfoldersyncstatus_ibfk_2', - 'easfoldersyncstatus', 'folder', - ['folder_id'], ['id']) - op.create_unique_constraint('account_id', 'easfoldersyncstatus', - ['account_id', 'folder_id']) + op.drop_constraint( + "easfoldersyncstatus_ibfk_1", "easfoldersyncstatus", type_="foreignkey" + ) + op.drop_column("easfoldersyncstatus", "folder_name") + op.create_foreign_key( + "easfoldersyncstatus_ibfk_1", + "easfoldersyncstatus", + "easaccount", + ["account_id"], + ["id"], + ) + op.create_foreign_key( + "easfoldersyncstatus_ibfk_2", + "easfoldersyncstatus", + "folder", + ["folder_id"], + ["id"], + ) + op.create_unique_constraint( + "account_id", "easfoldersyncstatus", ["account_id", "folder_id"] + ) # some weird alembic bug? need to drop and recreate this FK - op.drop_constraint('imapfoldersyncstatus_ibfk_1', 'imapfoldersyncstatus', - type_='foreignkey') - op.drop_constraint('account_id', 'imapfoldersyncstatus', type_='unique') - op.drop_column('imapfoldersyncstatus', 'folder_name') - op.create_foreign_key('imapfoldersyncstatus_ibfk_1', - 'imapfoldersyncstatus', - 'imapaccount', ['account_id'], ['id']) - op.create_foreign_key('imapfoldersyncstatus_ibfk_2', - 'imapfoldersyncstatus', 'folder', - ['folder_id'], ['id']) - op.create_unique_constraint('account_id', 'imapfoldersyncstatus', - ['account_id', 'folder_id']) - - with session_scope(versioned=False) \ - as db_session: + op.drop_constraint( + "imapfoldersyncstatus_ibfk_1", "imapfoldersyncstatus", type_="foreignkey" + ) + op.drop_constraint("account_id", "imapfoldersyncstatus", type_="unique") + op.drop_column("imapfoldersyncstatus", "folder_name") + op.create_foreign_key( + "imapfoldersyncstatus_ibfk_1", + "imapfoldersyncstatus", + "imapaccount", + ["account_id"], + ["id"], + ) + op.create_foreign_key( + "imapfoldersyncstatus_ibfk_2", + "imapfoldersyncstatus", + "folder", + ["folder_id"], + ["id"], + ) + op.create_unique_constraint( + "account_id", "imapfoldersyncstatus", ["account_id", "folder_id"] + ) + + with session_scope(versioned=False) as db_session: for info in db_session.query(ImapFolderInfo): print "migrating", info.folder_name - info.folder_id = folder_id_for[ - (info.account_id, info.folder_name.lower())] + info.folder_id = folder_id_for[(info.account_id, info.folder_name.lower())] db_session.commit() # some weird alembic bug? need to drop and recreate this FK - op.drop_constraint('imapfolderinfo_ibfk_1', 'imapfolderinfo', - type_='foreignkey') - op.drop_constraint('imapaccount_id', 'imapfolderinfo', type_='unique') - op.drop_column('imapfolderinfo', 'folder_name') - op.create_foreign_key('imapfolderinfo_ibfk_1', 'imapfolderinfo', - 'imapaccount', ['account_id'], ['id']) - op.create_foreign_key('imapfolderinfo_ibfk_2', 'imapfolderinfo', 'folder', - ['folder_id'], ['id']) - op.create_unique_constraint('imapaccount_id', 'imapfolderinfo', - ['account_id', 'folder_id']) + op.drop_constraint("imapfolderinfo_ibfk_1", "imapfolderinfo", type_="foreignkey") + op.drop_constraint("imapaccount_id", "imapfolderinfo", type_="unique") + op.drop_column("imapfolderinfo", "folder_name") + op.create_foreign_key( + "imapfolderinfo_ibfk_1", "imapfolderinfo", "imapaccount", ["account_id"], ["id"] + ) + op.create_foreign_key( + "imapfolderinfo_ibfk_2", "imapfolderinfo", "folder", ["folder_id"], ["id"] + ) + op.create_unique_constraint( + "imapaccount_id", "imapfolderinfo", ["account_id", "folder_id"] + ) def downgrade(): diff --git a/migrations/versions/051_store_secrets_in_local_vault.py b/migrations/versions/051_store_secrets_in_local_vault.py index ff0697435..13d1228a5 100644 --- a/migrations/versions/051_store_secrets_in_local_vault.py +++ b/migrations/versions/051_store_secrets_in_local_vault.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1925c535a52d' -down_revision = '29217fad3f46' +revision = "1925c535a52d" +down_revision = "29217fad3f46" from alembic import op import sqlalchemy as sa @@ -19,41 +19,47 @@ def upgrade(): from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) op.create_table( - 'secret', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('acl_id', sa.Integer(), nullable=False), - sa.Column('type', sa.Integer(), nullable=False), - sa.Column('secret', sa.String(length=512), nullable=True), - sa.PrimaryKeyConstraint('id')) - op.add_column('gmailaccount', sa.Column('refresh_token_id', - sa.Integer(), nullable=True)) + "secret", + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("acl_id", sa.Integer(), nullable=False), + sa.Column("type", sa.Integer(), nullable=False), + sa.Column("secret", sa.String(length=512), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.add_column( + "gmailaccount", sa.Column("refresh_token_id", sa.Integer(), nullable=True) + ) Base = declarative_base() Base.metadata.reflect(engine) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] class ImapAccount(Base): - __table__ = Base.metadata.tables['imapaccount'] + __table__ = Base.metadata.tables["imapaccount"] class GmailAccount(Base): - __table__ = Base.metadata.tables['gmailaccount'] + __table__ = Base.metadata.tables["gmailaccount"] class Secret(Base): - __table__ = Base.metadata.tables['secret'] + __table__ = Base.metadata.tables["secret"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for acct in db_session.query(GmailAccount): - secret = Secret(acl_id=0, type=0, secret=acct.refresh_token, - created_at=datetime.utcnow(), - updated_at=datetime.utcnow()) + secret = Secret( + acl_id=0, + type=0, + secret=acct.refresh_token, + created_at=datetime.utcnow(), + updated_at=datetime.utcnow(), + ) db_session.add(secret) db_session.commit() @@ -61,45 +67,55 @@ class Secret(Base): db_session.add(acct) db_session.commit() - op.alter_column('secret', 'created_at', existing_type=sa.DateTime(), - existing_nullable=True, nullable=False) - op.alter_column('secret', 'updated_at', existing_type=sa.DateTime(), - existing_nullable=True, nullable=False) + op.alter_column( + "secret", + "created_at", + existing_type=sa.DateTime(), + existing_nullable=True, + nullable=False, + ) + op.alter_column( + "secret", + "updated_at", + existing_type=sa.DateTime(), + existing_nullable=True, + nullable=False, + ) - op.drop_column('gmailaccount', 'refresh_token') + op.drop_column("gmailaccount", "refresh_token") def downgrade(): from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] class ImapAccount(Base): - __table__ = Base.metadata.tables['imapaccount'] + __table__ = Base.metadata.tables["imapaccount"] class GmailAccount(Base): - __table__ = Base.metadata.tables['gmailaccount'] + __table__ = Base.metadata.tables["gmailaccount"] class Secret(Base): - __table__ = Base.metadata.tables['secret'] + __table__ = Base.metadata.tables["secret"] - op.add_column('gmailaccount', sa.Column('refresh_token', - sa.String(length=512), nullable=True)) + op.add_column( + "gmailaccount", sa.Column("refresh_token", sa.String(length=512), nullable=True) + ) - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for acct in db_session.query(GmailAccount): - secret = db_session.query(Secret) \ - .filter_by(id=acct.refresh_token_id).one() + secret = db_session.query(Secret).filter_by(id=acct.refresh_token_id).one() acct.refresh_token = secret.secret db_session.add(acct) db_session.commit() - op.drop_column('gmailaccount', 'refresh_token_id') - op.drop_table('secret') + op.drop_column("gmailaccount", "refresh_token_id") + op.drop_table("secret") diff --git a/migrations/versions/052_store_google_client_id_and_secret_on_.py b/migrations/versions/052_store_google_client_id_and_secret_on_.py index 608d1f84a..54a01abb6 100644 --- a/migrations/versions/052_store_google_client_id_and_secret_on_.py +++ b/migrations/versions/052_store_google_client_id_and_secret_on_.py @@ -7,21 +7,22 @@ """ # revision identifiers, used by Alembic. -revision = '358d0320397f' -down_revision = '1925c535a52d' +revision = "358d0320397f" +down_revision = "1925c535a52d" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('gmailaccount', sa.Column('client_id', sa.String(length=256), - nullable=True)) - op.add_column('gmailaccount', sa.Column('client_secret', - sa.String(length=256), - nullable=True)) + op.add_column( + "gmailaccount", sa.Column("client_id", sa.String(length=256), nullable=True) + ) + op.add_column( + "gmailaccount", sa.Column("client_secret", sa.String(length=256), nullable=True) + ) def downgrade(): - op.drop_column('gmailaccount', 'client_secret') - op.drop_column('gmailaccount', 'client_id') + op.drop_column("gmailaccount", "client_secret") + op.drop_column("gmailaccount", "client_id") diff --git a/migrations/versions/053_canonicalize_addresses.py b/migrations/versions/053_canonicalize_addresses.py index 290aae9fe..28a8401da 100644 --- a/migrations/versions/053_canonicalize_addresses.py +++ b/migrations/versions/053_canonicalize_addresses.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3795b2a97af1' -down_revision = '358d0320397f' +revision = "3795b2a97af1" +down_revision = "358d0320397f" from alembic import op import sqlalchemy as sa @@ -16,29 +16,47 @@ def upgrade(): - op.add_column('account', sa.Column('_canonicalized_address', - sa.String(length=191), nullable=True)) - op.add_column('account', sa.Column('_raw_address', sa.String(length=191), - nullable=True)) - op.create_index('ix_account__canonicalized_address', 'account', - ['_canonicalized_address'], unique=False) - op.create_index('ix_account__raw_address', 'account', ['_raw_address'], - unique=False) - - op.add_column('contact', sa.Column('_canonicalized_address', - sa.String(length=191), nullable=True)) - op.add_column('contact', sa.Column('_raw_address', sa.String(length=191), - nullable=True)) - op.create_index('ix_contact__canonicalized_address', 'contact', - ['_canonicalized_address'], unique=False) - op.create_index('ix_contact__raw_address', 'contact', ['_raw_address'], - unique=False) + op.add_column( + "account", + sa.Column("_canonicalized_address", sa.String(length=191), nullable=True), + ) + op.add_column( + "account", sa.Column("_raw_address", sa.String(length=191), nullable=True) + ) + op.create_index( + "ix_account__canonicalized_address", + "account", + ["_canonicalized_address"], + unique=False, + ) + op.create_index( + "ix_account__raw_address", "account", ["_raw_address"], unique=False + ) + + op.add_column( + "contact", + sa.Column("_canonicalized_address", sa.String(length=191), nullable=True), + ) + op.add_column( + "contact", sa.Column("_raw_address", sa.String(length=191), nullable=True) + ) + op.create_index( + "ix_contact__canonicalized_address", + "contact", + ["_canonicalized_address"], + unique=False, + ) + op.create_index( + "ix_contact__raw_address", "contact", ["_raw_address"], unique=False + ) from flanker.addresslib import address from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base + Base = declarative_base() Base.metadata.reflect(engine) @@ -48,63 +66,65 @@ def canonicalize_address(addr): if not isinstance(parsed_address, address.EmailAddress): return addr local_part = parsed_address.mailbox - if parsed_address.hostname in ('gmail.com', 'googlemail.com'): - local_part = local_part.replace('.', '') - return '@'.join((local_part, parsed_address.hostname)) + if parsed_address.hostname in ("gmail.com", "googlemail.com"): + local_part = local_part.replace(".", "") + return "@".join((local_part, parsed_address.hostname)) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] class Contact(Base): - __table__ = Base.metadata.tables['contact'] + __table__ = Base.metadata.tables["contact"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for acct in db_session.query(Account): acct._raw_address = acct.email_address - acct._canonicalized_address = canonicalize_address( - acct.email_address) + acct._canonicalized_address = canonicalize_address(acct.email_address) db_session.commit() for contact in db_session.query(Contact): if contact.email_address is not None: contact._raw_address = contact.email_address contact._canonicalized_address = canonicalize_address( - contact.email_address) + contact.email_address + ) db_session.commit() - op.drop_index('ix_account_email_address', table_name='account') - op.drop_index('ix_contact_email_address', table_name='contact') - op.drop_column('account', 'email_address') - op.drop_column('contact', 'email_address') + op.drop_index("ix_account_email_address", table_name="account") + op.drop_index("ix_contact_email_address", table_name="contact") + op.drop_column("account", "email_address") + op.drop_column("contact", "email_address") def downgrade(): - op.add_column('account', sa.Column('email_address', - mysql.VARCHAR(length=191), - nullable=True)) - op.add_column('contact', sa.Column('email_address', - mysql.VARCHAR(length=191), - nullable=True)) - op.create_index('ix_account_email_address', 'account', ['email_address'], - unique=False) - op.create_index('ix_contact_email_address', 'contact', ['email_address'], - unique=False) + op.add_column( + "account", sa.Column("email_address", mysql.VARCHAR(length=191), nullable=True) + ) + op.add_column( + "contact", sa.Column("email_address", mysql.VARCHAR(length=191), nullable=True) + ) + op.create_index( + "ix_account_email_address", "account", ["email_address"], unique=False + ) + op.create_index( + "ix_contact_email_address", "contact", ["email_address"], unique=False + ) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base + Base = declarative_base() Base.metadata.reflect(engine) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] class Contact(Base): - __table__ = Base.metadata.tables['contact'] + __table__ = Base.metadata.tables["contact"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for acct in db_session.query(Account): acct.email_address = acct._raw_address db_session.commit() @@ -112,7 +132,7 @@ class Contact(Base): contact.email_address = contact._raw_address db_session.commit() - op.drop_index('ix_account__raw_address', table_name='account') - op.drop_index('ix_account__canonicalized_address', table_name='account') - op.drop_column('account', '_raw_address') - op.drop_column('account', '_canonicalized_address') + op.drop_index("ix_account__raw_address", table_name="account") + op.drop_index("ix_account__canonicalized_address", table_name="account") + op.drop_column("account", "_raw_address") + op.drop_column("account", "_canonicalized_address") diff --git a/migrations/versions/054_dont_specially_store_mailing_list_.py b/migrations/versions/054_dont_specially_store_mailing_list_.py index a96d0e43d..3a53e189a 100644 --- a/migrations/versions/054_dont_specially_store_mailing_list_.py +++ b/migrations/versions/054_dont_specially_store_mailing_list_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '5143154fb1a2' -down_revision = '3795b2a97af1' +revision = "5143154fb1a2" +down_revision = "3795b2a97af1" from alembic import op import sqlalchemy as sa @@ -16,14 +16,16 @@ def upgrade(): - op.drop_column('message', 'mailing_list_headers') - op.drop_column('thread', 'mailing_list_headers') + op.drop_column("message", "mailing_list_headers") + op.drop_column("thread", "mailing_list_headers") def downgrade(): # downgrade method provided for convenience, but we won't get the data # back. Didn't need it anyway... - op.add_column('thread', sa.Column('mailing_list_headers', mysql.TEXT(), - nullable=True)) - op.add_column('message', sa.Column('mailing_list_headers', mysql.TEXT(), - nullable=True)) + op.add_column( + "thread", sa.Column("mailing_list_headers", mysql.TEXT(), nullable=True) + ) + op.add_column( + "message", sa.Column("mailing_list_headers", mysql.TEXT(), nullable=True) + ) diff --git a/migrations/versions/055_add_account_liveness.py b/migrations/versions/055_add_account_liveness.py index 2437b4976..4053e959b 100644 --- a/migrations/versions/055_add_account_liveness.py +++ b/migrations/versions/055_add_account_liveness.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4b4674f1a726' -down_revision = '5143154fb1a2' +revision = "4b4674f1a726" +down_revision = "5143154fb1a2" from alembic import op import sqlalchemy as sa @@ -16,9 +16,9 @@ def upgrade(): op.add_column( - 'account', - sa.Column('state', sa.Enum('live', 'down', 'invalid'), nullable=True)) + "account", sa.Column("state", sa.Enum("live", "down", "invalid"), nullable=True) + ) def downgrade(): - op.drop_column('account', 'state') + op.drop_column("account", "state") diff --git a/migrations/versions/056_message_unique_constraint.py b/migrations/versions/056_message_unique_constraint.py index 14c9c71cf..f8144e2a8 100644 --- a/migrations/versions/056_message_unique_constraint.py +++ b/migrations/versions/056_message_unique_constraint.py @@ -7,47 +7,74 @@ """ # revision identifiers, used by Alembic. -revision = '4b4c5579c083' -down_revision = '4b4674f1a726' +revision = "4b4c5579c083" +down_revision = "4b4674f1a726" from alembic import op from sqlalchemy import func def upgrade(): - op.drop_constraint('messagecontactassociation_ibfk_1', - 'messagecontactassociation', type_='foreignkey') - op.drop_constraint('messagecontactassociation_ibfk_2', - 'messagecontactassociation', type_='foreignkey') - op.create_foreign_key('messagecontactassociation_ibfk_1', - 'messagecontactassociation', 'contact', - ['contact_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key('messagecontactassociation_ibfk_2', - 'messagecontactassociation', 'message', - ['message_id'], ['id'], ondelete='CASCADE') - op.drop_constraint('imapuid_ibfk_2', 'imapuid', type_='foreignkey') - op.create_foreign_key('imapuid_ibfk_2', 'imapuid', 'message', - ['message_id'], ['id'], ondelete='CASCADE') + op.drop_constraint( + "messagecontactassociation_ibfk_1", + "messagecontactassociation", + type_="foreignkey", + ) + op.drop_constraint( + "messagecontactassociation_ibfk_2", + "messagecontactassociation", + type_="foreignkey", + ) + op.create_foreign_key( + "messagecontactassociation_ibfk_1", + "messagecontactassociation", + "contact", + ["contact_id"], + ["id"], + ondelete="CASCADE", + ) + op.create_foreign_key( + "messagecontactassociation_ibfk_2", + "messagecontactassociation", + "message", + ["message_id"], + ["id"], + ondelete="CASCADE", + ) + op.drop_constraint("imapuid_ibfk_2", "imapuid", type_="foreignkey") + op.create_foreign_key( + "imapuid_ibfk_2", + "imapuid", + "message", + ["message_id"], + ["id"], + ondelete="CASCADE", + ) from inbox.models import Message from inbox.models.session import session_scope - with session_scope(versioned=False) \ - as db_session: - groups = db_session.query( - Message.id, Message.thread_id, Message.g_msgid)\ - .filter(~Message.g_msgid.is_(None))\ - .group_by(Message.thread_id, Message.g_msgid).having( - func.count(Message.id) > 1).all() + with session_scope(versioned=False) as db_session: + groups = ( + db_session.query(Message.id, Message.thread_id, Message.g_msgid) + .filter(~Message.g_msgid.is_(None)) + .group_by(Message.thread_id, Message.g_msgid) + .having(func.count(Message.id) > 1) + .all() + ) for message_id, thread_id, g_msgid in groups: print "deleting duplicates of ({}, {}), saving {}".format( - thread_id, g_msgid, message_id) + thread_id, g_msgid, message_id + ) db_session.query(Message).filter( Message.thread_id == thread_id, Message.g_msgid == g_msgid, - Message.id != message_id).delete() + Message.id != message_id, + ).delete() - op.execute('ALTER TABLE message ADD UNIQUE INDEX ix_message_thread_id_g_msgid (thread_id, g_msgid)') + op.execute( + "ALTER TABLE message ADD UNIQUE INDEX ix_message_thread_id_g_msgid (thread_id, g_msgid)" + ) def downgrade(): diff --git a/migrations/versions/057_consolidate_account_sync_status_columns.py b/migrations/versions/057_consolidate_account_sync_status_columns.py index 43d42383c..55447ab9e 100644 --- a/migrations/versions/057_consolidate_account_sync_status_columns.py +++ b/migrations/versions/057_consolidate_account_sync_status_columns.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4f57260602c9' -down_revision = '4b4c5579c083' +revision = "4f57260602c9" +down_revision = "4b4c5579c083" from alembic import op import sqlalchemy as sa @@ -18,30 +18,36 @@ def upgrade(): from inbox.sqlalchemy_ext.util import JSON, MutableDict from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base - op.add_column('account', - sa.Column('_sync_status', MutableDict.as_mutable(JSON()), - default={}, nullable=True)) + + op.add_column( + "account", + sa.Column( + "_sync_status", MutableDict.as_mutable(JSON()), default={}, nullable=True + ), + ) Base = declarative_base() Base.metadata.reflect(engine) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for acct in db_session.query(Account): - d = dict(sync_start_time=str(acct.sync_start_time), - sync_end_time=str(acct.sync_end_time)) + d = dict( + sync_start_time=str(acct.sync_start_time), + sync_end_time=str(acct.sync_end_time), + ) acct._sync_status = json_util.dumps(d) db_session.commit() - op.drop_column('account', 'sync_start_time') - op.drop_column('account', 'sync_end_time') + op.drop_column("account", "sync_start_time") + op.drop_column("account", "sync_end_time") def downgrade(): diff --git a/migrations/versions/058_enforce_length_limit_of_255_on_message_.py b/migrations/versions/058_enforce_length_limit_of_255_on_message_.py index 63265bc0a..e42824b31 100644 --- a/migrations/versions/058_enforce_length_limit_of_255_on_message_.py +++ b/migrations/versions/058_enforce_length_limit_of_255_on_message_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4af5952e8a5b' -down_revision = '4f57260602c9' +revision = "4af5952e8a5b" +down_revision = "4f57260602c9" from alembic import op import sqlalchemy as sa @@ -25,23 +25,27 @@ def truncate_subject(obj): def upgrade(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from sqlalchemy.ext.declarative import declarative_base + Base = declarative_base() Base.metadata.reflect(engine) class Message(Base): - __table__ = Base.metadata.tables['message'] + __table__ = Base.metadata.tables["message"] class Thread(Base): - __table__ = Base.metadata.tables['thread'] + __table__ = Base.metadata.tables["thread"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: count = 0 - for msg in db_session.query(Message).options( - sa.orm.load_only('subject')).yield_per(500): + for msg in ( + db_session.query(Message) + .options(sa.orm.load_only("subject")) + .yield_per(500) + ): truncate_subject(msg) count += 1 if count > 500: @@ -49,8 +53,9 @@ class Thread(Base): count = 0 db_session.commit() - for thread in db_session.query(Thread).options( - sa.orm.load_only('subject')).yield_per(500): + for thread in ( + db_session.query(Thread).options(sa.orm.load_only("subject")).yield_per(500) + ): truncate_subject(thread) count += 1 if count > 500: @@ -58,10 +63,8 @@ class Thread(Base): count = 0 db_session.commit() - op.alter_column('message', 'subject', - type_=sa.String(255), existing_nullable=True) - op.alter_column('thread', 'subject', - type_=sa.String(255), existing_nullable=True) + op.alter_column("message", "subject", type_=sa.String(255), existing_nullable=True) + op.alter_column("thread", "subject", type_=sa.String(255), existing_nullable=True) def downgrade(): diff --git a/migrations/versions/059_add_action_log.py b/migrations/versions/059_add_action_log.py index e8da3bfa1..bd31db934 100644 --- a/migrations/versions/059_add_action_log.py +++ b/migrations/versions/059_add_action_log.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '15dfc756a1b0' -down_revision = '4af5952e8a5b' +revision = "15dfc756a1b0" +down_revision = "4af5952e8a5b" from alembic import op import sqlalchemy as sa @@ -16,33 +16,36 @@ def upgrade(): op.create_table( - 'actionlog', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('namespace_id', sa.Integer(), nullable=False), - sa.Column('action', sa.Text(length=40), nullable=False), - sa.Column('record_id', sa.Integer(), nullable=False), - sa.Column('table_name', sa.Text(length=40), nullable=False), - sa.ForeignKeyConstraint(['namespace_id'], ['namespace.id'], ), - sa.PrimaryKeyConstraint('id') + "actionlog", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column("action", sa.Text(length=40), nullable=False), + sa.Column("record_id", sa.Integer(), nullable=False), + sa.Column("table_name", sa.Text(length=40), nullable=False), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"],), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_actionlog_created_at", "actionlog", ["created_at"], unique=False + ) + op.create_index( + "ix_actionlog_deleted_at", "actionlog", ["deleted_at"], unique=False + ) + op.create_index( + "ix_actionlog_namespace_id", "actionlog", ["namespace_id"], unique=False + ) + op.create_index( + "ix_actionlog_updated_at", "actionlog", ["updated_at"], unique=False ) - op.create_index('ix_actionlog_created_at', 'actionlog', ['created_at'], - unique=False) - op.create_index('ix_actionlog_deleted_at', 'actionlog', ['deleted_at'], - unique=False) - op.create_index('ix_actionlog_namespace_id', 'actionlog', ['namespace_id'], - unique=False) - op.create_index('ix_actionlog_updated_at', 'actionlog', ['updated_at'], - unique=False) def downgrade(): - op.drop_constraint('actionlog_ibfk_1', 'actionlog', - type_='foreignkey') - op.drop_index('ix_actionlog_updated_at', table_name='actionlog') - op.drop_index('ix_actionlog_namespace_id', table_name='actionlog') - op.drop_index('ix_actionlog_deleted_at', table_name='actionlog') - op.drop_index('ix_actionlog_created_at', table_name='actionlog') - op.drop_table('actionlog') + op.drop_constraint("actionlog_ibfk_1", "actionlog", type_="foreignkey") + op.drop_index("ix_actionlog_updated_at", table_name="actionlog") + op.drop_index("ix_actionlog_namespace_id", table_name="actionlog") + op.drop_index("ix_actionlog_deleted_at", table_name="actionlog") + op.drop_index("ix_actionlog_created_at", table_name="actionlog") + op.drop_table("actionlog") diff --git a/migrations/versions/060_cascade_folder_deletes_to_easuid.py b/migrations/versions/060_cascade_folder_deletes_to_easuid.py index 5b2ff77a6..7b75c9456 100644 --- a/migrations/versions/060_cascade_folder_deletes_to_easuid.py +++ b/migrations/versions/060_cascade_folder_deletes_to_easuid.py @@ -7,27 +7,33 @@ """ # revision identifiers, used by Alembic. -revision = '2a748760ac63' -down_revision = '15dfc756a1b0' +revision = "2a748760ac63" +down_revision = "15dfc756a1b0" from alembic import op def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() Base.metadata.reflect(engine) - if 'easfoldersyncstatus' in Base.metadata.tables: - op.drop_constraint('easuid_ibfk_3', 'easuid', type_='foreignkey') - op.create_foreign_key('easuid_ibfk_3', 'easuid', 'folder', - ['folder_id'], ['id'], ondelete='CASCADE') + if "easfoldersyncstatus" in Base.metadata.tables: + op.drop_constraint("easuid_ibfk_3", "easuid", type_="foreignkey") + op.create_foreign_key( + "easuid_ibfk_3", + "easuid", + "folder", + ["folder_id"], + ["id"], + ondelete="CASCADE", + ) def downgrade(): - op.drop_constraint('easuid_ibfk_3', 'easuid', type_='foreignkey') - op.create_foreign_key('easuid_ibfk_3', 'easuid', 'folder', - ['folder_id'], ['id']) + op.drop_constraint("easuid_ibfk_3", "easuid", type_="foreignkey") + op.create_foreign_key("easuid_ibfk_3", "easuid", "folder", ["folder_id"], ["id"]) diff --git a/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py b/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py index c228ffd8a..2726ceb0f 100644 --- a/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py +++ b/migrations/versions/061_remove_easfoldersyncstatus_folder_rows_.py @@ -8,35 +8,42 @@ """ # revision identifiers, used by Alembic. -revision = 'bb4f204f192' -down_revision = '2a748760ac63' +revision = "bb4f204f192" +down_revision = "2a748760ac63" def upgrade(): - if 'easfoldersyncstatus' in Base.metadata.tables: + if "easfoldersyncstatus" in Base.metadata.tables: from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm.exc import NoResultFound + Base = declarative_base() Base.metadata.reflect(engine) from inbox.models.backends.eas import EASFolderSyncStatus from inbox.models import Folder from inbox.util.eas.constants import SKIP_FOLDERS - with session_scope(versioned=False) as \ - db_session: - statuses = db_session.query(EASFolderSyncStatus).filter( - EASFolderSyncStatus.eas_folder_type.in_(SKIP_FOLDERS)).all() + with session_scope(versioned=False) as db_session: + statuses = ( + db_session.query(EASFolderSyncStatus) + .filter(EASFolderSyncStatus.eas_folder_type.in_(SKIP_FOLDERS)) + .all() + ) for s in statuses: db_session.delete(s) db_session.delete(s.folder) try: - for status in db_session.query(EASFolderSyncStatus)\ - .join(Folder).filter( - Folder.name == 'RecipientInfo').all(): + for status in ( + db_session.query(EASFolderSyncStatus) + .join(Folder) + .filter(Folder.name == "RecipientInfo") + .all() + ): db_session.delete(status) db_session.delete(status.folder) except NoResultFound: @@ -46,4 +53,4 @@ def upgrade(): def downgrade(): - raise Exception('Nope, not needed.') + raise Exception("Nope, not needed.") diff --git a/migrations/versions/062_up_max_length_of_message_message_id_header.py b/migrations/versions/062_up_max_length_of_message_message_id_header.py index 1d22a455b..e5a5b83d6 100644 --- a/migrations/versions/062_up_max_length_of_message_message_id_header.py +++ b/migrations/versions/062_up_max_length_of_message_message_id_header.py @@ -11,18 +11,20 @@ """ # revision identifiers, used by Alembic. -revision = '4c03aaa1fa47' -down_revision = 'bb4f204f192' +revision = "4c03aaa1fa47" +down_revision = "bb4f204f192" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('message', 'message_id_header', - type_=sa.String(998), existing_nullable=True) + op.alter_column( + "message", "message_id_header", type_=sa.String(998), existing_nullable=True + ) def downgrade(): - op.alter_column('message', 'message_id_header', - type_=sa.String(225), existing_nullable=True) + op.alter_column( + "message", "message_id_header", type_=sa.String(225), existing_nullable=True + ) diff --git a/migrations/versions/063_drop_misc_keyval_column_on_parts.py b/migrations/versions/063_drop_misc_keyval_column_on_parts.py index 204f879cf..41b63a9d8 100644 --- a/migrations/versions/063_drop_misc_keyval_column_on_parts.py +++ b/migrations/versions/063_drop_misc_keyval_column_on_parts.py @@ -7,14 +7,14 @@ """ # revision identifiers, used by Alembic. -revision = '4fd3fcd46a3b' -down_revision = '4c03aaa1fa47' +revision = "4fd3fcd46a3b" +down_revision = "4c03aaa1fa47" from alembic import op def upgrade(): - op.drop_column('part', 'misc_keyval') + op.drop_column("part", "misc_keyval") def downgrade(): diff --git a/migrations/versions/064_make_address_fields_non_null.py b/migrations/versions/064_make_address_fields_non_null.py index 6db843d52..8bc7eca61 100644 --- a/migrations/versions/064_make_address_fields_non_null.py +++ b/migrations/versions/064_make_address_fields_non_null.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2d05e116bdb7' -down_revision = '4fd3fcd46a3b' +revision = "2d05e116bdb7" +down_revision = "4fd3fcd46a3b" from alembic import op from sqlalchemy import func, or_ @@ -17,49 +17,49 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base + Base = declarative_base() Base.metadata.reflect(engine) class Message(Base): - __table__ = Base.metadata.tables['message'] + __table__ = Base.metadata.tables["message"] - with session_scope(versioned=False) \ - as db_session: - null_field_count = db_session.query(func.count(Message.id)). \ - filter(or_(Message.from_addr.is_(None), - Message.to_addr.is_(None), - Message.cc_addr.is_(None), - Message.bcc_addr.is_(None))).scalar() - print 'messages to migrate:', null_field_count + with session_scope(versioned=False) as db_session: + null_field_count = ( + db_session.query(func.count(Message.id)) + .filter( + or_( + Message.from_addr.is_(None), + Message.to_addr.is_(None), + Message.cc_addr.is_(None), + Message.bcc_addr.is_(None), + ) + ) + .scalar() + ) + print "messages to migrate:", null_field_count if int(null_field_count): for message in db_session.query(Message): - for attr in ('to_addr', 'from_addr', 'cc_addr', 'bcc_addr'): + for attr in ("to_addr", "from_addr", "cc_addr", "bcc_addr"): if getattr(message, attr) is None: setattr(message, attr, []) - print '.', + print ".", db_session.commit() - print 'making addrs non-nullable' + print "making addrs non-nullable" - op.alter_column('message', 'bcc_addr', existing_type=mysql.TEXT(), - nullable=False) - op.alter_column('message', 'cc_addr', existing_type=mysql.TEXT(), - nullable=False) - op.alter_column('message', 'from_addr', existing_type=mysql.TEXT(), - nullable=False) - op.alter_column('message', 'to_addr', existing_type=mysql.TEXT(), - nullable=False) + op.alter_column("message", "bcc_addr", existing_type=mysql.TEXT(), nullable=False) + op.alter_column("message", "cc_addr", existing_type=mysql.TEXT(), nullable=False) + op.alter_column("message", "from_addr", existing_type=mysql.TEXT(), nullable=False) + op.alter_column("message", "to_addr", existing_type=mysql.TEXT(), nullable=False) def downgrade(): - op.alter_column('message', 'to_addr', existing_type=mysql.TEXT(), - nullable=True) - op.alter_column('message', 'from_addr', existing_type=mysql.TEXT(), - nullable=True) - op.alter_column('message', 'cc_addr', existing_type=mysql.TEXT(), - nullable=True) - op.alter_column('message', 'bcc_addr', existing_type=mysql.TEXT(), - nullable=True) + op.alter_column("message", "to_addr", existing_type=mysql.TEXT(), nullable=True) + op.alter_column("message", "from_addr", existing_type=mysql.TEXT(), nullable=True) + op.alter_column("message", "cc_addr", existing_type=mysql.TEXT(), nullable=True) + op.alter_column("message", "bcc_addr", existing_type=mysql.TEXT(), nullable=True) diff --git a/migrations/versions/065_add_multi_column_transaction_index.py b/migrations/versions/065_add_multi_column_transaction_index.py index 2053d4ff3..3e80baefc 100644 --- a/migrations/versions/065_add_multi_column_transaction_index.py +++ b/migrations/versions/065_add_multi_column_transaction_index.py @@ -7,17 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '2e6120c97485' -down_revision = '2d05e116bdb7' +revision = "2e6120c97485" +down_revision = "2d05e116bdb7" from alembic import op def upgrade(): - op.create_index('namespace_id_deleted_at', - 'transaction', ['namespace_id', 'deleted_at'], - unique=False) + op.create_index( + "namespace_id_deleted_at", + "transaction", + ["namespace_id", "deleted_at"], + unique=False, + ) def downgrade(): - op.drop_index('namespace_id_deleted_at', table_name='transaction') + op.drop_index("namespace_id_deleted_at", table_name="transaction") diff --git a/migrations/versions/066_kill_spoolmessage.py b/migrations/versions/066_kill_spoolmessage.py index efb5bfd66..ddf732de3 100644 --- a/migrations/versions/066_kill_spoolmessage.py +++ b/migrations/versions/066_kill_spoolmessage.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4f3a1f6eaee3' -down_revision = '2e6120c97485' +revision = "4f3a1f6eaee3" +down_revision = "2e6120c97485" from alembic import op import sqlalchemy as sa @@ -18,45 +18,53 @@ def upgrade(): from sqlalchemy.ext.declarative import declarative_base from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - op.add_column('message', - sa.Column('is_created', sa.Boolean, - server_default=sa.sql.expression.false(), - nullable=False)) - op.add_column('message', - sa.Column('is_sent', sa.Boolean, - server_default=sa.sql.expression.false(), - nullable=False)) - op.add_column('message', - sa.Column('state', - sa.Enum('draft', 'sending', 'sending failed', - 'sent'))) - op.add_column('message', - sa.Column('is_reply', sa.Boolean())) - op.add_column('message', - sa.Column('resolved_message_id', sa.Integer(), - nullable=True)) - op.create_foreign_key('message_ibfk_2', - 'message', 'message', - ['resolved_message_id'], ['id']) - - op.add_column('message', - sa.Column('parent_draft_id', sa.Integer(), nullable=True)) - op.create_foreign_key('message_ibfk_3', - 'message', 'message', - ['parent_draft_id'], ['id']) + op.add_column( + "message", + sa.Column( + "is_created", + sa.Boolean, + server_default=sa.sql.expression.false(), + nullable=False, + ), + ) + op.add_column( + "message", + sa.Column( + "is_sent", + sa.Boolean, + server_default=sa.sql.expression.false(), + nullable=False, + ), + ) + op.add_column( + "message", + sa.Column("state", sa.Enum("draft", "sending", "sending failed", "sent")), + ) + op.add_column("message", sa.Column("is_reply", sa.Boolean())) + op.add_column( + "message", sa.Column("resolved_message_id", sa.Integer(), nullable=True) + ) + op.create_foreign_key( + "message_ibfk_2", "message", "message", ["resolved_message_id"], ["id"] + ) + + op.add_column("message", sa.Column("parent_draft_id", sa.Integer(), nullable=True)) + op.create_foreign_key( + "message_ibfk_3", "message", "message", ["parent_draft_id"], ["id"] + ) Base = declarative_base() Base.metadata.reflect(engine) class Message(Base): - __table__ = Base.metadata.tables['message'] + __table__ = Base.metadata.tables["message"] class SpoolMessage(Base): - __table__ = Base.metadata.tables['spoolmessage'] + __table__ = Base.metadata.tables["spoolmessage"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for sm in db_session.query(SpoolMessage).yield_per(250): m = db_session.query(Message).get(sm.id) @@ -69,8 +77,8 @@ class SpoolMessage(Base): db_session.commit() - op.drop_table('spoolmessage') + op.drop_table("spoolmessage") def downgrade(): - raise Exception('No going back.') + raise Exception("No going back.") diff --git a/migrations/versions/067_add_executed_status_to_action_log.py b/migrations/versions/067_add_executed_status_to_action_log.py index d06d934e6..76149b49c 100644 --- a/migrations/versions/067_add_executed_status_to_action_log.py +++ b/migrations/versions/067_add_executed_status_to_action_log.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '322c2800c401' -down_revision = '4f3a1f6eaee3' +revision = "322c2800c401" +down_revision = "4f3a1f6eaee3" from alembic import op import sqlalchemy as sa @@ -16,31 +16,39 @@ def upgrade(): # Purge any existing entries. - op.drop_table('actionlog') + op.drop_table("actionlog") op.create_table( - 'actionlog', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('namespace_id', sa.Integer(), nullable=False), - sa.Column('action', sa.Text(length=40), nullable=False), - sa.Column('record_id', sa.Integer(), nullable=False), - sa.Column('table_name', sa.Text(length=40), nullable=False), - sa.Column('executed', sa.Boolean(), nullable=False, - server_default=sa.sql.expression.false()), - sa.ForeignKeyConstraint(['namespace_id'], ['namespace.id'], ), - sa.PrimaryKeyConstraint('id') + "actionlog", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column("action", sa.Text(length=40), nullable=False), + sa.Column("record_id", sa.Integer(), nullable=False), + sa.Column("table_name", sa.Text(length=40), nullable=False), + sa.Column( + "executed", + sa.Boolean(), + nullable=False, + server_default=sa.sql.expression.false(), + ), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"],), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_actionlog_created_at", "actionlog", ["created_at"], unique=False + ) + op.create_index( + "ix_actionlog_deleted_at", "actionlog", ["deleted_at"], unique=False + ) + op.create_index( + "ix_actionlog_namespace_id", "actionlog", ["namespace_id"], unique=False + ) + op.create_index( + "ix_actionlog_updated_at", "actionlog", ["updated_at"], unique=False ) - op.create_index('ix_actionlog_created_at', 'actionlog', ['created_at'], - unique=False) - op.create_index('ix_actionlog_deleted_at', 'actionlog', ['deleted_at'], - unique=False) - op.create_index('ix_actionlog_namespace_id', 'actionlog', ['namespace_id'], - unique=False) - op.create_index('ix_actionlog_updated_at', 'actionlog', ['updated_at'], - unique=False) def downgrade(): - op.drop_column('actionlog', 'executed') + op.drop_column("actionlog", "executed") diff --git a/migrations/versions/068_outlook.py b/migrations/versions/068_outlook.py index 696cf0373..991bb05ab 100644 --- a/migrations/versions/068_outlook.py +++ b/migrations/versions/068_outlook.py @@ -7,40 +7,34 @@ """ # revision identifiers, used by Alembic. -revision = '1ceff61ec112' -down_revision = '322c2800c401' +revision = "1ceff61ec112" +down_revision = "322c2800c401" from alembic import op import sqlalchemy as sa def upgrade(): - op.create_table('outlookaccount', - sa.Column('id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['id'], [u'imapaccount.id'], - ondelete='CASCADE'), - sa.Column('refresh_token_id', - sa.Integer(), nullable=True), - sa.Column('scope', sa.String(length=512), nullable=True), - sa.Column('locale', sa.String(length=8), nullable=True), - sa.Column('client_id', sa.String(length=256), - nullable=True), - sa.Column('client_secret', sa.String(length=256), - nullable=True), - sa.Column('o_id', sa.String(length=32), nullable=True), - sa.Column('o_id_token', sa.String(length=1024), - nullable=True), - sa.Column('link', sa.String(length=256), nullable=True), - sa.Column('name', sa.String(length=256), nullable=True), - sa.Column('gender', sa.String(length=16), nullable=True), - sa.Column('family_name', sa.String(length=256), - nullable=True), - sa.Column('given_name', sa.String(length=256), - nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.alter_column('secret', 'secret', type_=sa.String(length=2048)) + op.create_table( + "outlookaccount", + sa.Column("id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["id"], [u"imapaccount.id"], ondelete="CASCADE"), + sa.Column("refresh_token_id", sa.Integer(), nullable=True), + sa.Column("scope", sa.String(length=512), nullable=True), + sa.Column("locale", sa.String(length=8), nullable=True), + sa.Column("client_id", sa.String(length=256), nullable=True), + sa.Column("client_secret", sa.String(length=256), nullable=True), + sa.Column("o_id", sa.String(length=32), nullable=True), + sa.Column("o_id_token", sa.String(length=1024), nullable=True), + sa.Column("link", sa.String(length=256), nullable=True), + sa.Column("name", sa.String(length=256), nullable=True), + sa.Column("gender", sa.String(length=16), nullable=True), + sa.Column("family_name", sa.String(length=256), nullable=True), + sa.Column("given_name", sa.String(length=256), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.alter_column("secret", "secret", type_=sa.String(length=2048)) def downgrade(): - op.drop_table('outlookaccount') + op.drop_table("outlookaccount") diff --git a/migrations/versions/069_aol.py b/migrations/versions/069_aol.py index b6f0cc556..84d38a7d8 100644 --- a/migrations/versions/069_aol.py +++ b/migrations/versions/069_aol.py @@ -7,22 +7,22 @@ """ # revision identifiers, used by Alembic. -revision = '479b3b84a73e' -down_revision = '1ceff61ec112' +revision = "479b3b84a73e" +down_revision = "1ceff61ec112" from alembic import op import sqlalchemy as sa def upgrade(): - op.create_table('aolaccount', - sa.Column('id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['id'], [u'imapaccount.id'], - ondelete='CASCADE'), - sa.Column('password', sa.String(256)), - sa.PrimaryKeyConstraint('id') - ) + op.create_table( + "aolaccount", + sa.Column("id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["id"], [u"imapaccount.id"], ondelete="CASCADE"), + sa.Column("password", sa.String(256)), + sa.PrimaryKeyConstraint("id"), + ) def downgrade(): - op.drop_table('aolaccount') + op.drop_table("aolaccount") diff --git a/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py b/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py index 903a8f33b..beaf2f34e 100644 --- a/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py +++ b/migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2525c5245cc2' -down_revision = '479b3b84a73e' +revision = "2525c5245cc2" +down_revision = "479b3b84a73e" from alembic import op import sqlalchemy as sa @@ -16,26 +16,25 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) - op.drop_constraint('folder_fk1', 'folder', type_='foreignkey') - op.drop_constraint('account_id', 'folder', type_='unique') + op.drop_constraint("folder_fk1", "folder", type_="foreignkey") + op.drop_constraint("account_id", "folder", type_="unique") - op.create_foreign_key('folder_fk1', - 'folder', 'account', - ['account_id'], ['id']) - op.create_unique_constraint('account_id', - 'folder', - ['account_id', 'name', 'canonical_name']) + op.create_foreign_key("folder_fk1", "folder", "account", ["account_id"], ["id"]) + op.create_unique_constraint( + "account_id", "folder", ["account_id", "name", "canonical_name"] + ) - if 'easfoldersyncstatus' in Base.metadata.tables: - op.create_unique_constraint('account_id_2', - 'easfoldersyncstatus', - ['account_id', 'eas_folder_id']) + if "easfoldersyncstatus" in Base.metadata.tables: + op.create_unique_constraint( + "account_id_2", "easfoldersyncstatus", ["account_id", "eas_folder_id"] + ) def downgrade(): - raise Exception('Unsupported, going back will break things.') + raise Exception("Unsupported, going back will break things.") diff --git a/migrations/versions/071_more_sync_states.py b/migrations/versions/071_more_sync_states.py index 3f5f9c82b..89f92462b 100644 --- a/migrations/versions/071_more_sync_states.py +++ b/migrations/versions/071_more_sync_states.py @@ -7,26 +7,28 @@ """ # revision identifiers, used by Alembic. -revision = '3bb5d61c895c' -down_revision = '2525c5245cc2' +revision = "3bb5d61c895c" +down_revision = "2525c5245cc2" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('account', 'sync_state', - existing_type=sa.Enum('running', 'stopped', - 'killed'), - type_=sa.Enum('running', 'stopped', - 'killed', 'invalid', - 'connerror'), - existing_nullable=True) + op.alter_column( + "account", + "sync_state", + existing_type=sa.Enum("running", "stopped", "killed"), + type_=sa.Enum("running", "stopped", "killed", "invalid", "connerror"), + existing_nullable=True, + ) def downgrade(): - op.alter_column('account', 'sync_state', - type_=sa.Enum('running', 'stopped', 'killed'), - existing_type=sa.Enum('running', 'stopped', 'killed', - 'invalid', 'connerror'), - existing_nullable=True) + op.alter_column( + "account", + "sync_state", + type_=sa.Enum("running", "stopped", "killed"), + existing_type=sa.Enum("running", "stopped", "killed", "invalid", "connerror"), + existing_nullable=True, + ) diff --git a/migrations/versions/072_recompute_snippets.py b/migrations/versions/072_recompute_snippets.py index a7368aef3..184aec819 100644 --- a/migrations/versions/072_recompute_snippets.py +++ b/migrations/versions/072_recompute_snippets.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4e93522b5b62' -down_revision = '3bb5d61c895c' +revision = "4e93522b5b62" +down_revision = "3bb5d61c895c" from sqlalchemy.ext.declarative import declarative_base @@ -31,6 +31,7 @@ def upgrade(): from inbox.models.session import session_scope from inbox.util.html import strip_tags from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) @@ -38,19 +39,17 @@ def upgrade(): SNIPPET_LENGTH = 191 class Message(Base): - __table__ = Base.metadata.tables['message'] + __table__ = Base.metadata.tables["message"] def calculate_html_snippet(msg, text): - text = text.replace('
', ' ').replace('
', ' '). \ - replace('
', ' ') + text = text.replace("
", " ").replace("
", " ").replace("
", " ") text = strip_tags(text) calculate_plaintext_snippet(msg, text) def calculate_plaintext_snippet(msg, text): - msg.snippet = ' '.join(text.split())[:SNIPPET_LENGTH] + msg.snippet = " ".join(text.split())[:SNIPPET_LENGTH] - with session_scope(versioned=False)\ - as db_session: + with session_scope(versioned=False) as db_session: for message in page_query(db_session.query(Message)): if not message.decode_error: calculate_html_snippet(message, message.sanitized_body) diff --git a/migrations/versions/073_generic_providers.py b/migrations/versions/073_generic_providers.py index e87c42191..2673b011b 100644 --- a/migrations/versions/073_generic_providers.py +++ b/migrations/versions/073_generic_providers.py @@ -10,8 +10,8 @@ """ # revision identifiers, used by Alembic. -revision = '43cd2de5ad85' -down_revision = '4e93522b5b62' +revision = "43cd2de5ad85" +down_revision = "4e93522b5b62" from alembic import op import sqlalchemy as sa @@ -22,63 +22,67 @@ def upgrade(): from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - op.create_table('genericaccount', - sa.Column('id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['id'], [u'imapaccount.id'], - ondelete='CASCADE'), - sa.Column('password_id', - sa.Integer(), nullable=True), - sa.Column('provider', - sa.String(length=64), nullable=False), - sa.PrimaryKeyConstraint('id') - ) + op.create_table( + "genericaccount", + sa.Column("id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["id"], [u"imapaccount.id"], ondelete="CASCADE"), + sa.Column("password_id", sa.Integer(), nullable=True), + sa.Column("provider", sa.String(length=64), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) Base = declarative_base() Base.metadata.reflect(engine) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] class ImapAccount(Base): - __table__ = Base.metadata.tables['imapaccount'] + __table__ = Base.metadata.tables["imapaccount"] class YahooAccount(Base): - __table__ = Base.metadata.tables['yahooaccount'] + __table__ = Base.metadata.tables["yahooaccount"] class AOLAccount(Base): - __table__ = Base.metadata.tables['aolaccount'] + __table__ = Base.metadata.tables["aolaccount"] class GenericAccount(Base): - __table__ = Base.metadata.tables['genericaccount'] + __table__ = Base.metadata.tables["genericaccount"] class Secret(Base): - __table__ = Base.metadata.tables['secret'] + __table__ = Base.metadata.tables["secret"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for acct in db_session.query(YahooAccount): - secret = Secret(acl_id=0, type=0, secret=acct.password, - created_at=datetime.utcnow(), - updated_at=datetime.utcnow()) + secret = Secret( + acl_id=0, + type=0, + secret=acct.password, + created_at=datetime.utcnow(), + updated_at=datetime.utcnow(), + ) db_session.add(secret) db_session.commit() - new_acct = GenericAccount(id=acct.id, - provider='yahoo') + new_acct = GenericAccount(id=acct.id, provider="yahoo") new_acct.password_id = secret.id db_session.add(new_acct) for acct in db_session.query(AOLAccount): - secret = Secret(acl_id=0, type=0, secret=acct.password, - created_at=datetime.utcnow(), - updated_at=datetime.utcnow()) + secret = Secret( + acl_id=0, + type=0, + secret=acct.password, + created_at=datetime.utcnow(), + updated_at=datetime.utcnow(), + ) db_session.add(secret) db_session.commit() - new_acct = GenericAccount(id=acct.id, - provider='aol') + new_acct = GenericAccount(id=acct.id, provider="aol") new_acct.password_id = secret.id db_session.add(new_acct) @@ -88,62 +92,61 @@ class Secret(Base): # don't cascade the delete engine.execute("drop table aolaccount") engine.execute("drop table yahooaccount") - op.drop_column('imapaccount', 'imap_host') + op.drop_column("imapaccount", "imap_host") def downgrade(): from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - op.create_table('aolaccount', - sa.Column('id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['id'], [u'imapaccount.id'], - ondelete='CASCADE'), - sa.Column('password', sa.String(256)), - sa.PrimaryKeyConstraint('id') - ) - - op.create_table('yahooaccount', - sa.Column('id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['id'], [u'imapaccount.id'], - ondelete='CASCADE'), - sa.Column('password', sa.String(256)), - sa.PrimaryKeyConstraint('id') - ) + op.create_table( + "aolaccount", + sa.Column("id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["id"], [u"imapaccount.id"], ondelete="CASCADE"), + sa.Column("password", sa.String(256)), + sa.PrimaryKeyConstraint("id"), + ) + + op.create_table( + "yahooaccount", + sa.Column("id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["id"], [u"imapaccount.id"], ondelete="CASCADE"), + sa.Column("password", sa.String(256)), + sa.PrimaryKeyConstraint("id"), + ) Base = declarative_base() Base.metadata.reflect(engine) class Account(Base): - __table__ = Base.metadata.tables['account'] + __table__ = Base.metadata.tables["account"] class ImapAccount(Base): - __table__ = Base.metadata.tables['imapaccount'] + __table__ = Base.metadata.tables["imapaccount"] class YahooAccount(Base): - __table__ = Base.metadata.tables['yahooaccount'] + __table__ = Base.metadata.tables["yahooaccount"] class AOLAccount(Base): - __table__ = Base.metadata.tables['aolaccount'] + __table__ = Base.metadata.tables["aolaccount"] class GenericAccount(Base): - __table__ = Base.metadata.tables['genericaccount'] + __table__ = Base.metadata.tables["genericaccount"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for acct in db_session.query(GenericAccount): - secret = db_session.query(Secret) \ - .filter_by(id=acct.password_id).one() + secret = db_session.query(Secret).filter_by(id=acct.password_id).one() - if acct.provider == 'yahoo': - new_acct = YahooAccount(namespace=acct.namespace, - password=secret.secret) + if acct.provider == "yahoo": + new_acct = YahooAccount( + namespace=acct.namespace, password=secret.secret + ) db_session.add(new_acct) - elif acct.provider == 'aol': - new_acct = AOLAccount(namespace=acct.namespace, - password=secret.secret) + elif acct.provider == "aol": + new_acct = AOLAccount(namespace=acct.namespace, password=secret.secret) db_session.add(new_acct) db_session.commit() - engine.execute('drop table genericaccount') + engine.execute("drop table genericaccount") diff --git a/migrations/versions/074_add_eas_thrid_index.py b/migrations/versions/074_add_eas_thrid_index.py index 114e70ea6..49c5ba787 100644 --- a/migrations/versions/074_add_eas_thrid_index.py +++ b/migrations/versions/074_add_eas_thrid_index.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3c02d8204335' -down_revision = '43cd2de5ad85' +revision = "3c02d8204335" +down_revision = "43cd2de5ad85" from alembic import op import sqlalchemy as sa @@ -16,20 +16,27 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine() Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) - if 'easthread' in Base.metadata.tables: - op.create_index('ix_easthread_eas_thrid', 'easthread', ['eas_thrid'], - unique=False, mysql_length=256) + if "easthread" in Base.metadata.tables: + op.create_index( + "ix_easthread_eas_thrid", + "easthread", + ["eas_thrid"], + unique=False, + mysql_length=256, + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine() Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) - if 'easthread' in Base.metadata.tables: - op.drop_index('ix_easthread_eas_thrid', table_name='easthread') + if "easthread" in Base.metadata.tables: + op.drop_index("ix_easthread_eas_thrid", table_name="easthread") diff --git a/migrations/versions/075_drop_contacts_search_signals.py b/migrations/versions/075_drop_contacts_search_signals.py index 7713f409f..7aa525942 100644 --- a/migrations/versions/075_drop_contacts_search_signals.py +++ b/migrations/versions/075_drop_contacts_search_signals.py @@ -7,16 +7,16 @@ """ # revision identifiers, used by Alembic. -revision = '1763103db266' -down_revision = '3c02d8204335' +revision = "1763103db266" +down_revision = "3c02d8204335" from alembic import op def upgrade(): - op.drop_table('searchsignal') - op.drop_table('searchtoken') + op.drop_table("searchsignal") + op.drop_table("searchtoken") def downgrade(): - raise Exception('No rolling back') + raise Exception("No rolling back") diff --git a/migrations/versions/076_add_thread_order_column.py b/migrations/versions/076_add_thread_order_column.py index c97d0b7b1..d6bbc3340 100644 --- a/migrations/versions/076_add_thread_order_column.py +++ b/migrations/versions/076_add_thread_order_column.py @@ -7,17 +7,16 @@ """ # revision identifiers, used by Alembic. -revision = '3de3979f94bd' -down_revision = '1763103db266' +revision = "3de3979f94bd" +down_revision = "1763103db266" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column("message", sa.Column('thread_order', sa.Integer, - nullable=False)) + op.add_column("message", sa.Column("thread_order", sa.Integer, nullable=False)) def downgrade(): - op.drop_column('message', 'thread_order') + op.drop_column("message", "thread_order") diff --git a/migrations/versions/077_add_supports_condstore_column_to_.py b/migrations/versions/077_add_supports_condstore_column_to_.py index 184bfe5a7..fb253ecf4 100644 --- a/migrations/versions/077_add_supports_condstore_column_to_.py +++ b/migrations/versions/077_add_supports_condstore_column_to_.py @@ -7,17 +7,18 @@ """ # revision identifiers, used by Alembic. -revision = '3c74cbe7882e' -down_revision = '3de3979f94bd' +revision = "3c74cbe7882e" +down_revision = "3de3979f94bd" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('genericaccount', sa.Column('supports_condstore', - sa.Boolean(), nullable=True)) + op.add_column( + "genericaccount", sa.Column("supports_condstore", sa.Boolean(), nullable=True) + ) def downgrade(): - op.drop_column('genericaccount', 'supports_condstore') + op.drop_column("genericaccount", "supports_condstore") diff --git a/migrations/versions/078_events.py b/migrations/versions/078_events.py index 48e53ea04..0a1fa1bf1 100644 --- a/migrations/versions/078_events.py +++ b/migrations/versions/078_events.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1c2253a0e997' -down_revision = '3c74cbe7882e' +revision = "1c2253a0e997" +down_revision = "3c74cbe7882e" from alembic import op import sqlalchemy as sa @@ -16,36 +16,36 @@ def upgrade(): op.create_table( - 'event', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uid', sa.String(length=64), nullable=False), - sa.Column('provider_name', sa.String(length=64), nullable=False), - sa.Column('public_id', sa.BINARY(length=16), nullable=False), - sa.Column('raw_data', sa.Text(), nullable=False), - sa.Column('account_id', sa.Integer(), nullable=False), - sa.Column('subject', sa.String(length=255), nullable=True), - sa.Column('body', sa.Text(), nullable=True), - sa.Column('location', sa.String(length=255), nullable=True), - sa.Column('busy', sa.Boolean(), nullable=False), - sa.Column('locked', sa.Boolean(), nullable=False), - sa.Column('reminders', sa.String(length=255), nullable=True), - sa.Column('recurrence', sa.String(length=255), nullable=True), - sa.Column('start', sa.DateTime(), nullable=False), - sa.Column('end', sa.DateTime(), nullable=True), - sa.Column('all_day', sa.Boolean(), nullable=False), - sa.Column('time_zone', sa.Integer(), nullable=False), - sa.Column('source', sa.Enum('remote', 'local'), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['account_id'], ['account.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), + "event", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uid", sa.String(length=64), nullable=False), + sa.Column("provider_name", sa.String(length=64), nullable=False), + sa.Column("public_id", sa.BINARY(length=16), nullable=False), + sa.Column("raw_data", sa.Text(), nullable=False), + sa.Column("account_id", sa.Integer(), nullable=False), + sa.Column("subject", sa.String(length=255), nullable=True), + sa.Column("body", sa.Text(), nullable=True), + sa.Column("location", sa.String(length=255), nullable=True), + sa.Column("busy", sa.Boolean(), nullable=False), + sa.Column("locked", sa.Boolean(), nullable=False), + sa.Column("reminders", sa.String(length=255), nullable=True), + sa.Column("recurrence", sa.String(length=255), nullable=True), + sa.Column("start", sa.DateTime(), nullable=False), + sa.Column("end", sa.DateTime(), nullable=True), + sa.Column("all_day", sa.Boolean(), nullable=False), + sa.Column("time_zone", sa.Integer(), nullable=False), + sa.Column("source", sa.Enum("remote", "local"), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(["account_id"], ["account.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.add_column( + "account", sa.Column("last_synced_events", sa.DateTime(), nullable=True) ) - op.add_column('account', sa.Column('last_synced_events', sa.DateTime(), - nullable=True)) def downgrade(): - op.drop_table('event') - op.drop_column('account', 'last_synced_events') + op.drop_table("event") + op.drop_column("account", "last_synced_events") diff --git a/migrations/versions/079_events_longer_uids.py b/migrations/versions/079_events_longer_uids.py index 49dea73ea..7a76c424f 100644 --- a/migrations/versions/079_events_longer_uids.py +++ b/migrations/versions/079_events_longer_uids.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '5901bf556d83' -down_revision = '1c2253a0e997' +revision = "5901bf556d83" +down_revision = "1c2253a0e997" from alembic import op import sqlalchemy as sa @@ -17,6 +17,7 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) @@ -25,18 +26,18 @@ def upgrade(): # only drop the uid constraint if it exists (created with creat_db # vs a migration). inspector = sa.inspect(engine) - if 'uid' in [c['name'] for c in inspector.get_unique_constraints('event')]: - op.drop_constraint('uid', 'event', type_='unique') + if "uid" in [c["name"] for c in inspector.get_unique_constraints("event")]: + op.drop_constraint("uid", "event", type_="unique") - op.create_unique_constraint('uuid', 'event', ['uid', 'source', - 'account_id', 'provider_name']) - op.alter_column('event', 'uid', - type_=sa.String(767, collation='ascii_general_ci')) + op.create_unique_constraint( + "uuid", "event", ["uid", "source", "account_id", "provider_name"] + ) + op.alter_column("event", "uid", type_=sa.String(767, collation="ascii_general_ci")) def downgrade(): - op.drop_constraint('uuid', 'event', type_='unique') - op.alter_column('event', 'uid', - type_=sa.String(64)) - op.create_unique_constraint('uid', 'event', ['uid', 'source', - 'account_id', 'provider_name']) + op.drop_constraint("uuid", "event", type_="unique") + op.alter_column("event", "uid", type_=sa.String(64)) + op.create_unique_constraint( + "uid", "event", ["uid", "source", "account_id", "provider_name"] + ) diff --git a/migrations/versions/080_longer_event_summaries.py b/migrations/versions/080_longer_event_summaries.py index 50092bca1..1acbe5197 100644 --- a/migrations/versions/080_longer_event_summaries.py +++ b/migrations/versions/080_longer_event_summaries.py @@ -7,16 +7,16 @@ """ # revision identifiers, used by Alembic. -revision = '4e3e8abea884' -down_revision = '5901bf556d83' +revision = "4e3e8abea884" +down_revision = "5901bf556d83" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('event', 'subject', type_=sa.String(1024)) + op.alter_column("event", "subject", type_=sa.String(1024)) def downgrade(): - op.alter_column('event', 'subject', type_=sa.String(255)) + op.alter_column("event", "subject", type_=sa.String(255)) diff --git a/migrations/versions/081_move_imapfolder_highestmodseq_to_bigint.py b/migrations/versions/081_move_imapfolder_highestmodseq_to_bigint.py index 1c0223e34..6c0b6d249 100644 --- a/migrations/versions/081_move_imapfolder_highestmodseq_to_bigint.py +++ b/migrations/versions/081_move_imapfolder_highestmodseq_to_bigint.py @@ -7,23 +7,31 @@ """ # revision identifiers, used by Alembic. -revision = '1bc2536b8bc6' -down_revision = '4e3e8abea884' +revision = "1bc2536b8bc6" +down_revision = "4e3e8abea884" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('imapfolderinfo', 'highestmodseq', - type_=sa.BigInteger, existing_type=sa.Integer, - existing_server_default=sa.sql.expression.null(), - existing_nullable=True) - - op.alter_column('imapfolderinfo', 'uidvalidity', - type_=sa.BigInteger, existing_type=sa.Integer, - existing_server_default=sa.sql.expression.null(), - existing_nullable=True) + op.alter_column( + "imapfolderinfo", + "highestmodseq", + type_=sa.BigInteger, + existing_type=sa.Integer, + existing_server_default=sa.sql.expression.null(), + existing_nullable=True, + ) + + op.alter_column( + "imapfolderinfo", + "uidvalidity", + type_=sa.BigInteger, + existing_type=sa.Integer, + existing_server_default=sa.sql.expression.null(), + existing_nullable=True, + ) def downgrade(): diff --git a/migrations/versions/082_event_participants.py b/migrations/versions/082_event_participants.py index cbf2e90de..acd9e85d9 100644 --- a/migrations/versions/082_event_participants.py +++ b/migrations/versions/082_event_participants.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1322d3787305' -down_revision = '1bc2536b8bc6' +revision = "1322d3787305" +down_revision = "1bc2536b8bc6" from alembic import op import sqlalchemy as sa @@ -16,25 +16,28 @@ def upgrade(): op.create_table( - 'eventparticipant', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('public_id', sa.BINARY(length=16), nullable=False), - sa.Column('event_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('_raw_address', sa.String(length=191), nullable=True), - sa.Column('_canonicalized_address', sa.String(length=191), - nullable=True), - sa.Column('status', sa.Enum('yes', 'no', 'maybe', 'awaiting'), - default='awaiting', nullable=False), - sa.Column('notes', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['event_id'], ['event.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('_raw_address', 'event_id', name='uid')) + "eventparticipant", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("public_id", sa.BINARY(length=16), nullable=False), + sa.Column("event_id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=255), nullable=True), + sa.Column("_raw_address", sa.String(length=191), nullable=True), + sa.Column("_canonicalized_address", sa.String(length=191), nullable=True), + sa.Column( + "status", + sa.Enum("yes", "no", "maybe", "awaiting"), + default="awaiting", + nullable=False, + ), + sa.Column("notes", sa.Text(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(["event_id"], ["event.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("_raw_address", "event_id", name="uid"), + ) def downgrade(): - op.drop_table('eventparticipant') + op.drop_table("eventparticipant") diff --git a/migrations/versions/083_calendars_event_owners.py b/migrations/versions/083_calendars_event_owners.py index bc87834a2..d7118010f 100644 --- a/migrations/versions/083_calendars_event_owners.py +++ b/migrations/versions/083_calendars_event_owners.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '10a1129fe685' -down_revision = '1322d3787305' +revision = "10a1129fe685" +down_revision = "1322d3787305" from alembic import op import sqlalchemy as sa @@ -17,87 +17,82 @@ def upgrade(): # remove old events that didn't match foreign key constraints on calendars - event = table('event') + event = table("event") op.execute(event.delete()) op.create_table( - 'calendar', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('public_id', sa.BINARY(length=16), nullable=False), - sa.Column('account_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=128), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('notes', sa.Text(), nullable=True), - sa.Column('uid', sa.String(767, collation='ascii_general_ci'), - nullable=False), - sa.Column('read_only', sa.Boolean(), nullable=False, - default=False), - sa.ForeignKeyConstraint(['account_id'], ['account.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('account_id', 'name', name='uuid') + "calendar", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("public_id", sa.BINARY(length=16), nullable=False), + sa.Column("account_id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=128), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("notes", sa.Text(), nullable=True), + sa.Column("uid", sa.String(767, collation="ascii_general_ci"), nullable=False), + sa.Column("read_only", sa.Boolean(), nullable=False, default=False), + sa.ForeignKeyConstraint(["account_id"], ["account.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("account_id", "name", name="uuid"), ) - op.add_column('account', sa.Column('default_calendar_id', sa.Integer(), - nullable=True)) + op.add_column( + "account", sa.Column("default_calendar_id", sa.Integer(), nullable=True) + ) - op.create_foreign_key('account_ibfk_10', - 'account', 'calendar', - ['default_calendar_id'], ['id']) - op.add_column('event', sa.Column('calendar_id', sa.Integer(), - nullable=False)) + op.create_foreign_key( + "account_ibfk_10", "account", "calendar", ["default_calendar_id"], ["id"] + ) + op.add_column("event", sa.Column("calendar_id", sa.Integer(), nullable=False)) - op.create_foreign_key('event_ibfk_2', - 'event', 'calendar', - ['calendar_id'], ['id']) + op.create_foreign_key("event_ibfk_2", "event", "calendar", ["calendar_id"], ["id"]) - op.add_column('event', sa.Column('owner', sa.String(length=255), - nullable=True)) + op.add_column("event", sa.Column("owner", sa.String(length=255), nullable=True)) - op.add_column('event', sa.Column('is_owner', sa.Boolean(), - default=True, - nullable=False)) + op.add_column( + "event", sa.Column("is_owner", sa.Boolean(), default=True, nullable=False) + ) - op.add_column('eventparticipant', sa.Column('guests', sa.Integer(), - default=0, - nullable=False)) + op.add_column( + "eventparticipant", sa.Column("guests", sa.Integer(), default=0, nullable=False) + ) - op.alter_column('eventparticipant', 'status', - existing_type=sa.Enum('yes', 'no', 'maybe', 'awaiting'), - type_=sa.Enum('yes', 'no', 'maybe', 'noreply'), - existing_nullable=False) + op.alter_column( + "eventparticipant", + "status", + existing_type=sa.Enum("yes", "no", "maybe", "awaiting"), + type_=sa.Enum("yes", "no", "maybe", "noreply"), + existing_nullable=False, + ) - op.drop_column('event', 'locked') - op.drop_column('event', 'time_zone') - op.add_column('event', sa.Column('start_date', sa.Date(), - nullable=True)) - op.add_column('event', sa.Column('end_date', sa.Date(), - nullable=True)) - op.add_column('event', sa.Column('read_only', sa.Boolean(), - nullable=False, default=False)) + op.drop_column("event", "locked") + op.drop_column("event", "time_zone") + op.add_column("event", sa.Column("start_date", sa.Date(), nullable=True)) + op.add_column("event", sa.Column("end_date", sa.Date(), nullable=True)) + op.add_column( + "event", sa.Column("read_only", sa.Boolean(), nullable=False, default=False) + ) def downgrade(): - op.alter_column('eventparticipant', 'status', - existing_type=sa.Enum('yes', 'no', 'maybe', 'noreply'), - type_=sa.Enum('yes', 'no', 'maybe', 'awaiting'), - existing_nullable=False) - op.drop_column('event', 'read_only') - op.add_column('event', sa.Column('locked', sa.Boolean(), - nullable=False)) - op.add_column('event', sa.Column('time_zone', sa.Integer(), - nullable=False)) - op.drop_constraint('default_calendar_ibfk_1', 'account', - type_='foreignkey') - op.drop_constraint('event_ibfk_2', 'event', - type_='foreignkey') - op.drop_table('calendar') - op.drop_column('event', 'calendar_id') - op.drop_column('event', 'start_date') - op.drop_column('event', 'end_date') - op.drop_column('event', 'owner') - op.drop_column('event', 'is_owner') - op.drop_column('account', 'default_calendar_id') - op.drop_column('eventparticipant', 'guests') + op.alter_column( + "eventparticipant", + "status", + existing_type=sa.Enum("yes", "no", "maybe", "noreply"), + type_=sa.Enum("yes", "no", "maybe", "awaiting"), + existing_nullable=False, + ) + op.drop_column("event", "read_only") + op.add_column("event", sa.Column("locked", sa.Boolean(), nullable=False)) + op.add_column("event", sa.Column("time_zone", sa.Integer(), nullable=False)) + op.drop_constraint("default_calendar_ibfk_1", "account", type_="foreignkey") + op.drop_constraint("event_ibfk_2", "event", type_="foreignkey") + op.drop_table("calendar") + op.drop_column("event", "calendar_id") + op.drop_column("event", "start_date") + op.drop_column("event", "end_date") + op.drop_column("event", "owner") + op.drop_column("event", "is_owner") + op.drop_column("account", "default_calendar_id") + op.drop_column("eventparticipant", "guests") diff --git a/migrations/versions/084_mutable_drafts.py b/migrations/versions/084_mutable_drafts.py index d93cb05d9..84523fe19 100644 --- a/migrations/versions/084_mutable_drafts.py +++ b/migrations/versions/084_mutable_drafts.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '10db12da2005' -down_revision = '10a1129fe685' +revision = "10db12da2005" +down_revision = "10a1129fe685" from alembic import op from sqlalchemy.sql import text @@ -18,39 +18,57 @@ def upgrade(): from inbox.sqlalchemy_ext.util import JSON - op.add_column('actionlog', - sa.Column('extra_args', JSON(), nullable=True)) + + op.add_column("actionlog", sa.Column("extra_args", JSON(), nullable=True)) conn = op.get_bind() - conn.execute(text(""" + conn.execute( + text( + """ ALTER TABLE message ADD COLUMN version BINARY(16), DROP FOREIGN KEY message_ibfk_3 - """)) + """ + ) + ) - parent_drafts_ids = [id_ for id_, in conn.execute(text( - """ + parent_drafts_ids = [ + id_ + for id_, in conn.execute( + text( + """ SELECT message.parent_draft_id from message WHERE message.is_created = 1 AND message.is_draft = 1 AND message.parent_draft_id IS NOT NULL - """))] + """ + ) + ) + ] print parent_drafts_ids if parent_drafts_ids: # delete old parent drafts - conn.execute(text(""" + conn.execute( + text( + """ DELETE FROM message WHERE message.is_created = 1 AND message.is_draft = 1 - AND message.id IN :parent_drafts_ids"""), - parent_drafts_ids=parent_drafts_ids) + AND message.id IN :parent_drafts_ids""" + ), + parent_drafts_ids=parent_drafts_ids, + ) - conn.execute(text(""" + conn.execute( + text( + """ UPDATE message SET message.version = message.public_id WHERE message.is_created = 1 AND message.is_draft = 1 - """)) + """ + ) + ) - op.drop_column('message', 'parent_draft_id') + op.drop_column("message", "parent_draft_id") def downgrade(): - raise Exception('No.') + raise Exception("No.") diff --git a/migrations/versions/085_add_attachment_tag.py b/migrations/versions/085_add_attachment_tag.py index c9913239d..b719554fc 100644 --- a/migrations/versions/085_add_attachment_tag.py +++ b/migrations/versions/085_add_attachment_tag.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '294200d809c8' -down_revision = '10db12da2005' +revision = "294200d809c8" +down_revision = "10db12da2005" from alembic import op from sqlalchemy.sql import text @@ -17,6 +17,7 @@ def upgrade(): from inbox.models.session import session_scope from inbox.models import Namespace + with session_scope() as db_session: # Create the attachment tag print "creating canonical tags..." @@ -27,13 +28,26 @@ def upgrade(): conn = op.get_bind() tag_id_for_namespace = dict( - [(namespace_id, tag_id) for namespace_id, tag_id in conn.execute( - text("SELECT namespace_id, id FROM tag WHERE name = 'attachment'"))]) + [ + (namespace_id, tag_id) + for namespace_id, tag_id in conn.execute( + text("SELECT namespace_id, id FROM tag WHERE name = 'attachment'") + ) + ] + ) print "have attachment tag for", len(tag_id_for_namespace), "namespaces" - existing_tagitems = set([thread_id for thread_id, in conn.execute(text( - "SELECT distinct(thread_id) FROM tagitem WHERE tag_id IN :tag_ids"), - tag_ids=set(tag_id_for_namespace.values()))]) + existing_tagitems = set( + [ + thread_id + for thread_id, in conn.execute( + text( + "SELECT distinct(thread_id) FROM tagitem WHERE tag_id IN :tag_ids" + ), + tag_ids=set(tag_id_for_namespace.values()), + ) + ] + ) q = """SELECT distinct(thread.id), namespace_id FROM thread INNER JOIN message ON thread.id = message.thread_id @@ -41,22 +55,28 @@ def upgrade(): WHERE part.content_disposition IS NOT NULL """ if existing_tagitems: - print "skipping", len(existing_tagitems), \ - "threads which already have the tag attachment" + print "skipping", len( + existing_tagitems + ), "threads which already have the tag attachment" q += " AND thread.id NOT IN :existing_tagitems" q += " ORDER BY thread.id ASC" - for thread_id, namespace_id in \ - conn.execute(text(q), existing_tagitems=existing_tagitems): + for thread_id, namespace_id in conn.execute( + text(q), existing_tagitems=existing_tagitems + ): print thread_id # We could bulk insert, but don't bother. - conn.execute(text( - """ + conn.execute( + text( + """ INSERT INTO tagitem (created_at, updated_at, thread_id, tag_id) VALUES (UTC_TIMESTAMP(), UTC_TIMESTAMP(), :thread_id, :tag_id) - """), - thread_id=thread_id, tag_id=tag_id_for_namespace[namespace_id]) + """ + ), + thread_id=thread_id, + tag_id=tag_id_for_namespace[namespace_id], + ) def downgrade(): diff --git a/migrations/versions/086_event_date_times.py b/migrations/versions/086_event_date_times.py index 065910d56..623bb0d67 100644 --- a/migrations/versions/086_event_date_times.py +++ b/migrations/versions/086_event_date_times.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1ac03cab7a24' -down_revision = '294200d809c8' +revision = "1ac03cab7a24" +down_revision = "294200d809c8" from alembic import op import sqlalchemy as sa @@ -17,6 +17,7 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) @@ -25,9 +26,9 @@ def upgrade(): # only drop the uid constraint if it exists (created with creat_db # vs a migration). inspector = sa.inspect(engine) - if 'start_date' in [c['name'] for c in inspector.get_columns('event')]: - op.drop_column('event', 'start_date') - op.drop_column('event', 'end_date') + if "start_date" in [c["name"] for c in inspector.get_columns("event")]: + op.drop_column("event", "start_date") + op.drop_column("event", "end_date") def downgrade(): diff --git a/migrations/versions/087_fix_account_foreign_keys.py b/migrations/versions/087_fix_account_foreign_keys.py index 197eda232..43df8cad0 100644 --- a/migrations/versions/087_fix_account_foreign_keys.py +++ b/migrations/versions/087_fix_account_foreign_keys.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '565c7325c51d' -down_revision = '1ac03cab7a24' +revision = "565c7325c51d" +down_revision = "1ac03cab7a24" from alembic import op import sqlalchemy as sa @@ -16,15 +16,17 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) inspector = sa.inspect(engine) - if 'default_calendar_ibfk_1' in [k['name'] for k in - inspector.get_foreign_keys('account')]: - op.drop_constraint('default_calendar_ibfk_1', 'account', - type_='foreignkey') - op.create_foreign_key('account_ibfk_10', 'account', 'calendar', - ['default_calendar_id'], ['id']) + if "default_calendar_ibfk_1" in [ + k["name"] for k in inspector.get_foreign_keys("account") + ]: + op.drop_constraint("default_calendar_ibfk_1", "account", type_="foreignkey") + op.create_foreign_key( + "account_ibfk_10", "account", "calendar", ["default_calendar_id"], ["id"] + ) def downgrade(): - raise Exception('No rolling back') + raise Exception("No rolling back") diff --git a/migrations/versions/088_calendar_descriptions.py b/migrations/versions/088_calendar_descriptions.py index 7cd246c0f..14e7f3ce6 100644 --- a/migrations/versions/088_calendar_descriptions.py +++ b/migrations/versions/088_calendar_descriptions.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '24e9afe91349' -down_revision = '565c7325c51d' +revision = "24e9afe91349" +down_revision = "565c7325c51d" from alembic import op import sqlalchemy as sa @@ -18,21 +18,35 @@ def upgrade(): from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base from inbox.ignition import main_engine - engine = main_engine(pool_size=1, max_overflow=0) - - op.alter_column('calendar', 'notes', new_column_name='description', - existing_type=sa.Text(), - existing_nullable=True) - op.add_column('calendar', sa.Column('provider_name', - sa.String(length=64), nullable=False)) - op.alter_column('event', 'subject', new_column_name='title', - existing_type=sa.String(1024), - existing_nullable=True) + engine = main_engine(pool_size=1, max_overflow=0) - op.alter_column('event', 'body', new_column_name='description', - existing_type=sa.Text(), - existing_nullable=True) + op.alter_column( + "calendar", + "notes", + new_column_name="description", + existing_type=sa.Text(), + existing_nullable=True, + ) + op.add_column( + "calendar", sa.Column("provider_name", sa.String(length=64), nullable=False) + ) + + op.alter_column( + "event", + "subject", + new_column_name="title", + existing_type=sa.String(1024), + existing_nullable=True, + ) + + op.alter_column( + "event", + "body", + new_column_name="description", + existing_type=sa.Text(), + existing_nullable=True, + ) # We're changing the structure of the calendar name so that # the provider can be split out from the name as it was previously @@ -47,16 +61,15 @@ def upgrade(): Base.metadata.reflect(engine) class Calendar(Base): - __table__ = Base.metadata.tables['calendar'] + __table__ = Base.metadata.tables["calendar"] class Event(Base): - __table__ = Base.metadata.tables['event'] + __table__ = Base.metadata.tables["event"] - with session_scope(versioned=False) \ - as db_session: + with session_scope(versioned=False) as db_session: for calendar in db_session.query(Calendar): - if calendar.name and '-' in calendar.name: - provider_name, name = calendar.name.split('-') + if calendar.name and "-" in calendar.name: + provider_name, name = calendar.name.split("-") calendar.provider_name = provider_name calendar.name = name calendar.read_only = True @@ -64,28 +77,35 @@ class Event(Base): event.read_only = True db_session.commit() - op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey') - op.drop_constraint('uuid', 'calendar', type_='unique') + op.drop_constraint("calendar_ibfk_1", "calendar", type_="foreignkey") + op.drop_constraint("uuid", "calendar", type_="unique") - op.create_unique_constraint('uuid', 'calendar', ['name', 'provider_name', - 'account_id']) + op.create_unique_constraint( + "uuid", "calendar", ["name", "provider_name", "account_id"] + ) - op.create_foreign_key(None, "calendar", "account", ["account_id"], ["id"], - ondelete='CASCADE') + op.create_foreign_key( + None, "calendar", "account", ["account_id"], ["id"], ondelete="CASCADE" + ) - op.drop_constraint('event_ibfk_2', 'event', type_='foreignkey') - op.create_foreign_key('event_ibfk_2', 'event', 'calendar', ['calendar_id'], - ['id'], ondelete='CASCADE') + op.drop_constraint("event_ibfk_2", "event", type_="foreignkey") + op.create_foreign_key( + "event_ibfk_2", "event", "calendar", ["calendar_id"], ["id"], ondelete="CASCADE" + ) def downgrade(): - op.alter_column('calendar', 'description', new_column_name='notes', - existing_type=sa.Text(), - existing_nullable=True) - op.drop_column('calendar', 'provider_name') - - op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey') - op.drop_constraint('uuid', 'calendar', type_='unique') - - op.create_unique_constraint('uuid', 'calendar', ['name', 'account_id']) + op.alter_column( + "calendar", + "description", + new_column_name="notes", + existing_type=sa.Text(), + existing_nullable=True, + ) + op.drop_column("calendar", "provider_name") + + op.drop_constraint("calendar_ibfk_1", "calendar", type_="foreignkey") + op.drop_constraint("uuid", "calendar", type_="unique") + + op.create_unique_constraint("uuid", "calendar", ["name", "account_id"]) op.create_foreign_key(None, "calendar", "account", ["account_id"], ["id"]) diff --git a/migrations/versions/089_revert_encryption.py b/migrations/versions/089_revert_encryption.py index dbbf97f89..a6058a397 100644 --- a/migrations/versions/089_revert_encryption.py +++ b/migrations/versions/089_revert_encryption.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2c577a8a01b7' -down_revision = '24e9afe91349' +revision = "2c577a8a01b7" +down_revision = "24e9afe91349" from alembic import op @@ -17,17 +17,20 @@ def upgrade(): # Block table - op.drop_column('block', 'encryption_scheme') + op.drop_column("block", "encryption_scheme") # Secret table - op.add_column('secret', sa.Column('acl_id', sa.Integer(), nullable=False)) + op.add_column("secret", sa.Column("acl_id", sa.Integer(), nullable=False)) - op.alter_column('secret', 'type', type_=sa.Integer(), - existing_server_default=None, - existing_nullable=False) + op.alter_column( + "secret", + "type", + type_=sa.Integer(), + existing_server_default=None, + existing_nullable=False, + ) - op.add_column('secret', - sa.Column('secret', sa.String(length=512), nullable=True)) + op.add_column("secret", sa.Column("secret", sa.String(length=512), nullable=True)) import nacl.secret import nacl.utils @@ -39,23 +42,24 @@ def upgrade(): Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) - key = config.get_required('SECRET_ENCRYPTION_KEY') + key = config.get_required("SECRET_ENCRYPTION_KEY") class Secret(Base): - __table__ = Base.metadata.tables['secret'] + __table__ = Base.metadata.tables["secret"] - with session_scope(versioned=False) as \ - db_session: - secrets = db_session.query(Secret).filter( - Secret.encryption_scheme == 1, - Secret._secret.isnot(None)).order_by(Secret.id).all() + with session_scope(versioned=False) as db_session: + secrets = ( + db_session.query(Secret) + .filter(Secret.encryption_scheme == 1, Secret._secret.isnot(None)) + .order_by(Secret.id) + .all() + ) for s in secrets: encrypted = s._secret s.secret = nacl.secret.SecretBox( - key=key, - encoder=nacl.encoding.HexEncoder + key=key, encoder=nacl.encoding.HexEncoder ).decrypt(encrypted) # Picked arbitrarily @@ -66,8 +70,8 @@ class Secret(Base): db_session.commit() - op.drop_column('secret', '_secret') - op.drop_column('secret', 'encryption_scheme') + op.drop_column("secret", "_secret") + op.drop_column("secret", "encryption_scheme") def downgrade(): diff --git a/migrations/versions/090_parts_block_ids.py b/migrations/versions/090_parts_block_ids.py index 2db69147d..f8674753c 100644 --- a/migrations/versions/090_parts_block_ids.py +++ b/migrations/versions/090_parts_block_ids.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2b89164aa9cd' -down_revision = '2c577a8a01b7' +revision = "2b89164aa9cd" +down_revision = "2c577a8a01b7" from datetime import datetime from alembic import op @@ -20,40 +20,46 @@ def upgrade(): conn = op.get_bind() # Create a new block_id table to make parts be relational # Add audit timestamps as Parts will no longer inherit from blocks - conn.execute(text(""" + conn.execute( + text( + """ ALTER TABLE part ADD COLUMN block_id INTEGER, ADD COLUMN created_at DATETIME, ADD COLUMN updated_at DATETIME, ADD COLUMN deleted_at DATETIME - """)) + """ + ) + ) - conn.execute(text( - "UPDATE part SET block_id=part.id, created_at=:now, updated_at=:now"), - now=datetime.utcnow()) + conn.execute( + text("UPDATE part SET block_id=part.id, created_at=:now, updated_at=:now"), + now=datetime.utcnow(), + ) - conn.execute(text(""" + conn.execute( + text( + """ ALTER TABLE part DROP FOREIGN KEY part_ibfk_1, MODIFY block_id INTEGER NOT NULL, MODIFY created_at DATETIME NOT NULL, MODIFY updated_at DATETIME NOT NULL, MODIFY id INTEGER NULL AUTO_INCREMENT - """)) + """ + ) + ) # can't batch this with other alterations while maintaining foreig key name - op.create_foreign_key('part_ibfk_1', 'part', 'block', ['block_id'], ['id']) + op.create_foreign_key("part_ibfk_1", "part", "block", ["block_id"], ["id"]) def downgrade(): - table_name = 'part' - op.drop_constraint('part_ibfk_1', table_name, - type_='foreignkey') - op.drop_column(table_name, 'block_id') - op.create_foreign_key('part_ibfk_1', table_name, 'block', - ['id'], ['id']) - op.drop_column(table_name, 'created_at') - op.drop_column(table_name, 'deleted_at') - op.drop_column(table_name, 'updated_at') - op.alter_column(table_name, 'id', existing_type=sa.Integer, - autoincrement=False) + table_name = "part" + op.drop_constraint("part_ibfk_1", table_name, type_="foreignkey") + op.drop_column(table_name, "block_id") + op.create_foreign_key("part_ibfk_1", table_name, "block", ["id"], ["id"]) + op.drop_column(table_name, "created_at") + op.drop_column(table_name, "deleted_at") + op.drop_column(table_name, "updated_at") + op.alter_column(table_name, "id", existing_type=sa.Integer, autoincrement=False) diff --git a/migrations/versions/091_remove_webhooks.py b/migrations/versions/091_remove_webhooks.py index 51e3d2855..eeafcb8bd 100644 --- a/migrations/versions/091_remove_webhooks.py +++ b/migrations/versions/091_remove_webhooks.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4b07b67498e1' -down_revision = '2b89164aa9cd' +revision = "4b07b67498e1" +down_revision = "2b89164aa9cd" from alembic import op import sqlalchemy as sa @@ -16,67 +16,112 @@ def upgrade(): - op.drop_table('webhook') - op.drop_table('lens') + op.drop_table("webhook") + op.drop_table("lens") def downgrade(): op.create_table( - 'lens', - sa.Column('public_id', sa.BINARY(length=16), nullable=False), - sa.Column('created_at', mysql.DATETIME(), nullable=False), - sa.Column('updated_at', mysql.DATETIME(), nullable=False), - sa.Column('deleted_at', mysql.DATETIME(), nullable=True), - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('namespace_id', mysql.INTEGER(display_width=11), - autoincrement=False, nullable=False), - sa.Column('subject', mysql.VARCHAR(length=255), nullable=True), - sa.Column('thread_public_id', sa.BINARY(length=16), nullable=True), - sa.Column('started_before', mysql.DATETIME(), nullable=True), - sa.Column('started_after', mysql.DATETIME(), nullable=True), - sa.Column('last_message_before', mysql.DATETIME(), nullable=True), - sa.Column('last_message_after', mysql.DATETIME(), nullable=True), - sa.Column('any_email', mysql.VARCHAR(length=255), nullable=True), - sa.Column('to_addr', mysql.VARCHAR(length=255), nullable=True), - sa.Column('from_addr', mysql.VARCHAR(length=255), nullable=True), - sa.Column('cc_addr', mysql.VARCHAR(length=255), nullable=True), - sa.Column('bcc_addr', mysql.VARCHAR(length=255), nullable=True), - sa.Column('filename', mysql.VARCHAR(length=255), nullable=True), - sa.Column('tag', mysql.VARCHAR(length=255), nullable=True), - sa.ForeignKeyConstraint(['namespace_id'], [u'namespace.id'], - name=u'lens_ibfk_1', ondelete=u'CASCADE'), - sa.PrimaryKeyConstraint('id'), - mysql_default_charset=u'utf8mb4', - mysql_engine=u'InnoDB' + "lens", + sa.Column("public_id", sa.BINARY(length=16), nullable=False), + sa.Column("created_at", mysql.DATETIME(), nullable=False), + sa.Column("updated_at", mysql.DATETIME(), nullable=False), + sa.Column("deleted_at", mysql.DATETIME(), nullable=True), + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column( + "namespace_id", + mysql.INTEGER(display_width=11), + autoincrement=False, + nullable=False, + ), + sa.Column("subject", mysql.VARCHAR(length=255), nullable=True), + sa.Column("thread_public_id", sa.BINARY(length=16), nullable=True), + sa.Column("started_before", mysql.DATETIME(), nullable=True), + sa.Column("started_after", mysql.DATETIME(), nullable=True), + sa.Column("last_message_before", mysql.DATETIME(), nullable=True), + sa.Column("last_message_after", mysql.DATETIME(), nullable=True), + sa.Column("any_email", mysql.VARCHAR(length=255), nullable=True), + sa.Column("to_addr", mysql.VARCHAR(length=255), nullable=True), + sa.Column("from_addr", mysql.VARCHAR(length=255), nullable=True), + sa.Column("cc_addr", mysql.VARCHAR(length=255), nullable=True), + sa.Column("bcc_addr", mysql.VARCHAR(length=255), nullable=True), + sa.Column("filename", mysql.VARCHAR(length=255), nullable=True), + sa.Column("tag", mysql.VARCHAR(length=255), nullable=True), + sa.ForeignKeyConstraint( + ["namespace_id"], + [u"namespace.id"], + name=u"lens_ibfk_1", + ondelete=u"CASCADE", + ), + sa.PrimaryKeyConstraint("id"), + mysql_default_charset=u"utf8mb4", + mysql_engine=u"InnoDB", ) op.create_table( - 'webhook', - sa.Column('public_id', sa.BINARY(length=16), nullable=False), - sa.Column('created_at', mysql.DATETIME(), nullable=False), - sa.Column('updated_at', mysql.DATETIME(), nullable=False), - sa.Column('deleted_at', mysql.DATETIME(), nullable=True), - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('namespace_id', mysql.INTEGER(display_width=11), - autoincrement=False, nullable=False), - sa.Column('lens_id', mysql.INTEGER(display_width=11), - autoincrement=False, nullable=False), - sa.Column('callback_url', mysql.TEXT(), nullable=False), - sa.Column('failure_notify_url', mysql.TEXT(), nullable=True), - sa.Column('include_body', mysql.TINYINT(display_width=1), - autoincrement=False, nullable=False), - sa.Column('max_retries', mysql.INTEGER(display_width=11), - server_default='3', autoincrement=False, nullable=False), - sa.Column('retry_interval', mysql.INTEGER(display_width=11), - server_default='60', autoincrement=False, nullable=False), - sa.Column('active', mysql.TINYINT(display_width=1), server_default='1', - autoincrement=False, nullable=False), - sa.Column('min_processed_id', mysql.INTEGER(display_width=11), - server_default='0', autoincrement=False, nullable=False), - sa.ForeignKeyConstraint(['lens_id'], [u'lens.id'], - name=u'webhook_ibfk_2', ondelete=u'CASCADE'), - sa.ForeignKeyConstraint(['namespace_id'], [u'namespace.id'], - name=u'webhook_ibfk_1', ondelete=u'CASCADE'), - sa.PrimaryKeyConstraint('id'), - mysql_default_charset=u'utf8mb4', - mysql_engine=u'InnoDB' + "webhook", + sa.Column("public_id", sa.BINARY(length=16), nullable=False), + sa.Column("created_at", mysql.DATETIME(), nullable=False), + sa.Column("updated_at", mysql.DATETIME(), nullable=False), + sa.Column("deleted_at", mysql.DATETIME(), nullable=True), + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column( + "namespace_id", + mysql.INTEGER(display_width=11), + autoincrement=False, + nullable=False, + ), + sa.Column( + "lens_id", + mysql.INTEGER(display_width=11), + autoincrement=False, + nullable=False, + ), + sa.Column("callback_url", mysql.TEXT(), nullable=False), + sa.Column("failure_notify_url", mysql.TEXT(), nullable=True), + sa.Column( + "include_body", + mysql.TINYINT(display_width=1), + autoincrement=False, + nullable=False, + ), + sa.Column( + "max_retries", + mysql.INTEGER(display_width=11), + server_default="3", + autoincrement=False, + nullable=False, + ), + sa.Column( + "retry_interval", + mysql.INTEGER(display_width=11), + server_default="60", + autoincrement=False, + nullable=False, + ), + sa.Column( + "active", + mysql.TINYINT(display_width=1), + server_default="1", + autoincrement=False, + nullable=False, + ), + sa.Column( + "min_processed_id", + mysql.INTEGER(display_width=11), + server_default="0", + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["lens_id"], [u"lens.id"], name=u"webhook_ibfk_2", ondelete=u"CASCADE" + ), + sa.ForeignKeyConstraint( + ["namespace_id"], + [u"namespace.id"], + name=u"webhook_ibfk_1", + ondelete=u"CASCADE", + ), + sa.PrimaryKeyConstraint("id"), + mysql_default_charset=u"utf8mb4", + mysql_engine=u"InnoDB", ) diff --git a/migrations/versions/092_fix_outlookaccount_typo.py b/migrations/versions/092_fix_outlookaccount_typo.py index e093f04d3..3670b905f 100644 --- a/migrations/versions/092_fix_outlookaccount_typo.py +++ b/migrations/versions/092_fix_outlookaccount_typo.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '63dc7f205da' -down_revision = '4b07b67498e1' +revision = "63dc7f205da" +down_revision = "4b07b67498e1" from alembic import op from sqlalchemy.sql import text @@ -16,13 +16,21 @@ def upgrade(): conn = op.get_bind() - conn.execute(text(""" + conn.execute( + text( + """ UPDATE account SET TYPE='outlookaccount' WHERE type='outlookccount'; - """)) + """ + ) + ) def downgrade(): conn = op.get_bind() - conn.execute(text(""" + conn.execute( + text( + """ UPDATE account SET TYPE='outlookccount' WHERE type='outlookaccount'; - """)) + """ + ) + ) diff --git a/migrations/versions/093_add_folder_identifier.py b/migrations/versions/093_add_folder_identifier.py index e612d76b0..724109de1 100644 --- a/migrations/versions/093_add_folder_identifier.py +++ b/migrations/versions/093_add_folder_identifier.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '159607944f52' -down_revision = '63dc7f205da' +revision = "159607944f52" +down_revision = "63dc7f205da" from alembic import op from sqlalchemy.sql import text @@ -19,20 +19,29 @@ def upgrade(): conn = op.get_bind() - conn.execute(text(""" + conn.execute( + text( + """ ALTER TABLE folder DROP INDEX account_id, ADD COLUMN identifier VARCHAR(:len) NULL, - ADD CONSTRAINT account_id UNIQUE (account_id, name, canonical_name, identifier)"""), len=MAX_INDEXABLE_LENGTH) + ADD CONSTRAINT account_id UNIQUE (account_id, name, canonical_name, identifier)""" + ), + len=MAX_INDEXABLE_LENGTH, + ) # For eas accounts - # set identifier=canonical_name # then canonical_name=NULL (so non-Inbox canonical tags will be set correctly henceforth) - conn.execute(text(""" + conn.execute( + text( + """ UPDATE folder INNER JOIN account ON folder.account_id=account.id SET folder.identifier=folder.canonical_name, folder.canonical_name=NULL WHERE account.type='easaccount' - """)) + """ + ) + ) # Set Inbox-canonical tags: inbox, sent, drafts, trash, archive too q = """ diff --git a/migrations/versions/094_eas_passwords.py b/migrations/versions/094_eas_passwords.py index 24800279c..046404937 100644 --- a/migrations/versions/094_eas_passwords.py +++ b/migrations/versions/094_eas_passwords.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '427812c1e849' -down_revision = '159607944f52' +revision = "427812c1e849" +down_revision = "159607944f52" from datetime import datetime from alembic import op @@ -17,31 +17,33 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) # Do nothing if the affected table isn't present. - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return # Do not define foreign key constraint here; that's done for all account # tables in the next migration. - op.add_column('easaccount', sa.Column('password_id', sa.Integer(), - sa.ForeignKey('secret.id'))) + op.add_column( + "easaccount", sa.Column("password_id", sa.Integer(), sa.ForeignKey("secret.id")) + ) Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) from inbox.models.session import session_scope class EASAccount(Base): - __table__ = Base.metadata.tables['easaccount'] + __table__ = Base.metadata.tables["easaccount"] secret = sa.orm.relationship( - 'Secret', primaryjoin='EASAccount.password_id == Secret.id') + "Secret", primaryjoin="EASAccount.password_id == Secret.id" + ) class Secret(Base): - __table__ = Base.metadata.tables['secret'] + __table__ = Base.metadata.tables["secret"] - with session_scope(versioned=False) as \ - db_session: + with session_scope(versioned=False) as db_session: accounts = db_session.query(EASAccount).all() - print '# EAS accounts: ', len(accounts) + print "# EAS accounts: ", len(accounts) for account in accounts: secret = Secret() @@ -56,15 +58,16 @@ class Secret(Base): db_session.commit() - op.alter_column('easaccount', 'password_id', - existing_type=sa.Integer(), - nullable=False) + op.alter_column( + "easaccount", "password_id", existing_type=sa.Integer(), nullable=False + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return - op.drop_constraint('easaccount_ibfk_2', 'easaccount', type_='foreignkey') - op.drop_column('easaccount', 'password_id') + op.drop_constraint("easaccount_ibfk_2", "easaccount", type_="foreignkey") + op.drop_column("easaccount", "password_id") diff --git a/migrations/versions/095_secret_storage.py b/migrations/versions/095_secret_storage.py index c093fc5d7..84893cd4a 100644 --- a/migrations/versions/095_secret_storage.py +++ b/migrations/versions/095_secret_storage.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1683790906cf' -down_revision = '427812c1e849' +revision = "1683790906cf" +down_revision = "427812c1e849" from alembic import op import sqlalchemy as sa @@ -17,26 +17,40 @@ def upgrade(): # SECRETS TABLE: # Can just drop this, was't really used before - op.drop_column('secret', 'acl_id') - - op.alter_column('secret', 'type', type_=sa.Enum('password', 'token'), - existing_server_default=None, - existing_nullable=False) - - op.add_column('secret', sa.Column('encryption_scheme', sa.Integer(), - server_default='0', nullable=False)) - op.add_column('secret', sa.Column('_secret', sa.BLOB(), - nullable=False)) + op.drop_column("secret", "acl_id") + + op.alter_column( + "secret", + "type", + type_=sa.Enum("password", "token"), + existing_server_default=None, + existing_nullable=False, + ) + + op.add_column( + "secret", + sa.Column( + "encryption_scheme", sa.Integer(), server_default="0", nullable=False + ), + ) + op.add_column("secret", sa.Column("_secret", sa.BLOB(), nullable=False)) # Account tables: # Don't need to change column types for password_id, refresh_token_id; # only add foreign key indices. - op.create_foreign_key('genericaccount_ibfk_2', 'genericaccount', 'secret', - ['password_id'], ['id']) - op.create_foreign_key('gmailaccount_ibfk_2', 'gmailaccount', 'secret', - ['refresh_token_id'], ['id']) - op.create_foreign_key('outlookaccount_ibfk_2', 'outlookaccount', 'secret', - ['refresh_token_id'], ['id']) + op.create_foreign_key( + "genericaccount_ibfk_2", "genericaccount", "secret", ["password_id"], ["id"] + ) + op.create_foreign_key( + "gmailaccount_ibfk_2", "gmailaccount", "secret", ["refresh_token_id"], ["id"] + ) + op.create_foreign_key( + "outlookaccount_ibfk_2", + "outlookaccount", + "secret", + ["refresh_token_id"], + ["id"], + ) def downgrade(): diff --git a/migrations/versions/096_migrate_secret_data.py b/migrations/versions/096_migrate_secret_data.py index 154ff5df4..618924848 100644 --- a/migrations/versions/096_migrate_secret_data.py +++ b/migrations/versions/096_migrate_secret_data.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '38c29430efeb' -down_revision = '1683790906cf' +revision = "38c29430efeb" +down_revision = "1683790906cf" import sqlalchemy as sa @@ -25,40 +25,47 @@ def upgrade(): Base.metadata.reflect(engine) class Secret(Base): - __table__ = Base.metadata.tables['secret'] + __table__ = Base.metadata.tables["secret"] class GenericAccount(Base): - __table__ = Base.metadata.tables['genericaccount'] + __table__ = Base.metadata.tables["genericaccount"] - with session_scope(versioned=False) as \ - db_session: - secrets = db_session.query(Secret).filter( - Secret.secret.isnot(None)).all() + with session_scope(versioned=False) as db_session: + secrets = db_session.query(Secret).filter(Secret.secret.isnot(None)).all() # Join on the genericaccount and optionally easaccount tables to # determine which secrets should have type 'password'. - generic_query = db_session.query(Secret.id).join( - GenericAccount).filter(Secret.id == GenericAccount.password_id) + generic_query = ( + db_session.query(Secret.id) + .join(GenericAccount) + .filter(Secret.id == GenericAccount.password_id) + ) password_secrets = [id_ for id_, in generic_query] - if engine.has_table('easaccount'): + if engine.has_table("easaccount"): + class EASAccount(Base): - __table__ = Base.metadata.tables['easaccount'] + __table__ = Base.metadata.tables["easaccount"] - eas_query = db_session.query(Secret.id).join( - EASAccount).filter(Secret.id == EASAccount.password_id) + eas_query = ( + db_session.query(Secret.id) + .join(EASAccount) + .filter(Secret.id == EASAccount.password_id) + ) password_secrets.extend([id_ for id_, in eas_query]) for s in secrets: - plain = s.secret.encode('utf-8') if isinstance(s.secret, unicode) \ - else s.secret - if config.get_required('ENCRYPT_SECRETS'): + plain = ( + s.secret.encode("utf-8") if isinstance(s.secret, unicode) else s.secret + ) + if config.get_required("ENCRYPT_SECRETS"): s._secret = nacl.secret.SecretBox( - key=config.get_required('SECRET_ENCRYPTION_KEY'), - encoder=nacl.encoding.HexEncoder + key=config.get_required("SECRET_ENCRYPTION_KEY"), + encoder=nacl.encoding.HexEncoder, ).encrypt( plaintext=plain, - nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)) + nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE), + ) # 1 is EncryptionScheme.SECRETBOX_WITH_STATIC_KEY s.encryption_scheme = 1 @@ -66,9 +73,9 @@ class EASAccount(Base): s._secret = plain if s.id in password_secrets: - s.type = 'password' + s.type = "password" else: - s.type = 'token' + s.type = "token" db_session.add(s) diff --git a/migrations/versions/097_secrets_endgame.py b/migrations/versions/097_secrets_endgame.py index c0031eca7..fdf77e6f0 100644 --- a/migrations/versions/097_secrets_endgame.py +++ b/migrations/versions/097_secrets_endgame.py @@ -7,18 +7,19 @@ """ # revision identifiers, used by Alembic. -revision = '248ec24a39f' -down_revision = '38c29430efeb' +revision = "248ec24a39f" +down_revision = "38c29430efeb" from alembic import op def upgrade(): from inbox.ignition import main_engine + engine = main_engine() - if engine.has_table('easaccount'): - op.drop_column('easaccount', 'password') - op.drop_column('secret', 'secret') + if engine.has_table("easaccount"): + op.drop_column("easaccount", "password") + op.drop_column("secret", "secret") def downgrade(): diff --git a/migrations/versions/098_add_throttling_support.py b/migrations/versions/098_add_throttling_support.py index 4f6a23d29..20980f26e 100644 --- a/migrations/versions/098_add_throttling_support.py +++ b/migrations/versions/098_add_throttling_support.py @@ -7,17 +7,19 @@ """ # revision identifiers, used by Alembic. -revision = '40b533a6f3e1' -down_revision = '248ec24a39f' +revision = "40b533a6f3e1" +down_revision = "248ec24a39f" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('account', sa.Column('throttled', sa.Boolean(), - server_default='0', nullable=True)) + op.add_column( + "account", + sa.Column("throttled", sa.Boolean(), server_default="0", nullable=True), + ) def downgrade(): - op.drop_column('account', 'throttled') + op.drop_column("account", "throttled") diff --git a/migrations/versions/099_add_namespace_id_to_message.py b/migrations/versions/099_add_namespace_id_to_message.py index 7456faf03..6f599af0e 100644 --- a/migrations/versions/099_add_namespace_id_to_message.py +++ b/migrations/versions/099_add_namespace_id_to_message.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'e27104acb25' -down_revision = '40b533a6f3e1' +revision = "e27104acb25" +down_revision = "40b533a6f3e1" from alembic import op from sqlalchemy.sql import text @@ -16,17 +16,25 @@ def upgrade(): conn = op.get_bind() - conn.execute(text(''' + conn.execute( + text( + """ ALTER TABLE message ADD COLUMN namespace_id integer, ADD FOREIGN KEY(namespace_id) REFERENCES namespace (id) - ''')) + """ + ) + ) - conn.execute(text(''' + conn.execute( + text( + """ UPDATE message INNER JOIN thread ON message.thread_id=thread.id - SET message.namespace_id = thread.namespace_id''')) + SET message.namespace_id = thread.namespace_id""" + ) + ) def downgrade(): - op.drop_constraint('message_ibfk_3', 'message', type_='foreignkey') - op.drop_column('message', 'namespace_id') + op.drop_constraint("message_ibfk_3", "message", type_="foreignkey") + op.drop_column("message", "namespace_id") diff --git a/migrations/versions/100_make_message_namespace_id_nonnull.py b/migrations/versions/100_make_message_namespace_id_nonnull.py index bef331958..ecb1ab88e 100644 --- a/migrations/versions/100_make_message_namespace_id_nonnull.py +++ b/migrations/versions/100_make_message_namespace_id_nonnull.py @@ -7,18 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '5a68ac0e3e9' -down_revision = 'e27104acb25' +revision = "5a68ac0e3e9" +down_revision = "e27104acb25" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('message', 'namespace_id', existing_type=sa.Integer(), - nullable=False) + op.alter_column( + "message", "namespace_id", existing_type=sa.Integer(), nullable=False + ) def downgrade(): - op.alter_column('message', 'namespace_id', existing_type=sa.Integer(), - nullable=True) + op.alter_column( + "message", "namespace_id", existing_type=sa.Integer(), nullable=True + ) diff --git a/migrations/versions/101_add_namespace_to_contacts.py b/migrations/versions/101_add_namespace_to_contacts.py index 04d06e9bc..da697de14 100644 --- a/migrations/versions/101_add_namespace_to_contacts.py +++ b/migrations/versions/101_add_namespace_to_contacts.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3bb01fcc755e' -down_revision = '5a68ac0e3e9' +revision = "3bb01fcc755e" +down_revision = "5a68ac0e3e9" from alembic import op from sqlalchemy.sql import text @@ -16,23 +16,35 @@ def upgrade(): conn = op.get_bind() - conn.execute(text(''' + conn.execute( + text( + """ ALTER TABLE contact ADD COLUMN namespace_id INTEGER, ADD FOREIGN KEY(namespace_id) REFERENCES namespace (id) - ''')) + """ + ) + ) - conn.execute(text(''' + conn.execute( + text( + """ UPDATE contact JOIN namespace ON contact.account_id=namespace.account_id - SET contact.namespace_id=namespace.id''')) + SET contact.namespace_id=namespace.id""" + ) + ) - conn.execute(text(''' + conn.execute( + text( + """ ALTER TABLE contact DROP INDEX uid, DROP FOREIGN KEY contact_ibfk_1, ADD CONSTRAINT uid UNIQUE (uid, source, namespace_id, provider_name), DROP COLUMN account_id - ''')) + """ + ) + ) def downgrade(): diff --git a/migrations/versions/102_add_namespace_to_events.py b/migrations/versions/102_add_namespace_to_events.py index d33bbd0bd..b6d5d8011 100644 --- a/migrations/versions/102_add_namespace_to_events.py +++ b/migrations/versions/102_add_namespace_to_events.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4d10bc835f44' -down_revision = '3bb01fcc755e' +revision = "4d10bc835f44" +down_revision = "3bb01fcc755e" from alembic import op from sqlalchemy.sql import text @@ -16,23 +16,35 @@ def upgrade(): conn = op.get_bind() - conn.execute(text(''' + conn.execute( + text( + """ ALTER TABLE event ADD COLUMN namespace_id INTEGER, ADD FOREIGN KEY(namespace_id) REFERENCES namespace (id) - ''')) + """ + ) + ) - conn.execute(text(''' + conn.execute( + text( + """ UPDATE event JOIN namespace ON event.account_id=namespace.account_id - SET event.namespace_id=namespace.id''')) + SET event.namespace_id=namespace.id""" + ) + ) - conn.execute(text(''' + conn.execute( + text( + """ ALTER TABLE event DROP INDEX uuid, DROP FOREIGN KEY event_ibfk_1, ADD CONSTRAINT uuid UNIQUE (uid, source, namespace_id, provider_name), DROP COLUMN account_id - ''')) + """ + ) + ) def downgrade(): diff --git a/migrations/versions/103_add_namespace_to_calendars.py b/migrations/versions/103_add_namespace_to_calendars.py index b0972c8b5..879101b29 100644 --- a/migrations/versions/103_add_namespace_to_calendars.py +++ b/migrations/versions/103_add_namespace_to_calendars.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4015edc83ba' -down_revision = '4d10bc835f44' +revision = "4015edc83ba" +down_revision = "4d10bc835f44" from alembic import op import sqlalchemy as sa @@ -16,19 +16,26 @@ def upgrade(): - op.add_column('calendar', sa.Column('namespace_id', sa.Integer(), - sa.ForeignKey('namespace.id'))) + op.add_column( + "calendar", + sa.Column("namespace_id", sa.Integer(), sa.ForeignKey("namespace.id")), + ) conn = op.get_bind() - conn.execute(text(''' + conn.execute( + text( + """ UPDATE calendar JOIN namespace ON calendar.account_id=namespace.account_id - SET calendar.namespace_id=namespace.id''')) - - op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey') - op.drop_constraint(u'uuid', 'calendar', type_='unique') - op.create_unique_constraint('uuid', 'calendar', ['namespace_id', - 'provider_name', 'name']) - op.drop_column('calendar', 'account_id') + SET calendar.namespace_id=namespace.id""" + ) + ) + + op.drop_constraint("calendar_ibfk_1", "calendar", type_="foreignkey") + op.drop_constraint(u"uuid", "calendar", type_="unique") + op.create_unique_constraint( + "uuid", "calendar", ["namespace_id", "provider_name", "name"] + ) + op.drop_column("calendar", "account_id") def downgrade(): diff --git a/migrations/versions/104_add_message_inbox_uid_index.py b/migrations/versions/104_add_message_inbox_uid_index.py index a84b7d56f..944aa2bba 100644 --- a/migrations/versions/104_add_message_inbox_uid_index.py +++ b/migrations/versions/104_add_message_inbox_uid_index.py @@ -7,16 +7,15 @@ """ # revision identifiers, used by Alembic. -revision = '569b9d365295' -down_revision = '4015edc83ba' +revision = "569b9d365295" +down_revision = "4015edc83ba" from alembic import op def upgrade(): - op.create_index('ix_message_inbox_uid', 'message', ['inbox_uid'], - unique=False) + op.create_index("ix_message_inbox_uid", "message", ["inbox_uid"], unique=False) def downgrade(): - op.drop_index('ix_message_inbox_uid', table_name='message') + op.drop_index("ix_message_inbox_uid", table_name="message") diff --git a/migrations/versions/105_add_subject_indexes.py b/migrations/versions/105_add_subject_indexes.py index e76f73428..38794e15e 100644 --- a/migrations/versions/105_add_subject_indexes.py +++ b/migrations/versions/105_add_subject_indexes.py @@ -7,17 +7,17 @@ """ # revision identifiers, used by Alembic. -revision = '37cd05edd433' -down_revision = '569b9d365295' +revision = "37cd05edd433" +down_revision = "569b9d365295" from alembic import op def upgrade(): - op.create_index('ix_message_subject', 'message', ['subject'], unique=False) - op.create_index('ix_thread_subject', 'thread', ['subject'], unique=False) + op.create_index("ix_message_subject", "message", ["subject"], unique=False) + op.create_index("ix_thread_subject", "thread", ["subject"], unique=False) def downgrade(): - op.drop_index('ix_thread_subject', table_name='thread') - op.drop_index('ix_message_subject', table_name='message') + op.drop_index("ix_thread_subject", table_name="thread") + op.drop_index("ix_message_subject", table_name="message") diff --git a/migrations/versions/106_add_more_indexes.py b/migrations/versions/106_add_more_indexes.py index 962dfea1c..c1519ca8c 100644 --- a/migrations/versions/106_add_more_indexes.py +++ b/migrations/versions/106_add_more_indexes.py @@ -7,22 +7,21 @@ """ # revision identifiers, used by Alembic. -revision = '118b3cdd0185' -down_revision = '37cd05edd433' +revision = "118b3cdd0185" +down_revision = "37cd05edd433" from alembic import op def upgrade(): - op.create_index('ix_thread_recentdate', 'thread', ['recentdate'], - unique=False) - op.create_index('ix_thread_subjectdate', 'thread', ['subjectdate'], - unique=False) - op.create_index('ix_message_received_date', 'message', ['received_date'], - unique=False) + op.create_index("ix_thread_recentdate", "thread", ["recentdate"], unique=False) + op.create_index("ix_thread_subjectdate", "thread", ["subjectdate"], unique=False) + op.create_index( + "ix_message_received_date", "message", ["received_date"], unique=False + ) def downgrade(): - op.drop_index('ix_thread_subjectdate', table_name='thread') - op.drop_index('ix_thread_recentdate', table_name='thread') - op.drop_index('ix_message_received_date', table_name='message') + op.drop_index("ix_thread_subjectdate", table_name="thread") + op.drop_index("ix_thread_recentdate", table_name="thread") + op.drop_index("ix_message_received_date", table_name="message") diff --git a/migrations/versions/107_drop_eas_state.py b/migrations/versions/107_drop_eas_state.py index fdf4a4e07..17de413a6 100644 --- a/migrations/versions/107_drop_eas_state.py +++ b/migrations/versions/107_drop_eas_state.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3cea90bfcdea' -down_revision = '118b3cdd0185' +revision = "3cea90bfcdea" +down_revision = "118b3cdd0185" from alembic import op import sqlalchemy as sa @@ -16,12 +16,13 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine() Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) - if 'easaccount' in Base.metadata.tables: - op.drop_column('easaccount', 'eas_state') + if "easaccount" in Base.metadata.tables: + op.drop_column("easaccount", "eas_state") def downgrade(): diff --git a/migrations/versions/108_easaccount_username.py b/migrations/versions/108_easaccount_username.py index edebbc0a3..a2ca1802c 100644 --- a/migrations/versions/108_easaccount_username.py +++ b/migrations/versions/108_easaccount_username.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2f97277cd86d' -down_revision = '3cea90bfcdea' +revision = "2f97277cd86d" +down_revision = "3cea90bfcdea" from alembic import op import sqlalchemy as sa @@ -16,31 +16,31 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine() - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return # We allow nullable=True because we don't have usernames for existing accounts. # Furthermore, we won't always get a username. from inbox.models.constants import MAX_INDEXABLE_LENGTH - op.add_column('easaccount', - sa.Column('username', sa.String(255), nullable=True)) + op.add_column("easaccount", sa.Column("username", sa.String(255), nullable=True)) - op.add_column('easaccount', - sa.Column('eas_auth', sa.String(MAX_INDEXABLE_LENGTH), - nullable=True)) + op.add_column( + "easaccount", + sa.Column("eas_auth", sa.String(MAX_INDEXABLE_LENGTH), nullable=True), + ) Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) from inbox.models.session import session_scope class EASAccount(Base): - __table__ = Base.metadata.tables['easaccount'] + __table__ = Base.metadata.tables["easaccount"] - with session_scope(versioned=False) as \ - db_session: + with session_scope(versioned=False) as db_session: accts = db_session.query(EASAccount).all() for a in accts: @@ -49,14 +49,19 @@ class EASAccount(Base): db_session.commit() - op.alter_column('easaccount', 'eas_auth', nullable=False, - existing_type=sa.String(MAX_INDEXABLE_LENGTH)) + op.alter_column( + "easaccount", + "eas_auth", + nullable=False, + existing_type=sa.String(MAX_INDEXABLE_LENGTH), + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine() - if engine.has_table('easaccount'): - op.drop_column('easaccount', 'username') - op.drop_column('easaccount', 'eas_auth') + if engine.has_table("easaccount"): + op.drop_column("easaccount", "username") + op.drop_column("easaccount", "eas_auth") diff --git a/migrations/versions/109_add_retries_column_to_the_actionlog.py b/migrations/versions/109_add_retries_column_to_the_actionlog.py index b46fe77a2..3ce991b86 100644 --- a/migrations/versions/109_add_retries_column_to_the_actionlog.py +++ b/migrations/versions/109_add_retries_column_to_the_actionlog.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '5709063bff01' -down_revision = '2f97277cd86d' +revision = "5709063bff01" +down_revision = "2f97277cd86d" from alembic import op import sqlalchemy as sa @@ -16,17 +16,25 @@ def upgrade(): - op.add_column('actionlog', - sa.Column('retries', sa.Integer, nullable=False, - server_default='0')) - op.add_column('actionlog', - sa.Column('status', sa.Enum('pending', 'successful', 'failed'), - server_default='pending')) + op.add_column( + "actionlog", + sa.Column("retries", sa.Integer, nullable=False, server_default="0"), + ) + op.add_column( + "actionlog", + sa.Column( + "status", + sa.Enum("pending", "successful", "failed"), + server_default="pending", + ), + ) conn = op.get_bind() - conn.execute(text("UPDATE actionlog SET status='successful' WHERE executed is TRUE")) + conn.execute( + text("UPDATE actionlog SET status='successful' WHERE executed is TRUE") + ) - op.drop_column('actionlog', u'executed') + op.drop_column("actionlog", u"executed") def downgrade(): diff --git a/migrations/versions/110_add_thread_index.py b/migrations/versions/110_add_thread_index.py index 6804db62b..513061d7f 100644 --- a/migrations/versions/110_add_thread_index.py +++ b/migrations/versions/110_add_thread_index.py @@ -7,17 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '4011b943a24d' -down_revision = '5709063bff01' +revision = "4011b943a24d" +down_revision = "5709063bff01" from alembic import op def upgrade(): - op.create_index('ix_thread_namespace_id_recentdate_deleted_at', 'thread', - ['namespace_id', 'recentdate', 'deleted_at'], unique=False) + op.create_index( + "ix_thread_namespace_id_recentdate_deleted_at", + "thread", + ["namespace_id", "recentdate", "deleted_at"], + unique=False, + ) def downgrade(): - op.drop_index('ix_thread_namespace_id_recentdate_deleted_at', - table_name='thread') + op.drop_index("ix_thread_namespace_id_recentdate_deleted_at", table_name="thread") diff --git a/migrations/versions/111_add_account_name_column.py b/migrations/versions/111_add_account_name_column.py index 3a82ddb88..77a026012 100644 --- a/migrations/versions/111_add_account_name_column.py +++ b/migrations/versions/111_add_account_name_column.py @@ -7,25 +7,31 @@ """ # revision identifiers, used by Alembic. -revision = '22d076f48b88' -down_revision = '4011b943a24d' +revision = "22d076f48b88" +down_revision = "4011b943a24d" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('account', sa.Column('name', sa.String(length=256), - server_default='', nullable=False)) + op.add_column( + "account", + sa.Column("name", sa.String(length=256), server_default="", nullable=False), + ) conn = op.get_bind() - conn.execute(''' + conn.execute( + """ UPDATE account JOIN gmailaccount ON account.id=gmailaccount.id - SET account.name=gmailaccount.name''') - conn.execute(''' + SET account.name=gmailaccount.name""" + ) + conn.execute( + """ UPDATE account JOIN outlookaccount ON account.id=outlookaccount.id - SET account.name=outlookaccount.name''') - op.drop_column('gmailaccount', 'name') - op.drop_column('outlookaccount', 'name') + SET account.name=outlookaccount.name""" + ) + op.drop_column("gmailaccount", "name") + op.drop_column("outlookaccount", "name") def downgrade(): diff --git a/migrations/versions/112_imap_delete_cascades.py b/migrations/versions/112_imap_delete_cascades.py index 9227b0db2..bb7ce1aae 100644 --- a/migrations/versions/112_imap_delete_cascades.py +++ b/migrations/versions/112_imap_delete_cascades.py @@ -7,24 +7,28 @@ """ # revision identifiers, used by Alembic. -revision = '26911668870a' -down_revision = '22d076f48b88' +revision = "26911668870a" +down_revision = "22d076f48b88" from alembic import op def upgrade(): conn = op.get_bind() - conn.execute(''' + conn.execute( + """ ALTER TABLE imapfolderinfo DROP FOREIGN KEY imapfolderinfo_ibfk_2, ADD FOREIGN KEY (folder_id) REFERENCES folder (id) ON DELETE - CASCADE''') + CASCADE""" + ) - conn.execute(''' + conn.execute( + """ ALTER TABLE imapfoldersyncstatus DROP FOREIGN KEY imapfoldersyncstatus_ibfk_2, ADD FOREIGN KEY (folder_id) REFERENCES folder (id) ON DELETE - CASCADE''') + CASCADE""" + ) def downgrade(): diff --git a/migrations/versions/113_add_custom_imap_overrides.py b/migrations/versions/113_add_custom_imap_overrides.py index 45d725b1b..b22cea0a1 100644 --- a/migrations/versions/113_add_custom_imap_overrides.py +++ b/migrations/versions/113_add_custom_imap_overrides.py @@ -7,30 +7,38 @@ """ # revision identifiers, used by Alembic. -revision = '26bfb2e45c47' -down_revision = '26911668870a' +revision = "26bfb2e45c47" +down_revision = "26911668870a" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('imapaccount', sa.Column('_imap_server_host', - sa.String(length=255), - nullable=True)) - op.add_column('imapaccount', sa.Column('_imap_server_port', sa.Integer(), - server_default='993', - nullable=False)) - op.add_column('imapaccount', sa.Column('_smtp_server_host', - sa.String(length=255), - nullable=True)) - op.add_column('imapaccount', sa.Column('_smtp_server_port', sa.Integer(), - server_default='587', - nullable=False)) + op.add_column( + "imapaccount", + sa.Column("_imap_server_host", sa.String(length=255), nullable=True), + ) + op.add_column( + "imapaccount", + sa.Column( + "_imap_server_port", sa.Integer(), server_default="993", nullable=False + ), + ) + op.add_column( + "imapaccount", + sa.Column("_smtp_server_host", sa.String(length=255), nullable=True), + ) + op.add_column( + "imapaccount", + sa.Column( + "_smtp_server_port", sa.Integer(), server_default="587", nullable=False + ), + ) def downgrade(): - op.drop_column('imapaccount', '_smtp_server_port') - op.drop_column('imapaccount', '_smtp_server_host') - op.drop_column('imapaccount', '_imap_server_port') - op.drop_column('imapaccount', '_imap_server_host') + op.drop_column("imapaccount", "_smtp_server_port") + op.drop_column("imapaccount", "_smtp_server_host") + op.drop_column("imapaccount", "_imap_server_port") + op.drop_column("imapaccount", "_imap_server_host") diff --git a/migrations/versions/114_eas_twodevices_pledge.py b/migrations/versions/114_eas_twodevices_pledge.py index 81712ff6c..49c15a18a 100644 --- a/migrations/versions/114_eas_twodevices_pledge.py +++ b/migrations/versions/114_eas_twodevices_pledge.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'ad7b856bcc0' -down_revision = '26bfb2e45c47' +revision = "ad7b856bcc0" +down_revision = "26bfb2e45c47" from alembic import op import sqlalchemy as sa @@ -16,72 +16,113 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine() - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return - op.create_table('easdevice', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('filtered', sa.Boolean(), nullable=False), - sa.Column('eas_device_id', sa.String(length=32), - nullable=False), - sa.Column('eas_device_type', sa.String(length=32), - nullable=False), - sa.Column('eas_policy_key', sa.String(length=64), - nullable=True), - sa.Column('eas_sync_key', sa.String(length=64), - server_default='0', nullable=False), - sa.PrimaryKeyConstraint('id')) - op.create_index('ix_easdevice_created_at', 'easdevice', ['created_at'], - unique=False) - op.create_index('ix_easdevice_updated_at', 'easdevice', ['updated_at'], - unique=False) - op.create_index('ix_easdevice_deleted_at', 'easdevice', ['deleted_at'], - unique=False) - op.create_index('ix_easdevice_eas_device_id', 'easdevice', - ['eas_device_id'], unique=False) + op.create_table( + "easdevice", + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("filtered", sa.Boolean(), nullable=False), + sa.Column("eas_device_id", sa.String(length=32), nullable=False), + sa.Column("eas_device_type", sa.String(length=32), nullable=False), + sa.Column("eas_policy_key", sa.String(length=64), nullable=True), + sa.Column( + "eas_sync_key", sa.String(length=64), server_default="0", nullable=False + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_easdevice_created_at", "easdevice", ["created_at"], unique=False + ) + op.create_index( + "ix_easdevice_updated_at", "easdevice", ["updated_at"], unique=False + ) + op.create_index( + "ix_easdevice_deleted_at", "easdevice", ["deleted_at"], unique=False + ) + op.create_index( + "ix_easdevice_eas_device_id", "easdevice", ["eas_device_id"], unique=False + ) - op.add_column('easaccount', sa.Column('primary_device_id', sa.Integer(), - sa.ForeignKey('easdevice.id'), - nullable=True)) - op.add_column('easaccount', sa.Column('secondary_device_id', sa.Integer(), - sa.ForeignKey('easdevice.id'), - nullable=True)) + op.add_column( + "easaccount", + sa.Column( + "primary_device_id", + sa.Integer(), + sa.ForeignKey("easdevice.id"), + nullable=True, + ), + ) + op.add_column( + "easaccount", + sa.Column( + "secondary_device_id", + sa.Integer(), + sa.ForeignKey("easdevice.id"), + nullable=True, + ), + ) - op.add_column('easfoldersyncstatus', sa.Column( - 'device_id', sa.Integer(), sa.ForeignKey( - 'easdevice.id', ondelete='CASCADE'), nullable=True)) - op.drop_constraint('account_id', 'easfoldersyncstatus', - type_='unique') - op.create_unique_constraint(None, 'easfoldersyncstatus', - ['account_id', 'device_id', 'folder_id']) - op.drop_constraint('easfoldersyncstatus_ibfk_1', 'easfoldersyncstatus', - type_='foreignkey') - op.drop_constraint('easfoldersyncstatus_ibfk_2', 'easfoldersyncstatus', - type_='foreignkey') - op.drop_constraint('account_id_2', 'easfoldersyncstatus', - type_='unique') - op.create_foreign_key('easfoldersyncstatus_ibfk_1', 'easfoldersyncstatus', - 'easaccount', ['account_id'], ['id']) - op.create_foreign_key('easfoldersyncstatus_ibfk_2', 'easfoldersyncstatus', - 'folder', ['folder_id'], ['id']) - op.create_unique_constraint(None, 'easfoldersyncstatus', - ['account_id', 'device_id', 'eas_folder_id']) + op.add_column( + "easfoldersyncstatus", + sa.Column( + "device_id", + sa.Integer(), + sa.ForeignKey("easdevice.id", ondelete="CASCADE"), + nullable=True, + ), + ) + op.drop_constraint("account_id", "easfoldersyncstatus", type_="unique") + op.create_unique_constraint( + None, "easfoldersyncstatus", ["account_id", "device_id", "folder_id"] + ) + op.drop_constraint( + "easfoldersyncstatus_ibfk_1", "easfoldersyncstatus", type_="foreignkey" + ) + op.drop_constraint( + "easfoldersyncstatus_ibfk_2", "easfoldersyncstatus", type_="foreignkey" + ) + op.drop_constraint("account_id_2", "easfoldersyncstatus", type_="unique") + op.create_foreign_key( + "easfoldersyncstatus_ibfk_1", + "easfoldersyncstatus", + "easaccount", + ["account_id"], + ["id"], + ) + op.create_foreign_key( + "easfoldersyncstatus_ibfk_2", + "easfoldersyncstatus", + "folder", + ["folder_id"], + ["id"], + ) + op.create_unique_constraint( + None, "easfoldersyncstatus", ["account_id", "device_id", "eas_folder_id"] + ) op.add_column( - 'easuid', sa.Column('device_id', sa.Integer(), sa.ForeignKey( - 'easdevice.id', ondelete='CASCADE'), nullable=True)) - op.drop_constraint('easuid_ibfk_3', 'easuid', type_='foreignkey') - op.drop_constraint('folder_id', 'easuid', type_='unique') - op.create_foreign_key('easuid_ibfk_3', 'easuid', 'folder', - ['folder_id'], ['id']) + "easuid", + sa.Column( + "device_id", + sa.Integer(), + sa.ForeignKey("easdevice.id", ondelete="CASCADE"), + nullable=True, + ), + ) + op.drop_constraint("easuid_ibfk_3", "easuid", type_="foreignkey") + op.drop_constraint("folder_id", "easuid", type_="unique") + op.create_foreign_key("easuid_ibfk_3", "easuid", "folder", ["folder_id"], ["id"]) op.create_unique_constraint( - None, 'easuid', ['folder_id', 'msg_uid', 'easaccount_id', 'device_id']) + None, "easuid", ["folder_id", "msg_uid", "easaccount_id", "device_id"] + ) def downgrade(): - raise Exception('!') + raise Exception("!") diff --git a/migrations/versions/115_eas_twodevices_turn.py b/migrations/versions/115_eas_twodevices_turn.py index 8c92a401d..991a1ee45 100644 --- a/migrations/versions/115_eas_twodevices_turn.py +++ b/migrations/versions/115_eas_twodevices_turn.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '17dc9c049f8b' -down_revision = 'ad7b856bcc0' +revision = "17dc9c049f8b" +down_revision = "ad7b856bcc0" from datetime import datetime @@ -19,9 +19,10 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine() - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return from inbox.models.session import session_scope @@ -30,39 +31,48 @@ def upgrade(): Base.metadata.reflect(engine) class EASAccount(Base): - __table__ = Base.metadata.tables['easaccount'] + __table__ = Base.metadata.tables["easaccount"] primary_device = sa.orm.relationship( - 'EASDevice', primaryjoin='and_(EASAccount.primary_device_id == EASDevice.id, ' - 'EASDevice.deleted_at.is_(None))', uselist=False) + "EASDevice", + primaryjoin="and_(EASAccount.primary_device_id == EASDevice.id, " + "EASDevice.deleted_at.is_(None))", + uselist=False, + ) secondary_device = sa.orm.relationship( - 'EASDevice', primaryjoin='and_(EASAccount.secondary_device_id == EASDevice.id, ' - 'EASDevice.deleted_at.is_(None))', uselist=False) + "EASDevice", + primaryjoin="and_(EASAccount.secondary_device_id == EASDevice.id, " + "EASDevice.deleted_at.is_(None))", + uselist=False, + ) class EASDevice(Base): - __table__ = Base.metadata.tables['easdevice'] + __table__ = Base.metadata.tables["easdevice"] - with session_scope(versioned=False) as \ - db_session: + with session_scope(versioned=False) as db_session: accts = db_session.query(EASAccount).all() for a in accts: # Set both to filtered=False, //needed// for correct deploy. - primary = EASDevice(created_at=datetime.utcnow(), - updated_at=datetime.utcnow(), - filtered=False, - eas_device_id=a._eas_device_id, - eas_device_type=a._eas_device_type, - eas_policy_key=a.eas_policy_key, - eas_sync_key=a.eas_account_sync_key) - - secondary = EASDevice(created_at=datetime.utcnow(), - updated_at=datetime.utcnow(), - filtered=False, - eas_device_id=a._eas_device_id, - eas_device_type=a._eas_device_type, - eas_policy_key=a.eas_policy_key, - eas_sync_key=a.eas_account_sync_key) + primary = EASDevice( + created_at=datetime.utcnow(), + updated_at=datetime.utcnow(), + filtered=False, + eas_device_id=a._eas_device_id, + eas_device_type=a._eas_device_type, + eas_policy_key=a.eas_policy_key, + eas_sync_key=a.eas_account_sync_key, + ) + + secondary = EASDevice( + created_at=datetime.utcnow(), + updated_at=datetime.utcnow(), + filtered=False, + eas_device_id=a._eas_device_id, + eas_device_type=a._eas_device_type, + eas_policy_key=a.eas_policy_key, + eas_sync_key=a.eas_account_sync_key, + ) a.primary_device = primary a.secondary_device = secondary @@ -74,24 +84,39 @@ class EASDevice(Base): conn = op.get_bind() acct_device_map = dict( - (id_, device_id) for id_, device_id in conn.execute(text( - """SELECT id, secondary_device_id from easaccount"""))) + (id_, device_id) + for id_, device_id in conn.execute( + text("""SELECT id, secondary_device_id from easaccount""") + ) + ) - print 'acct_device_map: ', acct_device_map + print "acct_device_map: ", acct_device_map for acct_id, device_id in acct_device_map.iteritems(): - conn.execute(text(""" + conn.execute( + text( + """ UPDATE easfoldersyncstatus SET device_id=:device_id WHERE account_id=:acct_id - """), device_id=device_id, acct_id=acct_id) - - conn.execute(text(""" + """ + ), + device_id=device_id, + acct_id=acct_id, + ) + + conn.execute( + text( + """ UPDATE easuid SET device_id=:device_id WHERE easaccount_id=:acct_id - """), device_id=device_id, acct_id=acct_id) + """ + ), + device_id=device_id, + acct_id=acct_id, + ) def downgrade(): - raise Exception('!') + raise Exception("!") diff --git a/migrations/versions/116_eas_twodevices_prestige.py b/migrations/versions/116_eas_twodevices_prestige.py index 1bb586de4..528fee5cb 100644 --- a/migrations/versions/116_eas_twodevices_prestige.py +++ b/migrations/versions/116_eas_twodevices_prestige.py @@ -7,32 +7,36 @@ """ # revision identifiers, used by Alembic. -revision = '10ecf4841ac3' -down_revision = '17dc9c049f8b' +revision = "10ecf4841ac3" +down_revision = "17dc9c049f8b" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('easdevice', 'created_at', - existing_type=sa.DateTime(), nullable=False) - op.alter_column('easdevice', 'updated_at', - existing_type=sa.DateTime(), nullable=False) - op.alter_column('easaccount', 'primary_device_id', - existing_type=sa.Integer(), nullable=False) - op.alter_column('easaccount', 'secondary_device_id', - existing_type=sa.Integer(), nullable=False) - op.alter_column('easfoldersyncstatus', 'device_id', - existing_type=sa.Integer(), nullable=False) - op.alter_column('easuid', 'device_id', - existing_type=sa.Integer(), nullable=False) - - op.drop_column('easaccount', '_eas_device_type') - op.drop_column('easaccount', '_eas_device_id') - op.drop_column('easaccount', 'eas_policy_key') - op.drop_column('easaccount', 'eas_account_sync_key') + op.alter_column( + "easdevice", "created_at", existing_type=sa.DateTime(), nullable=False + ) + op.alter_column( + "easdevice", "updated_at", existing_type=sa.DateTime(), nullable=False + ) + op.alter_column( + "easaccount", "primary_device_id", existing_type=sa.Integer(), nullable=False + ) + op.alter_column( + "easaccount", "secondary_device_id", existing_type=sa.Integer(), nullable=False + ) + op.alter_column( + "easfoldersyncstatus", "device_id", existing_type=sa.Integer(), nullable=False + ) + op.alter_column("easuid", "device_id", existing_type=sa.Integer(), nullable=False) + + op.drop_column("easaccount", "_eas_device_type") + op.drop_column("easaccount", "_eas_device_id") + op.drop_column("easaccount", "eas_policy_key") + op.drop_column("easaccount", "eas_account_sync_key") def downgrade(): - raise Exception('!') + raise Exception("!") diff --git a/migrations/versions/117_fix_easuid_delete_cascades.py b/migrations/versions/117_fix_easuid_delete_cascades.py index 4df0392c0..89bf007b1 100644 --- a/migrations/versions/117_fix_easuid_delete_cascades.py +++ b/migrations/versions/117_fix_easuid_delete_cascades.py @@ -7,21 +7,23 @@ """ # revision identifiers, used by Alembic. -revision = '420bf3422c4f' -down_revision = '10ecf4841ac3' +revision = "420bf3422c4f" +down_revision = "10ecf4841ac3" from alembic import op def upgrade(): - op.drop_constraint('easuid_ibfk_2', 'easuid', type_='foreignkey') - op.create_foreign_key('easuid_ibfk_2', 'easuid', 'message', - ['message_id'], ['id'], ondelete='cascade') + op.drop_constraint("easuid_ibfk_2", "easuid", type_="foreignkey") + op.create_foreign_key( + "easuid_ibfk_2", "easuid", "message", ["message_id"], ["id"], ondelete="cascade" + ) - op.drop_constraint('easuid_ibfk_3', 'easuid', type_='foreignkey') - op.create_foreign_key('easuid_ibfk_3', 'easuid', 'folder', - ['folder_id'], ['id'], ondelete='cascade') + op.drop_constraint("easuid_ibfk_3", "easuid", type_="foreignkey") + op.create_foreign_key( + "easuid_ibfk_3", "easuid", "folder", ["folder_id"], ["id"], ondelete="cascade" + ) def downgrade(): - raise Exception('Nope.') + raise Exception("Nope.") diff --git a/migrations/versions/118_store_label_information_per_uid.py b/migrations/versions/118_store_label_information_per_uid.py index 6ec088aa5..797336950 100644 --- a/migrations/versions/118_store_label_information_per_uid.py +++ b/migrations/versions/118_store_label_information_per_uid.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4634999269' -down_revision = '420bf3422c4f' +revision = "4634999269" +down_revision = "420bf3422c4f" from alembic import op import sqlalchemy as sa @@ -18,12 +18,11 @@ def upgrade(): - op.add_column('imapuid', sa.Column('g_labels', JSON(), - nullable=True)) + op.add_column("imapuid", sa.Column("g_labels", JSON(), nullable=True)) conn = op.get_bind() conn.execute(text("UPDATE imapuid SET g_labels = '[]'")) def downgrade(): - op.drop_column('imapuid', 'g_labels') + op.drop_column("imapuid", "g_labels") diff --git a/migrations/versions/119_store_full_message_body.py b/migrations/versions/119_store_full_message_body.py index bba0c6afd..50805bfdb 100644 --- a/migrations/versions/119_store_full_message_body.py +++ b/migrations/versions/119_store_full_message_body.py @@ -7,20 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '58732bb5d14b' -down_revision = '4634999269' +revision = "58732bb5d14b" +down_revision = "4634999269" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('message', sa.Column('full_body_id', - sa.Integer, nullable=True)) - op.create_foreign_key("full_body_id_fk", "message", "block", - ["full_body_id"], ["id"]) + op.add_column("message", sa.Column("full_body_id", sa.Integer, nullable=True)) + op.create_foreign_key( + "full_body_id_fk", "message", "block", ["full_body_id"], ["id"] + ) def downgrade(): - op.drop_constraint('full_body_id_fk', 'message', type_='foreignkey') - op.drop_column('message', 'full_body_id') + op.drop_constraint("full_body_id_fk", "message", type_="foreignkey") + op.drop_column("message", "full_body_id") diff --git a/migrations/versions/120_simplify_transaction_log.py b/migrations/versions/120_simplify_transaction_log.py index ecaea26da..6cb23827c 100644 --- a/migrations/versions/120_simplify_transaction_log.py +++ b/migrations/versions/120_simplify_transaction_log.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '8c2406df6f8' -down_revision = '58732bb5d14b' +revision = "8c2406df6f8" +down_revision = "58732bb5d14b" from alembic import op from sqlalchemy.sql import text @@ -16,14 +16,18 @@ def upgrade(): conn = op.get_bind() - conn.execute(text(''' + conn.execute( + text( + """ ALTER TABLE transaction CHANGE public_snapshot snapshot LONGTEXT, CHANGE table_name object_type VARCHAR(20), DROP COLUMN private_snapshot, DROP COLUMN delta, ADD INDEX `ix_transaction_object_public_id` (`object_public_id`) - ''')) + """ + ) + ) def downgrade(): diff --git a/migrations/versions/121_add_searchindexcursor.py b/migrations/versions/121_add_searchindexcursor.py index 274fc136a..4caca9f38 100644 --- a/migrations/versions/121_add_searchindexcursor.py +++ b/migrations/versions/121_add_searchindexcursor.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '526eefc1d600' -down_revision = '8c2406df6f8' +revision = "526eefc1d600" +down_revision = "8c2406df6f8" from alembic import op import sqlalchemy as sa @@ -16,27 +16,44 @@ def upgrade(): op.create_table( - 'searchindexcursor', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('transaction_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['transaction_id'], ['transaction.id'], ), - sa.PrimaryKeyConstraint('id') + "searchindexcursor", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("transaction_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["transaction_id"], ["transaction.id"],), + sa.PrimaryKeyConstraint("id"), ) - op.create_index('ix_searchindexcursor_created_at', 'searchindexcursor', - ['created_at'], unique=False) - op.create_index('ix_searchindexcursor_deleted_at', 'searchindexcursor', - ['deleted_at'], unique=False) - op.create_index('ix_searchindexcursor_updated_at', 'searchindexcursor', - ['updated_at'], unique=False) - op.create_index('ix_searchindexcursor_transaction_id', 'searchindexcursor', - ['transaction_id'], unique=False) + op.create_index( + "ix_searchindexcursor_created_at", + "searchindexcursor", + ["created_at"], + unique=False, + ) + op.create_index( + "ix_searchindexcursor_deleted_at", + "searchindexcursor", + ["deleted_at"], + unique=False, + ) + op.create_index( + "ix_searchindexcursor_updated_at", + "searchindexcursor", + ["updated_at"], + unique=False, + ) + op.create_index( + "ix_searchindexcursor_transaction_id", + "searchindexcursor", + ["transaction_id"], + unique=False, + ) def downgrade(): - op.drop_constraint('searchindexcursor_ibfk_1', 'searchindexcursor', - type_='foreignkey') - op.drop_table('searchindexcursor') + op.drop_constraint( + "searchindexcursor_ibfk_1", "searchindexcursor", type_="foreignkey" + ) + op.drop_table("searchindexcursor") diff --git a/migrations/versions/122_add_easeventuid.py b/migrations/versions/122_add_easeventuid.py index 59c554c6e..15236890b 100644 --- a/migrations/versions/122_add_easeventuid.py +++ b/migrations/versions/122_add_easeventuid.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '476c5185121b' -down_revision = '526eefc1d600' +revision = "476c5185121b" +down_revision = "526eefc1d600" from alembic import op import sqlalchemy as sa @@ -16,45 +16,48 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine() - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return op.create_table( - 'easeventuid', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('easaccount_id', sa.Integer(), nullable=False), - sa.Column('event_id', sa.Integer(), nullable=False), - sa.Column('folder_id', sa.Integer(), nullable=False), - sa.Column('fld_uid', sa.Integer(), nullable=False), - sa.Column('msg_uid', sa.Integer(), nullable=False), - - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['easaccount_id'], ['easaccount.id'], - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['event_id'], ['event.id'], - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['folder_id'], ['folder.id'], - ondelete='CASCADE') + "easeventuid", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("easaccount_id", sa.Integer(), nullable=False), + sa.Column("event_id", sa.Integer(), nullable=False), + sa.Column("folder_id", sa.Integer(), nullable=False), + sa.Column("fld_uid", sa.Integer(), nullable=False), + sa.Column("msg_uid", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.ForeignKeyConstraint( + ["easaccount_id"], ["easaccount.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["event_id"], ["event.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["folder_id"], ["folder.id"], ondelete="CASCADE"), ) - op.create_index('ix_easeventuid_created_at', 'easeventuid', - ['created_at'], unique=False) - op.create_index('ix_easeventuid_updated_at', 'easeventuid', - ['updated_at'], unique=False) - op.create_index('ix_easeventuid_deleted_at', 'easeventuid', - ['deleted_at'], unique=False) + op.create_index( + "ix_easeventuid_created_at", "easeventuid", ["created_at"], unique=False + ) + op.create_index( + "ix_easeventuid_updated_at", "easeventuid", ["updated_at"], unique=False + ) + op.create_index( + "ix_easeventuid_deleted_at", "easeventuid", ["deleted_at"], unique=False + ) - op.create_unique_constraint('uq_folder_id', 'easeventuid', - ['folder_id', 'msg_uid', 'easaccount_id']) + op.create_unique_constraint( + "uq_folder_id", "easeventuid", ["folder_id", "msg_uid", "easaccount_id"] + ) def downgrade(): - op.drop_constraint('easeventuid_ibfk_1', 'easeventuid', type_='foreignkey') - op.drop_constraint('easeventuid_ibfk_2', 'easeventuid', type_='foreignkey') - op.drop_constraint('easeventuid_ibfk_3', 'easeventuid', type_='foreignkey') - op.drop_table('easeventuid') + op.drop_constraint("easeventuid_ibfk_1", "easeventuid", type_="foreignkey") + op.drop_constraint("easeventuid_ibfk_2", "easeventuid", type_="foreignkey") + op.drop_constraint("easeventuid_ibfk_3", "easeventuid", type_="foreignkey") + op.drop_table("easeventuid") diff --git a/migrations/versions/123_remove_gmail_inbox_syncs.py b/migrations/versions/123_remove_gmail_inbox_syncs.py index 3c9ed974e..ff3e36eaf 100644 --- a/migrations/versions/123_remove_gmail_inbox_syncs.py +++ b/migrations/versions/123_remove_gmail_inbox_syncs.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3c743bd31ee2' -down_revision = '476c5185121b' +revision = "3c743bd31ee2" +down_revision = "476c5185121b" def upgrade(): @@ -19,9 +19,9 @@ def upgrade(): from inbox.models.session import session_scope from inbox.heartbeat.config import STATUS_DATABASE, get_redis_client from inbox.heartbeat.status import HeartbeatStatusKey + redis_client = get_redis_client(STATUS_DATABASE) - with session_scope(versioned=False) as \ - db_session: + with session_scope(versioned=False) as db_session: for account in db_session.query(GmailAccount): if account.inbox_folder is None: # May be the case for accounts that we can't sync, e.g. due to @@ -29,12 +29,14 @@ def upgrade(): continue q = db_session.query(ImapFolderSyncStatus).filter( ImapFolderSyncStatus.account_id == account.id, - ImapFolderSyncStatus.folder_id == account.inbox_folder.id) + ImapFolderSyncStatus.folder_id == account.inbox_folder.id, + ) q.delete() q = db_session.query(ImapUid).filter( ImapUid.account_id == account.id, - ImapUid.folder_id == account.inbox_folder.id) + ImapUid.folder_id == account.inbox_folder.id, + ) q.delete() db_session.commit() diff --git a/migrations/versions/124_remove_soft_deleted_objects.py b/migrations/versions/124_remove_soft_deleted_objects.py index d3996a3ae..47584acb9 100644 --- a/migrations/versions/124_remove_soft_deleted_objects.py +++ b/migrations/versions/124_remove_soft_deleted_objects.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '40ad73aa49df' -down_revision = '3c743bd31ee2' +revision = "40ad73aa49df" +down_revision = "3c743bd31ee2" from alembic import op @@ -16,8 +16,8 @@ def upgrade(): conn = op.get_bind() # In practice we only have messages and events with deleted_at set. - conn.execute('DELETE FROM message WHERE deleted_at IS NOT NULL;') - conn.execute('DELETE FROM event WHERE deleted_at IS NOT NULL;') + conn.execute("DELETE FROM message WHERE deleted_at IS NOT NULL;") + conn.execute("DELETE FROM event WHERE deleted_at IS NOT NULL;") def downgrade(): diff --git a/migrations/versions/125_refactor_participants_table.py b/migrations/versions/125_refactor_participants_table.py index f74d4452e..dc7c54467 100644 --- a/migrations/versions/125_refactor_participants_table.py +++ b/migrations/versions/125_refactor_participants_table.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '955792afd00' -down_revision = '40ad73aa49df' +revision = "955792afd00" +down_revision = "40ad73aa49df" from alembic import op import sqlalchemy as sa @@ -17,9 +17,9 @@ def upgrade(): from inbox.sqlalchemy_ext.util import JSON - op.add_column('event', - sa.Column('participants_by_email', JSON(), nullable=False)) - op.drop_table('eventparticipant') + + op.add_column("event", sa.Column("participants_by_email", JSON(), nullable=False)) + op.drop_table("eventparticipant") conn = op.get_bind() conn.execute(text("UPDATE event SET participants_by_email='{}'")) diff --git a/migrations/versions/126_add_account_sync_contacts_events.py b/migrations/versions/126_add_account_sync_contacts_events.py index dc73a7dd8..cd9050d39 100644 --- a/migrations/versions/126_add_account_sync_contacts_events.py +++ b/migrations/versions/126_add_account_sync_contacts_events.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '262436681c4' -down_revision = '955792afd00' +revision = "262436681c4" +down_revision = "955792afd00" from alembic import op @@ -16,36 +16,34 @@ def upgrade(): - op.add_column('account', sa.Column('sync_contacts', - sa.Boolean, - nullable=False, - default=False)) - op.add_column('account', sa.Column('sync_events', - sa.Boolean, - nullable=False, - default=False)) + op.add_column( + "account", sa.Column("sync_contacts", sa.Boolean, nullable=False, default=False) + ) + op.add_column( + "account", sa.Column("sync_events", sa.Boolean, nullable=False, default=False) + ) connection = op.get_bind() connection.execute( sa.sql.text( - ''' + """ update account join gmailaccount on account.id = gmailaccount.id set account.sync_contacts = 1 where gmailaccount.scope like "%https://www.google.com/m8/feeds%" - ''' + """ ) ) connection.execute( sa.sql.text( - ''' + """ update account join gmailaccount on account.id = gmailaccount.id set account.sync_events = 1 where gmailaccount.scope like "%https://www.googleapis.com/auth/calendar%" - ''' + """ ) ) def downgrade(): - op.drop_column('account', 'sync_contacts') - op.drop_column('account', 'sync_events') + op.drop_column("account", "sync_contacts") + op.drop_column("account", "sync_events") diff --git a/migrations/versions/127_remove_easeventuid.py b/migrations/versions/127_remove_easeventuid.py index 9487e8720..f48ae9180 100644 --- a/migrations/versions/127_remove_easeventuid.py +++ b/migrations/versions/127_remove_easeventuid.py @@ -7,25 +7,26 @@ """ # revision identifiers, used by Alembic. -revision = '581e91bd7141' -down_revision = '262436681c4' +revision = "581e91bd7141" +down_revision = "262436681c4" from alembic import op def upgrade(): from inbox.ignition import main_engine + engine = main_engine() - if not engine.has_table('easeventuid'): + if not engine.has_table("easeventuid"): return - op.drop_constraint('easeventuid_ibfk_1', 'easeventuid', type_='foreignkey') - op.drop_constraint('easeventuid_ibfk_2', 'easeventuid', type_='foreignkey') - op.drop_constraint('easeventuid_ibfk_3', 'easeventuid', type_='foreignkey') + op.drop_constraint("easeventuid_ibfk_1", "easeventuid", type_="foreignkey") + op.drop_constraint("easeventuid_ibfk_2", "easeventuid", type_="foreignkey") + op.drop_constraint("easeventuid_ibfk_3", "easeventuid", type_="foreignkey") - op.drop_table('easeventuid') + op.drop_table("easeventuid") def downgrade(): - raise Exception('No going back.') + raise Exception("No going back.") diff --git a/migrations/versions/128_fix_cascades.py b/migrations/versions/128_fix_cascades.py index 8ed6119aa..10fb78529 100644 --- a/migrations/versions/128_fix_cascades.py +++ b/migrations/versions/128_fix_cascades.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '284227d72f51' -down_revision = '581e91bd7141' +revision = "284227d72f51" +down_revision = "581e91bd7141" from alembic import op @@ -16,22 +16,22 @@ def upgrade(): connection = op.get_bind() connection.execute( - ''' + """ ALTER TABLE actionlog DROP FOREIGN KEY actionlog_ibfk_1; ALTER TABLE actionlog ADD CONSTRAINT actionlog_ibfk_1 FOREIGN KEY (namespace_id) REFERENCES namespace(id) ON DELETE CASCADE; ALTER TABLE easfoldersyncstatus DROP FOREIGN KEY easfoldersyncstatus_ibfk_3; ALTER TABLE easfoldersyncstatus ADD CONSTRAINT easfoldersyncstatus_ibfk_3 FOREIGN KEY (folder_id) REFERENCES folder(id) ON DELETE CASCADE; - ''' + """ ) def downgrade(): connection = op.get_bind() connection.execute( - ''' + """ ALTER TABLE actionlog DROP FOREIGN KEY actionlog_ibfk_1; ALTER TABLE actionlog ADD CONSTRAINT actionlog_ibfk_1 FOREIGN KEY (namespace_id) REFERENCES namespace(id); ALTER TABLE easfoldersyncstatus DROP FOREIGN KEY easfoldersyncstatus_ibfk_3; ALTER TABLE easfoldersyncstatus ADD CONSTRAINT easfoldersyncstatus_ibfk_3 FOREIGN KEY (folder_id) REFERENCES folder(id); - ''' + """ ) diff --git a/migrations/versions/129_make_folder_name_case_sensitive.py b/migrations/versions/129_make_folder_name_case_sensitive.py index 422aeda82..3542b070f 100644 --- a/migrations/versions/129_make_folder_name_case_sensitive.py +++ b/migrations/versions/129_make_folder_name_case_sensitive.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '5349c1a03fde' -down_revision = '284227d72f51' +revision = "5349c1a03fde" +down_revision = "284227d72f51" from alembic import op @@ -16,16 +16,16 @@ def upgrade(): connection = op.get_bind() connection.execute( - ''' + """ ALTER TABLE folder CHANGE name name varchar(191) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL; - ''' + """ ) def downgrade(): connection = op.get_bind() connection.execute( - ''' + """ ALTER TABLE folder CHANGE name name varchar(191) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL; - ''' + """ ) diff --git a/migrations/versions/130_add_message_index.py b/migrations/versions/130_add_message_index.py index c68ca7f81..14ea12133 100644 --- a/migrations/versions/130_add_message_index.py +++ b/migrations/versions/130_add_message_index.py @@ -7,18 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '2b288dc444f' -down_revision = '5349c1a03fde' +revision = "2b288dc444f" +down_revision = "5349c1a03fde" from alembic import op def upgrade(): - op.create_index('ix_message_ns_id_is_draft_received_date', 'message', - ['namespace_id', 'is_draft', 'received_date'], - unique=False) + op.create_index( + "ix_message_ns_id_is_draft_received_date", + "message", + ["namespace_id", "is_draft", "received_date"], + unique=False, + ) def downgrade(): - op.drop_index('ix_message_ns_id_is_draft_received_date', - table_name='message') + op.drop_index("ix_message_ns_id_is_draft_received_date", table_name="message") diff --git a/migrations/versions/131_update_transaction_indices.py b/migrations/versions/131_update_transaction_indices.py index eb1ce9720..c206d3b41 100644 --- a/migrations/versions/131_update_transaction_indices.py +++ b/migrations/versions/131_update_transaction_indices.py @@ -7,16 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '3bb4a941639c' -down_revision = '2b288dc444f' +revision = "3bb4a941639c" +down_revision = "2b288dc444f" from alembic import op def upgrade(): - op.create_index('namespace_id_created_at', 'transaction', - ['namespace_id', 'created_at'], unique=False) + op.create_index( + "namespace_id_created_at", + "transaction", + ["namespace_id", "created_at"], + unique=False, + ) def downgrade(): - op.drop_index('namespace_id_created_at', table_name='transaction') + op.drop_index("namespace_id_created_at", table_name="transaction") diff --git a/migrations/versions/132_add_cascade_delete_part_block_id.py b/migrations/versions/132_add_cascade_delete_part_block_id.py index 3019cea56..759064172 100644 --- a/migrations/versions/132_add_cascade_delete_part_block_id.py +++ b/migrations/versions/132_add_cascade_delete_part_block_id.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1d93c9f9f506' -down_revision = '3bb4a941639c' +revision = "1d93c9f9f506" +down_revision = "3bb4a941639c" from alembic import op @@ -16,10 +16,10 @@ def upgrade(): connection = op.get_bind() connection.execute( - ''' + """ ALTER TABLE part DROP FOREIGN KEY part_ibfk_1; ALTER TABLE part ADD CONSTRAINT part_ibfk_1 FOREIGN KEY (block_id) REFERENCES block(id) ON DELETE CASCADE; - ''' + """ ) diff --git a/migrations/versions/133_add_unique_account_constraint.py b/migrations/versions/133_add_unique_account_constraint.py index 4a20e89d2..d1a59262c 100644 --- a/migrations/versions/133_add_unique_account_constraint.py +++ b/migrations/versions/133_add_unique_account_constraint.py @@ -7,16 +7,17 @@ """ # revision identifiers, used by Alembic. -revision = '13faec74da45' -down_revision = '1d93c9f9f506' +revision = "13faec74da45" +down_revision = "1d93c9f9f506" from alembic import op def upgrade(): - op.create_unique_constraint('unique_account_address', 'account', - ['_canonicalized_address']) + op.create_unique_constraint( + "unique_account_address", "account", ["_canonicalized_address"] + ) def downgrade(): - op.drop_constraint('unique_account_address', 'account', type_='unique') + op.drop_constraint("unique_account_address", "account", type_="unique") diff --git a/migrations/versions/134_add_message_index.py b/migrations/versions/134_add_message_index.py index eb13b0739..2b33ea62f 100644 --- a/migrations/versions/134_add_message_index.py +++ b/migrations/versions/134_add_message_index.py @@ -7,16 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '4270a032b943' -down_revision = '13faec74da45' +revision = "4270a032b943" +down_revision = "13faec74da45" from alembic import op def upgrade(): - op.create_index('ix_message_namespace_id_deleted_at', 'message', - ['namespace_id', 'deleted_at'], unique=False) + op.create_index( + "ix_message_namespace_id_deleted_at", + "message", + ["namespace_id", "deleted_at"], + unique=False, + ) def downgrade(): - op.drop_index('ix_message_namespace_id_deleted_at', table_name='message') + op.drop_index("ix_message_namespace_id_deleted_at", table_name="message") diff --git a/migrations/versions/135_add_thread_tag_index_to_tagitem.py b/migrations/versions/135_add_thread_tag_index_to_tagitem.py index ea84ee550..a8b1d396d 100644 --- a/migrations/versions/135_add_thread_tag_index_to_tagitem.py +++ b/migrations/versions/135_add_thread_tag_index_to_tagitem.py @@ -7,15 +7,15 @@ """ # revision identifiers, used by Alembic. -revision = '4ee8aab06ee' -down_revision = '4270a032b943' +revision = "4ee8aab06ee" +down_revision = "4270a032b943" from alembic import op def upgrade(): - op.create_index('tag_thread_ids', 'tagitem', ['thread_id', 'tag_id']) + op.create_index("tag_thread_ids", "tagitem", ["thread_id", "tag_id"]) def downgrade(): - op.drop_index('tag_thread_ids', table_name='tagitem') + op.drop_index("tag_thread_ids", table_name="tagitem") diff --git a/migrations/versions/136_add_actionlog_index.py b/migrations/versions/136_add_actionlog_index.py index 24fdcd577..a61a17f15 100644 --- a/migrations/versions/136_add_actionlog_index.py +++ b/migrations/versions/136_add_actionlog_index.py @@ -7,16 +7,17 @@ """ # revision identifiers, used by Alembic. -revision = '39fa82d3168e' -down_revision = '4ee8aab06ee' +revision = "39fa82d3168e" +down_revision = "4ee8aab06ee" from alembic import op def upgrade(): - op.create_index('ix_actionlog_status_retries', 'actionlog', - ['status', 'retries'], unique=False) + op.create_index( + "ix_actionlog_status_retries", "actionlog", ["status", "retries"], unique=False + ) def downgrade(): - op.drop_index('ix_actionlog_status_retries', table_name='actionlog') + op.drop_index("ix_actionlog_status_retries", table_name="actionlog") diff --git a/migrations/versions/137_add_versions.py b/migrations/versions/137_add_versions.py index 24bb9050c..eeebc9267 100644 --- a/migrations/versions/137_add_versions.py +++ b/migrations/versions/137_add_versions.py @@ -7,22 +7,25 @@ """ # revision identifiers, used by Alembic. -revision = '1f746c93e8fd' -down_revision = '39fa82d3168e' +revision = "1f746c93e8fd" +down_revision = "39fa82d3168e" from alembic import op import sqlalchemy as sa def upgrade(): - op.drop_column('message', 'version') - op.add_column('message', sa.Column('version', sa.Integer(), - server_default='0', nullable=False)) - op.add_column('thread', sa.Column('version', sa.Integer(), - server_default='0', nullable=True)) + op.drop_column("message", "version") + op.add_column( + "message", + sa.Column("version", sa.Integer(), server_default="0", nullable=False), + ) + op.add_column( + "thread", sa.Column("version", sa.Integer(), server_default="0", nullable=True) + ) def downgrade(): - op.drop_column('message', 'version') - op.add_column('message', sa.Column('version', sa.BINARY(), nullable=True)) - op.drop_column('thread', 'version') + op.drop_column("message", "version") + op.add_column("message", sa.Column("version", sa.BINARY(), nullable=True)) + op.drop_column("thread", "version") diff --git a/migrations/versions/138_add_participants_column.py b/migrations/versions/138_add_participants_column.py index 4aa1e8554..61e672e21 100644 --- a/migrations/versions/138_add_participants_column.py +++ b/migrations/versions/138_add_participants_column.py @@ -9,8 +9,8 @@ from sqlalchemy.sql import text # revision identifiers, used by Alembic. -revision = '5305d4ae30b4' -down_revision = '1f746c93e8fd' +revision = "5305d4ae30b4" +down_revision = "1f746c93e8fd" def upgrade(): diff --git a/migrations/versions/139_add_ns_index_to_contact_and_event.py b/migrations/versions/139_add_ns_index_to_contact_and_event.py index 4ab036544..c1752f4da 100644 --- a/migrations/versions/139_add_ns_index_to_contact_and_event.py +++ b/migrations/versions/139_add_ns_index_to_contact_and_event.py @@ -7,22 +7,26 @@ """ # revision identifiers, used by Alembic. -revision = '1fd7b3e0b662' -down_revision = '5305d4ae30b4' +revision = "1fd7b3e0b662" +down_revision = "5305d4ae30b4" from alembic import op def upgrade(): op.create_index( - 'ix_contact_ns_uid_provider_name', - 'contact', - ['namespace_id', 'uid', 'provider_name'], unique=False) + "ix_contact_ns_uid_provider_name", + "contact", + ["namespace_id", "uid", "provider_name"], + unique=False, + ) op.create_index( - 'ix_event_ns_uid_provider_name', - 'event', - ['namespace_id', 'uid', 'provider_name'], unique=False) + "ix_event_ns_uid_provider_name", + "event", + ["namespace_id", "uid", "provider_name"], + unique=False, + ) def downgrade(): diff --git a/migrations/versions/140_relax_participants_by_email_constraint.py b/migrations/versions/140_relax_participants_by_email_constraint.py index 9b96c36db..4b5a042d5 100644 --- a/migrations/versions/140_relax_participants_by_email_constraint.py +++ b/migrations/versions/140_relax_participants_by_email_constraint.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3f01a3f1b4cc' -down_revision = '1fd7b3e0b662' +revision = "3f01a3f1b4cc" +down_revision = "1fd7b3e0b662" from alembic import op from sqlalchemy.sql import text diff --git a/migrations/versions/141_remote_remote_contacts.py b/migrations/versions/141_remote_remote_contacts.py index e4abdef05..3dd55fb0e 100644 --- a/migrations/versions/141_remote_remote_contacts.py +++ b/migrations/versions/141_remote_remote_contacts.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3ab34bc85c8d' -down_revision = '3f01a3f1b4cc' +revision = "3ab34bc85c8d" +down_revision = "3f01a3f1b4cc" from alembic import op from sqlalchemy.ext.declarative import declarative_base @@ -17,19 +17,20 @@ def upgrade(): from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class Contact_Old(Base): - __table__ = Base.metadata.tables['contact'] + __table__ = Base.metadata.tables["contact"] # Delete the "remote" contacts. This is just a server cache for comparing # any changes, now handled by the previous "local" contacts with session_scope() as db_session: - db_session.query(Contact_Old).filter_by(source='remote').delete() + db_session.query(Contact_Old).filter_by(source="remote").delete() - op.drop_column('contact', 'source') + op.drop_column("contact", "source") def downgrade(): diff --git a/migrations/versions/142_add_sync_run_bit.py b/migrations/versions/142_add_sync_run_bit.py index 23e5ffd70..ee5fc5040 100644 --- a/migrations/versions/142_add_sync_run_bit.py +++ b/migrations/versions/142_add_sync_run_bit.py @@ -7,17 +7,19 @@ """ # revision identifiers, used by Alembic. -revision = '2d8a350b4885' -down_revision = '3ab34bc85c8d' +revision = "2d8a350b4885" +down_revision = "3ab34bc85c8d" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('account', sa.Column('sync_should_run', sa.Boolean(), - server_default='1', nullable=True)) + op.add_column( + "account", + sa.Column("sync_should_run", sa.Boolean(), server_default="1", nullable=True), + ) def downgrade(): - op.drop_column('account', 'sync_should_run') + op.drop_column("account", "sync_should_run") diff --git a/migrations/versions/143_add_reply_to_message_id.py b/migrations/versions/143_add_reply_to_message_id.py index 909ba3814..9c4e69370 100644 --- a/migrations/versions/143_add_reply_to_message_id.py +++ b/migrations/versions/143_add_reply_to_message_id.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1d7a72222b7c' -down_revision = '2d8a350b4885' +revision = "1d7a72222b7c" +down_revision = "2d8a350b4885" from alembic import op @@ -21,23 +21,35 @@ def upgrade(): # The constraint name might be `message_ibfk_2` or `message_ibfk_3` or # whatever, so figure out which it is first. constraint_name = conn.execute( - '''SELECT constraint_name FROM information_schema.key_column_usage + """SELECT constraint_name FROM information_schema.key_column_usage WHERE table_name='message' AND referenced_table_name='message' - AND constraint_schema=DATABASE()''').fetchone()[0] - conn.execute('ALTER TABLE message DROP FOREIGN KEY {}'.format(constraint_name)) - conn.execute('ALTER TABLE message CHANGE resolved_message_id reply_to_message_id INT(11)') - conn.execute('ALTER TABLE message ADD CONSTRAINT {} FOREIGN KEY (reply_to_message_id) REFERENCES message(id)'. - format(constraint_name)) + AND constraint_schema=DATABASE()""" + ).fetchone()[0] + conn.execute("ALTER TABLE message DROP FOREIGN KEY {}".format(constraint_name)) + conn.execute( + "ALTER TABLE message CHANGE resolved_message_id reply_to_message_id INT(11)" + ) + conn.execute( + "ALTER TABLE message ADD CONSTRAINT {} FOREIGN KEY (reply_to_message_id) REFERENCES message(id)".format( + constraint_name + ) + ) def downgrade(): conn = op.get_bind() constraint_name = conn.execute( - '''SELECT constraint_name FROM information_schema.key_column_usage + """SELECT constraint_name FROM information_schema.key_column_usage WHERE table_name='message' AND referenced_table_name='message' - AND constraint_schema=DATABASE()''').fetchone()[0] - conn.execute('ALTER TABLE message DROP FOREIGN KEY {}'.format(constraint_name)) - conn.execute('ALTER TABLE message DROP FOREIGN KEY message_ibfk_3') - conn.execute('ALTER TABLE message CHANGE reply_to_message_id resolved_message_id INT(11)') - conn.execute('ALTER TABLE message ADD CONSTRAINT {} FOREIGN KEY (resolved_message_id) REFERENCES message(id)'. - format(constraint_name)) + AND constraint_schema=DATABASE()""" + ).fetchone()[0] + conn.execute("ALTER TABLE message DROP FOREIGN KEY {}".format(constraint_name)) + conn.execute("ALTER TABLE message DROP FOREIGN KEY message_ibfk_3") + conn.execute( + "ALTER TABLE message CHANGE reply_to_message_id resolved_message_id INT(11)" + ) + conn.execute( + "ALTER TABLE message ADD CONSTRAINT {} FOREIGN KEY (resolved_message_id) REFERENCES message(id)".format( + constraint_name + ) + ) diff --git a/migrations/versions/144_update_calendar_index.py b/migrations/versions/144_update_calendar_index.py index 2791fd4cb..679ea0138 100644 --- a/migrations/versions/144_update_calendar_index.py +++ b/migrations/versions/144_update_calendar_index.py @@ -7,27 +7,32 @@ """ # revision identifiers, used by Alembic. -revision = '1c73ca99c03b' -down_revision = '1d7a72222b7c' +revision = "1c73ca99c03b" +down_revision = "1d7a72222b7c" from alembic import op def upgrade(): - op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey') - op.drop_constraint('uuid', 'calendar', type_='unique') - op.create_index('uuid', 'calendar', - ['namespace_id', 'provider_name', 'name', 'uid'], unique=True) - op.create_foreign_key('calendar_ibfk_1', - 'calendar', 'namespace', - ['namespace_id'], ['id']) + op.drop_constraint("calendar_ibfk_1", "calendar", type_="foreignkey") + op.drop_constraint("uuid", "calendar", type_="unique") + op.create_index( + "uuid", + "calendar", + ["namespace_id", "provider_name", "name", "uid"], + unique=True, + ) + op.create_foreign_key( + "calendar_ibfk_1", "calendar", "namespace", ["namespace_id"], ["id"] + ) def downgrade(): - op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey') - op.drop_constraint('uuid', 'calendar', type_='unique') - op.create_index('uuid', 'calendar', - ['namespace_id', 'provider_name', 'name'], unique=True) - op.create_foreign_key('calendar_ibfk_1', - 'calendar', 'namespace', - ['namespace_id'], ['id']) + op.drop_constraint("calendar_ibfk_1", "calendar", type_="foreignkey") + op.drop_constraint("uuid", "calendar", type_="unique") + op.create_index( + "uuid", "calendar", ["namespace_id", "provider_name", "name"], unique=True + ) + op.create_foreign_key( + "calendar_ibfk_1", "calendar", "namespace", ["namespace_id"], ["id"] + ) diff --git a/migrations/versions/145_drop_event_constraint.py b/migrations/versions/145_drop_event_constraint.py index 89677198b..85d9e16ae 100644 --- a/migrations/versions/145_drop_event_constraint.py +++ b/migrations/versions/145_drop_event_constraint.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '557378226d9f' -down_revision = '1c73ca99c03b' +revision = "557378226d9f" +down_revision = "1c73ca99c03b" from alembic import op @@ -16,13 +16,15 @@ def upgrade(): conn = op.get_bind() conn.execute( - '''ALTER TABLE event DROP INDEX uuid, + """ALTER TABLE event DROP INDEX uuid, DROP INDEX ix_event_ns_uid_provider_name, - ADD INDEX ix_event_ns_uid_calendar_id (namespace_id, uid, calendar_id);''') + ADD INDEX ix_event_ns_uid_calendar_id (namespace_id, uid, calendar_id);""" + ) conn.execute( - '''ALTER TABLE calendar DROP INDEX uuid, - ADD UNIQUE INDEX uuid (namespace_id, uid, name);''') + """ALTER TABLE calendar DROP INDEX uuid, + ADD UNIQUE INDEX uuid (namespace_id, uid, name);""" + ) def downgrade(): diff --git a/migrations/versions/146_update_google_calendar_uids.py b/migrations/versions/146_update_google_calendar_uids.py index 1ec2b9e27..1ef9ce62e 100644 --- a/migrations/versions/146_update_google_calendar_uids.py +++ b/migrations/versions/146_update_google_calendar_uids.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'c77a90d524' -down_revision = '557378226d9f' +revision = "c77a90d524" +down_revision = "557378226d9f" from alembic import op @@ -16,9 +16,10 @@ def upgrade(): conn = op.get_bind() conn.execute( - '''UPDATE calendar JOIN namespace ON calendar.namespace_id=namespace.id + """UPDATE calendar JOIN namespace ON calendar.namespace_id=namespace.id JOIN gmailaccount ON namespace.account_id=gmailaccount.id SET - calendar.uid=calendar.name''') + calendar.uid=calendar.name""" + ) def downgrade(): diff --git a/migrations/versions/147_add_cleaned_subject.py b/migrations/versions/147_add_cleaned_subject.py index 444233302..ced6844f1 100644 --- a/migrations/versions/147_add_cleaned_subject.py +++ b/migrations/versions/147_add_cleaned_subject.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '486c7fa5b533' -down_revision = 'c77a90d524' +revision = "486c7fa5b533" +down_revision = "c77a90d524" from alembic import op import sqlalchemy as sa @@ -19,15 +19,21 @@ def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - op.add_column('thread', sa.Column('_cleaned_subject', - sa.String(length=255), nullable=True)) - op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'], - unique=False, mysql_length=191) + op.add_column( + "thread", sa.Column("_cleaned_subject", sa.String(length=255), nullable=True) + ) + op.create_index( + "ix_cleaned_subject", + "thread", + ["_cleaned_subject"], + unique=False, + mysql_length=191, + ) def downgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - op.drop_index('ix_cleaned_subject', table_name='thread') - op.drop_column('thread', '_cleaned_subject') + op.drop_index("ix_cleaned_subject", table_name="thread") + op.drop_column("thread", "_cleaned_subject") diff --git a/migrations/versions/148_add_last_modified_column_for_events.py b/migrations/versions/148_add_last_modified_column_for_events.py index 872aba190..0efbca182 100644 --- a/migrations/versions/148_add_last_modified_column_for_events.py +++ b/migrations/versions/148_add_last_modified_column_for_events.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '54dcea22a268' -down_revision = '486c7fa5b533' +revision = "54dcea22a268" +down_revision = "486c7fa5b533" from alembic import op from sqlalchemy.sql import text diff --git a/migrations/versions/149_add_emailed_events_calendar.py b/migrations/versions/149_add_emailed_events_calendar.py index 5116efd75..f95e73816 100644 --- a/migrations/versions/149_add_emailed_events_calendar.py +++ b/migrations/versions/149_add_emailed_events_calendar.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2493281d621' -down_revision = '54dcea22a268' +revision = "2493281d621" +down_revision = "54dcea22a268" from alembic import op from sqlalchemy.sql import text @@ -22,10 +22,18 @@ def upgrade(): # Orphan all the default calendars. We need to do it because we are going to # replace them with "Email events" calendars. conn.execute(text("UPDATE account SET default_calendar_id = NULL")) - conn.execute(text("ALTER TABLE account CHANGE default_calendar_id emailed_events_calendar_id INTEGER")) - conn.execute(text("ALTER TABLE account ADD CONSTRAINT emailed_events_fk " - "FOREIGN KEY (emailed_events_calendar_id) REFERENCES " - "calendar(id) ON DELETE SET NULL")) + conn.execute( + text( + "ALTER TABLE account CHANGE default_calendar_id emailed_events_calendar_id INTEGER" + ) + ) + conn.execute( + text( + "ALTER TABLE account ADD CONSTRAINT emailed_events_fk " + "FOREIGN KEY (emailed_events_calendar_id) REFERENCES " + "calendar(id) ON DELETE SET NULL" + ) + ) def downgrade(): @@ -33,7 +41,15 @@ def downgrade(): conn.execute(text("SET FOREIGN_KEY_CHECKS=0;")) conn.execute(text("set @@lock_wait_timeout = 20;")) conn.execute(text("ALTER TABLE account DROP FOREIGN KEY emailed_events_fk")) - conn.execute(text("ALTER TABLE account CHANGE COLUMN emailed_events_calendar_id default_calendar_id INTEGER")) - conn.execute(text("ALTER TABLE account ADD CONSTRAINT account_ibfk_10 " - "FOREIGN KEY (default_calendar_id) REFERENCES " - "calendar(id) ON DELETE SET NULL")) + conn.execute( + text( + "ALTER TABLE account CHANGE COLUMN emailed_events_calendar_id default_calendar_id INTEGER" + ) + ) + conn.execute( + text( + "ALTER TABLE account ADD CONSTRAINT account_ibfk_10 " + "FOREIGN KEY (default_calendar_id) REFERENCES " + "calendar(id) ON DELETE SET NULL" + ) + ) diff --git a/migrations/versions/150_add_polymorphic_events.py b/migrations/versions/150_add_polymorphic_events.py index 7f1c88d16..d35f11e6b 100644 --- a/migrations/versions/150_add_polymorphic_events.py +++ b/migrations/versions/150_add_polymorphic_events.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1de526a15c5d' -down_revision = '2493281d621' +revision = "1de526a15c5d" +down_revision = "2493281d621" import json import ast @@ -19,46 +19,50 @@ def upgrade(): op.create_table( - 'recurringeventoverride', - sa.Column('id', sa.Integer(), nullable=False), + "recurringeventoverride", + sa.Column("id", sa.Integer(), nullable=False), # These have to be nullable so we can do the type conversion - sa.Column('master_event_id', sa.Integer(), nullable=True), - sa.Column('master_event_uid', sa.String( - length=767, collation='ascii_general_ci'), nullable=True), - sa.Column('original_start_time', sa.DateTime(), nullable=True), - sa.Column('cancelled', sa.Boolean(), default=False), - sa.ForeignKeyConstraint(['id'], ['event.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['master_event_id'], ['event.id'], ), - sa.PrimaryKeyConstraint('id') + sa.Column("master_event_id", sa.Integer(), nullable=True), + sa.Column( + "master_event_uid", + sa.String(length=767, collation="ascii_general_ci"), + nullable=True, + ), + sa.Column("original_start_time", sa.DateTime(), nullable=True), + sa.Column("cancelled", sa.Boolean(), default=False), + sa.ForeignKeyConstraint(["id"], ["event.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["master_event_id"], ["event.id"],), + sa.PrimaryKeyConstraint("id"), ) op.create_table( - 'recurringevent', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('rrule', sa.String(length=255), nullable=True), - sa.Column('exdate', sa.Text(), nullable=True), - sa.Column('until', sa.DateTime(), nullable=True), - sa.Column('start_timezone', sa.String(35), nullable=True), - sa.ForeignKeyConstraint(['id'], ['event.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "recurringevent", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("rrule", sa.String(length=255), nullable=True), + sa.Column("exdate", sa.Text(), nullable=True), + sa.Column("until", sa.DateTime(), nullable=True), + sa.Column("start_timezone", sa.String(35), nullable=True), + sa.ForeignKeyConstraint(["id"], ["event.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), ) - op.add_column(u'event', sa.Column('type', sa.String(length=30), - nullable=True)) - op.create_index('ix_recurringeventoverride_master_event_uid', - 'recurringeventoverride', ['master_event_uid'], - unique=False) - op.alter_column(u'event', 'recurrence', type_=sa.Text()) + op.add_column(u"event", sa.Column("type", sa.String(length=30), nullable=True)) + op.create_index( + "ix_recurringeventoverride_master_event_uid", + "recurringeventoverride", + ["master_event_uid"], + unique=False, + ) + op.alter_column(u"event", "recurrence", type_=sa.Text()) def downgrade(): - op.drop_column(u'event', 'type') - op.drop_table('recurringevent') - op.drop_table('recurringeventoverride') + op.drop_column(u"event", "type") + op.drop_table("recurringevent") + op.drop_table("recurringeventoverride") def populate(): # Populate new classes from the existing data - from inbox.models.event import (Event, RecurringEvent, - RecurringEventOverride) + from inbox.models.event import Event, RecurringEvent, RecurringEventOverride from inbox.models.session import session_scope from inbox.events.util import parse_datetime from inbox.events.recurring import link_events @@ -66,8 +70,7 @@ def populate(): with session_scope() as db: # Redo recurrence rule population, since we extended the column length print "Repopulating max-length recurrences...", - for e in db.query(Event).filter( - sa.func.length(Event.recurrence) > 250): + for e in db.query(Event).filter(sa.func.length(Event.recurrence) > 250): try: raw_data = json.loads(e.raw_data) except: @@ -76,7 +79,7 @@ def populate(): except: print "Could not load raw data for event {}".format(e.id) continue - e.recurrence = raw_data['recurrence'] + e.recurrence = raw_data["recurrence"] db.commit() print "done." @@ -110,17 +113,17 @@ def populate(): except: print "Could not load raw data for event {}".format(e.id) continue - rec_uid = raw_data.get('recurringEventId') + rec_uid = raw_data.get("recurringEventId") if rec_uid: e.master_event_uid = rec_uid - ost = raw_data.get('originalStartTime') + ost = raw_data.get("originalStartTime") if ost: # this is a dictionary with one value start_time = ost.values().pop() e.original_start_time = parse_datetime(start_time) # attempt to get the ID for the event, if we can, and # set the relationship appropriately - if raw_data.get('status') == 'cancelled': + if raw_data.get("status") == "cancelled": e.cancelled = True link_events(db, e) c += 1 @@ -161,7 +164,7 @@ def populate(): except: print "Could not load raw data for event {}".format(r.id) continue - r.start_timezone = raw_data['start'].get('timeZone') + r.start_timezone = raw_data["start"].get("timeZone") # find any un-found overrides that didn't have masters earlier link_events(db, r) db.add(r) diff --git a/migrations/versions/151_remove_message_thread_order.py b/migrations/versions/151_remove_message_thread_order.py index b31a52dea..ab0ff86a2 100644 --- a/migrations/versions/151_remove_message_thread_order.py +++ b/migrations/versions/151_remove_message_thread_order.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2f3c8fa3fc3a' -down_revision = '1de526a15c5d' +revision = "2f3c8fa3fc3a" +down_revision = "1de526a15c5d" from alembic import op import sqlalchemy as sa @@ -16,10 +16,11 @@ def upgrade(): - op.drop_column('message', 'thread_order') + op.drop_column("message", "thread_order") def downgrade(): - op.add_column('message', sa.Column('thread_order', - mysql.INTEGER(display_width=11), - nullable=False)) + op.add_column( + "message", + sa.Column("thread_order", mysql.INTEGER(display_width=11), nullable=False), + ) diff --git a/migrations/versions/152_add_message_id_to_event.py b/migrations/versions/152_add_message_id_to_event.py index 282c59d63..e51d2310c 100644 --- a/migrations/versions/152_add_message_id_to_event.py +++ b/migrations/versions/152_add_message_id_to_event.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '211e93aff1e1' -down_revision = '2f3c8fa3fc3a' +revision = "211e93aff1e1" +down_revision = "2f3c8fa3fc3a" from alembic import op from sqlalchemy.sql import text @@ -18,8 +18,12 @@ def upgrade(): conn = op.get_bind() conn.execute(text("SET FOREIGN_KEY_CHECKS=0;")) conn.execute(text("ALTER TABLE event ADD COLUMN message_id int(11) DEFAULT NULL")) - conn.execute(text("ALTER TABLE event ADD CONSTRAINT message_ifbk FOREIGN KEY " - "(`message_id`) REFERENCES `message` (`id`) ON DELETE CASCADE")) + conn.execute( + text( + "ALTER TABLE event ADD CONSTRAINT message_ifbk FOREIGN KEY " + "(`message_id`) REFERENCES `message` (`id`) ON DELETE CASCADE" + ) + ) def downgrade(): diff --git a/migrations/versions/153_revert_account_unique_constraint.py b/migrations/versions/153_revert_account_unique_constraint.py index 6ec6d4882..aa94c0625 100644 --- a/migrations/versions/153_revert_account_unique_constraint.py +++ b/migrations/versions/153_revert_account_unique_constraint.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4032709362da' -down_revision = '211e93aff1e1' +revision = "4032709362da" +down_revision = "211e93aff1e1" from alembic import op @@ -18,13 +18,19 @@ def upgrade(): index_name = conn.execute( '''SELECT index_name FROM information_schema.statistics WHERE table_name="account" AND non_unique=0 AND - column_name="_canonicalized_address"''').fetchone()[0] - op.drop_constraint(index_name, 'account', type_='unique') - if index_name == 'ix_account__canonicalized_address': - op.create_index('ix_account__canonicalized_address', 'account', - ['_canonicalized_address'], unique=False) + column_name="_canonicalized_address"''' + ).fetchone()[0] + op.drop_constraint(index_name, "account", type_="unique") + if index_name == "ix_account__canonicalized_address": + op.create_index( + "ix_account__canonicalized_address", + "account", + ["_canonicalized_address"], + unique=False, + ) def downgrade(): - op.create_unique_constraint('unique_account_address', 'account', - ['_canonicalized_address']) + op.create_unique_constraint( + "unique_account_address", "account", ["_canonicalized_address"] + ) diff --git a/migrations/versions/154_add_message_indices.py b/migrations/versions/154_add_message_indices.py index 860da4c4e..cea923cc1 100644 --- a/migrations/versions/154_add_message_indices.py +++ b/migrations/versions/154_add_message_indices.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1f06c15ae796' -down_revision = '4032709362da' +revision = "1f06c15ae796" +down_revision = "4032709362da" from alembic import op @@ -18,20 +18,24 @@ def upgrade(): data_sha256_index_exists = conn.execute( '''SELECT COUNT(*) FROM information_schema.statistics WHERE table_name="message" AND - column_name="data_sha256"''').fetchone()[0] + column_name="data_sha256"''' + ).fetchone()[0] if not data_sha256_index_exists: conn.execute( - '''ALTER TABLE message - ADD INDEX `ix_message_data_sha256` (`data_sha256`(191))''') + """ALTER TABLE message + ADD INDEX `ix_message_data_sha256` (`data_sha256`(191))""" + ) received_date_index_exists = conn.execute( '''SELECT COUNT(*) FROM information_schema.statistics WHERE table_name="message" AND - column_name="received_date"''').fetchone()[0] + column_name="received_date"''' + ).fetchone()[0] if not received_date_index_exists: conn.execute( - '''ALTER TABLE message - ADD INDEX `ix_message_received_date` (`received_date`)''') + """ALTER TABLE message + ADD INDEX `ix_message_received_date` (`received_date`)""" + ) def downgrade(): diff --git a/migrations/versions/155_add_status_column.py b/migrations/versions/155_add_status_column.py index ca9fd79ac..e1de20093 100644 --- a/migrations/versions/155_add_status_column.py +++ b/migrations/versions/155_add_status_column.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '7de8a6ce8cd' -down_revision = '1f06c15ae796' +revision = "7de8a6ce8cd" +down_revision = "1f06c15ae796" from alembic import op from sqlalchemy.sql import text @@ -17,12 +17,20 @@ def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - conn.execute(text("ALTER TABLE event ADD COLUMN `status` " - "enum('tentative','confirmed','cancelled') " - "DEFAULT 'confirmed'")) - conn.execute(text("UPDATE event JOIN recurringeventoverride ON " - "event.id = recurringeventoverride.id SET status = 'cancelled' " - "where recurringeventoverride.cancelled IS TRUE;")) + conn.execute( + text( + "ALTER TABLE event ADD COLUMN `status` " + "enum('tentative','confirmed','cancelled') " + "DEFAULT 'confirmed'" + ) + ) + conn.execute( + text( + "UPDATE event JOIN recurringeventoverride ON " + "event.id = recurringeventoverride.id SET status = 'cancelled' " + "where recurringeventoverride.cancelled IS TRUE;" + ) + ) def downgrade(): diff --git a/migrations/versions/156_drop_cancelled_column.py b/migrations/versions/156_drop_cancelled_column.py index 3444ac76e..e5854b0fc 100644 --- a/migrations/versions/156_drop_cancelled_column.py +++ b/migrations/versions/156_drop_cancelled_column.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3c7f059a68ba' -down_revision = '7de8a6ce8cd' +revision = "3c7f059a68ba" +down_revision = "7de8a6ce8cd" from alembic import op from sqlalchemy.sql import text @@ -16,12 +16,12 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE recurringeventoverride " - "DROP COLUMN cancelled;")) + conn.execute(text("ALTER TABLE recurringeventoverride " "DROP COLUMN cancelled;")) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE recurringeventoverride " - "ADD COLUMN cancelled tinyint(1);")) + conn.execute( + text("ALTER TABLE recurringeventoverride " "ADD COLUMN cancelled tinyint(1);") + ) print "\nNote that you'll have to reset calendar syncs." diff --git a/migrations/versions/157_update_eas_schema.py b/migrations/versions/157_update_eas_schema.py index fa6aaf7ef..339a5961b 100644 --- a/migrations/versions/157_update_eas_schema.py +++ b/migrations/versions/157_update_eas_schema.py @@ -7,65 +7,70 @@ """ # revision identifiers, used by Alembic. -revision = '18064f5205dd' -down_revision = '3c7f059a68ba' +revision = "18064f5205dd" +down_revision = "3c7f059a68ba" from alembic import op def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) # Do nothing if the affected table isn't present. - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return conn = op.get_bind() # Drop folder_id foreign key constraint from easfoldersyncstatus table folder_fks = conn.execute( - '''SELECT constraint_name FROM information_schema.key_column_usage + """SELECT constraint_name FROM information_schema.key_column_usage WHERE table_name='easfoldersyncstatus' AND referenced_table_name='folder' - AND constraint_schema=DATABASE()''').fetchall() - for folder_fk, in folder_fks: + AND constraint_schema=DATABASE()""" + ).fetchall() + for (folder_fk,) in folder_fks: conn.execute( - 'ALTER TABLE easfoldersyncstatus DROP FOREIGN KEY {}'.format( - folder_fk)) + "ALTER TABLE easfoldersyncstatus DROP FOREIGN KEY {}".format(folder_fk) + ) # Drop folder_id foreign key constraint from easuid table folder_fks = conn.execute( - '''SELECT constraint_name FROM information_schema.key_column_usage + """SELECT constraint_name FROM information_schema.key_column_usage WHERE table_name='easuid' AND referenced_table_name='folder' - AND constraint_schema=DATABASE()''').fetchall() - for folder_fk, in folder_fks: - conn.execute( - 'ALTER TABLE easuid DROP FOREIGN KEY {}'.format( - folder_fk)) + AND constraint_schema=DATABASE()""" + ).fetchall() + for (folder_fk,) in folder_fks: + conn.execute("ALTER TABLE easuid DROP FOREIGN KEY {}".format(folder_fk)) # Add new index on easuid table conn.execute( - '''ALTER TABLE easuid ADD UNIQUE INDEX easaccount_id - (easaccount_id, device_id, fld_uid, msg_uid)''') + """ALTER TABLE easuid ADD UNIQUE INDEX easaccount_id + (easaccount_id, device_id, fld_uid, msg_uid)""" + ) # Drop deprecated indices - conn.execute('''ALTER TABLE easuid DROP INDEX folder_id, - DROP INDEX easuid_easaccount_id_folder_id''') conn.execute( - '''ALTER TABLE easfoldersyncstatus DROP INDEX account_id''') + """ALTER TABLE easuid DROP INDEX folder_id, + DROP INDEX easuid_easaccount_id_folder_id""" + ) + conn.execute("""ALTER TABLE easfoldersyncstatus DROP INDEX account_id""") # Make folder_id columns nullable so that we don't have to populate them. conn.execute( - '''ALTER TABLE easfoldersyncstatus CHANGE folder_id folder_id int(11) - DEFAULT NULL''') + """ALTER TABLE easfoldersyncstatus CHANGE folder_id folder_id int(11) + DEFAULT NULL""" + ) conn.execute( - '''ALTER TABLE easuid CHANGE folder_id folder_id int(11) - DEFAULT NULL''') + """ALTER TABLE easuid CHANGE folder_id folder_id int(11) + DEFAULT NULL""" + ) # Add references to folder syncs for canonical folders conn.execute( - '''ALTER TABLE easdevice + """ALTER TABLE easdevice ADD COLUMN archive_foldersync_id int(11) DEFAULT NULL, ADD COLUMN inbox_foldersync_id int(11) DEFAULT NULL, ADD COLUMN sent_foldersync_id int(11) DEFAULT NULL, @@ -77,18 +82,21 @@ def upgrade(): ADD FOREIGN KEY sent_foldersync_ibfk (sent_foldersync_id) REFERENCES easfoldersyncstatus (id) ON DELETE SET NULL, ADD FOREIGN KEY trash_foldersync_ibfk (trash_foldersync_id) - REFERENCES easfoldersyncstatus (id) ON DELETE SET NULL''') + REFERENCES easfoldersyncstatus (id) ON DELETE SET NULL""" + ) # Add name, canonical_name columns conn.execute( - '''ALTER TABLE easfoldersyncstatus + """ALTER TABLE easfoldersyncstatus ADD COLUMN name varchar(191) DEFAULT NULL, - ADD COLUMN canonical_name varchar(191) DEFAULT NULL''') + ADD COLUMN canonical_name varchar(191) DEFAULT NULL""" + ) # Set server-side default for deprecated is_draft column conn.execute( - '''ALTER TABLE easuid CHANGE is_draft is_draft tinyint(1) default 0 - NOT NULL''') + """ALTER TABLE easuid CHANGE is_draft is_draft tinyint(1) default 0 + NOT NULL""" + ) def downgrade(): diff --git a/migrations/versions/158_update_eas_schema_part_2.py b/migrations/versions/158_update_eas_schema_part_2.py index 0da09e8e3..db8fca4f0 100644 --- a/migrations/versions/158_update_eas_schema_part_2.py +++ b/migrations/versions/158_update_eas_schema_part_2.py @@ -7,49 +7,55 @@ """ # revision identifiers, used by Alembic. -revision = '5aa3f27457c' -down_revision = '18064f5205dd' +revision = "5aa3f27457c" +down_revision = "18064f5205dd" from alembic import op def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) # Do nothing if the affected table isn't present. - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return conn = op.get_bind() # Populate new easfoldersyncstatus columns. This should be run offline # (stop-the-world). conn.execute( - '''UPDATE easfoldersyncstatus JOIN folder ON + """UPDATE easfoldersyncstatus JOIN folder ON easfoldersyncstatus.folder_id=folder.id SET easfoldersyncstatus.name=folder.name, - easfoldersyncstatus.canonical_name=folder.canonical_name''') + easfoldersyncstatus.canonical_name=folder.canonical_name""" + ) # Populate references to canonical foldersyncs. conn.execute( - '''UPDATE easdevice JOIN easfoldersyncstatus ON + """UPDATE easdevice JOIN easfoldersyncstatus ON easdevice.id=easfoldersyncstatus.device_id AND easfoldersyncstatus.eas_folder_type='2' - SET easdevice.inbox_foldersync_id=easfoldersyncstatus.id''') + SET easdevice.inbox_foldersync_id=easfoldersyncstatus.id""" + ) conn.execute( - '''UPDATE easdevice JOIN easfoldersyncstatus ON + """UPDATE easdevice JOIN easfoldersyncstatus ON easdevice.id=easfoldersyncstatus.device_id AND easfoldersyncstatus.eas_folder_type='4' - SET easdevice.trash_foldersync_id=easfoldersyncstatus.id''') + SET easdevice.trash_foldersync_id=easfoldersyncstatus.id""" + ) conn.execute( - '''UPDATE easdevice JOIN easfoldersyncstatus ON + """UPDATE easdevice JOIN easfoldersyncstatus ON easdevice.id=easfoldersyncstatus.device_id AND easfoldersyncstatus.eas_folder_type='5' - SET easdevice.sent_foldersync_id=easfoldersyncstatus.id''') + SET easdevice.sent_foldersync_id=easfoldersyncstatus.id""" + ) conn.execute( - '''UPDATE easdevice JOIN easfoldersyncstatus ON + """UPDATE easdevice JOIN easfoldersyncstatus ON easdevice.id=easfoldersyncstatus.device_id AND easfoldersyncstatus.canonical_name='archive' - SET easdevice.archive_foldersync_id=easfoldersyncstatus.id''') + SET easdevice.archive_foldersync_id=easfoldersyncstatus.id""" + ) def downgrade(): diff --git a/migrations/versions/159_update_eas_schema_part_3.py b/migrations/versions/159_update_eas_schema_part_3.py index 895646b37..8fc2d96ba 100644 --- a/migrations/versions/159_update_eas_schema_part_3.py +++ b/migrations/versions/159_update_eas_schema_part_3.py @@ -7,24 +7,27 @@ """ # revision identifiers, used by Alembic. -revision = '4e6eedda36af' -down_revision = '5aa3f27457c' +revision = "4e6eedda36af" +down_revision = "5aa3f27457c" from alembic import op def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) # Do nothing if the affected table isn't present. - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return conn = op.get_bind() - conn.execute('''ALTER TABLE easfoldersyncstatus - CHANGE name name varchar(191) NOT NULL''') - conn.execute('''ALTER TABLE easuid DROP COLUMN folder_id''') - conn.execute('''ALTER TABLE easfoldersyncstatus DROP COLUMN folder_id''') + conn.execute( + """ALTER TABLE easfoldersyncstatus + CHANGE name name varchar(191) NOT NULL""" + ) + conn.execute("""ALTER TABLE easuid DROP COLUMN folder_id""") + conn.execute("""ALTER TABLE easfoldersyncstatus DROP COLUMN folder_id""") def downgrade(): diff --git a/migrations/versions/160_split_actionlog.py b/migrations/versions/160_split_actionlog.py index b7dc4fc4c..b18f49f43 100644 --- a/migrations/versions/160_split_actionlog.py +++ b/migrations/versions/160_split_actionlog.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '182f2b40fa36' -down_revision = '4e6eedda36af' +revision = "182f2b40fa36" +down_revision = "4e6eedda36af" from alembic import op import sqlalchemy as sa @@ -18,48 +18,57 @@ def upgrade(): from inbox.ignition import main_engine - op.add_column('actionlog', sa.Column('type', sa.String(16))) + op.add_column("actionlog", sa.Column("type", sa.String(16))) # Update action_log entries from inbox.models import Namespace, Account, ActionLog from inbox.models.session import session_scope with session_scope() as db_session: - q = db_session.query(ActionLog).join(Namespace).join(Account).\ - filter(ActionLog.status == 'pending', - Account.discriminator != 'easaccount').\ - options(contains_eager(ActionLog.namespace, Namespace.account)) - - print 'Updating {} action_log entries'.format(q.count()) + q = ( + db_session.query(ActionLog) + .join(Namespace) + .join(Account) + .filter( + ActionLog.status == "pending", Account.discriminator != "easaccount" + ) + .options(contains_eager(ActionLog.namespace, Namespace.account)) + ) + + print "Updating {} action_log entries".format(q.count()) for a in q.all(): - a.type = 'actionlog' + a.type = "actionlog" db_session.commit() engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return - op.create_table('easactionlog', - sa.Column('id', sa.Integer()), - sa.Column('secondary_status', - sa.Enum('pending', 'successful', 'failed'), - server_default='pending'), - sa.Column('secondary_retries', sa.Integer(), - nullable=False, server_default='0'), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['id'], ['actionlog.id'], - ondelete='CASCADE')) + op.create_table( + "easactionlog", + sa.Column("id", sa.Integer()), + sa.Column( + "secondary_status", + sa.Enum("pending", "successful", "failed"), + server_default="pending", + ), + sa.Column( + "secondary_retries", sa.Integer(), nullable=False, server_default="0" + ), + sa.PrimaryKeyConstraint("id"), + sa.ForeignKeyConstraint(["id"], ["actionlog.id"], ondelete="CASCADE"), + ) def downgrade(): from inbox.ignition import main_engine - op.drop_column('actionlog', 'type') + op.drop_column("actionlog", "type") engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return - op.drop_table('easactionlog') + op.drop_table("easactionlog") diff --git a/migrations/versions/161_update_eas_schema_part_3_for_prod.py b/migrations/versions/161_update_eas_schema_part_3_for_prod.py index a60199513..221955f17 100644 --- a/migrations/versions/161_update_eas_schema_part_3_for_prod.py +++ b/migrations/versions/161_update_eas_schema_part_3_for_prod.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '365071c47fa7' -down_revision = '182f2b40fa36' +revision = "365071c47fa7" +down_revision = "182f2b40fa36" from alembic import op from sqlalchemy.schema import MetaData @@ -16,28 +16,31 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) # Check affected tables present - if not engine.has_table('easaccount'): + if not engine.has_table("easaccount"): return meta = MetaData() meta.reflect(bind=engine) - easuid = meta.tables['easuid'] + easuid = meta.tables["easuid"] # Check this migration hasn't run before - if 'folder_id' not in [c.name for c in easuid.columns]: - print 'This migration has been run, skipping.' + if "folder_id" not in [c.name for c in easuid.columns]: + print "This migration has been run, skipping." return - print 'Running migration' + print "Running migration" conn = op.get_bind() - conn.execute('''ALTER TABLE easfoldersyncstatus - CHANGE name name varchar(191) NOT NULL''') - conn.execute('''ALTER TABLE easuid DROP COLUMN folder_id''') - conn.execute('''ALTER TABLE easfoldersyncstatus DROP COLUMN folder_id''') + conn.execute( + """ALTER TABLE easfoldersyncstatus + CHANGE name name varchar(191) NOT NULL""" + ) + conn.execute("""ALTER TABLE easuid DROP COLUMN folder_id""") + conn.execute("""ALTER TABLE easfoldersyncstatus DROP COLUMN folder_id""") def downgrade(): diff --git a/migrations/versions/162_update_folder_unique_constraint.py b/migrations/versions/162_update_folder_unique_constraint.py index 0a75430ef..e42ac0e5a 100644 --- a/migrations/versions/162_update_folder_unique_constraint.py +++ b/migrations/versions/162_update_folder_unique_constraint.py @@ -7,20 +7,19 @@ """ # revision identifiers, used by Alembic. -revision = '2235895f313b' -down_revision = '365071c47fa7' +revision = "2235895f313b" +down_revision = "365071c47fa7" from alembic import op def upgrade(): - op.create_unique_constraint('account_id_2', 'folder', - ['account_id', 'name']) - op.drop_constraint(u'account_id', 'folder', type_='unique') + op.create_unique_constraint("account_id_2", "folder", ["account_id", "name"]) + op.drop_constraint(u"account_id", "folder", type_="unique") def downgrade(): - op.create_unique_constraint(u'account_id', 'folder', - ['account_id', 'name', 'canonical_name', - 'identifier']) - op.drop_constraint('account_id_2', 'folder', type_='unique') + op.create_unique_constraint( + u"account_id", "folder", ["account_id", "name", "canonical_name", "identifier"] + ) + op.drop_constraint("account_id_2", "folder", type_="unique") diff --git a/migrations/versions/163_drop_transaction_snapshot.py b/migrations/versions/163_drop_transaction_snapshot.py index e23b28022..005b88ee0 100644 --- a/migrations/versions/163_drop_transaction_snapshot.py +++ b/migrations/versions/163_drop_transaction_snapshot.py @@ -7,14 +7,14 @@ """ # revision identifiers, used by Alembic. -revision = '457164360472' -down_revision = '2235895f313b' +revision = "457164360472" +down_revision = "2235895f313b" from alembic import op def upgrade(): - op.drop_column(u'transaction', 'snapshot') + op.drop_column(u"transaction", "snapshot") def downgrade(): diff --git a/migrations/versions/164_add_decode_error_index.py b/migrations/versions/164_add_decode_error_index.py index 6c31a93de..86707ad47 100644 --- a/migrations/versions/164_add_decode_error_index.py +++ b/migrations/versions/164_add_decode_error_index.py @@ -1,15 +1,16 @@ """add message.decode_error index """ -revision = '17dcbd7754e0' -down_revision = '457164360472' +revision = "17dcbd7754e0" +down_revision = "457164360472" from alembic import op def upgrade(): - op.create_index('ix_message_decode_error', 'message', - ['decode_error'], unique=False) + op.create_index( + "ix_message_decode_error", "message", ["decode_error"], unique=False + ) def downgrade(): - op.drop_index('ix_message_decode_error', table_name='message') + op.drop_index("ix_message_decode_error", table_name="message") diff --git a/migrations/versions/165_add_compacted_body.py b/migrations/versions/165_add_compacted_body.py index b2ce8ae84..306553729 100644 --- a/migrations/versions/165_add_compacted_body.py +++ b/migrations/versions/165_add_compacted_body.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '29698176aa8d' -down_revision = '17dcbd7754e0' +revision = "29698176aa8d" +down_revision = "17dcbd7754e0" from alembic import op import sqlalchemy as sa @@ -16,9 +16,10 @@ def upgrade(): - op.add_column(u'message', sa.Column('_compacted_body', mysql.LONGBLOB(), - nullable=True)) + op.add_column( + u"message", sa.Column("_compacted_body", mysql.LONGBLOB(), nullable=True) + ) def downgrade(): - op.drop_column(u'message', '_compacted_body') + op.drop_column(u"message", "_compacted_body") diff --git a/migrations/versions/166_migrate_body_format.py b/migrations/versions/166_migrate_body_format.py index c2a4a4f1c..b2c550d0d 100644 --- a/migrations/versions/166_migrate_body_format.py +++ b/migrations/versions/166_migrate_body_format.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3d4f5741e1d7' -down_revision = '29698176aa8d' +revision = "3d4f5741e1d7" +down_revision = "29698176aa8d" import sqlalchemy as sa from sqlalchemy.ext.declarative import declarative_base @@ -22,25 +22,29 @@ def upgrade(): from inbox.ignition import main_engine from inbox.models.session import session_scope from inbox.security.blobstorage import encode_blob + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class Message(Base): - __table__ = Base.metadata.tables['message'] + __table__ = Base.metadata.tables["message"] with session_scope(versioned=False) as db_session: - max_id, = db_session.query(sa.func.max(Message.id)).one() + (max_id,) = db_session.query(sa.func.max(Message.id)).one() if max_id is None: max_id = 0 for i in range(0, max_id, CHUNK_SIZE): - messages = db_session.query(Message). \ - filter(Message.id > i, Message.id <= i + CHUNK_SIZE). \ - options(load_only('_compacted_body', 'sanitized_body')) + messages = ( + db_session.query(Message) + .filter(Message.id > i, Message.id <= i + CHUNK_SIZE) + .options(load_only("_compacted_body", "sanitized_body")) + ) for message in messages: if message._compacted_body is None: message._compacted_body = encode_blob( - message.sanitized_body.encode('utf-8')) + message.sanitized_body.encode("utf-8") + ) db_session.commit() diff --git a/migrations/versions/167_create_index_for_querying_messages_by_.py b/migrations/versions/167_create_index_for_querying_messages_by_.py index 49dd781e5..987a99af3 100644 --- a/migrations/versions/167_create_index_for_querying_messages_by_.py +++ b/migrations/versions/167_create_index_for_querying_messages_by_.py @@ -7,16 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '576f5310e8fc' -down_revision = '3d4f5741e1d7' +revision = "576f5310e8fc" +down_revision = "3d4f5741e1d7" from alembic import op def upgrade(): - op.create_index('ix_message_namespace_id_is_created', 'message', - ['namespace_id', 'is_created'], unique=False) + op.create_index( + "ix_message_namespace_id_is_created", + "message", + ["namespace_id", "is_created"], + unique=False, + ) def downgrade(): - op.drop_index('ix_message_namespace_id_is_created', table_name='message') + op.drop_index("ix_message_namespace_id_is_created", table_name="message") diff --git a/migrations/versions/168_drop_message_sanitized_body.py b/migrations/versions/168_drop_message_sanitized_body.py index d95cc3f71..5267bd545 100644 --- a/migrations/versions/168_drop_message_sanitized_body.py +++ b/migrations/versions/168_drop_message_sanitized_body.py @@ -7,14 +7,14 @@ """ # revision identifiers, used by Alembic. -revision = '1740b45aa815' -down_revision = '576f5310e8fc' +revision = "1740b45aa815" +down_revision = "576f5310e8fc" from alembic import op def upgrade(): - op.drop_column('message', 'sanitized_body') + op.drop_column("message", "sanitized_body") def downgrade(): diff --git a/migrations/versions/169_update_easuid_schema.py b/migrations/versions/169_update_easuid_schema.py index 0f83f2f3b..582ee2bba 100644 --- a/migrations/versions/169_update_easuid_schema.py +++ b/migrations/versions/169_update_easuid_schema.py @@ -7,26 +7,29 @@ """ # revision identifiers, used by Alembic. -revision = '281b07fa75bb' -down_revision = '1740b45aa815' +revision = "281b07fa75bb" +down_revision = "1740b45aa815" from alembic import op def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easuid'): + if not engine.has_table("easuid"): return conn = op.get_bind() - conn.execute('''ALTER TABLE easuid + conn.execute( + """ALTER TABLE easuid ADD COLUMN server_id VARCHAR(64) DEFAULT NULL, ADD COLUMN easfoldersyncstatus_id INT(11) DEFAULT NULL, ADD INDEX easfoldersyncstatus_id (easfoldersyncstatus_id), ADD CONSTRAINT easuid_ibfk_4 FOREIGN KEY (easfoldersyncstatus_id) REFERENCES easfoldersyncstatus (id) ON DELETE CASCADE, ADD INDEX ix_easuid_server_id (server_id) - ''') + """ + ) def downgrade(): diff --git a/migrations/versions/170_update_easuid_schema_2.py b/migrations/versions/170_update_easuid_schema_2.py index 5611f5a86..b1b9d1cc3 100644 --- a/migrations/versions/170_update_easuid_schema_2.py +++ b/migrations/versions/170_update_easuid_schema_2.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3ee78a8b1ac6' -down_revision = '281b07fa75bb' +revision = "3ee78a8b1ac6" +down_revision = "281b07fa75bb" import sqlalchemy as sa @@ -17,30 +17,38 @@ def upgrade(): from sqlalchemy.ext.declarative import declarative_base from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easuid'): + if not engine.has_table("easuid"): return Base = declarative_base() Base.metadata.reflect(engine) class EASUid(Base): - __table__ = Base.metadata.tables['easuid'] + __table__ = Base.metadata.tables["easuid"] class EASFolderSyncStatus(Base): - __table__ = Base.metadata.tables['easfoldersyncstatus'] + __table__ = Base.metadata.tables["easfoldersyncstatus"] with session_scope(versioned=False) as db_session: max_easuid = db_session.query(sa.func.max(EASUid.id)).scalar() if max_easuid is None: return while True: - results = db_session.query(EASUid, EASFolderSyncStatus). \ - join(EASFolderSyncStatus, sa.and_( - EASUid.fld_uid == EASFolderSyncStatus.eas_folder_id, - EASUid.device_id == EASFolderSyncStatus.device_id, - EASUid.easaccount_id == EASFolderSyncStatus.account_id, - EASUid.easfoldersyncstatus_id.is_(None))). \ - limit(1000).all() + results = ( + db_session.query(EASUid, EASFolderSyncStatus) + .join( + EASFolderSyncStatus, + sa.and_( + EASUid.fld_uid == EASFolderSyncStatus.eas_folder_id, + EASUid.device_id == EASFolderSyncStatus.device_id, + EASUid.easaccount_id == EASFolderSyncStatus.account_id, + EASUid.easfoldersyncstatus_id.is_(None), + ), + ) + .limit(1000) + .all() + ) if not results: return for easuid, easfoldersyncstatus in results: diff --git a/migrations/versions/171_update_easuid_schema_3.py b/migrations/versions/171_update_easuid_schema_3.py index c77587c6d..c1b8dfd22 100644 --- a/migrations/versions/171_update_easuid_schema_3.py +++ b/migrations/versions/171_update_easuid_schema_3.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '584356bf23a3' -down_revision = '3ee78a8b1ac6' +revision = "584356bf23a3" +down_revision = "3ee78a8b1ac6" import sqlalchemy as sa @@ -17,22 +17,24 @@ def upgrade(): from sqlalchemy.ext.declarative import declarative_base from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easuid'): + if not engine.has_table("easuid"): return Base = declarative_base() Base.metadata.reflect(engine) class EASUid(Base): - __table__ = Base.metadata.tables['easuid'] + __table__ = Base.metadata.tables["easuid"] with session_scope(versioned=False) as db_session: # STOPSHIP(emfree): determine if we need to batch this update on large # databases. db_session.query(EASUid).update( - {'server_id': sa.func.concat(EASUid.fld_uid, ':', EASUid.msg_uid)}, - synchronize_session=False) + {"server_id": sa.func.concat(EASUid.fld_uid, ":", EASUid.msg_uid)}, + synchronize_session=False, + ) pass diff --git a/migrations/versions/172_update_easuid_schema_4.py b/migrations/versions/172_update_easuid_schema_4.py index a82cbb003..26df86418 100644 --- a/migrations/versions/172_update_easuid_schema_4.py +++ b/migrations/versions/172_update_easuid_schema_4.py @@ -7,23 +7,26 @@ """ # revision identifiers, used by Alembic. -revision = 'd0427f9f3d1' -down_revision = '584356bf23a3' +revision = "d0427f9f3d1" +down_revision = "584356bf23a3" from alembic import op def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easuid'): + if not engine.has_table("easuid"): return conn = op.get_bind() - conn.execute('''ALTER TABLE easuid + conn.execute( + """ALTER TABLE easuid CHANGE COLUMN msg_uid msg_uid INT(11) DEFAULT NULL, CHANGE COLUMN fld_uid fld_uid INT(11) DEFAULT NULL, - ADD UNIQUE INDEX easaccount_id_2 (easaccount_id, device_id, easfoldersyncstatus_id, server_id)''') + ADD UNIQUE INDEX easaccount_id_2 (easaccount_id, device_id, easfoldersyncstatus_id, server_id)""" + ) def downgrade(): diff --git a/migrations/versions/173_add_owner2.py b/migrations/versions/173_add_owner2.py index 29d7f813b..045ecec99 100644 --- a/migrations/versions/173_add_owner2.py +++ b/migrations/versions/173_add_owner2.py @@ -8,8 +8,8 @@ """ # revision identifiers, used by Alembic. -revision = 'fd32a69381a' -down_revision = 'd0427f9f3d1' +revision = "fd32a69381a" +down_revision = "d0427f9f3d1" from alembic import op from sqlalchemy.sql import text @@ -18,8 +18,9 @@ def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - conn.execute(text("ALTER TABLE event " - "ADD COLUMN owner2 varchar(1024) DEFAULT NULL")) + conn.execute( + text("ALTER TABLE event " "ADD COLUMN owner2 varchar(1024) DEFAULT NULL") + ) def downgrade(): diff --git a/migrations/versions/174_backfill_owner2.py b/migrations/versions/174_backfill_owner2.py index f114e44bf..70458d543 100644 --- a/migrations/versions/174_backfill_owner2.py +++ b/migrations/versions/174_backfill_owner2.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4ef055945390' -down_revision = 'fd32a69381a' +revision = "4ef055945390" +down_revision = "fd32a69381a" from alembic import op from sqlalchemy.sql import text diff --git a/migrations/versions/175_fix_recurring_override_cascade.py b/migrations/versions/175_fix_recurring_override_cascade.py index 966f649ea..62f61e4ed 100644 --- a/migrations/versions/175_fix_recurring_override_cascade.py +++ b/migrations/versions/175_fix_recurring_override_cascade.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '6e5b154d917' -down_revision = '4ef055945390' +revision = "6e5b154d917" +down_revision = "4ef055945390" from alembic import op from sqlalchemy.sql import text @@ -18,30 +18,47 @@ def upgrade(): conn = op.get_bind() # Check this migration is needed fk_name, fk_delete = conn.execute( - '''SELECT constraint_name, delete_rule FROM + """SELECT constraint_name, delete_rule FROM information_schema.referential_constraints WHERE constraint_schema=DATABASE() AND table_name='recurringeventoverride' AND constraint_name='recurringeventoverride_ibfk_2' - ''').fetchone() + """ + ).fetchone() - if fk_delete == 'CASCADE': - print 'Checked fk: {}. This migration is not needed, skipping.'.format(fk_name) + if fk_delete == "CASCADE": + print "Checked fk: {}. This migration is not needed, skipping.".format(fk_name) return conn.execute(text("set @@lock_wait_timeout = 20;")) conn.execute(text("SET FOREIGN_KEY_CHECKS=0;")) - conn.execute(text("ALTER TABLE recurringeventoverride DROP FOREIGN KEY " - "`recurringeventoverride_ibfk_2`")) - conn.execute(text("ALTER TABLE recurringeventoverride ADD CONSTRAINT recurringeventoverride_ibfk_2" - " FOREIGN KEY (`master_event_id`) REFERENCES `event` (`id`) ON DELETE CASCADE")) + conn.execute( + text( + "ALTER TABLE recurringeventoverride DROP FOREIGN KEY " + "`recurringeventoverride_ibfk_2`" + ) + ) + conn.execute( + text( + "ALTER TABLE recurringeventoverride ADD CONSTRAINT recurringeventoverride_ibfk_2" + " FOREIGN KEY (`master_event_id`) REFERENCES `event` (`id`) ON DELETE CASCADE" + ) + ) def downgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) conn.execute(text("SET FOREIGN_KEY_CHECKS=0;")) - conn.execute(text("ALTER TABLE recurringeventoverride DROP FOREIGN KEY " - "`recurringeventoverride_ibfk_2`")) - conn.execute(text("ALTER TABLE recurringeventoverride ADD CONSTRAINT recurringeventoverride_ibfk_2" - " FOREIGN KEY (`master_event_id`) REFERENCES `event` (`id`)")) + conn.execute( + text( + "ALTER TABLE recurringeventoverride DROP FOREIGN KEY " + "`recurringeventoverride_ibfk_2`" + ) + ) + conn.execute( + text( + "ALTER TABLE recurringeventoverride ADD CONSTRAINT recurringeventoverride_ibfk_2" + " FOREIGN KEY (`master_event_id`) REFERENCES `event` (`id`)" + ) + ) diff --git a/migrations/versions/176_add_run_state_folderstatus.py b/migrations/versions/176_add_run_state_folderstatus.py index b598d1e35..a9228d9aa 100644 --- a/migrations/versions/176_add_run_state_folderstatus.py +++ b/migrations/versions/176_add_run_state_folderstatus.py @@ -7,18 +7,24 @@ """ # revision identifiers, used by Alembic. -revision = '48a1991e5dbd' -down_revision = '6e5b154d917' +revision = "48a1991e5dbd" +down_revision = "6e5b154d917" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('imapfoldersyncstatus', sa.Column('sync_should_run', - sa.Boolean(), server_default=sa.sql.expression.true(), - nullable=False)) + op.add_column( + "imapfoldersyncstatus", + sa.Column( + "sync_should_run", + sa.Boolean(), + server_default=sa.sql.expression.true(), + nullable=False, + ), + ) def downgrade(): - op.drop_column('imapfoldersyncstatus', 'sync_should_run') + op.drop_column("imapfoldersyncstatus", "sync_should_run") diff --git a/migrations/versions/177_add_run_state_eas_folderstatus.py b/migrations/versions/177_add_run_state_eas_folderstatus.py index 28bdcd4ce..bbcade754 100644 --- a/migrations/versions/177_add_run_state_eas_folderstatus.py +++ b/migrations/versions/177_add_run_state_eas_folderstatus.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2b9dd6f7593a' -down_revision = '48a1991e5dbd' +revision = "2b9dd6f7593a" +down_revision = "48a1991e5dbd" from alembic import op import sqlalchemy as sa @@ -16,17 +16,25 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easfoldersyncstatus'): + if not engine.has_table("easfoldersyncstatus"): return - op.add_column('easfoldersyncstatus', sa.Column('sync_should_run', - sa.Boolean(), server_default=sa.sql.expression.true(), - nullable=False)) + op.add_column( + "easfoldersyncstatus", + sa.Column( + "sync_should_run", + sa.Boolean(), + server_default=sa.sql.expression.true(), + nullable=False, + ), + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easfoldersyncstatus'): + if not engine.has_table("easfoldersyncstatus"): return - op.drop_column('easfoldersyncstatus', 'sync_should_run') + op.drop_column("easfoldersyncstatus", "sync_should_run") diff --git a/migrations/versions/178_add_reply_to_messagecontactassociation.py b/migrations/versions/178_add_reply_to_messagecontactassociation.py index 6ec900201..da50a17f3 100644 --- a/migrations/versions/178_add_reply_to_messagecontactassociation.py +++ b/migrations/versions/178_add_reply_to_messagecontactassociation.py @@ -7,24 +7,28 @@ """ # revision identifiers, used by Alembic. -revision = '41f957b595fc' -down_revision = '2b9dd6f7593a' +revision = "41f957b595fc" +down_revision = "2b9dd6f7593a" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('messagecontactassociation', 'field', - existing_type=sa.Enum('from_addr', 'to_addr', - 'cc_addr', 'bcc_addr'), - type_=sa.Enum('from_addr', 'to_addr', 'cc_addr', - 'bcc_addr', 'reply_to')) + op.alter_column( + "messagecontactassociation", + "field", + existing_type=sa.Enum("from_addr", "to_addr", "cc_addr", "bcc_addr"), + type_=sa.Enum("from_addr", "to_addr", "cc_addr", "bcc_addr", "reply_to"), + ) def downgrade(): - op.alter_column('messagecontactassociation', 'field', - existing_type=sa.Enum('from_addr', 'to_addr', 'cc_addr', - 'bcc_addr', 'reply_to'), - type_=sa.Enum('from_addr', 'to_addr', 'cc_addr', - 'bcc_addr')) + op.alter_column( + "messagecontactassociation", + "field", + existing_type=sa.Enum( + "from_addr", "to_addr", "cc_addr", "bcc_addr", "reply_to" + ), + type_=sa.Enum("from_addr", "to_addr", "cc_addr", "bcc_addr"), + ) diff --git a/migrations/versions/179_longer_event_descriptions.py b/migrations/versions/179_longer_event_descriptions.py index 3d8057bfd..e5067f2d5 100644 --- a/migrations/versions/179_longer_event_descriptions.py +++ b/migrations/versions/179_longer_event_descriptions.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '56500282e024' -down_revision = '41f957b595fc' +revision = "56500282e024" +down_revision = "41f957b595fc" from alembic import op import sqlalchemy as sa @@ -16,9 +16,8 @@ def upgrade(): - op.add_column('event', sa.Column('_description', mysql.LONGTEXT(), - nullable=True)) + op.add_column("event", sa.Column("_description", mysql.LONGTEXT(), nullable=True)) def downgrade(): - op.drop_column('event', '_description') + op.drop_column("event", "_description") diff --git a/migrations/versions/180_migrate_event_descriptions.py b/migrations/versions/180_migrate_event_descriptions.py index 6a1514f20..8207d861f 100644 --- a/migrations/versions/180_migrate_event_descriptions.py +++ b/migrations/versions/180_migrate_event_descriptions.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'ea9dc8742ee' -down_revision = '56500282e024' +revision = "ea9dc8742ee" +down_revision = "56500282e024" from alembic import op @@ -17,9 +17,10 @@ def upgrade(): conn = op.get_bind() while True: res = conn.execute( - 'UPDATE event SET _description=description ' - 'WHERE _description IS NULL AND description IS NOT NULL LIMIT 100000') - print 'Updated {} rows'.format(res.rowcount) + "UPDATE event SET _description=description " + "WHERE _description IS NULL AND description IS NOT NULL LIMIT 100000" + ) + print "Updated {} rows".format(res.rowcount) if res.rowcount == 0: return diff --git a/migrations/versions/181_drop_short_event_descriptions.py b/migrations/versions/181_drop_short_event_descriptions.py index 0dea36be6..293579dd0 100644 --- a/migrations/versions/181_drop_short_event_descriptions.py +++ b/migrations/versions/181_drop_short_event_descriptions.py @@ -7,14 +7,14 @@ """ # revision identifiers, used by Alembic. -revision = '10da2e0bc3bb' -down_revision = 'ea9dc8742ee' +revision = "10da2e0bc3bb" +down_revision = "ea9dc8742ee" from alembic import op def upgrade(): - op.drop_column('event', 'description') + op.drop_column("event", "description") def downgrade(): diff --git a/migrations/versions/182_add_data_processing_cache_table.py b/migrations/versions/182_add_data_processing_cache_table.py index 1ea3f0bdb..a0f9231b1 100644 --- a/migrations/versions/182_add_data_processing_cache_table.py +++ b/migrations/versions/182_add_data_processing_cache_table.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3857f395fb1d' -down_revision = '10da2e0bc3bb' +revision = "3857f395fb1d" +down_revision = "10da2e0bc3bb" from alembic import op import sqlalchemy as sa @@ -17,23 +17,23 @@ def upgrade(): op.create_table( - 'dataprocessingcache', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('namespace_id', sa.Integer(), nullable=False), - sa.Column('contact_rankings', mysql.MEDIUMBLOB(), nullable=True), - sa.Column('contact_rankings_last_updated', sa.DateTime(), - nullable=True), - sa.Column('contact_groups', mysql.MEDIUMBLOB(), nullable=True), - sa.Column('contact_groups_last_updated', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['namespace_id'], [u'namespace.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('namespace_id') + "dataprocessingcache", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column("contact_rankings", mysql.MEDIUMBLOB(), nullable=True), + sa.Column("contact_rankings_last_updated", sa.DateTime(), nullable=True), + sa.Column("contact_groups", mysql.MEDIUMBLOB(), nullable=True), + sa.Column("contact_groups_last_updated", sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint( + ["namespace_id"], [u"namespace.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("namespace_id"), ) def downgrade(): - op.drop_table('dataprocessingcache') + op.drop_table("dataprocessingcache") diff --git a/migrations/versions/183_change_event_sync_timestamp.py b/migrations/versions/183_change_event_sync_timestamp.py index f66dc6a8a..7bf6915ff 100644 --- a/migrations/versions/183_change_event_sync_timestamp.py +++ b/migrations/versions/183_change_event_sync_timestamp.py @@ -7,24 +7,25 @@ """ # revision identifiers, used by Alembic. -revision = '3a58d466f61d' -down_revision = '3857f395fb1d' +revision = "3a58d466f61d" +down_revision = "3857f395fb1d" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('calendar', sa.Column('last_synced', sa.DateTime(), - nullable=True)) + op.add_column("calendar", sa.Column("last_synced", sa.DateTime(), nullable=True)) conn = op.get_bind() - conn.execute('''UPDATE calendar + conn.execute( + """UPDATE calendar JOIN namespace ON calendar.namespace_id=namespace.id JOIN account ON namespace.account_id=account.id JOIN gmailaccount ON account.id=gmailaccount.id SET calendar.last_synced=account.last_synced_events - WHERE account.emailed_events_calendar_id != calendar.id''') + WHERE account.emailed_events_calendar_id != calendar.id""" + ) def downgrade(): - op.drop_column('calendar', 'last_synced') + op.drop_column("calendar", "last_synced") diff --git a/migrations/versions/184_create_gmail_auth_credentials_table.py b/migrations/versions/184_create_gmail_auth_credentials_table.py index 37d6457a4..07f3bd6c9 100644 --- a/migrations/versions/184_create_gmail_auth_credentials_table.py +++ b/migrations/versions/184_create_gmail_auth_credentials_table.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2ac4e3c4e049' -down_revision = '3a58d466f61d' +revision = "2ac4e3c4e049" +down_revision = "3a58d466f61d" from alembic import op import sqlalchemy as sa @@ -17,29 +17,33 @@ def upgrade(): op.create_table( - 'gmailauthcredentials', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('gmailaccount_id', sa.Integer(), nullable=False), - sa.Column('refresh_token_id', sa.Integer(), nullable=False), - sa.Column('scopes', mysql.VARCHAR(length=512), nullable=False), - sa.Column('g_id_token', mysql.VARCHAR(length=1024), nullable=False), - sa.Column('client_id', mysql.VARCHAR(length=256), nullable=False), - sa.Column('client_secret', mysql.VARCHAR(length=256), nullable=False), - sa.Column('is_valid', sa.Boolean(), - nullable=False, server_default=sa.sql.expression.true()), + "gmailauthcredentials", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("gmailaccount_id", sa.Integer(), nullable=False), + sa.Column("refresh_token_id", sa.Integer(), nullable=False), + sa.Column("scopes", mysql.VARCHAR(length=512), nullable=False), + sa.Column("g_id_token", mysql.VARCHAR(length=1024), nullable=False), + sa.Column("client_id", mysql.VARCHAR(length=256), nullable=False), + sa.Column("client_secret", mysql.VARCHAR(length=256), nullable=False), + sa.Column( + "is_valid", + sa.Boolean(), + nullable=False, + server_default=sa.sql.expression.true(), + ), sa.ForeignKeyConstraint( - ['gmailaccount_id'], [u'gmailaccount.id'], ondelete='CASCADE' + ["gmailaccount_id"], [u"gmailaccount.id"], ondelete="CASCADE" ), sa.ForeignKeyConstraint( - ['refresh_token_id'], [u'secret.id'], ondelete='CASCADE' + ["refresh_token_id"], [u"secret.id"], ondelete="CASCADE" ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('refresh_token_id'), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("refresh_token_id"), ) def downgrade(): - op.drop_table('gmailauthcredentials') + op.drop_table("gmailauthcredentials") diff --git a/migrations/versions/185_backfill_gmail_auth_credentials_table.py b/migrations/versions/185_backfill_gmail_auth_credentials_table.py index 55ce17149..f66492413 100644 --- a/migrations/versions/185_backfill_gmail_auth_credentials_table.py +++ b/migrations/versions/185_backfill_gmail_auth_credentials_table.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '14692efd261b' -down_revision = '2ac4e3c4e049' +revision = "14692efd261b" +down_revision = "2ac4e3c4e049" def upgrade(): @@ -18,6 +18,7 @@ def upgrade(): from inbox.config import config from inbox.models.session import session_scope from inbox.ignition import main_engine + engine = main_engine() now = datetime.datetime.now() @@ -25,29 +26,36 @@ def upgrade(): Base.metadata.reflect(engine) class GmailAccount(Base): - __table__ = Base.metadata.tables['gmailaccount'] + __table__ = Base.metadata.tables["gmailaccount"] class Secret(Base): - __table__ = Base.metadata.tables['secret'] + __table__ = Base.metadata.tables["secret"] class GmailAuthCredentials(Base): - __table__ = Base.metadata.tables['gmailauthcredentials'] + __table__ = Base.metadata.tables["gmailauthcredentials"] secret = relationship(Secret) with session_scope(versioned=False) as db_session: - for acc, sec in db_session.query(GmailAccount, Secret) \ - .filter(GmailAccount.refresh_token_id == Secret.id, - GmailAccount.scope != None, - GmailAccount.g_id_token != None) \ - .all(): # noqa: E711 + for acc, sec in ( + db_session.query(GmailAccount, Secret) + .filter( + GmailAccount.refresh_token_id == Secret.id, + GmailAccount.scope != None, + GmailAccount.g_id_token != None, + ) + .all() + ): # noqa: E711 # Create a new GmailAuthCredentials entry if # we don't have one already - if db_session.query(GmailAuthCredentials, Secret) \ - .filter(GmailAuthCredentials.gmailaccount_id == acc.id) \ - .filter(Secret._secret == sec._secret) \ - .count() == 0: + if ( + db_session.query(GmailAuthCredentials, Secret) + .filter(GmailAuthCredentials.gmailaccount_id == acc.id) + .filter(Secret._secret == sec._secret) + .count() + == 0 + ): # Create a new secret new_sec = Secret() @@ -66,13 +74,13 @@ class GmailAuthCredentials(Base): auth_creds.updated_at = now auth_creds.secret = new_sec - auth_creds.client_id = \ - (acc.client_id or - config.get_required('GOOGLE_OAUTH_CLIENT_ID')) + auth_creds.client_id = acc.client_id or config.get_required( + "GOOGLE_OAUTH_CLIENT_ID" + ) - auth_creds.client_secret = \ - (acc.client_secret or - config.get_required('GOOGLE_OAUTH_CLIENT_SECRET')) + auth_creds.client_secret = acc.client_secret or config.get_required( + "GOOGLE_OAUTH_CLIENT_SECRET" + ) db_session.add(auth_creds) db_session.add(new_sec) diff --git a/migrations/versions/186_new_tables_for_folders_overhaul.py b/migrations/versions/186_new_tables_for_folders_overhaul.py index 59ed22739..c2bf9d1ba 100644 --- a/migrations/versions/186_new_tables_for_folders_overhaul.py +++ b/migrations/versions/186_new_tables_for_folders_overhaul.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '23e204cd1d91' -down_revision = '14692efd261b' +revision = "23e204cd1d91" +down_revision = "14692efd261b" from alembic import op import sqlalchemy as sa @@ -16,117 +16,132 @@ def upgrade(): op.create_table( - 'category', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('public_id', sa.BINARY(16), nullable=False), - sa.Column('namespace_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=191, collation='utf8mb4_bin'), - nullable=True), - sa.Column('display_name', sa.String(length=191), nullable=False), - sa.Column('type_', sa.Enum('folder', 'label'), nullable=False), - sa.ForeignKeyConstraint(['namespace_id'], ['namespace.id'], - name='category_fk1', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('namespace_id', 'name', 'display_name'), - sa.UniqueConstraint('namespace_id', 'public_id') + "category", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("public_id", sa.BINARY(16), nullable=False), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column( + "name", sa.String(length=191, collation="utf8mb4_bin"), nullable=True + ), + sa.Column("display_name", sa.String(length=191), nullable=False), + sa.Column("type_", sa.Enum("folder", "label"), nullable=False), + sa.ForeignKeyConstraint( + ["namespace_id"], ["namespace.id"], name="category_fk1", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("namespace_id", "name", "display_name"), + sa.UniqueConstraint("namespace_id", "public_id"), ) - op.create_index('ix_category_public_id', 'category', ['public_id'], - unique=False) - op.create_index('ix_category_created_at', 'category', ['created_at'], - unique=False) - op.create_index('ix_category_deleted_at', 'category', ['deleted_at'], - unique=False) - op.create_index('ix_category_updated_at', 'category', ['updated_at'], - unique=False) + op.create_index("ix_category_public_id", "category", ["public_id"], unique=False) + op.create_index("ix_category_created_at", "category", ["created_at"], unique=False) + op.create_index("ix_category_deleted_at", "category", ["deleted_at"], unique=False) + op.create_index("ix_category_updated_at", "category", ["updated_at"], unique=False) op.create_table( - 'label', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('account_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=191, collation='utf8mb4_bin'), - nullable=False), - sa.Column('canonical_name', sa.String(length=191), nullable=True), - sa.Column('category_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['account_id'], ['account.id'], - name='label_fk1', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['category_id'], [u'category.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('account_id', 'name') + "label", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("account_id", sa.Integer(), nullable=False), + sa.Column( + "name", sa.String(length=191, collation="utf8mb4_bin"), nullable=False + ), + sa.Column("canonical_name", sa.String(length=191), nullable=True), + sa.Column("category_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["account_id"], ["account.id"], name="label_fk1", ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["category_id"], [u"category.id"],), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("account_id", "name"), ) - op.create_index('ix_label_created_at', 'label', ['created_at'], - unique=False) - op.create_index('ix_label_deleted_at', 'label', ['deleted_at'], - unique=False) - op.create_index('ix_label_updated_at', 'label', ['updated_at'], - unique=False) + op.create_index("ix_label_created_at", "label", ["created_at"], unique=False) + op.create_index("ix_label_deleted_at", "label", ["deleted_at"], unique=False) + op.create_index("ix_label_updated_at", "label", ["updated_at"], unique=False) op.create_table( - 'messagecategory', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('message_id', sa.Integer(), nullable=False), - sa.Column('category_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['category_id'], [u'category.id'], - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['message_id'], [u'message.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "messagecategory", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("message_id", sa.Integer(), nullable=False), + sa.Column("category_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["category_id"], [u"category.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["message_id"], [u"message.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_messagecategory_created_at", "messagecategory", ["created_at"], unique=False + ) + op.create_index( + "ix_messagecategory_deleted_at", "messagecategory", ["deleted_at"], unique=False + ) + op.create_index( + "ix_messagecategory_updated_at", "messagecategory", ["updated_at"], unique=False + ) + op.create_index( + "message_category_ids", + "messagecategory", + ["message_id", "category_id"], + unique=False, ) - op.create_index('ix_messagecategory_created_at', 'messagecategory', - ['created_at'], unique=False) - op.create_index('ix_messagecategory_deleted_at', 'messagecategory', - ['deleted_at'], unique=False) - op.create_index('ix_messagecategory_updated_at', 'messagecategory', - ['updated_at'], unique=False) - op.create_index('message_category_ids', 'messagecategory', - ['message_id', 'category_id'], unique=False) op.create_table( - 'labelitem', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('imapuid_id', sa.Integer(), nullable=False), - sa.Column('label_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['imapuid_id'], [u'imapuid.id'], - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['label_id'], [u'label.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "labelitem", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("imapuid_id", sa.Integer(), nullable=False), + sa.Column("label_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["imapuid_id"], [u"imapuid.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["label_id"], [u"label.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "imapuid_label_ids", "labelitem", ["imapuid_id", "label_id"], unique=False + ) + op.create_index( + "ix_labelitem_created_at", "labelitem", ["created_at"], unique=False + ) + op.create_index( + "ix_labelitem_deleted_at", "labelitem", ["deleted_at"], unique=False + ) + op.create_index( + "ix_labelitem_updated_at", "labelitem", ["updated_at"], unique=False + ) + op.add_column("folder", sa.Column("category_id", sa.Integer(), nullable=True)) + op.create_foreign_key( + "folder_ibfk_2", "folder", "category", ["category_id"], ["id"] ) - op.create_index('imapuid_label_ids', 'labelitem', - ['imapuid_id', 'label_id'], unique=False) - op.create_index('ix_labelitem_created_at', 'labelitem', ['created_at'], - unique=False) - op.create_index('ix_labelitem_deleted_at', 'labelitem', ['deleted_at'], - unique=False) - op.create_index('ix_labelitem_updated_at', 'labelitem', ['updated_at'], - unique=False) - op.add_column('folder', - sa.Column('category_id', sa.Integer(), nullable=True)) - op.create_foreign_key('folder_ibfk_2', 'folder', - 'category', ['category_id'], ['id']) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if engine.has_table('easfoldersyncstatus'): - op.add_column('easfoldersyncstatus', - sa.Column('category_id', sa.Integer(), nullable=True)) - op.create_foreign_key('easfoldersyncstatus_ibfk_3', - 'easfoldersyncstatus', 'category', - ['category_id'], ['id']) + if engine.has_table("easfoldersyncstatus"): + op.add_column( + "easfoldersyncstatus", sa.Column("category_id", sa.Integer(), nullable=True) + ) + op.create_foreign_key( + "easfoldersyncstatus_ibfk_3", + "easfoldersyncstatus", + "category", + ["category_id"], + ["id"], + ) - op.add_column('message', - sa.Column('is_starred', sa.Boolean(), - server_default=sa.sql.expression.false(), - nullable=False)) + op.add_column( + "message", + sa.Column( + "is_starred", + sa.Boolean(), + server_default=sa.sql.expression.false(), + nullable=False, + ), + ) def downgrade(): - raise Exception('Aw hell no') + raise Exception("Aw hell no") diff --git a/migrations/versions/187_migrate_data_for_folders_overhaul.py b/migrations/versions/187_migrate_data_for_folders_overhaul.py index f67c24f2e..a4f35ac44 100644 --- a/migrations/versions/187_migrate_data_for_folders_overhaul.py +++ b/migrations/versions/187_migrate_data_for_folders_overhaul.py @@ -7,38 +7,33 @@ """ # revision identifiers, used by Alembic. -revision = '334b33f18b4f' -down_revision = '23e204cd1d91' +revision = "334b33f18b4f" +down_revision = "23e204cd1d91" from sqlalchemy import asc from sqlalchemy.orm import joinedload, subqueryload, load_only from inbox.config import config from nylas.logging import configure_logging, get_logger -configure_logging(config.get('LOGLEVEL')) + +configure_logging(config.get("LOGLEVEL")) log = get_logger() def populate_labels(uid, account, db_session): from inbox.models import Label - existing_labels = { - (l.name, l.canonical_name): l for l in account.labels - } - uid.is_draft = '\\Draft' in uid.g_labels - uid.is_starred = '\\Starred' in uid.g_labels - - category_map = { - '\\Inbox': 'inbox', - '\\Important': 'important', - '\\Sent': 'sent' - } + + existing_labels = {(l.name, l.canonical_name): l for l in account.labels} + uid.is_draft = "\\Draft" in uid.g_labels + uid.is_starred = "\\Starred" in uid.g_labels + + category_map = {"\\Inbox": "inbox", "\\Important": "important", "\\Sent": "sent"} remote_labels = set() for label_string in uid.g_labels: - if label_string in ('\\Draft', '\\Starred'): + if label_string in ("\\Draft", "\\Starred"): continue elif label_string in category_map: - remote_labels.add((category_map[label_string], - category_map[label_string])) + remote_labels.add((category_map[label_string], category_map[label_string])) else: remote_labels.add((label_string, None)) @@ -53,28 +48,35 @@ def populate_labels(uid, account, db_session): def set_labels_for_imapuids(account, db_session): from inbox.models.backends.imap import ImapUid - uids = db_session.query(ImapUid).filter( - ImapUid.account_id == account.id).options( - subqueryload(ImapUid.labelitems).joinedload('label')) + + uids = ( + db_session.query(ImapUid) + .filter(ImapUid.account_id == account.id) + .options(subqueryload(ImapUid.labelitems).joinedload("label")) + ) for uid in uids: populate_labels(uid, account, db_session) - log.info('Updated UID labels', account_id=account.id, uid=uid.id) + log.info("Updated UID labels", account_id=account.id, uid=uid.id) def create_categories_for_folders(account, db_session): from inbox.models import Folder, Category - for folder in db_session.query(Folder).filter( - Folder.account_id == account.id): + + for folder in db_session.query(Folder).filter(Folder.account_id == account.id): cat = Category.find_or_create( - db_session, namespace_id=account.namespace.id, - name=folder.canonical_name, display_name=folder.name, - type_='folder') + db_session, + namespace_id=account.namespace.id, + name=folder.canonical_name, + display_name=folder.name, + type_="folder", + ) folder.category = cat db_session.commit() def create_categories_for_easfoldersyncstatuses(account, db_session): from inbox.mailsync.backends.eas.base.foldersync import save_categories + save_categories(db_session, account, account.primary_device_id) db_session.commit() save_categories(db_session, account, account.secondary_device_id) @@ -83,13 +85,14 @@ def create_categories_for_easfoldersyncstatuses(account, db_session): def migrate_account_metadata(account_id): from inbox.models.session import session_scope from inbox.models import Account + with session_scope(versioned=False) as db_session: account = db_session.query(Account).get(account_id) - if account.discriminator == 'easaccount': + if account.discriminator == "easaccount": create_categories_for_easfoldersyncstatuses(account, db_session) else: create_categories_for_folders(account, db_session) - if account.discriminator == 'gmailaccount': + if account.discriminator == "gmailaccount": set_labels_for_imapuids(account, db_session) db_session.commit() @@ -102,25 +105,35 @@ def migrate_messages(account_id): engine = main_engine(pool_size=1, max_overflow=0) with session_scope(versioned=False) as db_session: - namespace = db_session.query(Namespace).filter_by( - account_id=account_id).one() + namespace = db_session.query(Namespace).filter_by(account_id=account_id).one() offset = 0 while True: - if engine.has_table('easuid'): + if engine.has_table("easuid"): additional_options = [subqueryload(Message.easuids)] else: additional_options = [] - messages = db_session.query(Message). \ - filter(Message.namespace_id == namespace.id). \ - options(load_only(Message.id, Message.is_read, - Message.is_starred, Message.is_draft), - joinedload(Message.namespace).load_only('id'), - subqueryload(Message.imapuids), - subqueryload(Message.messagecategories), - *additional_options). \ - with_hint(Message, 'USE INDEX (ix_message_namespace_id)'). \ - order_by(asc(Message.id)).limit(1000).offset(offset).all() + messages = ( + db_session.query(Message) + .filter(Message.namespace_id == namespace.id) + .options( + load_only( + Message.id, + Message.is_read, + Message.is_starred, + Message.is_draft, + ), + joinedload(Message.namespace).load_only("id"), + subqueryload(Message.imapuids), + subqueryload(Message.messagecategories), + *additional_options + ) + .with_hint(Message, "USE INDEX (ix_message_namespace_id)") + .order_by(asc(Message.id)) + .limit(1000) + .offset(offset) + .all() + ) if not messages: return for message in messages: @@ -129,8 +142,9 @@ def migrate_messages(account_id): except IndexError: # Can happen for messages without a folder. pass - log.info('Updated message', namespace_id=namespace.id, - message_id=message.id) + log.info( + "Updated message", namespace_id=namespace.id, message_id=message.id + ) db_session.commit() offset += 1000 @@ -143,6 +157,7 @@ def migrate_account(account_id): def upgrade(): from inbox.models.session import session_scope from inbox.models import Account + with session_scope() as db_session: account_ids = [id_ for id_, in db_session.query(Account.id)] diff --git a/migrations/versions/188_create_sequence_number_column.py b/migrations/versions/188_create_sequence_number_column.py index f39540bf4..ddb2f5c92 100644 --- a/migrations/versions/188_create_sequence_number_column.py +++ b/migrations/versions/188_create_sequence_number_column.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '606447e78e7' -down_revision = '334b33f18b4f' +revision = "606447e78e7" +down_revision = "334b33f18b4f" from alembic import op from sqlalchemy.sql import text @@ -17,11 +17,16 @@ def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - conn.execute(text("ALTER TABLE event ADD COLUMN sequence_number int(11) DEFAULT '0', " - "ALGORITHM=inplace,LOCK=none")) - conn.execute(text("UPDATE event SET sequence_number='0' " - "WHERE sequence_number is NULL")) + conn.execute( + text( + "ALTER TABLE event ADD COLUMN sequence_number int(11) DEFAULT '0', " + "ALGORITHM=inplace,LOCK=none" + ) + ) + conn.execute( + text("UPDATE event SET sequence_number='0' " "WHERE sequence_number is NULL") + ) def downgrade(): - op.drop_column('event', 'sequence_number') + op.drop_column("event", "sequence_number") diff --git a/migrations/versions/189_add_initial_sync_start_end_column.py b/migrations/versions/189_add_initial_sync_start_end_column.py index 5aaaf982b..d3a616e4c 100644 --- a/migrations/versions/189_add_initial_sync_start_end_column.py +++ b/migrations/versions/189_add_initial_sync_start_end_column.py @@ -7,20 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '3b093f2d7419' -down_revision = '606447e78e7' +revision = "3b093f2d7419" +down_revision = "606447e78e7" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('folder', sa.Column('initial_sync_end', sa.DateTime(), - nullable=True)) - op.add_column('folder', sa.Column('initial_sync_start', sa.DateTime(), - nullable=True)) + op.add_column("folder", sa.Column("initial_sync_end", sa.DateTime(), nullable=True)) + op.add_column( + "folder", sa.Column("initial_sync_start", sa.DateTime(), nullable=True) + ) def downgrade(): - op.drop_column('folder', 'initial_sync_start') - op.drop_column('folder', 'initial_sync_end') + op.drop_column("folder", "initial_sync_start") + op.drop_column("folder", "initial_sync_end") diff --git a/migrations/versions/190_eas_add_device_retirement.py b/migrations/versions/190_eas_add_device_retirement.py index f1db5cfe0..802b705b4 100644 --- a/migrations/versions/190_eas_add_device_retirement.py +++ b/migrations/versions/190_eas_add_device_retirement.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '246a6bf050bc' -down_revision = '3b093f2d7419' +revision = "246a6bf050bc" +down_revision = "3b093f2d7419" from alembic import op import sqlalchemy as sa @@ -16,18 +16,25 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easdevice'): + if not engine.has_table("easdevice"): return - op.add_column('easdevice', - sa.Column('retired', sa.Boolean(), - server_default=sa.sql.expression.false(), - nullable=False)) + op.add_column( + "easdevice", + sa.Column( + "retired", + sa.Boolean(), + server_default=sa.sql.expression.false(), + nullable=False, + ), + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easdevice'): + if not engine.has_table("easdevice"): return - op.drop_column('easdevice', 'retired') + op.drop_column("easdevice", "retired") diff --git a/migrations/versions/191_add_new_events_and_calendars_flags.py b/migrations/versions/191_add_new_events_and_calendars_flags.py index ba83720dc..d66bdb8ca 100644 --- a/migrations/versions/191_add_new_events_and_calendars_flags.py +++ b/migrations/versions/191_add_new_events_and_calendars_flags.py @@ -7,41 +7,30 @@ """ # revision identifiers, used by Alembic. -revision = '47aec237051e' -down_revision = '246a6bf050bc' +revision = "47aec237051e" +down_revision = "246a6bf050bc" from alembic import op import sqlalchemy as sa def upgrade(): + op.add_column("gmailaccount", sa.Column("last_calendar_list_sync", sa.DateTime())) op.add_column( - 'gmailaccount', - sa.Column('last_calendar_list_sync', sa.DateTime()) + "gmailaccount", sa.Column("gpush_calendar_list_last_ping", sa.DateTime()) ) op.add_column( - 'gmailaccount', - sa.Column('gpush_calendar_list_last_ping', sa.DateTime()) - ) - op.add_column( - 'gmailaccount', - sa.Column('gpush_calendar_list_expiration', sa.DateTime()) + "gmailaccount", sa.Column("gpush_calendar_list_expiration", sa.DateTime()) ) - op.add_column( - 'calendar', - sa.Column('gpush_last_ping', sa.DateTime()) - ) - op.add_column( - 'calendar', - sa.Column('gpush_expiration', sa.DateTime()) - ) + op.add_column("calendar", sa.Column("gpush_last_ping", sa.DateTime())) + op.add_column("calendar", sa.Column("gpush_expiration", sa.DateTime())) def downgrade(): - op.drop_column('gmailaccount', 'last_calendar_list_sync') - op.drop_column('gmailaccount', 'gpush_calendar_list_last_ping') - op.drop_column('gmailaccount', 'gpush_calendar_list_expiration') + op.drop_column("gmailaccount", "last_calendar_list_sync") + op.drop_column("gmailaccount", "gpush_calendar_list_last_ping") + op.drop_column("gmailaccount", "gpush_calendar_list_expiration") - op.drop_column('calendar', 'gpush_last_ping') - op.drop_column('calendar', 'gpush_expiration') + op.drop_column("calendar", "gpush_last_ping") + op.drop_column("calendar", "gpush_expiration") diff --git a/migrations/versions/192_add_receivedrecentdate_column_to_threads.py b/migrations/versions/192_add_receivedrecentdate_column_to_threads.py index c8328bb51..8eb75cd29 100644 --- a/migrations/versions/192_add_receivedrecentdate_column_to_threads.py +++ b/migrations/versions/192_add_receivedrecentdate_column_to_threads.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2758cefad87d' -down_revision = '47aec237051e' +revision = "2758cefad87d" +down_revision = "47aec237051e" from alembic import op import sqlalchemy as sa @@ -16,22 +16,32 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('thread'): + if not engine.has_table("thread"): return - op.add_column('thread', - sa.Column('receivedrecentdate', sa.DATETIME(), - server_default=sa.sql.null(), - nullable=True)) - op.create_index('ix_thread_namespace_id_receivedrecentdate', 'thread', - ['namespace_id', 'receivedrecentdate'], unique=False) + op.add_column( + "thread", + sa.Column( + "receivedrecentdate", + sa.DATETIME(), + server_default=sa.sql.null(), + nullable=True, + ), + ) + op.create_index( + "ix_thread_namespace_id_receivedrecentdate", + "thread", + ["namespace_id", "receivedrecentdate"], + unique=False, + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('thread'): + if not engine.has_table("thread"): return - op.drop_column('thread', 'receivedrecentdate') - op.drop_index('ix_thread_namespace_id_receivedrecentdate', - table_name='thread') + op.drop_column("thread", "receivedrecentdate") + op.drop_index("ix_thread_namespace_id_receivedrecentdate", table_name="thread") diff --git a/migrations/versions/193_calculate_receivedrecentdate_for_threads.py b/migrations/versions/193_calculate_receivedrecentdate_for_threads.py index 09fff4531..3a44d96f5 100644 --- a/migrations/versions/193_calculate_receivedrecentdate_for_threads.py +++ b/migrations/versions/193_calculate_receivedrecentdate_for_threads.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '691fa97024d' -down_revision = '2758cefad87d' +revision = "691fa97024d" +down_revision = "2758cefad87d" # solution from http://stackoverflow.com/a/1217947 @@ -33,10 +33,15 @@ def upgrade(): with session_scope(versioned=False) as db_session: for thread in page_query(db_session.query(Thread)): - last_message = db_session.query(Message). \ - filter(Message.thread_id == thread.id, - not_(Message.categories.any(name="sent"))). \ - order_by(desc(Message.received_date)).first() + last_message = ( + db_session.query(Message) + .filter( + Message.thread_id == thread.id, + not_(Message.categories.any(name="sent")), + ) + .order_by(desc(Message.received_date)) + .first() + ) if last_message: thread.receivedrecentdate = last_message.received_date diff --git a/migrations/versions/194_extend_eas_folder_id.py b/migrations/versions/194_extend_eas_folder_id.py index a399a0355..93ba969d9 100644 --- a/migrations/versions/194_extend_eas_folder_id.py +++ b/migrations/versions/194_extend_eas_folder_id.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '69e93aef3e9' -down_revision = '691fa97024d' +revision = "69e93aef3e9" +down_revision = "691fa97024d" from alembic import op from sqlalchemy.sql import text @@ -20,13 +20,18 @@ def upgrade(): conn.execute(text("set @@lock_wait_timeout = 20;")) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easfoldersyncstatus' in Base.metadata.tables: - conn.execute(text("ALTER TABLE easfoldersyncstatus MODIFY eas_folder_id VARCHAR(191)," - " MODIFY eas_parent_id VARCHAR(191)")) + if "easfoldersyncstatus" in Base.metadata.tables: + conn.execute( + text( + "ALTER TABLE easfoldersyncstatus MODIFY eas_folder_id VARCHAR(191)," + " MODIFY eas_parent_id VARCHAR(191)" + ) + ) def downgrade(): @@ -34,10 +39,15 @@ def downgrade(): conn.execute(text("set @@lock_wait_timeout = 20;")) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easfoldersyncstatus' in Base.metadata.tables: - conn.execute(text("ALTER TABLE easfoldersyncstatus MODIFY eas_folder_id VARCHAR(64)," - " MODIFY eas_parent_id VARCHAR(64)")) + if "easfoldersyncstatus" in Base.metadata.tables: + conn.execute( + text( + "ALTER TABLE easfoldersyncstatus MODIFY eas_folder_id VARCHAR(64)," + " MODIFY eas_parent_id VARCHAR(64)" + ) + ) diff --git a/migrations/versions/195_remove_receivedrecentdate_column.py b/migrations/versions/195_remove_receivedrecentdate_column.py index bd31ee94c..4f7a1c21a 100644 --- a/migrations/versions/195_remove_receivedrecentdate_column.py +++ b/migrations/versions/195_remove_receivedrecentdate_column.py @@ -7,23 +7,31 @@ """ # revision identifiers, used by Alembic. -revision = '51ad0922ad8e' -down_revision = '69e93aef3e9' +revision = "51ad0922ad8e" +down_revision = "69e93aef3e9" from alembic import op import sqlalchemy as sa def upgrade(): - op.drop_column('thread', 'receivedrecentdate') - op.drop_index('ix_thread_namespace_id_receivedrecentdate', - table_name='thread') + op.drop_column("thread", "receivedrecentdate") + op.drop_index("ix_thread_namespace_id_receivedrecentdate", table_name="thread") def downgrade(): - op.add_column('thread', - sa.Column('receivedrecentdate', sa.DATETIME(), - server_default=sa.sql.null(), - nullable=True)) - op.create_index('ix_thread_namespace_id_receivedrecentdate', 'thread', - ['namespace_id', 'receivedrecentdate'], unique=False) + op.add_column( + "thread", + sa.Column( + "receivedrecentdate", + sa.DATETIME(), + server_default=sa.sql.null(), + nullable=True, + ), + ) + op.create_index( + "ix_thread_namespace_id_receivedrecentdate", + "thread", + ["namespace_id", "receivedrecentdate"], + unique=False, + ) diff --git a/migrations/versions/196_create_outlook_account_column.py b/migrations/versions/196_create_outlook_account_column.py index e6472efe7..23ee266c6 100644 --- a/migrations/versions/196_create_outlook_account_column.py +++ b/migrations/versions/196_create_outlook_account_column.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4fa0540482f8' -down_revision = '51ad0922ad8e' +revision = "4fa0540482f8" +down_revision = "51ad0922ad8e" from alembic import op from sqlalchemy.sql import text @@ -20,11 +20,12 @@ def upgrade(): conn.execute(text("set @@lock_wait_timeout = 20;")) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easaccount' in Base.metadata.tables: + if "easaccount" in Base.metadata.tables: conn.execute(text("ALTER TABLE easaccount ADD COLUMN outlook_account BOOL;")) @@ -33,9 +34,10 @@ def downgrade(): conn.execute(text("set @@lock_wait_timeout = 20;")) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) - if 'easaccount' in Base.metadata.tables: + if "easaccount" in Base.metadata.tables: conn.execute(text("ALTER TABLE easaccount DROP COLUMN outlook_account;")) diff --git a/migrations/versions/197_add_message_categories_change_counter.py b/migrations/versions/197_add_message_categories_change_counter.py index 5b62bf615..57888c8d4 100644 --- a/migrations/versions/197_add_message_categories_change_counter.py +++ b/migrations/versions/197_add_message_categories_change_counter.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3cf51fb0e76a' -down_revision = '4fa0540482f8' +revision = "3cf51fb0e76a" +down_revision = "4fa0540482f8" from alembic import op import sqlalchemy as sa @@ -16,19 +16,33 @@ def upgrade(): op.alter_column( - 'message', 'state', - type_=sa.Enum('draft', 'sending', 'sending failed', 'sent', - 'actions_pending', 'actions_committed'), - existing_type=sa.Enum('draft', 'sending', 'sending failed', 'sent')) + "message", + "state", + type_=sa.Enum( + "draft", + "sending", + "sending failed", + "sent", + "actions_pending", + "actions_committed", + ), + existing_type=sa.Enum("draft", "sending", "sending failed", "sent"), + ) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easdevice'): + if not engine.has_table("easdevice"): return - op.add_column('easdevice', - sa.Column('active', sa.Boolean(), - server_default=sa.sql.expression.false(), - nullable=False)) + op.add_column( + "easdevice", + sa.Column( + "active", + sa.Boolean(), + server_default=sa.sql.expression.false(), + nullable=False, + ), + ) def downgrade(): diff --git a/migrations/versions/198_eas_foldersyncstatus_startstop_columns.py b/migrations/versions/198_eas_foldersyncstatus_startstop_columns.py index fb4bbea41..9cbf18291 100644 --- a/migrations/versions/198_eas_foldersyncstatus_startstop_columns.py +++ b/migrations/versions/198_eas_foldersyncstatus_startstop_columns.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '301d22aa96b8' -down_revision = '3cf51fb0e76a' +revision = "301d22aa96b8" +down_revision = "3cf51fb0e76a" from alembic import op import sqlalchemy as sa @@ -16,21 +16,25 @@ def upgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easfoldersyncstatus'): + if not engine.has_table("easfoldersyncstatus"): return - op.add_column('easfoldersyncstatus', sa.Column('initial_sync_end', - sa.DateTime(), - nullable=True)) - op.add_column('easfoldersyncstatus', sa.Column('initial_sync_start', - sa.DateTime(), - nullable=True)) + op.add_column( + "easfoldersyncstatus", + sa.Column("initial_sync_end", sa.DateTime(), nullable=True), + ) + op.add_column( + "easfoldersyncstatus", + sa.Column("initial_sync_start", sa.DateTime(), nullable=True), + ) def downgrade(): from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easfoldersyncstatus'): + if not engine.has_table("easfoldersyncstatus"): return - op.drop_column('easfoldersyncstatus', 'initial_sync_start') - op.drop_column('easfoldersyncstatus', 'initial_sync_end') + op.drop_column("easfoldersyncstatus", "initial_sync_start") + op.drop_column("easfoldersyncstatus", "initial_sync_end") diff --git a/migrations/versions/199_save_imap_uidnext.py b/migrations/versions/199_save_imap_uidnext.py index f224dff8b..39bf6e1d1 100644 --- a/migrations/versions/199_save_imap_uidnext.py +++ b/migrations/versions/199_save_imap_uidnext.py @@ -7,17 +7,16 @@ """ # revision identifiers, used by Alembic. -revision = '3583211a4838' -down_revision = '301d22aa96b8' +revision = "3583211a4838" +down_revision = "301d22aa96b8" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('imapfolderinfo', sa.Column('uidnext', sa.Integer(), - nullable=True)) + op.add_column("imapfolderinfo", sa.Column("uidnext", sa.Integer(), nullable=True)) def downgrade(): - op.drop_column('imapfolderinfo', 'uidnext') + op.drop_column("imapfolderinfo", "uidnext") diff --git a/migrations/versions/200_update_imapfolderinfo.py b/migrations/versions/200_update_imapfolderinfo.py index cb9c0035c..c7e50a2aa 100644 --- a/migrations/versions/200_update_imapfolderinfo.py +++ b/migrations/versions/200_update_imapfolderinfo.py @@ -7,17 +7,18 @@ """ # revision identifiers, used by Alembic. -revision = 'dbf45fac873' -down_revision = '3583211a4838' +revision = "dbf45fac873" +down_revision = "3583211a4838" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('imapfolderinfo', sa.Column('last_slow_refresh', - sa.DateTime(), nullable=True)) + op.add_column( + "imapfolderinfo", sa.Column("last_slow_refresh", sa.DateTime(), nullable=True) + ) def downgrade(): - op.drop_column('imapfolderinfo', 'last_slow_refresh') + op.drop_column("imapfolderinfo", "last_slow_refresh") diff --git a/migrations/versions/201_add_sync_email_bit_to_account.py b/migrations/versions/201_add_sync_email_bit_to_account.py index 880365d92..9b6701701 100644 --- a/migrations/versions/201_add_sync_email_bit_to_account.py +++ b/migrations/versions/201_add_sync_email_bit_to_account.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '527bbdc2b0fa' -down_revision = 'dbf45fac873' +revision = "527bbdc2b0fa" +down_revision = "dbf45fac873" from alembic import op import sqlalchemy as sa @@ -16,11 +16,15 @@ def upgrade(): op.add_column( - 'account', - sa.Column('sync_email', sa.Boolean(), nullable=False, - server_default=sa.sql.expression.true()) + "account", + sa.Column( + "sync_email", + sa.Boolean(), + nullable=False, + server_default=sa.sql.expression.true(), + ), ) def downgrade(): - op.drop_column('account', 'sync_email') + op.drop_column("account", "sync_email") diff --git a/migrations/versions/202_drop_sync_raw_data_column.py b/migrations/versions/202_drop_sync_raw_data_column.py index 1ff0df688..892a28ade 100644 --- a/migrations/versions/202_drop_sync_raw_data_column.py +++ b/migrations/versions/202_drop_sync_raw_data_column.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2e515548043b' -down_revision = '527bbdc2b0fa' +revision = "2e515548043b" +down_revision = "527bbdc2b0fa" from alembic import op import sqlalchemy as sa @@ -16,10 +16,16 @@ def upgrade(): - op.drop_column('account', 'save_raw_messages') + op.drop_column("account", "save_raw_messages") def downgrade(): - op.add_column('account', sa.Column('save_raw_messages', - mysql.TINYINT(display_width=1), - server_default='1', nullable=True)) + op.add_column( + "account", + sa.Column( + "save_raw_messages", + mysql.TINYINT(display_width=1), + server_default="1", + nullable=True, + ), + ) diff --git a/migrations/versions/203_deleted_at_constraint.py b/migrations/versions/203_deleted_at_constraint.py index de7b8093c..e52447295 100644 --- a/migrations/versions/203_deleted_at_constraint.py +++ b/migrations/versions/203_deleted_at_constraint.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '420ccbea2c5e' -down_revision = '2e515548043b' +revision = "420ccbea2c5e" +down_revision = "2e515548043b" from alembic import op from sqlalchemy.sql import text @@ -17,34 +17,44 @@ def upgrade(): conn = op.get_bind() conn.execute(text("set @@foreign_key_checks = 0;")) - op.drop_constraint(u'namespace_id', 'category', type_='unique') - op.create_unique_constraint(u'namespace_id', 'category', - ['namespace_id', 'name', 'display_name', - 'deleted_at']) + op.drop_constraint(u"namespace_id", "category", type_="unique") + op.create_unique_constraint( + u"namespace_id", + "category", + ["namespace_id", "name", "display_name", "deleted_at"], + ) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easfoldersyncstatus'): + if not engine.has_table("easfoldersyncstatus"): return - op.drop_constraint(u'account_id_2', 'easfoldersyncstatus', type_='unique') - op.create_unique_constraint(u'account_id_2', 'easfoldersyncstatus', - ['account_id', 'device_id', 'eas_folder_id', - 'deleted_at']) + op.drop_constraint(u"account_id_2", "easfoldersyncstatus", type_="unique") + op.create_unique_constraint( + u"account_id_2", + "easfoldersyncstatus", + ["account_id", "device_id", "eas_folder_id", "deleted_at"], + ) conn.execute(text("set @@foreign_key_checks = 1;")) def downgrade(): conn = op.get_bind() conn.execute(text("set @@foreign_key_checks = 0;")) - op.drop_constraint(u'namespace_id', 'category', type_='unique') - op.create_unique_constraint(u'namespace_id', 'category', - ['namespace_id', 'name', 'display_name']) + op.drop_constraint(u"namespace_id", "category", type_="unique") + op.create_unique_constraint( + u"namespace_id", "category", ["namespace_id", "name", "display_name"] + ) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easfoldersyncstatus'): + if not engine.has_table("easfoldersyncstatus"): return - op.drop_constraint(u'account_id_2', 'easfoldersyncstatus', type_='unique') - op.create_unique_constraint(u'account_id_2', 'easfoldersyncstatus', - ['account_id', 'device_id', 'eas_folder_id']) + op.drop_constraint(u"account_id_2", "easfoldersyncstatus", type_="unique") + op.create_unique_constraint( + u"account_id_2", + "easfoldersyncstatus", + ["account_id", "device_id", "eas_folder_id"], + ) conn.execute(text("set @@foreign_key_checks = 1;")) diff --git a/migrations/versions/204_remove_deleted_at_constraint.py b/migrations/versions/204_remove_deleted_at_constraint.py index 00f70e445..7a3460943 100644 --- a/migrations/versions/204_remove_deleted_at_constraint.py +++ b/migrations/versions/204_remove_deleted_at_constraint.py @@ -8,8 +8,8 @@ """ # revision identifiers, used by Alembic. -revision = '583e083d4512' -down_revision = '420ccbea2c5e' +revision = "583e083d4512" +down_revision = "420ccbea2c5e" from alembic import op from sqlalchemy.sql import text @@ -18,34 +18,44 @@ def upgrade(): conn = op.get_bind() conn.execute(text("set @@foreign_key_checks = 0;")) - op.drop_constraint(u'namespace_id', 'category', type_='unique') - op.create_unique_constraint(u'namespace_id', 'category', - ['namespace_id', 'name', 'display_name']) + op.drop_constraint(u"namespace_id", "category", type_="unique") + op.create_unique_constraint( + u"namespace_id", "category", ["namespace_id", "name", "display_name"] + ) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easfoldersyncstatus'): + if not engine.has_table("easfoldersyncstatus"): return - op.drop_constraint(u'account_id_2', 'easfoldersyncstatus', type_='unique') - op.create_unique_constraint(u'account_id_2', 'easfoldersyncstatus', - ['account_id', 'device_id', 'eas_folder_id']) + op.drop_constraint(u"account_id_2", "easfoldersyncstatus", type_="unique") + op.create_unique_constraint( + u"account_id_2", + "easfoldersyncstatus", + ["account_id", "device_id", "eas_folder_id"], + ) conn.execute(text("set @@foreign_key_checks = 1;")) def downgrade(): conn = op.get_bind() conn.execute(text("set @@foreign_key_checks = 0;")) - op.drop_constraint(u'namespace_id', 'category', type_='unique') - op.create_unique_constraint(u'namespace_id', 'category', - ['namespace_id', 'name', 'display_name', - 'deleted_at']) + op.drop_constraint(u"namespace_id", "category", type_="unique") + op.create_unique_constraint( + u"namespace_id", + "category", + ["namespace_id", "name", "display_name", "deleted_at"], + ) from inbox.ignition import main_engine + engine = main_engine(pool_size=1, max_overflow=0) - if not engine.has_table('easfoldersyncstatus'): + if not engine.has_table("easfoldersyncstatus"): return - op.drop_constraint(u'account_id_2', 'easfoldersyncstatus', type_='unique') - op.create_unique_constraint(u'account_id_2', 'easfoldersyncstatus', - ['account_id', 'device_id', 'eas_folder_id', - 'deleted_at']) + op.drop_constraint(u"account_id_2", "easfoldersyncstatus", type_="unique") + op.create_unique_constraint( + u"account_id_2", + "easfoldersyncstatus", + ["account_id", "device_id", "eas_folder_id", "deleted_at"], + ) conn.execute(text("set @@foreign_key_checks = 1;")) diff --git a/migrations/versions/205_fix_categories_cascade.py b/migrations/versions/205_fix_categories_cascade.py index a507bb3e1..98e738b28 100644 --- a/migrations/versions/205_fix_categories_cascade.py +++ b/migrations/versions/205_fix_categories_cascade.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '302d9f6b22f3' -down_revision = '583e083d4512' +revision = "302d9f6b22f3" +down_revision = "583e083d4512" from alembic import op from sqlalchemy.sql import text @@ -20,12 +20,20 @@ def upgrade(): conn.execute(text("SET FOREIGN_KEY_CHECKS=0;")) conn.execute(text("ALTER TABLE folder DROP FOREIGN KEY folder_ibfk_1")) - conn.execute(text("ALTER TABLE folder ADD CONSTRAINT folder_ibfk_1 FOREIGN KEY " - "(`category_id`) REFERENCES `category` (`id`) ON DELETE CASCADE")) + conn.execute( + text( + "ALTER TABLE folder ADD CONSTRAINT folder_ibfk_1 FOREIGN KEY " + "(`category_id`) REFERENCES `category` (`id`) ON DELETE CASCADE" + ) + ) conn.execute(text("ALTER TABLE label DROP FOREIGN KEY label_ibfk_1")) - conn.execute(text("ALTER TABLE label ADD CONSTRAINT label_ibfk_1 FOREIGN KEY " - "(`category_id`) REFERENCES `category` (`id`) ON DELETE CASCADE")) + conn.execute( + text( + "ALTER TABLE label ADD CONSTRAINT label_ibfk_1 FOREIGN KEY " + "(`category_id`) REFERENCES `category` (`id`) ON DELETE CASCADE" + ) + ) def downgrade(): @@ -34,9 +42,17 @@ def downgrade(): conn.execute(text("SET FOREIGN_KEY_CHECKS=0;")) conn.execute(text("ALTER TABLE folder DROP FOREIGN KEY folder_ibfk_1")) - conn.execute(text("ALTER TABLE folder ADD CONSTRAINT folder_ibfk_1 FOREIGN KEY " - "(`category_id`) REFERENCES `category` (`id`)")) + conn.execute( + text( + "ALTER TABLE folder ADD CONSTRAINT folder_ibfk_1 FOREIGN KEY " + "(`category_id`) REFERENCES `category` (`id`)" + ) + ) conn.execute(text("ALTER TABLE label DROP FOREIGN KEY label_ibfk_1")) - conn.execute(text("ALTER TABLE label ADD CONSTRAINT label_ibfk_1 FOREIGN KEY " - "(`category_id`) REFERENCES `category` (`id`)")) + conn.execute( + text( + "ALTER TABLE label ADD CONSTRAINT label_ibfk_1 FOREIGN KEY " + "(`category_id`) REFERENCES `category` (`id`)" + ) + ) diff --git a/migrations/versions/206_add_phone_numbers_table.py b/migrations/versions/206_add_phone_numbers_table.py index 26cdf02ac..774cfd946 100644 --- a/migrations/versions/206_add_phone_numbers_table.py +++ b/migrations/versions/206_add_phone_numbers_table.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'gu8eqpm6f2x1n0fg' -down_revision = '302d9f6b22f3' +revision = "gu8eqpm6f2x1n0fg" +down_revision = "302d9f6b22f3" from alembic import op import sqlalchemy as sa @@ -17,28 +17,30 @@ def upgrade(): op.create_table( - 'phonenumber', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('contact_id', sa.Integer(), nullable=False), - sa.Column('type', mysql.VARCHAR(length=64), nullable=True), - sa.Column('number', mysql.VARCHAR(length=64), nullable=False), - sa.ForeignKeyConstraint( - ['contact_id'], [u'contact.id'], ondelete='CASCADE' - ), - sa.PrimaryKeyConstraint('id'), + "phonenumber", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("contact_id", sa.Integer(), nullable=False), + sa.Column("type", mysql.VARCHAR(length=64), nullable=True), + sa.Column("number", mysql.VARCHAR(length=64), nullable=False), + sa.ForeignKeyConstraint(["contact_id"], [u"contact.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_phonenumber_created_at", "phonenumber", ["created_at"], unique=False + ) + op.create_index( + "ix_phonenumber_updated_at", "phonenumber", ["updated_at"], unique=False + ) + op.create_index( + "ix_phonenumber_contact_id", "phonenumber", ["contact_id"], unique=False + ) + op.create_index( + "ix_phonenumber_deleted_at", "phonenumber", ["deleted_at"], unique=False ) - op.create_index('ix_phonenumber_created_at', - 'phonenumber', ['created_at'], unique=False) - op.create_index('ix_phonenumber_updated_at', - 'phonenumber', ['updated_at'], unique=False) - op.create_index('ix_phonenumber_contact_id', - 'phonenumber', ['contact_id'], unique=False) - op.create_index('ix_phonenumber_deleted_at', - 'phonenumber', ['deleted_at'], unique=False) def downgrade(): - op.drop_table('phonenumber') + op.drop_table("phonenumber") diff --git a/migrations/versions/207_add_contact_search_index_service_cursor_.py b/migrations/versions/207_add_contact_search_index_service_cursor_.py index ec21724c0..2497961d8 100644 --- a/migrations/versions/207_add_contact_search_index_service_cursor_.py +++ b/migrations/versions/207_add_contact_search_index_service_cursor_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4b225df49747' -down_revision = 'gu8eqpm6f2x1n0fg' +revision = "4b225df49747" +down_revision = "gu8eqpm6f2x1n0fg" from alembic import op import sqlalchemy as sa @@ -16,37 +16,61 @@ def upgrade(): - op.create_table('contactsearchindexcursor', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('transaction_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['transaction_id'], - [u'transaction.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('ix_contactsearchindexcursor_created_at', - 'contactsearchindexcursor', ['created_at'], unique=False) - op.create_index('ix_contactsearchindexcursor_deleted_at', - 'contactsearchindexcursor', ['deleted_at'], unique=False) - op.create_index('ix_contactsearchindexcursor_transaction_id', - 'contactsearchindexcursor', ['transaction_id'], unique=False) - op.create_index('ix_contactsearchindexcursor_updated_at', - 'contactsearchindexcursor', ['updated_at'], unique=False) - op.drop_table('searchindexcursor') + op.create_table( + "contactsearchindexcursor", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("transaction_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["transaction_id"], [u"transaction.id"],), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_contactsearchindexcursor_created_at", + "contactsearchindexcursor", + ["created_at"], + unique=False, + ) + op.create_index( + "ix_contactsearchindexcursor_deleted_at", + "contactsearchindexcursor", + ["deleted_at"], + unique=False, + ) + op.create_index( + "ix_contactsearchindexcursor_transaction_id", + "contactsearchindexcursor", + ["transaction_id"], + unique=False, + ) + op.create_index( + "ix_contactsearchindexcursor_updated_at", + "contactsearchindexcursor", + ["updated_at"], + unique=False, + ) + op.drop_table("searchindexcursor") def downgrade(): - op.create_table('searchindexcursor', - sa.Column('created_at', mysql.DATETIME(), nullable=False), - sa.Column('updated_at', mysql.DATETIME(), nullable=False), - sa.Column('deleted_at', mysql.DATETIME(), nullable=True), - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('transaction_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['transaction_id'], [u'transaction.id'], name=u'searchindexcursor_ibfk_1'), - sa.PrimaryKeyConstraint('id'), - mysql_default_charset=u'utf8mb4', - mysql_engine=u'InnoDB' - ) - op.drop_table('contactsearchindexcursor') + op.create_table( + "searchindexcursor", + sa.Column("created_at", mysql.DATETIME(), nullable=False), + sa.Column("updated_at", mysql.DATETIME(), nullable=False), + sa.Column("deleted_at", mysql.DATETIME(), nullable=True), + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column( + "transaction_id", + mysql.INTEGER(display_width=11), + autoincrement=False, + nullable=True, + ), + sa.ForeignKeyConstraint( + ["transaction_id"], [u"transaction.id"], name=u"searchindexcursor_ibfk_1" + ), + sa.PrimaryKeyConstraint("id"), + mysql_default_charset=u"utf8mb4", + mysql_engine=u"InnoDB", + ) + op.drop_table("contactsearchindexcursor") diff --git a/migrations/versions/208_drop_easuid_uniqueconstraint.py b/migrations/versions/208_drop_easuid_uniqueconstraint.py index 776166af8..f071ece19 100644 --- a/migrations/versions/208_drop_easuid_uniqueconstraint.py +++ b/migrations/versions/208_drop_easuid_uniqueconstraint.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1962d17d1c0a' -down_revision = '4b225df49747' +revision = "1962d17d1c0a" +down_revision = "4b225df49747" from alembic import op from sqlalchemy.sql import text @@ -16,11 +16,11 @@ def upgrade(): conn = op.get_bind() - if not conn.engine.has_table('easuid'): + if not conn.engine.has_table("easuid"): return - conn.execute(text('set @@lock_wait_timeout = 20;')) - conn.execute(text('set @@foreign_key_checks = 0;')) - op.drop_constraint(u'easaccount_id_2', 'easuid', type_='unique') + conn.execute(text("set @@lock_wait_timeout = 20;")) + conn.execute(text("set @@foreign_key_checks = 0;")) + op.drop_constraint(u"easaccount_id_2", "easuid", type_="unique") def downgrade(): diff --git a/migrations/versions/209_recreate_easuid_index.py b/migrations/versions/209_recreate_easuid_index.py index 692331676..6f96ca6e7 100644 --- a/migrations/versions/209_recreate_easuid_index.py +++ b/migrations/versions/209_recreate_easuid_index.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3618838f5bc6' -down_revision = '1962d17d1c0a' +revision = "3618838f5bc6" +down_revision = "1962d17d1c0a" from alembic import op from sqlalchemy.sql import text @@ -16,13 +16,15 @@ def upgrade(): conn = op.get_bind() - if not conn.engine.has_table('easuid'): + if not conn.engine.has_table("easuid"): return - conn.execute(text('set @@lock_wait_timeout = 20;')) - conn.execute(text('set @@foreign_key_checks = 0;')) - op.create_index(u'ix_easaccount_id_2', 'easuid', - ['easaccount_id', 'device_id', 'easfoldersyncstatus_id', - 'server_id']) + conn.execute(text("set @@lock_wait_timeout = 20;")) + conn.execute(text("set @@foreign_key_checks = 0;")) + op.create_index( + u"ix_easaccount_id_2", + "easuid", + ["easaccount_id", "device_id", "easfoldersyncstatus_id", "server_id"], + ) def downgrade(): diff --git a/migrations/versions/210_drop_message_full_body_id_fk.py b/migrations/versions/210_drop_message_full_body_id_fk.py index 6904bb691..6c19a8ac8 100644 --- a/migrations/versions/210_drop_message_full_body_id_fk.py +++ b/migrations/versions/210_drop_message_full_body_id_fk.py @@ -7,14 +7,14 @@ """ # revision identifiers, used by Alembic. -revision = '3613ca83ea40' -down_revision = '3618838f5bc6' +revision = "3613ca83ea40" +down_revision = "3618838f5bc6" from alembic import op def upgrade(): - op.drop_constraint(u'full_body_id_fk', 'message', type_='foreignkey') + op.drop_constraint(u"full_body_id_fk", "message", type_="foreignkey") def downgrade(): diff --git a/migrations/versions/211_drop_message_full_body_id.py b/migrations/versions/211_drop_message_full_body_id.py index 5d92147d7..00f468a56 100644 --- a/migrations/versions/211_drop_message_full_body_id.py +++ b/migrations/versions/211_drop_message_full_body_id.py @@ -7,14 +7,14 @@ """ # revision identifiers, used by Alembic. -revision = '31aae1ecb374' -down_revision = '3613ca83ea40' +revision = "31aae1ecb374" +down_revision = "3613ca83ea40" from alembic import op def upgrade(): - op.drop_column('message', 'full_body_id') + op.drop_column("message", "full_body_id") def downgrade(): diff --git a/migrations/versions/212_add_columns_for_smtp_imap_specific_auth.py b/migrations/versions/212_add_columns_for_smtp_imap_specific_auth.py index 8756384cf..45d2fd3f6 100644 --- a/migrations/versions/212_add_columns_for_smtp_imap_specific_auth.py +++ b/migrations/versions/212_add_columns_for_smtp_imap_specific_auth.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '501f6b2fef28' -down_revision = '31aae1ecb374' +revision = "501f6b2fef28" +down_revision = "31aae1ecb374" from alembic import op, context from sqlalchemy.sql import text @@ -20,27 +20,35 @@ def upgrade(): conn.execute(text("set @@foreign_key_checks = 0;")) # Add new columns + ForeignKey constraints. - shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id')) + shard_id = int(context.get_x_argument(as_dictionary=True).get("shard_id")) if shard_id == 0: - conn.execute(text("ALTER TABLE genericaccount " - "ADD COLUMN imap_username CHAR(255) DEFAULT NULL, " - "ADD COLUMN smtp_username CHAR(255) DEFAULT NULL, " - "ADD COLUMN imap_password_id INT(11), " - "ADD COLUMN smtp_password_id INT(11), " - "ADD CONSTRAINT imap_password_id_ifbk FOREIGN KEY " - "(`imap_password_id`) REFERENCES `secret` (`id`), " - "ADD CONSTRAINT smtp_password_id_ifbk FOREIGN KEY " - "(`smtp_password_id`) REFERENCES `secret` (`id`);")) + conn.execute( + text( + "ALTER TABLE genericaccount " + "ADD COLUMN imap_username CHAR(255) DEFAULT NULL, " + "ADD COLUMN smtp_username CHAR(255) DEFAULT NULL, " + "ADD COLUMN imap_password_id INT(11), " + "ADD COLUMN smtp_password_id INT(11), " + "ADD CONSTRAINT imap_password_id_ifbk FOREIGN KEY " + "(`imap_password_id`) REFERENCES `secret` (`id`), " + "ADD CONSTRAINT smtp_password_id_ifbk FOREIGN KEY " + "(`smtp_password_id`) REFERENCES `secret` (`id`);" + ) + ) else: - conn.execute(text("ALTER TABLE genericaccount " - "ADD COLUMN imap_username CHAR(255) DEFAULT NULL, " - "ADD COLUMN smtp_username CHAR(255) DEFAULT NULL, " - "ADD COLUMN imap_password_id BIGINT(20), " - "ADD COLUMN smtp_password_id BIGINT(20), " - "ADD CONSTRAINT imap_password_id_ifbk FOREIGN KEY " - "(`imap_password_id`) REFERENCES `secret` (`id`), " - "ADD CONSTRAINT smtp_password_id_ifbk FOREIGN KEY " - "(`smtp_password_id`) REFERENCES `secret` (`id`);")) + conn.execute( + text( + "ALTER TABLE genericaccount " + "ADD COLUMN imap_username CHAR(255) DEFAULT NULL, " + "ADD COLUMN smtp_username CHAR(255) DEFAULT NULL, " + "ADD COLUMN imap_password_id BIGINT(20), " + "ADD COLUMN smtp_password_id BIGINT(20), " + "ADD CONSTRAINT imap_password_id_ifbk FOREIGN KEY " + "(`imap_password_id`) REFERENCES `secret` (`id`), " + "ADD CONSTRAINT smtp_password_id_ifbk FOREIGN KEY " + "(`smtp_password_id`) REFERENCES `secret` (`id`);" + ) + ) def downgrade(): diff --git a/migrations/versions/213_add_metadata_table.py b/migrations/versions/213_add_metadata_table.py index 84fdb2e97..3f675e457 100644 --- a/migrations/versions/213_add_metadata_table.py +++ b/migrations/versions/213_add_metadata_table.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'bc1119471fe' -down_revision = '501f6b2fef28' +revision = "bc1119471fe" +down_revision = "501f6b2fef28" from alembic import op, context import sqlalchemy as sa @@ -17,56 +17,70 @@ def upgrade(): from inbox.sqlalchemy_ext.util import JSON - shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id')) + shard_id = int(context.get_x_argument(as_dictionary=True).get("shard_id")) namespace_id_type = sa.Integer() if shard_id == 0 else sa.BigInteger() op.create_table( - 'metadata', - sa.Column('public_id', sa.BINARY(length=16), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.BigInteger(), nullable=False), - sa.Column('app_id', sa.Integer(), nullable=True), - sa.Column('app_client_id', sa.BINARY(length=16), nullable=False), - sa.Column('app_type', sa.String(length=20), nullable=False), - sa.Column('namespace_id', namespace_id_type, nullable=False), - sa.Column('object_public_id', sa.String(length=191), nullable=False), - sa.Column('object_type', sa.String(length=20), nullable=False), - sa.Column('object_id', sa.BigInteger(), nullable=False), - sa.Column('value', JSON(), nullable=True), - sa.Column('version', sa.Integer(), server_default='0', nullable=True), - sa.ForeignKeyConstraint(['namespace_id'], [u'namespace.id'], - ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + "metadata", + sa.Column("public_id", sa.BINARY(length=16), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.BigInteger(), nullable=False), + sa.Column("app_id", sa.Integer(), nullable=True), + sa.Column("app_client_id", sa.BINARY(length=16), nullable=False), + sa.Column("app_type", sa.String(length=20), nullable=False), + sa.Column("namespace_id", namespace_id_type, nullable=False), + sa.Column("object_public_id", sa.String(length=191), nullable=False), + sa.Column("object_type", sa.String(length=20), nullable=False), + sa.Column("object_id", sa.BigInteger(), nullable=False), + sa.Column("value", JSON(), nullable=True), + sa.Column("version", sa.Integer(), server_default="0", nullable=True), + sa.ForeignKeyConstraint( + ["namespace_id"], [u"namespace.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_metadata_created_at"), "metadata", ["created_at"], unique=False + ) + op.create_index( + op.f("ix_metadata_deleted_at"), "metadata", ["deleted_at"], unique=False + ) + op.create_index( + op.f("ix_metadata_object_id"), "metadata", ["object_id"], unique=False + ) + op.create_index( + op.f("ix_metadata_object_public_id"), + "metadata", + ["object_public_id"], + unique=False, + ) + op.create_index( + op.f("ix_metadata_public_id"), "metadata", ["public_id"], unique=False + ) + op.create_index( + op.f("ix_metadata_updated_at"), "metadata", ["updated_at"], unique=False + ) + op.create_index( + "ix_obj_public_id_app_id", + "metadata", + ["object_public_id", "app_id"], + unique=True, ) - op.create_index(op.f('ix_metadata_created_at'), 'metadata', - ['created_at'], unique=False) - op.create_index(op.f('ix_metadata_deleted_at'), 'metadata', - ['deleted_at'], unique=False) - op.create_index(op.f('ix_metadata_object_id'), 'metadata', - ['object_id'], unique=False) - op.create_index(op.f('ix_metadata_object_public_id'), 'metadata', - ['object_public_id'], unique=False) - op.create_index(op.f('ix_metadata_public_id'), 'metadata', - ['public_id'], unique=False) - op.create_index(op.f('ix_metadata_updated_at'), 'metadata', - ['updated_at'], unique=False) - op.create_index('ix_obj_public_id_app_id', 'metadata', - ['object_public_id', 'app_id'], unique=True) conn = op.get_bind() increment = (shard_id << 48) + 1 - conn.execute('ALTER TABLE metadata AUTO_INCREMENT={}'.format(increment)) + conn.execute("ALTER TABLE metadata AUTO_INCREMENT={}".format(increment)) def downgrade(): - op.drop_index('ix_obj_public_id_app_id', table_name='metadata') - op.drop_index(op.f('ix_metadata_updated_at'), table_name='metadata') - op.drop_index(op.f('ix_metadata_public_id'), table_name='metadata') - op.drop_index(op.f('ix_metadata_object_public_id'), table_name='metadata') - op.drop_index(op.f('ix_metadata_object_id'), table_name='metadata') - op.drop_index(op.f('ix_metadata_deleted_at'), table_name='metadata') - op.drop_index(op.f('ix_metadata_created_at'), table_name='metadata') - op.drop_index(op.f('ix_metadata_app_public_id'), table_name='metadata') - op.drop_table('metadata') + op.drop_index("ix_obj_public_id_app_id", table_name="metadata") + op.drop_index(op.f("ix_metadata_updated_at"), table_name="metadata") + op.drop_index(op.f("ix_metadata_public_id"), table_name="metadata") + op.drop_index(op.f("ix_metadata_object_public_id"), table_name="metadata") + op.drop_index(op.f("ix_metadata_object_id"), table_name="metadata") + op.drop_index(op.f("ix_metadata_deleted_at"), table_name="metadata") + op.drop_index(op.f("ix_metadata_created_at"), table_name="metadata") + op.drop_index(op.f("ix_metadata_app_public_id"), table_name="metadata") + op.drop_table("metadata") diff --git a/migrations/versions/214_introduce_accounttransaction.py b/migrations/versions/214_introduce_accounttransaction.py index 302cc2309..19cca17d6 100644 --- a/migrations/versions/214_introduce_accounttransaction.py +++ b/migrations/versions/214_introduce_accounttransaction.py @@ -7,51 +7,75 @@ """ # revision identifiers, used by Alembic. -revision = '4b83e064dd49' -down_revision = 'bc1119471fe' +revision = "4b83e064dd49" +down_revision = "bc1119471fe" from alembic import op, context import sqlalchemy as sa def upgrade(): - shard_id = int(context.config.get_main_option('shard_id')) + shard_id = int(context.config.get_main_option("shard_id")) namespace_id_type = sa.Integer() if shard_id == 0 else sa.BigInteger() - op.create_table('accounttransaction', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.BigInteger(), nullable=False), - sa.Column('public_id', sa.BINARY(length=16), nullable=False), - sa.Column('namespace_id', namespace_id_type, nullable=True), - sa.Column('object_type', sa.String(20), nullable=False), - sa.Column('record_id', sa.BigInteger(), nullable=False), - sa.Column('object_public_id', sa.String(191), nullable=False), - sa.Column('command', sa.Enum('insert', 'update', 'delete'), - nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['namespace_id'], - [u'namespace.id'],) - ) - op.create_index('ix_accounttransaction_created_at', - 'accounttransaction', ['created_at'], unique=False) - op.create_index('ix_accounttransaction_updated_at', - 'accounttransaction', ['updated_at'], unique=False) - op.create_index('ix_accounttransaction_deleted_at', - 'accounttransaction', ['deleted_at'], unique=False) - op.create_index('ix_accounttransaction_table_name', - 'accounttransaction', ['object_type'], unique=False) - op.create_index('ix_accounttransaction_command', - 'accounttransaction', ['command'], unique=False) - op.create_index('ix_accounttransaction_object_type_record_id', - 'accounttransaction', ['object_type', 'record_id'], unique=False) - op.create_index('ix_accounttransaction_namespace_id_created_at', - 'accounttransaction', ['namespace_id', 'created_at'], unique=False) + op.create_table( + "accounttransaction", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.BigInteger(), nullable=False), + sa.Column("public_id", sa.BINARY(length=16), nullable=False), + sa.Column("namespace_id", namespace_id_type, nullable=True), + sa.Column("object_type", sa.String(20), nullable=False), + sa.Column("record_id", sa.BigInteger(), nullable=False), + sa.Column("object_public_id", sa.String(191), nullable=False), + sa.Column("command", sa.Enum("insert", "update", "delete"), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.ForeignKeyConstraint(["namespace_id"], [u"namespace.id"],), + ) + op.create_index( + "ix_accounttransaction_created_at", + "accounttransaction", + ["created_at"], + unique=False, + ) + op.create_index( + "ix_accounttransaction_updated_at", + "accounttransaction", + ["updated_at"], + unique=False, + ) + op.create_index( + "ix_accounttransaction_deleted_at", + "accounttransaction", + ["deleted_at"], + unique=False, + ) + op.create_index( + "ix_accounttransaction_table_name", + "accounttransaction", + ["object_type"], + unique=False, + ) + op.create_index( + "ix_accounttransaction_command", "accounttransaction", ["command"], unique=False + ) + op.create_index( + "ix_accounttransaction_object_type_record_id", + "accounttransaction", + ["object_type", "record_id"], + unique=False, + ) + op.create_index( + "ix_accounttransaction_namespace_id_created_at", + "accounttransaction", + ["namespace_id", "created_at"], + unique=False, + ) conn = op.get_bind() increment = (shard_id << 48) + 1 - conn.execute('ALTER TABLE accounttransaction AUTO_INCREMENT={}'.format(increment)) + conn.execute("ALTER TABLE accounttransaction AUTO_INCREMENT={}".format(increment)) def downgrade(): diff --git a/migrations/versions/215_add_actionlog_status_type_index.py b/migrations/versions/215_add_actionlog_status_type_index.py index d8370719c..c5965ac1e 100644 --- a/migrations/versions/215_add_actionlog_status_type_index.py +++ b/migrations/versions/215_add_actionlog_status_type_index.py @@ -7,16 +7,15 @@ """ # revision identifiers, used by Alembic. -revision = '4bfecbcc7dbd' -down_revision = '4b83e064dd49' +revision = "4bfecbcc7dbd" +down_revision = "4b83e064dd49" from alembic import op def upgrade(): - op.create_index('idx_status_type', 'actionlog', ['status', 'type'], - unique=False) + op.create_index("idx_status_type", "actionlog", ["status", "type"], unique=False) def downgrade(): - op.drop_index('idx_status_type', table_name='actionlog') + op.drop_index("idx_status_type", table_name="actionlog") diff --git a/migrations/versions/216_add_folder_separator_column_for_generic_.py b/migrations/versions/216_add_folder_separator_column_for_generic_.py index 53e0dd154..92d1e3181 100644 --- a/migrations/versions/216_add_folder_separator_column_for_generic_.py +++ b/migrations/versions/216_add_folder_separator_column_for_generic_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4f8e995d1dba' -down_revision = '4bfecbcc7dbd' +revision = "4f8e995d1dba" +down_revision = "4bfecbcc7dbd" from alembic import op from sqlalchemy.sql import text @@ -17,8 +17,12 @@ def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - conn.execute(text("ALTER TABLE genericaccount ADD COLUMN folder_separator varchar(16)")) - conn.execute(text("ALTER TABLE genericaccount ADD COLUMN folder_prefix varchar(191)")) + conn.execute( + text("ALTER TABLE genericaccount ADD COLUMN folder_separator varchar(16)") + ) + conn.execute( + text("ALTER TABLE genericaccount ADD COLUMN folder_prefix varchar(191)") + ) def downgrade(): diff --git a/migrations/versions/217_add_genericaccount_ssl_required.py b/migrations/versions/217_add_genericaccount_ssl_required.py index d6cfaa4c3..c9456073b 100644 --- a/migrations/versions/217_add_genericaccount_ssl_required.py +++ b/migrations/versions/217_add_genericaccount_ssl_required.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3d8b5977eaa8' -down_revision = '4f8e995d1dba' +revision = "3d8b5977eaa8" +down_revision = "4f8e995d1dba" from alembic import op from sqlalchemy.sql import text @@ -19,8 +19,7 @@ def upgrade(): conn.execute(text("set @@lock_wait_timeout = 20;")) conn.execute(text("set @@foreign_key_checks = 0;")) - conn.execute(text("ALTER TABLE genericaccount " - "ADD COLUMN ssl_required BOOLEAN;")) + conn.execute(text("ALTER TABLE genericaccount " "ADD COLUMN ssl_required BOOLEAN;")) def downgrade(): diff --git a/migrations/versions/218_modify_metadata_indexes.py b/migrations/versions/218_modify_metadata_indexes.py index 73479481b..a9f0763aa 100644 --- a/migrations/versions/218_modify_metadata_indexes.py +++ b/migrations/versions/218_modify_metadata_indexes.py @@ -7,17 +7,19 @@ """ # revision identifiers, used by Alembic. -revision = '3b1cc8580fc2' -down_revision = '3d8b5977eaa8' +revision = "3b1cc8580fc2" +down_revision = "3d8b5977eaa8" from alembic import op def upgrade(): - op.create_index('ix_namespace_id_app_id', 'metadata', ['namespace_id', 'app_id'], unique=False) - op.drop_index('ix_metadata_object_id', table_name='metadata') + op.create_index( + "ix_namespace_id_app_id", "metadata", ["namespace_id", "app_id"], unique=False + ) + op.drop_index("ix_metadata_object_id", table_name="metadata") def downgrade(): - op.create_index('ix_metadata_object_id', 'metadata', ['object_id'], unique=False) - op.drop_index('ix_namespace_id_app_id', table_name='metadata') + op.create_index("ix_metadata_object_id", "metadata", ["object_id"], unique=False) + op.drop_index("ix_namespace_id_app_id", table_name="metadata") diff --git a/migrations/versions/219_accounttransaction_namespace_id_cascade.py b/migrations/versions/219_accounttransaction_namespace_id_cascade.py index 4e74e51e3..b83d68092 100644 --- a/migrations/versions/219_accounttransaction_namespace_id_cascade.py +++ b/migrations/versions/219_accounttransaction_namespace_id_cascade.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '2b2205db4964' -down_revision = '3b1cc8580fc2' +revision = "2b2205db4964" +down_revision = "3b1cc8580fc2" from alembic import op from sqlalchemy.sql import text @@ -19,9 +19,17 @@ def upgrade(): conn.execute(text("set @@lock_wait_timeout = 20;")) conn.execute(text("SET FOREIGN_KEY_CHECKS=0;")) - conn.execute(text("ALTER TABLE accounttransaction DROP FOREIGN KEY accounttransaction_ibfk_1")) - conn.execute(text("ALTER TABLE accounttransaction ADD CONSTRAINT accounttransaction_ibfk_1 FOREIGN KEY " - "(`namespace_id`) REFERENCES `namespace` (`id`) ON DELETE CASCADE")) + conn.execute( + text( + "ALTER TABLE accounttransaction DROP FOREIGN KEY accounttransaction_ibfk_1" + ) + ) + conn.execute( + text( + "ALTER TABLE accounttransaction ADD CONSTRAINT accounttransaction_ibfk_1 FOREIGN KEY " + "(`namespace_id`) REFERENCES `namespace` (`id`) ON DELETE CASCADE" + ) + ) def downgrade(): @@ -29,6 +37,14 @@ def downgrade(): conn.execute(text("set @@lock_wait_timeout = 20;")) conn.execute(text("SET FOREIGN_KEY_CHECKS=0;")) - conn.execute(text("ALTER TABLE accounttransaction DROP FOREIGN KEY accounttransaction_ibfk_1")) - conn.execute(text("ALTER TABLE accounttransaction ADD CONSTRAINT accounttransaction_ibfk_1 FOREIGN KEY " - "(`namespace_id`) REFERENCES `namespace` (`id`)")) + conn.execute( + text( + "ALTER TABLE accounttransaction DROP FOREIGN KEY accounttransaction_ibfk_1" + ) + ) + conn.execute( + text( + "ALTER TABLE accounttransaction ADD CONSTRAINT accounttransaction_ibfk_1 FOREIGN KEY " + "(`namespace_id`) REFERENCES `namespace` (`id`)" + ) + ) diff --git a/migrations/versions/220_folder_separators_again.py b/migrations/versions/220_folder_separators_again.py index 1e1638aa8..1a55acaf6 100644 --- a/migrations/versions/220_folder_separators_again.py +++ b/migrations/versions/220_folder_separators_again.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '59e1cc690da9' -down_revision = '2b2205db4964' +revision = "59e1cc690da9" +down_revision = "2b2205db4964" from alembic import op from sqlalchemy.sql import text @@ -19,22 +19,34 @@ def upgrade(): conn.execute(text("set @@lock_wait_timeout = 20;")) # Check if the folder_separator column is defined or not. - res = conn.execute(text("SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " - "TABLE_NAME = 'genericaccount' AND COLUMN_NAME " - "= 'folder_separator' AND TABLE_SCHEMA = DATABASE()")) + res = conn.execute( + text( + "SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " + "TABLE_NAME = 'genericaccount' AND COLUMN_NAME " + "= 'folder_separator' AND TABLE_SCHEMA = DATABASE()" + ) + ) if res.fetchall() == []: # Execute migration only if the field isn't defined yet. - conn.execute(text("ALTER TABLE genericaccount ADD COLUMN folder_separator varchar(16)")) - - res = conn.execute(text("SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " - "TABLE_NAME = 'genericaccount' AND COLUMN_NAME " - "= 'folder_prefix' AND TABLE_SCHEMA = DATABASE()")) + conn.execute( + text("ALTER TABLE genericaccount ADD COLUMN folder_separator varchar(16)") + ) + + res = conn.execute( + text( + "SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " + "TABLE_NAME = 'genericaccount' AND COLUMN_NAME " + "= 'folder_prefix' AND TABLE_SCHEMA = DATABASE()" + ) + ) # Check if the folder_prefix column is defined or not. if res.fetchall() == []: # Execute migration only if the field isn't defined yet. - conn.execute(text("ALTER TABLE genericaccount ADD COLUMN folder_prefix varchar(191)")) + conn.execute( + text("ALTER TABLE genericaccount ADD COLUMN folder_prefix varchar(191)") + ) def downgrade(): diff --git a/migrations/versions/221_fix_category_column_defaults.py b/migrations/versions/221_fix_category_column_defaults.py index 4caf20092..81d905719 100644 --- a/migrations/versions/221_fix_category_column_defaults.py +++ b/migrations/versions/221_fix_category_column_defaults.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '516024977fc5' -down_revision = '59e1cc690da9' +revision = "516024977fc5" +down_revision = "59e1cc690da9" from alembic import op from sqlalchemy.sql import text @@ -18,18 +18,30 @@ def upgrade(): conn = op.get_bind() conn.execute(text("set @@lock_wait_timeout = 20;")) - conn.execute(text("ALTER TABLE category " - "MODIFY COLUMN name VARCHAR(191) NOT NULL DEFAULT '', " - "MODIFY COLUMN deleted_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'")) - - conn.execute(text("ALTER TABLE folder " - "MODIFY COLUMN name VARCHAR(191) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, " - "MODIFY COLUMN canonical_name VARCHAR(191) NOT NULL DEFAULT '', " - "DROP INDEX account_id, " - "ADD CONSTRAINT UNIQUE account_id (account_id, name, canonical_name)")) - - conn.execute(text("ALTER TABLE label " - "MODIFY COLUMN canonical_name VARCHAR(191) NOT NULL DEFAULT ''")) + conn.execute( + text( + "ALTER TABLE category " + "MODIFY COLUMN name VARCHAR(191) NOT NULL DEFAULT '', " + "MODIFY COLUMN deleted_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'" + ) + ) + + conn.execute( + text( + "ALTER TABLE folder " + "MODIFY COLUMN name VARCHAR(191) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, " + "MODIFY COLUMN canonical_name VARCHAR(191) NOT NULL DEFAULT '', " + "DROP INDEX account_id, " + "ADD CONSTRAINT UNIQUE account_id (account_id, name, canonical_name)" + ) + ) + + conn.execute( + text( + "ALTER TABLE label " + "MODIFY COLUMN canonical_name VARCHAR(191) NOT NULL DEFAULT ''" + ) + ) def downgrade(): diff --git a/migrations/versions/222_remove_unused_transaction_indices.py b/migrations/versions/222_remove_unused_transaction_indices.py index bc037d941..f4f25066e 100644 --- a/migrations/versions/222_remove_unused_transaction_indices.py +++ b/migrations/versions/222_remove_unused_transaction_indices.py @@ -7,19 +7,24 @@ """ # revision identifiers, used by Alembic. -revision = '361972a1de3e' -down_revision = '516024977fc5' +revision = "361972a1de3e" +down_revision = "516024977fc5" from alembic import op def upgrade(): - op.drop_index('ix_transaction_table_name', table_name='transaction') - op.drop_index('namespace_id_deleted_at', table_name='transaction') + op.drop_index("ix_transaction_table_name", table_name="transaction") + op.drop_index("namespace_id_deleted_at", table_name="transaction") def downgrade(): - op.create_index('namespace_id_deleted_at', 'transaction', - ['namespace_id', 'deleted_at'], unique=False) - op.create_index('ix_transaction_table_name', 'transaction', - ['object_type'], unique=False) + op.create_index( + "namespace_id_deleted_at", + "transaction", + ["namespace_id", "deleted_at"], + unique=False, + ) + op.create_index( + "ix_transaction_table_name", "transaction", ["object_type"], unique=False + ) diff --git a/migrations/versions/223_time_mixins_fix.py b/migrations/versions/223_time_mixins_fix.py index c068df32e..a324831ea 100644 --- a/migrations/versions/223_time_mixins_fix.py +++ b/migrations/versions/223_time_mixins_fix.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '539ce0291298' -down_revision = '361972a1de3e' +revision = "539ce0291298" +down_revision = "361972a1de3e" from alembic import op import sqlalchemy as sa @@ -18,76 +18,143 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE accounttransaction " - " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'")) - conn.execute(text("ALTER TABLE messagecategory" - " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'")) - conn.execute(text("ALTER TABLE messagecontactassociation" - " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'")) - conn.execute(text("ALTER TABLE transaction" - " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'")) + conn.execute( + text( + "ALTER TABLE accounttransaction " + " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'" + ) + ) + conn.execute( + text( + "ALTER TABLE messagecategory" + " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'" + ) + ) + conn.execute( + text( + "ALTER TABLE messagecontactassociation" + " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'" + ) + ) + conn.execute( + text( + "ALTER TABLE transaction" + " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'" + ) + ) - conn.execute(text("ALTER TABLE accounttransaction DROP updated_at," - "DROP deleted_at")) - conn.execute(text("ALTER TABLE messagecategory DROP updated_at," - "DROP deleted_at")) - conn.execute(text("ALTER TABLE messagecontactassociation DROP updated_at," - "DROP deleted_at")) - conn.execute(text("ALTER TABLE thread DROP deleted_at, DROP INDEX" - " ix_thread_namespace_id_recentdate_deleted_at")) - conn.execute(text("ALTER TABLE transaction DROP deleted_at," - "DROP updated_at")) - if conn.engine.has_table('easdevice'): + conn.execute( + text("ALTER TABLE accounttransaction DROP updated_at," "DROP deleted_at") + ) + conn.execute(text("ALTER TABLE messagecategory DROP updated_at," "DROP deleted_at")) + conn.execute( + text("ALTER TABLE messagecontactassociation DROP updated_at," "DROP deleted_at") + ) + conn.execute( + text( + "ALTER TABLE thread DROP deleted_at, DROP INDEX" + " ix_thread_namespace_id_recentdate_deleted_at" + ) + ) + conn.execute(text("ALTER TABLE transaction DROP deleted_at," "DROP updated_at")) + if conn.engine.has_table("easdevice"): # Run EAS specific migrations - conn.execute(text("ALTER TABLE easdevice" - " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'")) - conn.execute(text("ALTER TABLE easdevice DROP deleted_at," - "DROP updated_at")) + conn.execute( + text( + "ALTER TABLE easdevice" + " MODIFY COLUMN updated_at DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'" + ) + ) + conn.execute(text("ALTER TABLE easdevice DROP deleted_at," "DROP updated_at")) def downgrade(): conn = op.get_bind() - op.add_column('transaction', sa.Column('updated_at', mysql.DATETIME(), - nullable=False)) - op.add_column('transaction', sa.Column('deleted_at', mysql.DATETIME(), - nullable=True)) - op.create_index('ix_transaction_updated_at', 'transaction', ['updated_at'], - unique=False) - op.create_index('ix_transaction_deleted_at', 'transaction', ['deleted_at'], - unique=False) + op.add_column( + "transaction", sa.Column("updated_at", mysql.DATETIME(), nullable=False) + ) + op.add_column( + "transaction", sa.Column("deleted_at", mysql.DATETIME(), nullable=True) + ) + op.create_index( + "ix_transaction_updated_at", "transaction", ["updated_at"], unique=False + ) + op.create_index( + "ix_transaction_deleted_at", "transaction", ["deleted_at"], unique=False + ) - op.add_column('thread', sa.Column('deleted_at', mysql.DATETIME(), - nullable=True)) - op.create_index('ix_thread_deleted_at', 'thread', ['deleted_at'], - unique=False) - op.create_index('ix_thread_namespace_id_recentdate_deleted_at', 'thread', - ['namespace_id', 'recentdate', 'deleted_at'], unique=False) + op.add_column("thread", sa.Column("deleted_at", mysql.DATETIME(), nullable=True)) + op.create_index("ix_thread_deleted_at", "thread", ["deleted_at"], unique=False) + op.create_index( + "ix_thread_namespace_id_recentdate_deleted_at", + "thread", + ["namespace_id", "recentdate", "deleted_at"], + unique=False, + ) - op.add_column('messagecontactassociation', sa.Column('updated_at', - mysql.DATETIME(), nullable=False)) - op.add_column('messagecontactassociation', sa.Column('deleted_at', - mysql.DATETIME(), nullable=True)) - op.create_index('ix_messagecontactassociation_updated_at', - 'messagecontactassociation', ['updated_at'], unique=False) - op.create_index('ix_messagecontactassociation_deleted_at', - 'messagecontactassociation', ['deleted_at'], unique=False) + op.add_column( + "messagecontactassociation", + sa.Column("updated_at", mysql.DATETIME(), nullable=False), + ) + op.add_column( + "messagecontactassociation", + sa.Column("deleted_at", mysql.DATETIME(), nullable=True), + ) + op.create_index( + "ix_messagecontactassociation_updated_at", + "messagecontactassociation", + ["updated_at"], + unique=False, + ) + op.create_index( + "ix_messagecontactassociation_deleted_at", + "messagecontactassociation", + ["deleted_at"], + unique=False, + ) - op.add_column('messagecategory', sa.Column('updated_at', mysql.DATETIME(), - nullable=False)) - op.add_column('messagecategory', sa.Column('deleted_at', mysql.DATETIME(), - nullable=True)) - op.create_index('ix_messagecategory_updated_at', 'messagecategory', - ['updated_at'], unique=False) - op.create_index('ix_messagecategory_deleted_at', 'messagecategory', - ['deleted_at'], unique=False) + op.add_column( + "messagecategory", sa.Column("updated_at", mysql.DATETIME(), nullable=False) + ) + op.add_column( + "messagecategory", sa.Column("deleted_at", mysql.DATETIME(), nullable=True) + ) + op.create_index( + "ix_messagecategory_updated_at", "messagecategory", ["updated_at"], unique=False + ) + op.create_index( + "ix_messagecategory_deleted_at", "messagecategory", ["deleted_at"], unique=False + ) - op.add_column('accounttransaction', sa.Column('updated_at', mysql.DATETIME(), nullable=False)) - op.add_column('accounttransaction', sa.Column('deleted_at', mysql.DATETIME(), nullable=True)) - op.create_index('ix_accounttransaction_updated_at', 'accounttransaction', ['updated_at'], unique=False) - op.create_index('ix_accounttransaction_deleted_at', 'accounttransaction', ['deleted_at'], unique=False) + op.add_column( + "accounttransaction", sa.Column("updated_at", mysql.DATETIME(), nullable=False) + ) + op.add_column( + "accounttransaction", sa.Column("deleted_at", mysql.DATETIME(), nullable=True) + ) + op.create_index( + "ix_accounttransaction_updated_at", + "accounttransaction", + ["updated_at"], + unique=False, + ) + op.create_index( + "ix_accounttransaction_deleted_at", + "accounttransaction", + ["deleted_at"], + unique=False, + ) - if conn.engine.has_table('easdevice'): - op.add_column('easdevice', sa.Column('updated_at', mysql.DATETIME(), nullable=False)) - op.add_column('easdevice', sa.Column('deleted_at', mysql.DATETIME(), nullable=True)) - op.create_index('ix_easdevice_updated_at', 'easdevice', ['updated_at'], unique=False) - op.create_index('ix_easdevice_deleted_at', 'easdevice', ['deleted_at'], unique=False) + if conn.engine.has_table("easdevice"): + op.add_column( + "easdevice", sa.Column("updated_at", mysql.DATETIME(), nullable=False) + ) + op.add_column( + "easdevice", sa.Column("deleted_at", mysql.DATETIME(), nullable=True) + ) + op.create_index( + "ix_easdevice_updated_at", "easdevice", ["updated_at"], unique=False + ) + op.create_index( + "ix_easdevice_deleted_at", "easdevice", ["deleted_at"], unique=False + ) diff --git a/migrations/versions/224_namespace_id_idx_transaction.py b/migrations/versions/224_namespace_id_idx_transaction.py index fde1fb2f1..fde408fe2 100644 --- a/migrations/versions/224_namespace_id_idx_transaction.py +++ b/migrations/versions/224_namespace_id_idx_transaction.py @@ -7,16 +7,15 @@ """ # revision identifiers, used by Alembic. -revision = '29a1f2ef5653' -down_revision = '539ce0291298' +revision = "29a1f2ef5653" +down_revision = "539ce0291298" from alembic import op def upgrade(): - op.create_index('idx_namespace', 'transaction', ['namespace_id'], - unique=False) + op.create_index("idx_namespace", "transaction", ["namespace_id"], unique=False) def downgrade(): - op.drop_index('idx_namespace', table_name='transaction') + op.drop_index("idx_namespace", table_name="transaction") diff --git a/migrations/versions/225_drop_messagecategory_foreign_keys.py b/migrations/versions/225_drop_messagecategory_foreign_keys.py index c4c442b93..8fc57a194 100644 --- a/migrations/versions/225_drop_messagecategory_foreign_keys.py +++ b/migrations/versions/225_drop_messagecategory_foreign_keys.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '25129e0316d4' -down_revision = '29a1f2ef5653' +revision = "25129e0316d4" +down_revision = "29a1f2ef5653" from alembic import op from sqlalchemy.sql import text @@ -16,19 +16,29 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE messagecategory" - " DROP FOREIGN KEY messagecategory_ibfk_1")) + conn.execute( + text("ALTER TABLE messagecategory" " DROP FOREIGN KEY messagecategory_ibfk_1") + ) - conn.execute(text("ALTER TABLE messagecategory" - " DROP FOREIGN KEY messagecategory_ibfk_2")) + conn.execute( + text("ALTER TABLE messagecategory" " DROP FOREIGN KEY messagecategory_ibfk_2") + ) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE messagecategory " - "ADD CONSTRAINT messagecategory_ibfk_2 FOREIGN KEY " - "(category_id) REFERENCES category(id)")) - - conn.execute(text("ALTER TABLE messagecategory " - "ADD CONSTRAINT messagecategory_ibfk_1 FOREIGN KEY " - "(message_id) REFERENCES message(id)")) + conn.execute( + text( + "ALTER TABLE messagecategory " + "ADD CONSTRAINT messagecategory_ibfk_2 FOREIGN KEY " + "(category_id) REFERENCES category(id)" + ) + ) + + conn.execute( + text( + "ALTER TABLE messagecategory " + "ADD CONSTRAINT messagecategory_ibfk_1 FOREIGN KEY " + "(message_id) REFERENCES message(id)" + ) + ) diff --git a/migrations/versions/226_add_queryable_value_column_to_metadata.py b/migrations/versions/226_add_queryable_value_column_to_metadata.py index e5c77968a..f607dad73 100644 --- a/migrations/versions/226_add_queryable_value_column_to_metadata.py +++ b/migrations/versions/226_add_queryable_value_column_to_metadata.py @@ -7,20 +7,25 @@ """ # revision identifiers, used by Alembic. -revision = '2dbf6da0775b' -down_revision = '25129e0316d4' +revision = "2dbf6da0775b" +down_revision = "25129e0316d4" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('metadata', sa.Column('queryable_value', sa.BigInteger(), - nullable=True)) - op.create_index(op.f('ix_metadata_queryable_value'), 'metadata', - ['queryable_value'], unique=False) + op.add_column( + "metadata", sa.Column("queryable_value", sa.BigInteger(), nullable=True) + ) + op.create_index( + op.f("ix_metadata_queryable_value"), + "metadata", + ["queryable_value"], + unique=False, + ) def downgrade(): - op.drop_index(op.f('ix_metadata_queryable_value'), table_name='metadata') - op.drop_column('metadata', 'queryable_value') + op.drop_index(op.f("ix_metadata_queryable_value"), table_name="metadata") + op.drop_column("metadata", "queryable_value") diff --git a/migrations/versions/227_remove_message_foreignkeys.py b/migrations/versions/227_remove_message_foreignkeys.py index 8276f70eb..89d22502b 100644 --- a/migrations/versions/227_remove_message_foreignkeys.py +++ b/migrations/versions/227_remove_message_foreignkeys.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '17b147c1d53c' -down_revision = '2dbf6da0775b' +revision = "17b147c1d53c" +down_revision = "2dbf6da0775b" from alembic import op from sqlalchemy.sql import text @@ -16,22 +16,31 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE message" - " DROP FOREIGN KEY message_ibfk_1")) - conn.execute(text("ALTER TABLE message" - " DROP FOREIGN KEY message_ibfk_2")) - conn.execute(text("ALTER TABLE message" - " DROP FOREIGN KEY message_ibfk_3")) + conn.execute(text("ALTER TABLE message" " DROP FOREIGN KEY message_ibfk_1")) + conn.execute(text("ALTER TABLE message" " DROP FOREIGN KEY message_ibfk_2")) + conn.execute(text("ALTER TABLE message" " DROP FOREIGN KEY message_ibfk_3")) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE message" - "ADD CONSTRAINT message_ibfk_3 FOREIGN KEY " - "(reply_to_message_id) REFERENCES message(id)")) - conn.execute(text("ALTER TABLE message" - "ADD CONSTRAINT message_ibfk_2 FOREIGN KEY " - "(thread_id) REFERENCES thread(id)")) - conn.execute(text("ALTER TABLE message" - "ADD CONSTRAINT message_ibfk_1 FOREIGN KEY " - "(namespace_id) REFERENCES namespace(id)")) + conn.execute( + text( + "ALTER TABLE message" + "ADD CONSTRAINT message_ibfk_3 FOREIGN KEY " + "(reply_to_message_id) REFERENCES message(id)" + ) + ) + conn.execute( + text( + "ALTER TABLE message" + "ADD CONSTRAINT message_ibfk_2 FOREIGN KEY " + "(thread_id) REFERENCES thread(id)" + ) + ) + conn.execute( + text( + "ALTER TABLE message" + "ADD CONSTRAINT message_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)" + ) + ) diff --git a/migrations/versions/228_increase_gmailaccount_token_length.py b/migrations/versions/228_increase_gmailaccount_token_length.py index 1dc13ea52..7e648a80b 100644 --- a/migrations/versions/228_increase_gmailaccount_token_length.py +++ b/migrations/versions/228_increase_gmailaccount_token_length.py @@ -7,20 +7,20 @@ """ # revision identifiers, used by Alembic. -revision = '3df39f4fbdec' -down_revision = '17b147c1d53c' +revision = "3df39f4fbdec" +down_revision = "17b147c1d53c" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('gmailaccount', 'g_id_token', type_=sa.String(length=2048)) - op.alter_column('gmailauthcredentials', 'g_id_token', type_=sa.String(length=2048)) + op.alter_column("gmailaccount", "g_id_token", type_=sa.String(length=2048)) + op.alter_column("gmailauthcredentials", "g_id_token", type_=sa.String(length=2048)) pass def downgrade(): - op.alter_column('gmailaccount', 'g_id_token', type_=sa.String(length=1024)) - op.alter_column('gmailauthcredentials', 'g_id_token', type_=sa.String(length=1024)) + op.alter_column("gmailaccount", "g_id_token", type_=sa.String(length=1024)) + op.alter_column("gmailauthcredentials", "g_id_token", type_=sa.String(length=1024)) pass diff --git a/migrations/versions/229_drop_transaction_foreign_keys.py b/migrations/versions/229_drop_transaction_foreign_keys.py index fe5e78664..548d4b7c4 100644 --- a/migrations/versions/229_drop_transaction_foreign_keys.py +++ b/migrations/versions/229_drop_transaction_foreign_keys.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '23ff7f0b506d' -down_revision = '3df39f4fbdec' +revision = "23ff7f0b506d" +down_revision = "3df39f4fbdec" from alembic import op from sqlalchemy.sql import text @@ -16,17 +16,28 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE transaction" - " DROP FOREIGN KEY transaction_ibfk_1")) - conn.execute(text("ALTER TABLE accounttransaction" - " DROP FOREIGN KEY accounttransaction_ibfk_1")) + conn.execute(text("ALTER TABLE transaction" " DROP FOREIGN KEY transaction_ibfk_1")) + conn.execute( + text( + "ALTER TABLE accounttransaction" + " DROP FOREIGN KEY accounttransaction_ibfk_1" + ) + ) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE accounttransaction" - "ADD CONSTRAINT accounttransaction_ibfk_1 FOREIGN KEY " - "(namespace_id) REFERENCES namespace(id)")) - conn.execute(text("ALTER TABLE transaction" - "ADD CONSTRAINT transaction_ibfk_1 FOREIGN KEY " - "(namespace_id) REFERENCES namespace(id)")) + conn.execute( + text( + "ALTER TABLE accounttransaction" + "ADD CONSTRAINT accounttransaction_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)" + ) + ) + conn.execute( + text( + "ALTER TABLE transaction" + "ADD CONSTRAINT transaction_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)" + ) + ) diff --git a/migrations/versions/230_drop_block_foreign_keys.py b/migrations/versions/230_drop_block_foreign_keys.py index c71df138f..ec8db2b08 100644 --- a/migrations/versions/230_drop_block_foreign_keys.py +++ b/migrations/versions/230_drop_block_foreign_keys.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4265dc58eec6' -down_revision = '23ff7f0b506d' +revision = "4265dc58eec6" +down_revision = "23ff7f0b506d" from alembic import op from sqlalchemy.sql import text @@ -16,22 +16,31 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE part" - " DROP FOREIGN KEY part_ibfk_1")) - conn.execute(text("ALTER TABLE part" - " DROP FOREIGN KEY part_ibfk_2")) - conn.execute(text("ALTER TABLE block" - " DROP FOREIGN KEY block_ibfk_1")) + conn.execute(text("ALTER TABLE part" " DROP FOREIGN KEY part_ibfk_1")) + conn.execute(text("ALTER TABLE part" " DROP FOREIGN KEY part_ibfk_2")) + conn.execute(text("ALTER TABLE block" " DROP FOREIGN KEY block_ibfk_1")) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE block " - "ADD CONSTRAINT block_ibfk_1 FOREIGN KEY " - "(namespace_id) REFERENCES namespace(id)")) - conn.execute(text("ALTER TABLE part " - "ADD CONSTRAINT part_ibfk_2 FOREIGN KEY " - "(message_id) REFERENCES message(id)")) - conn.execute(text("ALTER TABLE part " - "ADD CONSTRAINT part_ibfk_1 FOREIGN KEY " - "(block_id) REFERENCES block(id)")) + conn.execute( + text( + "ALTER TABLE block " + "ADD CONSTRAINT block_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)" + ) + ) + conn.execute( + text( + "ALTER TABLE part " + "ADD CONSTRAINT part_ibfk_2 FOREIGN KEY " + "(message_id) REFERENCES message(id)" + ) + ) + conn.execute( + text( + "ALTER TABLE part " + "ADD CONSTRAINT part_ibfk_1 FOREIGN KEY " + "(block_id) REFERENCES block(id)" + ) + ) diff --git a/migrations/versions/231_drop_contact_foreign_keys.py b/migrations/versions/231_drop_contact_foreign_keys.py index e54ee6237..1b1770076 100644 --- a/migrations/versions/231_drop_contact_foreign_keys.py +++ b/migrations/versions/231_drop_contact_foreign_keys.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'c48fc8dea1b' -down_revision = '4265dc58eec6' +revision = "c48fc8dea1b" +down_revision = "4265dc58eec6" from alembic import op from sqlalchemy.sql import text @@ -16,26 +16,40 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE contact" - " DROP FOREIGN KEY contact_ibfk_1")) + conn.execute(text("ALTER TABLE contact" " DROP FOREIGN KEY contact_ibfk_1")) - conn.execute(text("ALTER TABLE phonenumber" - " DROP FOREIGN KEY phonenumber_ibfk_1")) + conn.execute(text("ALTER TABLE phonenumber" " DROP FOREIGN KEY phonenumber_ibfk_1")) - conn.execute(text("ALTER TABLE messagecontactassociation" - " DROP FOREIGN KEY messagecontactassociation_ibfk_1")) + conn.execute( + text( + "ALTER TABLE messagecontactassociation" + " DROP FOREIGN KEY messagecontactassociation_ibfk_1" + ) + ) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE contact" - " ADD CONSTRAINT contact_ibfk_1 FOREIGN KEY" - " (namespace_id) REFERENCES namespace(id)")) - - conn.execute(text("ALTER TABLE phonenumber" - " ADD CONSTRAINT phonenumber_ibfk_1 FOREIGN KEY" - " (contact_id) REFERENCES contact(id)")) - - conn.execute(text("ALTER TABLE messagecontactassociation" - " ADD CONSTRAINT messagecontactassociation_ibfk_1" - " FOREIGN KEY (contact_id) REFERENCES contact(id)")) + conn.execute( + text( + "ALTER TABLE contact" + " ADD CONSTRAINT contact_ibfk_1 FOREIGN KEY" + " (namespace_id) REFERENCES namespace(id)" + ) + ) + + conn.execute( + text( + "ALTER TABLE phonenumber" + " ADD CONSTRAINT phonenumber_ibfk_1 FOREIGN KEY" + " (contact_id) REFERENCES contact(id)" + ) + ) + + conn.execute( + text( + "ALTER TABLE messagecontactassociation" + " ADD CONSTRAINT messagecontactassociation_ibfk_1" + " FOREIGN KEY (contact_id) REFERENCES contact(id)" + ) + ) diff --git a/migrations/versions/232_add_thread_deleted_at.py b/migrations/versions/232_add_thread_deleted_at.py index ee14185d5..dc430409b 100644 --- a/migrations/versions/232_add_thread_deleted_at.py +++ b/migrations/versions/232_add_thread_deleted_at.py @@ -7,20 +7,23 @@ """ # revision identifiers, used by Alembic. -revision = '4a44b06cd53b' -down_revision = 'c48fc8dea1b' +revision = "4a44b06cd53b" +down_revision = "c48fc8dea1b" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('thread', sa.Column('deleted_at', sa.DateTime(), - nullable=True)) - op.create_index('ix_thread_namespace_id_deleted_at', 'thread', - ['namespace_id', 'deleted_at'], unique=False) + op.add_column("thread", sa.Column("deleted_at", sa.DateTime(), nullable=True)) + op.create_index( + "ix_thread_namespace_id_deleted_at", + "thread", + ["namespace_id", "deleted_at"], + unique=False, + ) def downgrade(): - op.drop_index('ix_thread_namespace_id_deleted_at', table_name='thread') - op.drop_column('thread', 'deleted_at') + op.drop_index("ix_thread_namespace_id_deleted_at", table_name="thread") + op.drop_column("thread", "deleted_at") diff --git a/migrations/versions/233_revert_drop_block_foreign_keys.py b/migrations/versions/233_revert_drop_block_foreign_keys.py index 09b9145a6..cf2c823af 100644 --- a/migrations/versions/233_revert_drop_block_foreign_keys.py +++ b/migrations/versions/233_revert_drop_block_foreign_keys.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '569ebe8e383d' -down_revision = '4a44b06cd53b' +revision = "569ebe8e383d" +down_revision = "4a44b06cd53b" from alembic import op from sqlalchemy.sql import text @@ -16,22 +16,31 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE block " - "ADD CONSTRAINT block_ibfk_1 FOREIGN KEY " - "(namespace_id) REFERENCES namespace(id)")) - conn.execute(text("ALTER TABLE part " - "ADD CONSTRAINT part_ibfk_2 FOREIGN KEY " - "(message_id) REFERENCES message(id)")) - conn.execute(text("ALTER TABLE part " - "ADD CONSTRAINT part_ibfk_1 FOREIGN KEY " - "(block_id) REFERENCES block(id)")) + conn.execute( + text( + "ALTER TABLE block " + "ADD CONSTRAINT block_ibfk_1 FOREIGN KEY " + "(namespace_id) REFERENCES namespace(id)" + ) + ) + conn.execute( + text( + "ALTER TABLE part " + "ADD CONSTRAINT part_ibfk_2 FOREIGN KEY " + "(message_id) REFERENCES message(id)" + ) + ) + conn.execute( + text( + "ALTER TABLE part " + "ADD CONSTRAINT part_ibfk_1 FOREIGN KEY " + "(block_id) REFERENCES block(id)" + ) + ) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE part" - " DROP FOREIGN KEY part_ibfk_1")) - conn.execute(text("ALTER TABLE part" - " DROP FOREIGN KEY part_ibfk_2")) - conn.execute(text("ALTER TABLE block" - " DROP FOREIGN KEY block_ibfk_1")) + conn.execute(text("ALTER TABLE part" " DROP FOREIGN KEY part_ibfk_1")) + conn.execute(text("ALTER TABLE part" " DROP FOREIGN KEY part_ibfk_2")) + conn.execute(text("ALTER TABLE block" " DROP FOREIGN KEY block_ibfk_1")) diff --git a/migrations/versions/234_change_contact_uid_collation.py b/migrations/versions/234_change_contact_uid_collation.py index 6122c98ef..2b23b9f2b 100644 --- a/migrations/versions/234_change_contact_uid_collation.py +++ b/migrations/versions/234_change_contact_uid_collation.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '53e6a7446c45' -down_revision = '569ebe8e383d' +revision = "53e6a7446c45" +down_revision = "569ebe8e383d" from alembic import op from sqlalchemy.sql import text @@ -16,9 +16,15 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE contact MODIFY uid varchar(64) NOT NULL COLLATE utf8mb4_bin")) + conn.execute( + text("ALTER TABLE contact MODIFY uid varchar(64) NOT NULL COLLATE utf8mb4_bin") + ) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE contact MODIFY uid varchar(64) NOT NULL COLLATE utf8mb4_general_ci")) + conn.execute( + text( + "ALTER TABLE contact MODIFY uid varchar(64) NOT NULL COLLATE utf8mb4_general_ci" + ) + ) diff --git a/migrations/versions/235_change_imapfolderinfo_column.py b/migrations/versions/235_change_imapfolderinfo_column.py index bf62079bb..d33c5cb3b 100644 --- a/migrations/versions/235_change_imapfolderinfo_column.py +++ b/migrations/versions/235_change_imapfolderinfo_column.py @@ -7,22 +7,30 @@ """ # revision identifiers, used by Alembic. -revision = '34815f9e639c' -down_revision = '53e6a7446c45' +revision = "34815f9e639c" +down_revision = "53e6a7446c45" from alembic import op import sqlalchemy as sa def upgrade(): - op.alter_column('imapfolderinfo', 'uidnext', - type_=sa.BigInteger, existing_type=sa.Integer, - existing_server_default=sa.sql.expression.null(), - existing_nullable=True) + op.alter_column( + "imapfolderinfo", + "uidnext", + type_=sa.BigInteger, + existing_type=sa.Integer, + existing_server_default=sa.sql.expression.null(), + existing_nullable=True, + ) def downgrade(): - op.alter_column('imapfolderinfo', 'uidnext', - type_=sa.Integer, existing_type=sa.BigInteger, - existing_server_default=sa.sql.expression.null(), - existing_nullable=True) + op.alter_column( + "imapfolderinfo", + "uidnext", + type_=sa.Integer, + existing_type=sa.BigInteger, + existing_server_default=sa.sql.expression.null(), + existing_nullable=True, + ) diff --git a/migrations/versions/236_add_desired_sync_host.py b/migrations/versions/236_add_desired_sync_host.py index 26806003c..b1dae0f04 100644 --- a/migrations/versions/236_add_desired_sync_host.py +++ b/migrations/versions/236_add_desired_sync_host.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '3eb4f30c8ed3' -down_revision = '34815f9e639c' +revision = "3eb4f30c8ed3" +down_revision = "34815f9e639c" from alembic import op from sqlalchemy.sql import text diff --git a/migrations/versions/237_add_new_contacts_index.py b/migrations/versions/237_add_new_contacts_index.py index f03bda3c5..17f685a2b 100644 --- a/migrations/versions/237_add_new_contacts_index.py +++ b/migrations/versions/237_add_new_contacts_index.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '780b1dabd51' -down_revision = '3eb4f30c8ed3' +revision = "780b1dabd51" +down_revision = "3eb4f30c8ed3" from alembic import op from sqlalchemy.sql import text @@ -16,11 +16,14 @@ def upgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE contact" - " ADD INDEX idx_namespace_created(namespace_id, created_at)")) + conn.execute( + text( + "ALTER TABLE contact" + " ADD INDEX idx_namespace_created(namespace_id, created_at)" + ) + ) def downgrade(): conn = op.get_bind() - conn.execute(text("ALTER TABLE contact" - " DROP INDEX idx_namespace_created")) + conn.execute(text("ALTER TABLE contact" " DROP INDEX idx_namespace_created")) diff --git a/migrations/versions/238_add_message_id_header_idx.py b/migrations/versions/238_add_message_id_header_idx.py index 2fb5a5fa3..93b3c697e 100644 --- a/migrations/versions/238_add_message_id_header_idx.py +++ b/migrations/versions/238_add_message_id_header_idx.py @@ -7,18 +7,23 @@ """ # revision identifiers, used by Alembic. -revision = '1b0b4e6fdf96' -down_revision = '780b1dabd51' +revision = "1b0b4e6fdf96" +down_revision = "780b1dabd51" from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql + def upgrade(): - op.create_index('ix_message_message_id_header_namespace_id', 'message', - ['message_id_header', 'namespace_id'], unique=False, - mysql_length={'message_id_header': 191}) + op.create_index( + "ix_message_message_id_header_namespace_id", + "message", + ["message_id_header", "namespace_id"], + unique=False, + mysql_length={"message_id_header": 191}, + ) def downgrade(): - op.drop_index('ix_message_message_id_header_namespace_id', table_name='message') + op.drop_index("ix_message_message_id_header_namespace_id", table_name="message") diff --git a/migrations/versions/239_server_default_created_at.py b/migrations/versions/239_server_default_created_at.py index 991514ae6..00a083af1 100644 --- a/migrations/versions/239_server_default_created_at.py +++ b/migrations/versions/239_server_default_created_at.py @@ -7,21 +7,63 @@ """ # revision identifiers, used by Alembic. -revision = '1dfc65e583bf' -down_revision = '1b0b4e6fdf96' +revision = "1dfc65e583bf" +down_revision = "1b0b4e6fdf96" from alembic import op from sqlalchemy.sql import text # SELECT table_name FROM information_schema.columns WHERE table_schema='inbox' AND column_name='created_at' -TABLES = ['account', 'accounttransaction', 'actionlog', 'block', 'calendar', 'category', 'contact', 'contactsearchindexcursor', 'dataprocessingcache', 'event', 'folder', 'gmailauthcredentials', 'imapfolderinfo', 'imapfoldersyncstatus', 'imapuid', 'label', 'labelitem', 'message', 'messagecategory', 'messagecontactassociation', 'metadata', 'namespace', 'part', 'phonenumber', 'secret', 'thread', 'transaction'] +TABLES = [ + "account", + "accounttransaction", + "actionlog", + "block", + "calendar", + "category", + "contact", + "contactsearchindexcursor", + "dataprocessingcache", + "event", + "folder", + "gmailauthcredentials", + "imapfolderinfo", + "imapfoldersyncstatus", + "imapuid", + "label", + "labelitem", + "message", + "messagecategory", + "messagecontactassociation", + "metadata", + "namespace", + "part", + "phonenumber", + "secret", + "thread", + "transaction", +] + def upgrade(): conn = op.get_bind() for table in TABLES: - conn.execute(text('ALTER TABLE `{}` MODIFY COLUMN `created_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP'.format(table))) + conn.execute( + text( + "ALTER TABLE `{}` MODIFY COLUMN `created_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP".format( + table + ) + ) + ) + def downgrade(): conn = op.get_bind() for table in TABLES: - conn.execute(text('ALTER TABLE `{}` MODIFY COLUMN `created_at` DATETIME NOT NULL'.format(table))) + conn.execute( + text( + "ALTER TABLE `{}` MODIFY COLUMN `created_at` DATETIME NOT NULL".format( + table + ) + ) + ) diff --git a/migrations/versions/240_create_missing_indexes.py b/migrations/versions/240_create_missing_indexes.py index 074c0bdfd..a9ca3575b 100644 --- a/migrations/versions/240_create_missing_indexes.py +++ b/migrations/versions/240_create_missing_indexes.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '36ff8677e77' -down_revision = '1dfc65e583bf' +revision = "36ff8677e77" +down_revision = "1dfc65e583bf" from alembic import op import sqlalchemy as sa @@ -16,54 +16,105 @@ def upgrade(): # Thread table - op.create_index('ix_namespace_id__cleaned_subject', 'thread', ['namespace_id', '_cleaned_subject'], unique=False, mysql_length={'_cleaned_subject': 80}) - op.drop_index('ix_cleaned_subject', table_name='thread') - op.drop_index('ix_thread_namespace_id', table_name='thread') - - op.drop_index('ix_thread_subject', 'thread') - op.create_index('ix_thread_subject', 'thread', ['subject'], unique=False, mysql_length=80) + op.create_index( + "ix_namespace_id__cleaned_subject", + "thread", + ["namespace_id", "_cleaned_subject"], + unique=False, + mysql_length={"_cleaned_subject": 80}, + ) + op.drop_index("ix_cleaned_subject", table_name="thread") + op.drop_index("ix_thread_namespace_id", table_name="thread") + + op.drop_index("ix_thread_subject", "thread") + op.create_index( + "ix_thread_subject", "thread", ["subject"], unique=False, mysql_length=80 + ) # Message table - op.create_index('ix_message_thread_id', 'message', ['thread_id'], unique=False) - op.drop_index('ix_message_namespace_id_message_id_header_subject', table_name='message') + op.create_index("ix_message_thread_id", "message", ["thread_id"], unique=False) + op.drop_index( + "ix_message_namespace_id_message_id_header_subject", table_name="message" + ) - op.drop_index('ix_message_subject', 'message') - op.create_index('ix_message_subject', 'message', ['subject'], unique=False, mysql_length=80) + op.drop_index("ix_message_subject", "message") + op.create_index( + "ix_message_subject", "message", ["subject"], unique=False, mysql_length=80 + ) conn = op.get_bind() - conn.execute('ALTER TABLE `message` CHANGE `data_sha256` `data_sha256` VARCHAR(64) CHARACTER SET ascii NULL DEFAULT NULL') - - op.drop_index('ix_message_message_id_header_namespace_id', table_name='message') - op.create_index('ix_message_message_id_header_namespace_id', 'message', - ['message_id_header', 'namespace_id'], unique=False, - mysql_length={'message_id_header': 80}) - - op.create_index('ix_message_reply_to_message_id', 'message', ['reply_to_message_id'], unique=False) + conn.execute( + "ALTER TABLE `message` CHANGE `data_sha256` `data_sha256` VARCHAR(64) CHARACTER SET ascii NULL DEFAULT NULL" + ) + + op.drop_index("ix_message_message_id_header_namespace_id", table_name="message") + op.create_index( + "ix_message_message_id_header_namespace_id", + "message", + ["message_id_header", "namespace_id"], + unique=False, + mysql_length={"message_id_header": 80}, + ) + + op.create_index( + "ix_message_reply_to_message_id", + "message", + ["reply_to_message_id"], + unique=False, + ) def downgrade(): # Thread table - op.create_index('ix_thread_namespace_id', 'thread', ['namespace_id'], unique=False) - op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'], unique=False, mysql_length={'_cleaned_subject': 191}) - op.drop_index('ix_namespace_id__cleaned_subject', table_name='thread') - - op.drop_index('ix_thread_subject', 'thread') - op.create_index('ix_thread_subject', 'thread', ['subject'], unique=False, mysql_length=191) + op.create_index("ix_thread_namespace_id", "thread", ["namespace_id"], unique=False) + op.create_index( + "ix_cleaned_subject", + "thread", + ["_cleaned_subject"], + unique=False, + mysql_length={"_cleaned_subject": 191}, + ) + op.drop_index("ix_namespace_id__cleaned_subject", table_name="thread") + + op.drop_index("ix_thread_subject", "thread") + op.create_index( + "ix_thread_subject", "thread", ["subject"], unique=False, mysql_length=191 + ) # Message table - op.create_index('ix_message_namespace_id_message_id_header_subject', 'message', ['namespace_id', 'subject', 'message_id_header'], unique=False, mysql_length={'subject': 191, 'message_id_header': 191}) - op.drop_index('ix_message_thread_id', table_name='message') - op.drop_index('ix_message_subject', 'message') - op.create_index('ix_message_subject', 'message', ['subject'], unique=False, mysql_length=191) - - op.drop_index('ix_message_data_sha256', 'message') + op.create_index( + "ix_message_namespace_id_message_id_header_subject", + "message", + ["namespace_id", "subject", "message_id_header"], + unique=False, + mysql_length={"subject": 191, "message_id_header": 191}, + ) + op.drop_index("ix_message_thread_id", table_name="message") + op.drop_index("ix_message_subject", "message") + op.create_index( + "ix_message_subject", "message", ["subject"], unique=False, mysql_length=191 + ) + + op.drop_index("ix_message_data_sha256", "message") conn = op.get_bind() - conn.execute('ALTER TABLE `message` CHANGE `data_sha256` `data_sha256` VARCHAR(255) NULL DEFAULT NULL;') - op.create_index('ix_message_data_sha256', 'message', ['data_sha256'], unique=False, mysql_length=191) - - op.drop_index('ix_message_message_id_header_namespace_id', table_name='message') - op.create_index('ix_message_message_id_header_namespace_id', 'message', - ['message_id_header', 'namespace_id'], unique=False, - mysql_length={'message_id_header': 191}) - - op.drop_index('ix_message_reply_to_message_id', table_name='message') + conn.execute( + "ALTER TABLE `message` CHANGE `data_sha256` `data_sha256` VARCHAR(255) NULL DEFAULT NULL;" + ) + op.create_index( + "ix_message_data_sha256", + "message", + ["data_sha256"], + unique=False, + mysql_length=191, + ) + + op.drop_index("ix_message_message_id_header_namespace_id", table_name="message") + op.create_index( + "ix_message_message_id_header_namespace_id", + "message", + ["message_id_header", "namespace_id"], + unique=False, + mysql_length={"message_id_header": 191}, + ) + + op.drop_index("ix_message_reply_to_message_id", table_name="message") diff --git a/migrations/versions/241_create_messagecategory_index.py b/migrations/versions/241_create_messagecategory_index.py index 9ea66f4b5..ed34c369e 100644 --- a/migrations/versions/241_create_messagecategory_index.py +++ b/migrations/versions/241_create_messagecategory_index.py @@ -7,17 +7,21 @@ """ # revision identifiers, used by Alembic. -revision = '407abeb7398f' -down_revision = '36ff8677e77' +revision = "407abeb7398f" +down_revision = "36ff8677e77" from alembic import op import sqlalchemy as sa def upgrade(): - op.create_index('ix_messagecategory_category_id', 'messagecategory', - ['category_id'], unique=False) + op.create_index( + "ix_messagecategory_category_id", + "messagecategory", + ["category_id"], + unique=False, + ) def downgrade(): - op.drop_index('ix_messagecategory_category_id', table_name='messagecategory') + op.drop_index("ix_messagecategory_category_id", table_name="messagecategory") diff --git a/migrations/versions/242_fix_indexes.py b/migrations/versions/242_fix_indexes.py index a54e268ec..b1f89ff26 100644 --- a/migrations/versions/242_fix_indexes.py +++ b/migrations/versions/242_fix_indexes.py @@ -7,20 +7,30 @@ """ # revision identifiers, used by Alembic. -revision = '2197bc4a7df5' -down_revision = '407abeb7398f' +revision = "2197bc4a7df5" +down_revision = "407abeb7398f" from alembic import op import sqlalchemy as sa def upgrade(): - op.create_index('ix_messagecontactassociation_contact_id', 'messagecontactassociation', ['contact_id'], unique=False) - op.drop_index('ix_transaction_namespace_id', table_name='transaction') - op.drop_index('idx_namespace', table_name='transaction') + op.create_index( + "ix_messagecontactassociation_contact_id", + "messagecontactassociation", + ["contact_id"], + unique=False, + ) + op.drop_index("ix_transaction_namespace_id", table_name="transaction") + op.drop_index("idx_namespace", table_name="transaction") def downgrade(): - op.drop_index('ix_messagecontactassociation_contact_id', table_name='messagecontactassociation') - op.create_index('ix_transaction_namespace_id', 'transaction', ['namespace_id'], unique=False) - op.create_index('idx_namespace', 'transaction', ['namespace_id'], unique=False) + op.drop_index( + "ix_messagecontactassociation_contact_id", + table_name="messagecontactassociation", + ) + op.create_index( + "ix_transaction_namespace_id", "transaction", ["namespace_id"], unique=False + ) + op.create_index("idx_namespace", "transaction", ["namespace_id"], unique=False) diff --git a/migrations/versions/243_fix_action_log_indexes.py b/migrations/versions/243_fix_action_log_indexes.py index c8273e618..b405caf8d 100644 --- a/migrations/versions/243_fix_action_log_indexes.py +++ b/migrations/versions/243_fix_action_log_indexes.py @@ -7,23 +7,29 @@ """ # revision identifiers, used by Alembic. -revision = '2c47d9226de6' -down_revision = '2197bc4a7df5' +revision = "2c47d9226de6" +down_revision = "2197bc4a7df5" from alembic import op import sqlalchemy as sa def upgrade(): - op.create_index('ix_actionlog_status_namespace_id_record_id', 'actionlog', - ['status', 'namespace_id', 'record_id'], unique=False) - op.drop_index('idx_actionlog_status_type', table_name='actionlog') - op.drop_index('ix_actionlog_status_retries', table_name='actionlog') + op.create_index( + "ix_actionlog_status_namespace_id_record_id", + "actionlog", + ["status", "namespace_id", "record_id"], + unique=False, + ) + op.drop_index("idx_actionlog_status_type", table_name="actionlog") + op.drop_index("ix_actionlog_status_retries", table_name="actionlog") def downgrade(): - op.create_index('idx_actionlog_status_type', 'actionlog', - ['status', 'type'], unique=False) - op.create_index('ix_actionlog_status_retries', 'actionlog', - ['status', 'retries'], unique=False) - op.drop_index('ix_actionlog_status_namespace_id_record_id', table_name='actionlog') + op.create_index( + "idx_actionlog_status_type", "actionlog", ["status", "type"], unique=False + ) + op.create_index( + "ix_actionlog_status_retries", "actionlog", ["status", "retries"], unique=False + ) + op.drop_index("ix_actionlog_status_namespace_id_record_id", table_name="actionlog") diff --git a/migrations/versions/244_cursor_index.py b/migrations/versions/244_cursor_index.py index 973b79f9d..628394899 100644 --- a/migrations/versions/244_cursor_index.py +++ b/migrations/versions/244_cursor_index.py @@ -7,17 +7,22 @@ """ # revision identifiers, used by Alembic. -revision = '2c67046c548d' -down_revision = '2c47d9226de6' +revision = "2c67046c548d" +down_revision = "2c47d9226de6" from alembic import op def upgrade(): - op.create_index('ix_transaction_namespace_id_object_type_id', 'transaction', - ['namespace_id', 'object_type', 'id'], unique=False) + op.create_index( + "ix_transaction_namespace_id_object_type_id", + "transaction", + ["namespace_id", "object_type", "id"], + unique=False, + ) def downgrade(): - op.drop_index('ix_transaction_namespace_id_object_type_id', - table_name='transaction') + op.drop_index( + "ix_transaction_namespace_id_object_type_id", table_name="transaction" + ) diff --git a/migrations/versions/245_cascade_secrets.py b/migrations/versions/245_cascade_secrets.py index 13aadaf4f..c675f63b1 100644 --- a/migrations/versions/245_cascade_secrets.py +++ b/migrations/versions/245_cascade_secrets.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '1449eededf1' -down_revision = '2c67046c548d' +revision = "1449eededf1" +down_revision = "2c67046c548d" from alembic import op import sqlalchemy as sa @@ -16,19 +16,23 @@ def upgrade(): conn = op.get_bind() - conn.execute(''' + conn.execute( + """ ALTER TABLE `genericaccount` DROP FOREIGN KEY `genericaccount_ibfk_2`; ALTER TABLE `genericaccount` ADD CONSTRAINT `genericaccount_ibfk_2` FOREIGN KEY (`imap_password_id`) REFERENCES `secret` (`id`) ON DELETE CASCADE; ALTER TABLE `genericaccount` DROP FOREIGN KEY `genericaccount_ibfk_3`; ALTER TABLE `genericaccount` ADD CONSTRAINT `genericaccount_ibfk_3` FOREIGN KEY (`smtp_password_id`) REFERENCES `secret` (`id`) ON DELETE CASCADE; - ''') + """ + ) def downgrade(): conn = op.get_bind() - conn.execute(''' + conn.execute( + """ ALTER TABLE `genericaccount` DROP FOREIGN KEY `genericaccount_ibfk_2`; ALTER TABLE `genericaccount` ADD CONSTRAINT `genericaccount_ibfk_2` FOREIGN KEY (`imap_password_id`) REFERENCES `secret` (`id`); ALTER TABLE `genericaccount` DROP FOREIGN KEY `genericaccount_ibfk_3`; ALTER TABLE `genericaccount` ADD CONSTRAINT `genericaccount_ibfk_3` FOREIGN KEY (`smtp_password_id`) REFERENCES `secret` (`id`); - ''') + """ + ) diff --git a/migrations/versions/246_create_message_actionlog_indexes.py b/migrations/versions/246_create_message_actionlog_indexes.py index 30748fa0b..7e4eb1e2f 100644 --- a/migrations/versions/246_create_message_actionlog_indexes.py +++ b/migrations/versions/246_create_message_actionlog_indexes.py @@ -7,22 +7,28 @@ """ # revision identifiers, used by Alembic. -revision = '69c4b13c806' -down_revision = '1449eededf1' +revision = "69c4b13c806" +down_revision = "1449eededf1" from alembic import op import sqlalchemy as sa def upgrade(): - op.create_index('ix_message_namespace_id_received_date', 'message', - ['namespace_id', 'received_date']) + op.create_index( + "ix_message_namespace_id_received_date", + "message", + ["namespace_id", "received_date"], + ) - op.create_index('ix_actionlog_namespace_id_status_type', 'actionlog', - ['namespace_id', 'status', 'type']) + op.create_index( + "ix_actionlog_namespace_id_status_type", + "actionlog", + ["namespace_id", "status", "type"], + ) def downgrade(): - op.drop_index('ix_message_namespace_id_received_date', table_name='message') + op.drop_index("ix_message_namespace_id_received_date", table_name="message") - op.drop_index('ix_actionlog_namespace_id_status_type', table_name='actionlog') + op.drop_index("ix_actionlog_namespace_id_status_type", table_name="actionlog") diff --git a/migrations/versions/247_add_event_visibility.py b/migrations/versions/247_add_event_visibility.py index 9487a7a4d..60aa50d0f 100644 --- a/migrations/versions/247_add_event_visibility.py +++ b/migrations/versions/247_add_event_visibility.py @@ -7,16 +7,18 @@ """ # revision identifiers, used by Alembic. -revision = '53b532fda984' -down_revision = '69c4b13c806' +revision = "53b532fda984" +down_revision = "69c4b13c806" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('event', sa.Column('visibility', sa.Enum('private', 'public'), nullable=True)) + op.add_column( + "event", sa.Column("visibility", sa.Enum("private", "public"), nullable=True) + ) def downgrade(): - op.drop_column('event', 'visibility') + op.drop_column("event", "visibility") diff --git a/migrations/versions/248_event_contact_association.py b/migrations/versions/248_event_contact_association.py index ec7b3ffda..058b74aeb 100644 --- a/migrations/versions/248_event_contact_association.py +++ b/migrations/versions/248_event_contact_association.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '203ae9bf0ddd' -down_revision = '53b532fda984' +revision = "203ae9bf0ddd" +down_revision = "53b532fda984" from alembic import op import sqlalchemy as sa @@ -16,22 +16,38 @@ def upgrade(): op.create_table( - 'eventcontactassociation', - sa.Column('created_at', sa.DateTime(), nullable=False, - server_default=sa.text(u'now()')), - sa.Column('id', sa.BigInteger(), nullable=False, autoincrement=True), - sa.Column('contact_id', sa.BigInteger(), nullable=False), - sa.Column('event_id', sa.BigInteger(), nullable=False), - sa.Column('field', - sa.Enum('participant', 'title', 'description', 'owner'), - nullable=True), - sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), - sa.ForeignKeyConstraint(['event_id'], ['event.id'], ), - sa.PrimaryKeyConstraint('id', 'contact_id', 'event_id'), + "eventcontactassociation", + sa.Column( + "created_at", + sa.DateTime(), + nullable=False, + server_default=sa.text(u"now()"), + ), + sa.Column("id", sa.BigInteger(), nullable=False, autoincrement=True), + sa.Column("contact_id", sa.BigInteger(), nullable=False), + sa.Column("event_id", sa.BigInteger(), nullable=False), + sa.Column( + "field", + sa.Enum("participant", "title", "description", "owner"), + nullable=True, + ), + sa.ForeignKeyConstraint(["contact_id"], ["contact.id"],), + sa.ForeignKeyConstraint(["event_id"], ["event.id"],), + sa.PrimaryKeyConstraint("id", "contact_id", "event_id"), + ) + op.create_index( + "ix_eventcontactassociation_created_at", + "eventcontactassociation", + ["created_at"], + unique=False, + ) + op.create_index( + "ix_eventcontactassociation_contact_id", + "eventcontactassociation", + ["contact_id"], + unique=False, ) - op.create_index('ix_eventcontactassociation_created_at', 'eventcontactassociation', ['created_at'], unique=False) - op.create_index('ix_eventcontactassociation_contact_id', 'eventcontactassociation', ['contact_id'], unique=False) def downgrade(): - op.drop_table('eventcontactassociation') + op.drop_table("eventcontactassociation") diff --git a/migrations/versions/249_fix_contact_association_constraints.py b/migrations/versions/249_fix_contact_association_constraints.py index bb01632f4..07a379e02 100644 --- a/migrations/versions/249_fix_contact_association_constraints.py +++ b/migrations/versions/249_fix_contact_association_constraints.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '36ce9c8635ef' -down_revision = '203ae9bf0ddd' +revision = "36ce9c8635ef" +down_revision = "203ae9bf0ddd" from alembic import op import sqlalchemy as sa @@ -16,13 +16,25 @@ def upgrade(): connection = op.get_bind() - connection.execute('ALTER TABLE `eventcontactassociation` DROP FOREIGN KEY `eventcontactassociation_ibfk_2`') - connection.execute('ALTER TABLE `eventcontactassociation` ADD CONSTRAINT `eventcontactassociation_ibfk_2` FOREIGN KEY (`event_id`) REFERENCES `event` (`id`) ON DELETE CASCADE') - connection.execute('ALTER TABLE `messagecontactassociation` ADD CONSTRAINT `messagecontactassociation_ibfk_2` FOREIGN KEY (`contact_id`) REFERENCES `contact` (`id`)') + connection.execute( + "ALTER TABLE `eventcontactassociation` DROP FOREIGN KEY `eventcontactassociation_ibfk_2`" + ) + connection.execute( + "ALTER TABLE `eventcontactassociation` ADD CONSTRAINT `eventcontactassociation_ibfk_2` FOREIGN KEY (`event_id`) REFERENCES `event` (`id`) ON DELETE CASCADE" + ) + connection.execute( + "ALTER TABLE `messagecontactassociation` ADD CONSTRAINT `messagecontactassociation_ibfk_2` FOREIGN KEY (`contact_id`) REFERENCES `contact` (`id`)" + ) def downgrade(): connection = op.get_bind() - connection.execute('ALTER TABLE `eventcontactassociation` DROP FOREIGN KEY `eventcontactassociation_ibfk_2`') - connection.execute('ALTER TABLE `eventcontactassociation` ADD CONSTRAINT `eventcontactassociation_ibfk_2` FOREIGN KEY (`event_id`) REFERENCES `event` (`id`)') - connection.execute('ALTER TABLE `messagecontactassociation` DROP FOREIGN KEY `messagecontactassociation_ibfk_2`') + connection.execute( + "ALTER TABLE `eventcontactassociation` DROP FOREIGN KEY `eventcontactassociation_ibfk_2`" + ) + connection.execute( + "ALTER TABLE `eventcontactassociation` ADD CONSTRAINT `eventcontactassociation_ibfk_2` FOREIGN KEY (`event_id`) REFERENCES `event` (`id`)" + ) + connection.execute( + "ALTER TABLE `messagecontactassociation` DROP FOREIGN KEY `messagecontactassociation_ibfk_2`" + ) diff --git a/setup.py b/setup.py index 376ba239f..ed0147f56 100644 --- a/setup.py +++ b/setup.py @@ -7,9 +7,7 @@ name="inbox-sync", version="17.3.8", # Release Mar 8, 2017 packages=find_packages(), - install_requires=[], - include_package_data=True, package_data={ # "inbox-sync": ["alembic.ini"], @@ -18,49 +16,53 @@ # And include any *.msg files found in the 'hello' package, too: # 'hello': ['*.msg'], }, - data_files=[("sync-engine-test-config", glob.glob("etc/*test*")), - ("alembic-inbox-sync", ["alembic.ini"]), - ("alembic-inbox-sync/migrations", - filter(os.path.isfile, glob.glob("migrations/*"))), - ("alembic-inbox-sync/migrations/versions", - filter(os.path.isfile, glob.glob("migrations/versions/*"))) - ], - - scripts=['bin/inbox-start', - 'bin/inbox-console', - 'bin/inbox-auth', - 'bin/delete-account-data', - 'bin/delete-marked-accounts', - 'bin/create-db', - 'bin/create-test-db', - 'bin/verify-db', - 'bin/migrate-db', - 'bin/stamp-db', - 'bin/inbox-api', - 'bin/get-id', - 'bin/get-object', - 'bin/set-throttled', - 'bin/syncback-service', - 'bin/contact-search-service', - 'bin/contact-search-backfill', - 'bin/contact-search-delete-index', - 'bin/backfix-generic-imap-separators.py', - 'bin/backfix-duplicate-categories.py', - 'bin/correct-autoincrements', - 'bin/update-categories', - 'bin/detect-missing-sync-host', - 'bin/purge-transaction-log', - 'bin/mysql-prompt', - 'bin/unschedule-account-syncs', - 'bin/syncback-stats', - 'bin/set-desired-host', - 'bin/get-accounts-for-host', - 'bin/deferred-migration-service', - 'bin/balance-fleet', - 'bin/get-account-loads', - 'bin/restart-forgotten-accounts', - ], - + data_files=[ + ("sync-engine-test-config", glob.glob("etc/*test*")), + ("alembic-inbox-sync", ["alembic.ini"]), + ( + "alembic-inbox-sync/migrations", + filter(os.path.isfile, glob.glob("migrations/*")), + ), + ( + "alembic-inbox-sync/migrations/versions", + filter(os.path.isfile, glob.glob("migrations/versions/*")), + ), + ], + scripts=[ + "bin/inbox-start", + "bin/inbox-console", + "bin/inbox-auth", + "bin/delete-account-data", + "bin/delete-marked-accounts", + "bin/create-db", + "bin/create-test-db", + "bin/verify-db", + "bin/migrate-db", + "bin/stamp-db", + "bin/inbox-api", + "bin/get-id", + "bin/get-object", + "bin/set-throttled", + "bin/syncback-service", + "bin/contact-search-service", + "bin/contact-search-backfill", + "bin/contact-search-delete-index", + "bin/backfix-generic-imap-separators.py", + "bin/backfix-duplicate-categories.py", + "bin/correct-autoincrements", + "bin/update-categories", + "bin/detect-missing-sync-host", + "bin/purge-transaction-log", + "bin/mysql-prompt", + "bin/unschedule-account-syncs", + "bin/syncback-stats", + "bin/set-desired-host", + "bin/get-accounts-for-host", + "bin/deferred-migration-service", + "bin/balance-fleet", + "bin/get-account-loads", + "bin/restart-forgotten-accounts", + ], # See: # https://pythonhosted.org/setuptools/setuptools.html#dynamic-discovery-of-services-and-plugins # https://pythonhosted.org/setuptools/pkg_resources.html#entry-points