Skip to content

Commit

Permalink
Route level logging (#121)
Browse files Browse the repository at this point in the history
* C4-14 add some route level logging

* fix sys import

* bump version

* use deco instead

* C4-14 add context so deco will work in some scenarios

* add context to dlq_to_primary

* unskip deco test

* fix schema routes as well

* up travis python to 3.6

* revert 3.6

* C4-15 address kents points

* C4-14 augment test, add more log statements to esstorage
  • Loading branch information
willronchetti authored Jan 30, 2020
1 parent fe75644 commit 424d20f
Show file tree
Hide file tree
Showing 20 changed files with 216 additions and 92 deletions.
1 change: 1 addition & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[pytest]
log_cli_level = INFO
addopts =
--pyargs snovault.tests
--instafail
2 changes: 1 addition & 1 deletion src/snovault/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Version information."""

# The following line *must* be the last in the module, exactly as formatted:
__version__ = "1.3.7"
__version__ = "1.3.8"
9 changes: 7 additions & 2 deletions src/snovault/aggregated_items.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import sys

from pyramid.httpexceptions import HTTPForbidden
from pyramid.view import view_config

from .calculated import calculated_property
from .resources import Item
from pyramid.view import view_config
from pyramid.httpexceptions import HTTPForbidden
from .util import debug_log


def includeme(config):
Expand All @@ -11,6 +15,7 @@ def includeme(config):

@view_config(context=Item, permission='view', request_method='GET',
name='aggregated-items')
@debug_log
def item_view_aggregated_items(context, request):
"""
View config for aggregated_items. If the current model does not have
Expand Down
23 changes: 16 additions & 7 deletions src/snovault/attachment.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,15 @@
import mimetypes
import uuid
from base64 import b64decode
from hashlib import md5
from io import BytesIO
from mimetypes import guess_type
from urllib.parse import (
quote,
unquote,
)

import magic
from PIL import Image
from pyramid.httpexceptions import (
HTTPNotFound,
Expand All @@ -10,23 +18,23 @@
from pyramid.response import Response
from pyramid.traversal import find_root
from pyramid.view import view_config
from urllib.parse import (
quote,
unquote,
)
from structlog import getLogger

from snovault import (
BLOBS,
Item,
)
from .util import debug_log
from .validation import ValidationFailure
import magic
import mimetypes
import uuid


def includeme(config):
config.scan(__name__)


log = getLogger(__name__)


class ItemWithAttachment(Item):
"""
Item base class with attachment blob.
Expand Down Expand Up @@ -242,6 +250,7 @@ def _update(self, properties, sheets=None):

@view_config(name='download', context=ItemWithAttachment, request_method='GET',
permission='view', subpath_segments=2)
@debug_log
def download(context, request):
prop_name, filename = request.subpath
try:
Expand Down
16 changes: 11 additions & 5 deletions src/snovault/batchupgrade.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,27 +7,32 @@
"""
import itertools
import logging
import transaction
from copy import deepcopy

import transaction
from pyramid.traversal import find_resource
from pyramid.view import view_config
from structlog import getLogger

from snovault import (
CONNECTION,
STORAGE,
UPGRADER,
)
from pyramid.view import view_config
from pyramid.traversal import find_resource
from .schema_utils import validate
from .util import debug_log

logger = getLogger(__name__)
EPILOG = __doc__
logger = logging.getLogger(__name__)


def includeme(config):
config.add_route('batch_upgrade', '/batch_upgrade')
config.scan(__name__)




def batched(iterable, n=1):
l = len(iterable)
for ndx in range(0, l, n):
Expand Down Expand Up @@ -94,6 +99,7 @@ def update_item(storage, context):


@view_config(route_name='batch_upgrade', request_method='POST', permission='import_items')
@debug_log
def batch_upgrade(request):
request.datastore = 'database'
transaction.get().setExtendedInfo('upgrade', True)
Expand Down
27 changes: 16 additions & 11 deletions src/snovault/crud_views.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,28 @@
from past.builtins import basestring
from pyramid.settings import asbool
from pyramid.traversal import (
find_resource,
)
from pyramid.view import view_config
import sys
from uuid import (
UUID,
uuid4,
)

import transaction
from pyramid.settings import asbool
from pyramid.view import view_config
from structlog import get_logger

from .calculated import calculated_property
from .interfaces import (
COLLECTIONS,
CONNECTION,
STORAGE,
Created,
BeforeModified,
AfterModified,
)
from .invalidation import add_to_indexing_queue
from .resources import (
Collection,
Item,
)
from .calculated import calculated_property
from .util import debug_log
from .validation import ValidationFailure
from .validators import (
no_validate_item_content_patch,
Expand All @@ -31,10 +33,7 @@
validate_item_content_put,
validate_item_content_in_place
)
from .invalidation import add_to_indexing_queue
import transaction

from structlog import get_logger
log = get_logger(__name__)


Expand Down Expand Up @@ -147,6 +146,7 @@ def render_item(request, context, render, return_uri_also=False):
@view_config(context=Collection, permission='add_unvalidated', request_method='POST',
validators=[no_validate_item_content_post],
request_param=['validate=false'])
@debug_log
def collection_add(context, request, render=None):
'''Endpoint for adding a new Item.'''
check_only = asbool(request.params.get('check_only', False))
Expand Down Expand Up @@ -185,6 +185,7 @@ def collection_add(context, request, render=None):
@view_config(context=Item, permission='index', request_method='GET',
validators=[validate_item_content_in_place],
request_param=['check_only=true'])
@debug_log
def item_edit(context, request, render=None):
'''
Endpoint for editing an existing Item.
Expand Down Expand Up @@ -225,6 +226,7 @@ def item_edit(context, request, render=None):

@view_config(context=Item, permission='view', request_method='GET',
name='links')
@debug_log
def get_linking_items(context, request, render=None):
"""
Utilize find_uuids_linked_to_item function in PickStorage to find
Expand All @@ -245,6 +247,7 @@ def get_linking_items(context, request, render=None):


@view_config(context=Item, permission='edit', request_method='DELETE')
@debug_log
def item_delete_full(context, request, render=None):
"""
DELETE method that either sets the status of an item to deleted (base
Expand Down Expand Up @@ -297,8 +300,10 @@ def item_delete_full(context, request, render=None):
}



@view_config(context=Item, permission='view', request_method='GET',
name='validation-errors')
@debug_log
def item_view_validation_errors(context, request):
"""
View config for validation_errors. If the current model does not have
Expand Down
7 changes: 6 additions & 1 deletion src/snovault/elasticsearch/cached_views.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
""" Cached views used when model was pulled from elasticsearch.
"""

from itertools import chain
import sys

from pyramid.httpexceptions import HTTPForbidden
from pyramid.view import view_config

from .interfaces import ICachedItem
from ..util import debug_log


def includeme(config):
config.scan(__name__)


@view_config(context=ICachedItem, request_method='GET', name='embedded')
@debug_log
def cached_view_embedded(context, request):
source = context.model.source
allowed = set(source['principals_allowed']['view'])
Expand Down Expand Up @@ -45,6 +49,7 @@ def filter_embedded(embedded, effective_principals):


@view_config(context=ICachedItem, request_method='GET', name='object')
@debug_log
def cached_view_object(context, request):
source = context.model.source
allowed = set(source['principals_allowed']['view'])
Expand Down
3 changes: 3 additions & 0 deletions src/snovault/elasticsearch/esstorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,6 +330,7 @@ def purge_uuid(self, rid, index_name, item_type=None, registry=None):
mirror_env = registry.settings['mirror.env.name']
use_aws_auth = registry.settings.get('elasticsearch.aws_auth')
mirror_health = ff_utils.get_health_page(ff_env=mirror_env)
log.info('PURGE: attempting to purge %s from mirror storage %s' % (rid, mirror_env))
# if we could not get mirror health, bail here
if 'error' in mirror_health:
log.error('PURGE: Tried to purge %s from mirror storage but couldn\'t get health page. Is staging up?' % rid)
Expand All @@ -348,6 +349,8 @@ def purge_uuid(self, rid, index_name, item_type=None, registry=None):
log.error('PURGE: Couldn\'t find %s in mirrored ElasticSearch (%s). Continuing.' % (rid, mirror_env))
except Exception as exc:
log.error('PURGE: Cannot delete %s in mirrored ElasticSearch (%s). Error: %s Continuing.' % (item_type, mirror_env, str(exc)))
else:
log.info('PURGE: Did not find a mirror env. Continuing.')

def __iter__(self, *item_types):
query = {'query': {
Expand Down
31 changes: 18 additions & 13 deletions src/snovault/elasticsearch/indexer.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,31 @@
import copy
import datetime
import json
import time
import sys
from timeit import default_timer as timer

import structlog
from elasticsearch.exceptions import (
ConflictError,
ConnectionError,
TransportError,
)
from ..embed import MissingIndexItemException
from pyramid.view import view_config
from urllib3.exceptions import ReadTimeoutError
from .interfaces import (
ELASTIC_SEARCH,
INDEXER,
INDEXER_QUEUE
)

from snovault import (
DBSESSION,
STORAGE
)
from .indexer_utils import get_namespaced_index, find_uuids_for_indexing
import datetime
import structlog
import time
import copy
import json
from timeit import default_timer as timer
from .interfaces import (
ELASTIC_SEARCH,
INDEXER,
INDEXER_QUEUE
)
from ..embed import MissingIndexItemException
from ..util import debug_log

log = structlog.getLogger(__name__)

Expand Down Expand Up @@ -63,7 +67,8 @@ def check_sid(sid, max_sid):


@view_config(route_name='index', request_method='POST', permission="index")
def index(request):
@debug_log
def index(context, request):
# Setting request.datastore here only works because routed views are not traversed.
request.datastore = 'database'
record = request.json.get('record', False) # if True, make a record in es
Expand Down
26 changes: 16 additions & 10 deletions src/snovault/elasticsearch/indexer_queue.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,24 @@
### Class to manage the items for indexing
# First round will use a standard SQS queue from AWS without Elasticache.

import boto3
import datetime
import json
import math
import structlog
import socket
import sys
import time
import datetime
from collections import OrderedDict

import boto3
import structlog
from pyramid.view import view_config
from pyramid.decorator import reify
from .interfaces import INDEXER_QUEUE, INDEXER_QUEUE_MIRROR

from .indexer_utils import get_uuids_for_types
from collections import OrderedDict
from .interfaces import INDEXER_QUEUE, INDEXER_QUEUE_MIRROR
from ..util import debug_log

log = structlog.getLogger(__name__)


def includeme(config):
config.add_route('queue_indexing', '/queue_indexing')
config.add_route('indexing_status', '/indexing_status')
Expand All @@ -37,7 +40,8 @@ def includeme(config):


@view_config(route_name='queue_indexing', request_method='POST', permission="index")
def queue_indexing(request):
@debug_log
def queue_indexing(context, request):
"""
Endpoint to queue items for indexing. Takes a POST request with index
priviliges which should contain either a list of uuids under "uuids" key
Expand Down Expand Up @@ -95,7 +99,8 @@ def queue_indexing(request):


@view_config(route_name='indexing_status', request_method='GET')
def indexing_status(request):
@debug_log
def indexing_status(context, request):
"""
Endpoint to check what is currently on the queue. Uses GET requests
"""
Expand All @@ -115,7 +120,8 @@ def indexing_status(request):


@view_config(route_name='dlq_to_primary', request_method='GET', permission='index')
def dlq_to_primary(request):
@debug_log
def dlq_to_primary(context, request):
"""
Endpoint to move all uuids on the DLQ to the primary queue
"""
Expand Down
Loading

0 comments on commit 424d20f

Please sign in to comment.