diff --git a/config/dpkg/changelog b/config/dpkg/changelog index f46e515ac5..2c1542a276 100644 --- a/config/dpkg/changelog +++ b/config/dpkg/changelog @@ -2,4 +2,4 @@ python-plaso (1.3.1-1) unstable; urgency=low * Auto-generated - -- Log2Timeline Fri, 04 Dec 2015 04:03:55 +0100 + -- Log2Timeline Fri, 04 Dec 2015 15:29:30 +0100 diff --git a/plaso/engine/processing_status.py b/plaso/engine/processing_status.py index 86872d372e..59369e0858 100644 --- a/plaso/engine/processing_status.py +++ b/plaso/engine/processing_status.py @@ -405,7 +405,9 @@ def WorkersRunning(self): continue if (extraction_worker_status.number_of_events_delta > 0 or extraction_worker_status.consumed_number_of_path_specs_delta > 0 or - extraction_worker_status.produced_number_of_path_specs_delta > 0): + extraction_worker_status.produced_number_of_path_specs_delta > 0 or + (extraction_worker_status.status == + definitions.PROCESSING_STATUS_HASHING)): return True return False diff --git a/plaso/engine/worker.py b/plaso/engine/worker.py index b563ae2405..40b5862bbb 100644 --- a/plaso/engine/worker.py +++ b/plaso/engine/worker.py @@ -185,6 +185,8 @@ def _HashDataStream(self, file_entry, data_stream_name=u''): logging.debug(u'[HashDataStream] hashing file: {0:s}'.format( self._current_display_name)) + self._status = definitions.PROCESSING_STATUS_HASHING + file_object = file_entry.GetFileObject(data_stream_name=data_stream_name) if not file_object: return @@ -439,6 +441,7 @@ def _ProcessDataStream(self, file_entry, data_stream_name=u''): if not parser_name_list: parser_name_list = self._non_sigscan_parser_names + self._status = definitions.PROCESSING_STATUS_PARSING for parser_name in parser_name_list: parser_object = self._parser_objects.get(parser_name, None) if not parser_object: diff --git a/plaso/lib/definitions.py b/plaso/lib/definitions.py index 1be4d0bf88..b4dd7e7031 100644 --- a/plaso/lib/definitions.py +++ b/plaso/lib/definitions.py @@ -14,8 +14,10 @@ PROCESSING_STATUS_COMPLETED = u'completed' PROCESSING_STATUS_ERROR = u'error' +PROCESSING_STATUS_HASHING = u'hashing' PROCESSING_STATUS_INITIALIZED = u'initialized' PROCESSING_STATUS_KILLED = u'killed' +PROCESSING_STATUS_PARSING = u'parsing' PROCESSING_STATUS_RUNNING = u'running' RESERVED_VARIABLE_NAMES = frozenset([ diff --git a/plaso/multi_processing/multi_process.py b/plaso/multi_processing/multi_process.py index 0a35fd1260..0e67ba722b 100644 --- a/plaso/multi_processing/multi_process.py +++ b/plaso/multi_processing/multi_process.py @@ -521,7 +521,9 @@ def _CheckProcessStatus(self, pid): if status_indicator not in [ definitions.PROCESSING_STATUS_COMPLETED, definitions.PROCESSING_STATUS_INITIALIZED, - definitions.PROCESSING_STATUS_RUNNING]: + definitions.PROCESSING_STATUS_RUNNING, + definitions.PROCESSING_STATUS_PARSING, + definitions.PROCESSING_STATUS_HASHING]: logging.error( (u'Process {0:s} (PID: {1:d}) is not functioning correctly. ' diff --git a/tools/log2timeline.py b/tools/log2timeline.py index a6de67bcc5..61c879c63f 100755 --- a/tools/log2timeline.py +++ b/tools/log2timeline.py @@ -134,7 +134,9 @@ def _FormatStatusTableRow( display_name: the display name of the file last processed. """ if (number_of_events_delta == 0 and - status == definitions.PROCESSING_STATUS_RUNNING): + status in [definitions.PROCESSING_STATUS_RUNNING, + definitions.PROCESSING_STATUS_HASHING, + definitions.PROCESSING_STATUS_PARSING]): status = process_status # This check makes sure the columns are tab aligned. @@ -316,7 +318,9 @@ def _PrintStatusUpdateStream(self, processing_status): extraction_worker_status.pid, extraction_worker_status.number_of_events, extraction_worker_status.display_name, - status == definitions.PROCESSING_STATUS_RUNNING, + status in [definitions.PROCESSING_STATUS_RUNNING, + definitions.PROCESSING_STATUS_HASHING, + definitions.PROCESSING_STATUS_PARSING], extraction_worker_status.process_status)) def AddExperimentalOptions(self, argument_group):