Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

disable checks for optional directories that are not created by the linux scanner TKT-1145 #122

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGES
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
1.19.7
------
- Removed validating optional directories in endpoint scans

1.19.6
------
- Add InvalidUrlError
Expand Down
2 changes: 1 addition & 1 deletion intezer_sdk/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '1.19.6'
__version__ = '1.19.7'
18 changes: 10 additions & 8 deletions intezer_sdk/endpoint_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,6 @@ def _send_analyze_to_api(self, **additional_parameters) -> str:
raise ValueError('Scan directory does not exist')
if not os.path.isdir(self._files_dir):
raise ValueError('Files directory does not exist')
if not os.path.isdir(self._fileless_dir):
raise ValueError('Fileless directory does not exist')
if not os.path.isdir(self._memory_modules_dir):
raise ValueError('Memory modules directory does not exist')

self._scan_id, self.analysis_id = self._create_scan()

Expand Down Expand Up @@ -212,7 +208,7 @@ def _send_loaded_modules_info(self):
def _send_files_info_and_upload_required(self):
logger.info(f'Endpoint analysis: {self.analysis_id}, uploading files info and uploading required files')
with concurrent.futures.ThreadPoolExecutor() as executor:
for files_info_file in glob.glob(os.path.join(self._offline_scan_directory, 'files_info_*.json')):
for files_info_file in glob.glob(os.path.join(self._offline_scan_directory, 'files_info*.json')):

logger.debug(f'Endpoint analysis: {self.analysis_id}, uploading {files_info_file}')
with open(files_info_file, encoding='utf-8') as f:
Expand All @@ -233,21 +229,27 @@ def _send_files_info_and_upload_required(self):

def _send_module_differences(self):
logger.info(f'Endpoint analysis: {self.analysis_id}, uploading file module differences info')
with open(os.path.join(self._offline_scan_directory, 'file_module_differences.json'), encoding='utf-8') as f:
path = os.path.join(self._offline_scan_directory, 'file_module_differences.json')
if not os.path.isfile(path):
return
with open(path, encoding='utf-8') as f:
file_module_differences = json.load(f)
self._scan_api.send_file_module_differences(file_module_differences)

def _send_injected_modules_info(self):
logger.info(f'Endpoint analysis: {self.analysis_id}, uploading injected modules info')
with open(os.path.join(self._offline_scan_directory, 'injected_modules_info.json'), encoding='utf-8') as f:
path = os.path.join(self._offline_scan_directory, 'injected_modules_info.json')
if not os.path.isfile(path):
return
with open(path, encoding='utf-8') as f:
injected_modules_info = json.load(f)
self._scan_api.send_injected_modules_info(injected_modules_info)

def _send_memory_module_dump_info_and_upload_required(self):
logger.info(f'Endpoint analysis: {self.analysis_id}, uploading memory module dump info')
with concurrent.futures.ThreadPoolExecutor() as executor:
for memory_module_dump_info_file in glob.glob(os.path.join(self._offline_scan_directory,
'memory_module_dump_info_*.json')):
'memory_module_dump_info*.json')):

logger.debug(f'Endpoint analysis: {self.analysis_id}, uploading {memory_module_dump_info_file}')
with open(memory_module_dump_info_file, encoding='utf-8') as f:
Expand Down