From 5f8c8e0c4a62d65909b4ddf1c6b5bc1b74287394 Mon Sep 17 00:00:00 2001 From: Evgeniy Naydanov Date: Sun, 9 Jan 2022 14:13:42 +0000 Subject: [PATCH] fix(pylint): remove unspecified-encoding warnings --- .pre-commit-config.yaml | 2 +- cdc_replication_test.py | 8 ++++---- custom_cs_test.py | 2 +- grow_cluster_test.py | 2 +- jepsen_test.py | 2 +- longevity_test.py | 6 +++--- performance_regression_test.py | 6 +++--- performance_regression_user_profiles_test.py | 4 ++-- sct.py | 4 ++-- sdcm/cluster.py | 16 ++++++++-------- sdcm/cluster_aws.py | 2 +- sdcm/collectd.py | 8 ++++---- sdcm/coredump.py | 2 +- sdcm/db_log_reader.py | 2 +- sdcm/gemini_thread.py | 2 +- sdcm/logcollector.py | 4 ++-- sdcm/mgmt/common.py | 4 ++-- sdcm/microbenchmarking.py | 2 +- sdcm/monitorstack/__init__.py | 6 +++--- sdcm/remote/base.py | 4 ++-- sdcm/remote/remote_file.py | 4 ++-- sdcm/results_analyze/__init__.py | 6 +++--- sdcm/sct_events/base.py | 2 +- sdcm/send_email.py | 4 ++-- sdcm/stress_thread.py | 2 +- sdcm/test_config.py | 4 ++-- sdcm/tester.py | 8 ++++---- sdcm/utils/common.py | 12 ++++++------ sdcm/utils/compaction_ops.py | 2 +- sdcm/utils/decorators.py | 4 ++-- sdcm/utils/docker_utils.py | 2 +- sdcm/utils/file.py | 2 +- sdcm/utils/k8s.py | 10 +++++----- sdcm/utils/profiler.py | 4 ++-- sdcm/utils/threads_and_processes_alive.py | 4 ++-- sdcm/utils/version_utils.py | 2 +- stop_compaction_test.py | 2 +- unit_tests/lib/data_pickle.py | 6 +++--- unit_tests/test_events.py | 2 +- unit_tests/test_hydra_sh.py | 2 +- unit_tests/test_prometheus.py | 3 ++- unit_tests/test_remoter.py | 4 ++-- unit_tests/test_sct_events_system.py | 3 ++- unit_tests/test_scylla_yaml.py | 2 +- unit_tests/test_scylla_yaml_builders.py | 2 +- unit_tests/test_tester.py | 2 +- utils/build_system/create_test_release_jobs.py | 4 ++-- utils/mocks/aws_mock.py | 6 +++--- 48 files changed, 100 insertions(+), 98 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 509ccdd461..c49a2e2f44 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,7 +34,7 @@ repos: - id: pylint name: pylint - entry: pylint -j 2 -d consider-using-f-string,unspecified-encoding + entry: pylint -j 2 -d consider-using-f-string language: system exclude: ^docker/alternator-dns/.*$ types: [python] diff --git a/cdc_replication_test.py b/cdc_replication_test.py index 055a869f05..77744856d7 100644 --- a/cdc_replication_test.py +++ b/cdc_replication_test.py @@ -46,7 +46,7 @@ def mode_str(mode: Mode) -> str: def print_file_to_stdout(path: str) -> None: - with open(path, "r") as file: + with open(path, encoding="utf-8") as file: shutil.copyfileobj(file, sys.stdout) @@ -58,7 +58,7 @@ def write_cql_result(res, path: str): :param path: path to file :type path: str """ - with open(path, 'w') as file: + with open(path, 'w', encoding="utf-8") as file: for row in res: file.write(str(row) + '\n') file.flush() @@ -300,7 +300,7 @@ def test_replication(self, is_gemini_test: bool, mode: Mode) -> None: migrate_log_path = None migrate_ok = True if mode == Mode.PREIMAGE: - with open(replicator_log_path) as file: + with open(replicator_log_path, encoding="utf-8") as file: self.consistency_ok = not 'Inconsistency detected.\n' in (line for line in file) else: migrate_log_path = os.path.join(self.logdir, 'scylla-migrate.log') @@ -332,7 +332,7 @@ def check_consistency(self, migrate_log_dst_path: str, compare_timestamps: bool migrate_ok = res.ok if not migrate_ok: self.log.error('scylla-migrate command returned status {}'.format(res.exit_status)) - with open(migrate_log_dst_path) as file: + with open(migrate_log_dst_path, encoding="utf-8") as file: consistency_ok = 'Consistency check OK.\n' in (line for line in file) return (migrate_ok, consistency_ok) diff --git a/custom_cs_test.py b/custom_cs_test.py index e187e45109..6314351f72 100644 --- a/custom_cs_test.py +++ b/custom_cs_test.py @@ -31,7 +31,7 @@ def test_write_mode(self): Run cassandra-stress with params defined in data_dir/scylla.yaml """ cs_custom_config = get_data_dir_path('cassandra-stress-custom.yaml') - with open(cs_custom_config, 'r') as cs_custom_config_file: + with open(cs_custom_config, encoding="utf-8") as cs_custom_config_file: self.log.info('Using custom cassandra-stress config:') self.log.info(cs_custom_config_file.read()) for node in self.loaders.nodes: diff --git a/grow_cluster_test.py b/grow_cluster_test.py index 860dd1307e..28b1aa2a78 100644 --- a/grow_cluster_test.py +++ b/grow_cluster_test.py @@ -37,7 +37,7 @@ def __init__(self, *args, **kwargs): def get_stress_cmd_profile(self): cs_custom_config = get_data_dir_path('cassandra-stress-custom-mixed-narrow-wide-row.yaml') - with open(cs_custom_config, 'r') as cs_custom_config_file: + with open(cs_custom_config, encoding="utf-8") as cs_custom_config_file: self.log.info('Using custom cassandra-stress config:') self.log.info(cs_custom_config_file.read()) for node in self.loaders.nodes: diff --git a/jepsen_test.py b/jepsen_test.py index 102070001c..4efa84c308 100644 --- a/jepsen_test.py +++ b/jepsen_test.py @@ -86,7 +86,7 @@ def save_jepsen_report(self): sleep {JEPSEN_WEB_SERVER_START_DELAY} """), verbose=True) - with open(os.path.join(self.logdir, "jepsen_report.html"), "wt") as jepsen_report: + with open(os.path.join(self.logdir, "jepsen_report.html"), "wt", encoding="utf-8") as jepsen_report: jepsen_report.write(requests.get(url).text) self.log.info("Report has been saved to %s", jepsen_report.name) diff --git a/longevity_test.py b/longevity_test.py index 23b9d72177..93e04aeb61 100644 --- a/longevity_test.py +++ b/longevity_test.py @@ -234,7 +234,7 @@ def test_custom_time(self): assert os.path.exists(cs_profile), 'File not found: {}'.format(cs_profile) self.log.debug('Run stress test with user profile {}, duration {}'.format(cs_profile, cs_duration)) profile_dst = os.path.join('/tmp', os.path.basename(cs_profile)) - with open(cs_profile) as pconf: + with open(cs_profile, encoding="utf-8") as pconf: cont = pconf.readlines() user_profile_table_count = self.params.get( # pylint: disable=invalid-name 'user_profile_table_count') @@ -497,7 +497,7 @@ def _pre_create_templated_user_schema(self, batch_start=None, batch_end=None): cs_user_profiles = self.params.get('cs_user_profiles') # read user-profile for profile_file in cs_user_profiles: - with open(profile_file) as fobj: + with open(profile_file, encoding="utf-8") as fobj: profile_yaml = yaml.safe_load(fobj) keyspace_definition = profile_yaml['keyspace_definition'] keyspace_name = profile_yaml['keyspace'] @@ -582,7 +582,7 @@ def create_templated_user_stress_params(self, idx, cs_profile): # pylint: disab params_list = [] cs_duration = self.params.get('cs_duration') - with open(cs_profile) as pconf: + with open(cs_profile, encoding="utf-8") as pconf: cont = pconf.readlines() pconf.seek(0) template = string.Template(pconf.read()) diff --git a/performance_regression_test.py b/performance_regression_test.py index 32103eca42..c5627b6c07 100644 --- a/performance_regression_test.py +++ b/performance_regression_test.py @@ -119,7 +119,7 @@ def display_results(self, results, test_name=''): self.display_single_result(single_result) test_xml += self.get_test_xml(single_result, test_name=test_name) - with open(os.path.join(self.logdir, 'jenkins_perf_PerfPublisher.xml'), 'w') as pref_file: + with open(os.path.join(self.logdir, 'jenkins_perf_PerfPublisher.xml'), 'w', encoding="utf-8") as pref_file: content = """%s""" % (test_name, test_xml) pref_file.write(content) except Exception as ex: # pylint: disable=broad-except @@ -152,7 +152,7 @@ def _get_total_ops(self): @staticmethod def _clean_email_data(): email_data_path = 'email_data.json' - with open(email_data_path, 'w'): + with open(email_data_path, 'w', encoding="utf-8"): pass def preload_data(self): @@ -556,7 +556,7 @@ def get_mv_name(user_profile): # Get materialized view name from user profile - with open(user_profile) as fobj: + with open(user_profile, encoding="utf-8") as fobj: user_profile_yaml = yaml.safe_load(fobj) mv_name = '' diff --git a/performance_regression_user_profiles_test.py b/performance_regression_user_profiles_test.py index b2126e8de7..f6693fe5fc 100644 --- a/performance_regression_user_profiles_test.py +++ b/performance_regression_user_profiles_test.py @@ -28,7 +28,7 @@ def __init__(self, *args, **kwargs): self.create_stats = False def _clean_keyspace(self, cs_profile): # pylint: disable=invalid-name - with open(cs_profile) as fdr: + with open(cs_profile, encoding="utf-8") as fdr: key_space = [line.split(':')[-1].strip() for line in fdr.readlines() if line.startswith('keyspace:')] if key_space: self.log.debug('Drop keyspace {}'.format(key_space[0])) @@ -47,7 +47,7 @@ def test_user_profiles(self): assert os.path.exists(cs_profile), 'File not found: {}'.format(cs_profile) self.log.debug('Run stress test with user profile {}, duration {}'.format(cs_profile, duration)) profile_dst = os.path.join('/tmp', os.path.basename(cs_profile)) - with open(cs_profile) as pconf: + with open(cs_profile, encoding="utf-8") as pconf: cont = pconf.readlines() for cmd in [line.lstrip('#').strip() for line in cont if line.find('cassandra-stress') > 0]: stress_cmd = (cmd.format(profile_dst, duration)) diff --git a/sct.py b/sct.py index bbc56361e6..d4889f27fa 100755 --- a/sct.py +++ b/sct.py @@ -1053,7 +1053,7 @@ def send_email(test_id=None, test_status=None, start_time=None, started_by=None, start_time = format_timestamp(int(start_time)) testrun_dir = get_testrun_dir(test_id=test_id, base_dir=logdir) if testrun_dir: - with open(os.path.join(testrun_dir, 'test_id'), 'r', encoding='utf-8') as file: + with open(os.path.join(testrun_dir, 'test_id'), encoding='utf-8') as file: test_id = file.read().strip() email_results_file = os.path.join(testrun_dir, "email_data.json") if not os.path.exists(email_results_file): @@ -1286,7 +1286,7 @@ def create_runner_instance(cloud_provider, region, availability_zone, instance_t remoter = sct_runner.get_remoter(host=runner_public_ip, connect_timeout=120) if remoter.run("true", timeout=100, verbose=False, ignore_status=True).ok: LOGGER.info("Successfully connected the SCT Runner. Public IP: %s", runner_public_ip) - with sct_runner_ip_path.open("w") as sct_runner_ip_file: + with sct_runner_ip_path.open(mode="w", encoding="utf-8") as sct_runner_ip_file: sct_runner_ip_file.write(runner_public_ip) else: LOGGER.error("Unable to SSH to %s! Exiting...", runner_public_ip) diff --git a/sdcm/cluster.py b/sdcm/cluster.py index 1fac404f6b..6356eb1131 100644 --- a/sdcm/cluster.py +++ b/sdcm/cluster.py @@ -621,7 +621,7 @@ def extract_seeds_from_scylla_yaml(self): yaml_dst_path = os.path.join(tempfile.mkdtemp(prefix='sct'), 'scylla.yaml') wait.wait_for(func=self.remoter.receive_files, step=10, text='Waiting for copying scylla.yaml', timeout=300, throw_exc=True, src=self.add_install_prefix(SCYLLA_YAML_PATH), dst=yaml_dst_path) - with open(yaml_dst_path, 'r') as yaml_stream: + with open(yaml_dst_path, encoding="utf-8") as yaml_stream: try: conf_dict = yaml.safe_load(yaml_stream) except Exception: @@ -1380,7 +1380,7 @@ def mark_log(self): """ if not os.path.exists(self.system_log): return 0 - with open(self.system_log) as log_file: + with open(self.system_log, encoding="utf-8") as log_file: log_file.seek(0, os.SEEK_END) return log_file.tell() @@ -3155,13 +3155,13 @@ def get_db_auth(self): def write_node_public_ip_file(self): public_ip_file_path = os.path.join(self.logdir, 'public_ips') - with open(public_ip_file_path, 'w') as public_ip_file: + with open(public_ip_file_path, 'w', encoding="utf-8") as public_ip_file: public_ip_file.write("%s" % "\n".join(self.get_node_public_ips())) public_ip_file.write("\n") def write_node_private_ip_file(self): private_ip_file_path = os.path.join(self.logdir, 'private_ips') - with open(private_ip_file_path, 'w') as private_ip_file: + with open(private_ip_file_path, 'w', encoding="utf-8") as private_ip_file: private_ip_file.write("%s" % "\n".join(self.get_node_private_ips())) private_ip_file.write("\n") @@ -4775,13 +4775,13 @@ def sct_dashboard_json_file(self): @staticmethod def sct_dashboard_json_file_content_update(update_params: dict, json_file: str): # Read json data to the string - with open(json_file, 'r') as file: + with open(json_file, encoding="utf-8") as file: json_data = file.read() for param, value in update_params.items(): json_data = json_data.replace(param, value) - with open(json_file, 'w') as file: + with open(json_file, 'w', encoding="utf-8") as file: json.dump(json.loads(json_data), file, indent=2) def node_setup(self, node, **kwargs): # pylint: disable=unused-argument @@ -4959,7 +4959,7 @@ def configure_scylla_monitoring(self, node, sct_metrics=True, alert_manager=True local_template = os.path.join(temp_dir, template_fn) node.remoter.receive_files(src=prometheus_yaml_template, dst=local_template_tmp) - with open(local_template_tmp) as output_file: + with open(local_template_tmp, encoding="utf-8") as output_file: templ_yaml = yaml.safe_load(output_file) self.log.debug("Configs %s" % templ_yaml) loader_targets_list = ["[%s]:9103" % getattr(node, self.DB_NODES_IP_ADDRESS) @@ -4998,7 +4998,7 @@ def remove_sct_metrics(metric): if self.sct_ip_port: scrape_configs.append(dict(job_name="sct_metrics", honor_labels=True, static_configs=[dict(targets=[self.sct_ip_port])])) - with open(local_template, "w") as output_file: + with open(local_template, "w", encoding="utf-8") as output_file: yaml.safe_dump(templ_yaml, output_file, default_flow_style=False) # to remove tag !!python/unicode node.remoter.send_files(src=local_template, dst=prometheus_yaml_template, delete_dst=True) diff --git a/sdcm/cluster_aws.py b/sdcm/cluster_aws.py index 21afd2f618..634de37372 100644 --- a/sdcm/cluster_aws.py +++ b/sdcm/cluster_aws.py @@ -912,7 +912,7 @@ def get_seed_nodes(self): yaml_dst_path = os.path.join(tempfile.mkdtemp(prefix='sct-cassandra'), 'cassandra.yaml') node.remoter.receive_files(src='/etc/cassandra/cassandra.yaml', dst=yaml_dst_path) - with open(yaml_dst_path, 'r') as yaml_stream: + with open(yaml_dst_path, encoding="utf-8") as yaml_stream: conf_dict = yaml.safe_load(yaml_stream) try: return conf_dict['seed_provider'][0]['parameters'][0]['seeds'].split(',') diff --git a/sdcm/collectd.py b/sdcm/collectd.py index 9fe2ab5032..4430a60541 100644 --- a/sdcm/collectd.py +++ b/sdcm/collectd.py @@ -47,7 +47,7 @@ def _setup_collectd(self): tmp_path_exporter = os.path.join(tmp_dir_exporter, 'scylla.conf') tmp_path_remote = "/tmp/scylla-collectd.conf" - with open(tmp_path_exporter, 'w') as tmp_cfg_prom: # deepcode ignore BinaryWrite~open: automatically converted to utf8 + with open(tmp_path_exporter, 'w', encoding="utf-8") as tmp_cfg_prom: tmp_cfg_prom.write(self._collectd_cfg) # pylint: disable=no-member try: self.node.remoter.send_files(src=tmp_path_exporter, dst=tmp_path_remote) @@ -348,7 +348,7 @@ def collectd_exporter_setup(self): tmp_path_exporter = os.path.join(tmp_dir_exporter, 'collectd_exporter.conf') tmp_path_remote = '/tmp/collectd_exporter.conf' system_path_remote = '/etc/init/collectd_exporter.conf' - with open(tmp_path_exporter, 'w') as tmp_cfg_prom: + with open(tmp_path_exporter, 'w', encoding="utf-8") as tmp_cfg_prom: tmp_cfg_prom.write(service_file) try: self.node.remoter.send_files(src=tmp_path_exporter, dst=tmp_path_remote) @@ -423,7 +423,7 @@ def collectd_exporter_setup(self): tmp_path_exporter = os.path.join(tmp_dir_exporter, 'collectd-exporter.service') tmp_path_remote = '/tmp/collectd-exporter.service' system_path_remote = '/etc/systemd/system/collectd-exporter.service' - with open(tmp_path_exporter, 'w') as tmp_cfg_prom: # deepcode ignore BinaryWrite~open: automatically converted to utf8 + with open(tmp_path_exporter, 'w', encoding="utf-8") as tmp_cfg_prom: tmp_cfg_prom.write(systemd_unit) try: self.node.remoter.send_files(src=tmp_path_exporter, dst=tmp_path_remote) @@ -452,7 +452,7 @@ def collectd_exporter_service_setup(self): tmp_path_exporter = os.path.join(tmp_dir_exporter, 'collectd_exporter.conf') tmp_path_remote = '/tmp/collectd_exporter.conf' system_path_remote = '/etc/init/collectd_exporter.conf' - with open(tmp_path_exporter, 'w') as tmp_cfg_prom: # deepcode ignore BinaryWrite~open: automatically converted to utf8 + with open(tmp_path_exporter, 'w', encoding="utf-8") as tmp_cfg_prom: tmp_cfg_prom.write(service_file) try: self.node.remoter.send_files(src=tmp_path_exporter, dst=tmp_path_remote) diff --git a/sdcm/coredump.py b/sdcm/coredump.py index 4b5f87f30e..dc32439f66 100644 --- a/sdcm/coredump.py +++ b/sdcm/coredump.py @@ -268,7 +268,7 @@ def log_coredump(self, core_info: CoreDumpInfo): if not core_info.coredump_info: return log_file = os.path.join(self.node.logdir, 'coredump.log') - with open(log_file, 'a') as log_file_obj: + with open(log_file, 'a', encoding="utf-8") as log_file_obj: log_file_obj.write(core_info.coredump_info) for line in core_info.coredump_info.splitlines(): self.log.error(line) diff --git a/sdcm/db_log_reader.py b/sdcm/db_log_reader.py index 3dc4852994..bfb61ef6fb 100644 --- a/sdcm/db_log_reader.py +++ b/sdcm/db_log_reader.py @@ -86,7 +86,7 @@ def _read_and_publish_events(self) -> None: if not os.path.exists(self._system_log): return - with open(self._system_log, 'r') as db_file: + with open(self._system_log, encoding="utf-8") as db_file: if self._last_log_position: db_file.seek(self._last_log_position) for index, line in enumerate(db_file, start=self._last_line_no + 1): diff --git a/sdcm/gemini_thread.py b/sdcm/gemini_thread.py index ffa20e16c1..6d4c1ef148 100644 --- a/sdcm/gemini_thread.py +++ b/sdcm/gemini_thread.py @@ -151,7 +151,7 @@ def get_gemini_results(self): local_gemini_result_file = os.path.join(node.logdir, os.path.basename(result_file)) node.remoter.receive_files(src=result_file, dst=local_gemini_result_file) - with open(local_gemini_result_file) as local_file: + with open(local_gemini_result_file, encoding="utf-8") as local_file: content = local_file.read() res = self._parse_gemini_summary_json(content) if res: diff --git a/sdcm/logcollector.py b/sdcm/logcollector.py index 0b895f7f1a..a1a096916e 100644 --- a/sdcm/logcollector.py +++ b/sdcm/logcollector.py @@ -582,7 +582,7 @@ def collect(self, node, local_dst, remote_dst=None, local_search_path=None): os.makedirs(local_dst, exist_ok=True) snapshots = self.get_grafana_snapshot(node) snapshots_file = os.path.join(local_dst, "grafana_snapshots") - with open(snapshots_file, "w") as f: # pylint: disable=invalid-name + with open(snapshots_file, "w", encoding="utf-8") as f: # pylint: disable=invalid-name for snapshot in snapshots: f.write(snapshot + '\n') @@ -1268,7 +1268,7 @@ def create_base_storage_dir(self, test_dir=None): self.storage_dir = os.path.join(self.sct_result_dir, log_dir, 'collected_logs') os.makedirs(self.storage_dir, exist_ok=True) if not os.path.exists(os.path.join(os.path.dirname(self.storage_dir), "test_id")): - with open(os.path.join(os.path.dirname(self.storage_dir), "test_id"), "w") as f: # pylint: disable=invalid-name + with open(os.path.join(os.path.dirname(self.storage_dir), "test_id"), "w", encoding="utf-8") as f: # pylint: disable=invalid-name f.write(self.test_id) diff --git a/sdcm/mgmt/common.py b/sdcm/mgmt/common.py index 3770eb7300..b42771d620 100644 --- a/sdcm/mgmt/common.py +++ b/sdcm/mgmt/common.py @@ -46,7 +46,7 @@ def duration_to_timedelta(duration_string): def get_manager_repo_from_defaults(manager_version_name, distro): - with open("defaults/manager_versions.yaml", 'r') as mgmt_config: + with open("defaults/manager_versions.yaml", encoding="utf-8") as mgmt_config: manager_repos_by_version_dict = yaml.safe_load(mgmt_config)["manager_repos_by_version"] version_specific_repos = manager_repos_by_version_dict.get(manager_version_name, None) @@ -61,7 +61,7 @@ def get_manager_repo_from_defaults(manager_version_name, distro): def get_manager_scylla_backend(scylla_backend_version_name, distro): - with open("defaults/manager_versions.yaml", 'r') as mgmt_config: + with open("defaults/manager_versions.yaml", encoding="utf-8") as mgmt_config: scylla_backend_repos_by_version_dict = yaml.safe_load(mgmt_config)["scylla_backend_repo_by_version"] version_specific_repos = scylla_backend_repos_by_version_dict.get(scylla_backend_version_name, None) diff --git a/sdcm/microbenchmarking.py b/sdcm/microbenchmarking.py index e25eaaf858..5f842bc9f5 100755 --- a/sdcm/microbenchmarking.py +++ b/sdcm/microbenchmarking.py @@ -338,7 +338,7 @@ def get_results(self, results_path, update_db): test_args = os.path.splitext(new_filename)[0] test_type = dirname + "_" + test_args json_path = os.path.join(dirname, dataset_name, filename) - with open(json_path, 'r') as json_file: + with open(json_path, encoding="utf-8") as json_file: self.log.info("Reading: %s", json_path) datastore = json.load(json_file) datastore.update({'hostname': self.hostname, diff --git a/sdcm/monitorstack/__init__.py b/sdcm/monitorstack/__init__.py index aa5375df15..28c2d3b946 100644 --- a/sdcm/monitorstack/__init__.py +++ b/sdcm/monitorstack/__init__.py @@ -247,7 +247,7 @@ def get_monitoring_stack_dir(base_dir): def get_monitoring_stack_scylla_version(monitoring_stack_dir): try: - with open(os.path.join(monitoring_stack_dir, 'monitor_version'), 'r') as f: # pylint: disable=invalid-name + with open(os.path.join(monitoring_stack_dir, 'monitor_version'), encoding="utf-8") as f: # pylint: disable=invalid-name versions = f.read().strip() monitoring_version, scylla_version = versions.split(':') return monitoring_version, scylla_version @@ -291,7 +291,7 @@ def restore_sct_dashboards(monitoring_dockers_dir, scylla_version): sct_dashboard_file_name) dashboard_url = f'http://localhost:{GRAFANA_DOCKER_PORT}/api/dashboards/db' - with open(sct_dashboard_file, "r") as f: # pylint: disable=invalid-name + with open(sct_dashboard_file, encoding="utf-8") as f: # pylint: disable=invalid-name dashboard_config = json.load(f) # NOTE: remove value from the 'dashboard.id' field to avoid following error: # @@ -329,7 +329,7 @@ def restore_annotations_data(monitoring_stack_dir): LOGGER.info('There is no annotations file.Skip loading annotations') return False try: - with open(annotations_file, "r") as f: # pylint: disable=invalid-name + with open(annotations_file, encoding="utf-8") as f: # pylint: disable=invalid-name annotations = json.load(f) annotations_url = f"http://localhost:{GRAFANA_DOCKER_PORT}/api/annotations" diff --git a/sdcm/remote/base.py b/sdcm/remote/base.py index 431dd704f1..cff1baf02e 100644 --- a/sdcm/remote/base.py +++ b/sdcm/remote/base.py @@ -237,14 +237,14 @@ def __init__(self, log_file: str): def submit(self, stream: str) -> list: stream_buffer = stream[self.len:] - with open(self.log_file, "a+") as log_file: + with open(self.log_file, "a+", encoding="utf-8") as log_file: log_file.write(stream_buffer) self.len = len(stream) return [] def submit_line(self, line: str): - with open(self.log_file, "a+") as log_file: + with open(self.log_file, "a+", encoding="utf-8") as log_file: log_file.write(line) diff --git a/sdcm/remote/remote_file.py b/sdcm/remote/remote_file.py index 0aba90380d..8b29bb398f 100644 --- a/sdcm/remote/remote_file.py +++ b/sdcm/remote/remote_file.py @@ -47,13 +47,13 @@ def remote_file(remoter, remote_path, serializer=StringIO.getvalue, deserializer throw_exc=True, src=remote_path, dst=local_tempfile) - with open(local_tempfile, "r") as fobj: + with open(local_tempfile, encoding="utf-8") as fobj: parsed_data = deserializer(fobj) yield parsed_data content = serializer(parsed_data) - with open(local_tempfile, "w") as fobj: + with open(local_tempfile, "w", encoding="utf-8") as fobj: fobj.write(content) LOGGER.debug("New content of `%s':\n%s", remote_path, content) diff --git a/sdcm/results_analyze/__init__.py b/sdcm/results_analyze/__init__.py index 9be9ddc7e8..dcd16ad9c4 100644 --- a/sdcm/results_analyze/__init__.py +++ b/sdcm/results_analyze/__init__.py @@ -152,7 +152,7 @@ def render_to_html(self, results, html_file_path="", template=None): html = template.render(results) self.log.info("Results has been rendered to html") if html_file_path: - with open(html_file_path, "w") as html_file: + with open(html_file_path, "w", encoding="utf-8") as html_file: html_file.write(html) self.log.info("HTML report saved to '%s'.", html_file_path) return html @@ -179,7 +179,7 @@ def gen_kibana_dashboard_url(self, dashboard_path=""): def save_email_data_file(self, subject, email_data, file_path='email_data.json'): if os.path.exists(file_path): try: - with open(file_path, 'r') as file: + with open(file_path, encoding="utf-8") as file: data = file.read().strip() file_content = json.loads(data or '{}') except EnvironmentError as err: @@ -188,7 +188,7 @@ def save_email_data_file(self, subject, email_data, file_path='email_data.json') file_content = {} file_content[subject] = email_data.copy() try: - with open(file_path, 'w') as file: + with open(file_path, 'w', encoding="utf-8") as file: json.dump(file_content, file) except EnvironmentError as err: self.log.error('Failed to write %s to file %s with error %s', file_content, file_path, err) diff --git a/sdcm/sct_events/base.py b/sdcm/sct_events/base.py index 2806e15e8b..c9a40b4c5d 100644 --- a/sdcm/sct_events/base.py +++ b/sdcm/sct_events/base.py @@ -46,7 +46,7 @@ class SctEventTypesRegistry(Dict[str, Type["SctEvent"]]): # pylint: disable=too-few-public-methods def __init__(self, severities_conf: str = DEFAULT_SEVERITIES): super().__init__() - with open(severities_conf) as fobj: + with open(severities_conf, encoding="utf-8") as fobj: self.max_severities = {event_t: Severity[sev] for event_t, sev in yaml.safe_load(fobj).items()} self.limit_rules = [] diff --git a/sdcm/send_email.py b/sdcm/send_email.py index 0084662dfb..eab1661d32 100644 --- a/sdcm/send_email.py +++ b/sdcm/send_email.py @@ -654,7 +654,7 @@ def read_email_data_from_file(filename): email_data = None if os.path.exists(filename): try: - with open(filename, "r") as file: + with open(filename, encoding="utf-8") as file: data = file.read().strip() email_data = json.loads(data or '{}') except Exception as details: # pylint: disable=broad-except @@ -673,7 +673,7 @@ def save_email_data_to_file(email_data, filepath): """ try: if email_data: - with open(filepath, "w") as json_file: + with open(filepath, "w", encoding="utf-8") as json_file: json.dump(email_data, json_file) except Exception as details: # pylint: disable=broad-except LOGGER.warning("Error during collecting data for email %s", details) diff --git a/sdcm/stress_thread.py b/sdcm/stress_thread.py index 592eecb465..c3b9878250 100644 --- a/sdcm/stress_thread.py +++ b/sdcm/stress_thread.py @@ -157,7 +157,7 @@ def _run_stress(self, node, loader_idx, cpu_idx, keyspace_idx): # pylint: disab stress_cmd = self.create_stress_cmd(node, loader_idx, keyspace_idx) if self.profile: - with open(self.profile) as profile_file: + with open(self.profile, encoding="utf-8") as profile_file: LOGGER.info('Profile content:\n%s', profile_file.read()) node.remoter.send_files(self.profile, os.path.join('/tmp', os.path.basename(self.profile)), delete_dst=True) diff --git a/sdcm/test_config.py b/sdcm/test_config.py index 3921519178..55b6f4d550 100644 --- a/sdcm/test_config.py +++ b/sdcm/test_config.py @@ -66,7 +66,7 @@ def set_test_id_only(cls, test_id) -> bool: def set_test_id(cls, test_id): if cls.set_test_id_only(test_id): test_id_file_path = os.path.join(cls.logdir(), "test_id") - with open(test_id_file_path, "w") as test_id_file: + with open(test_id_file_path, "w", encoding="utf-8") as test_id_file: test_id_file.write(str(test_id)) @classmethod @@ -107,7 +107,7 @@ def logdir(cls) -> str: def latency_results_file(cls): if not cls._latency_results_file_path: cls._latency_results_file_path = os.path.join(cls._logdir, cls._latency_results_file_name) - with open(cls._latency_results_file_path, 'w'): + with open(cls._latency_results_file_path, 'w', encoding="utf-8"): pass return cls._latency_results_file_path diff --git a/sdcm/tester.py b/sdcm/tester.py index 0e3f810fd6..e188e3773b 100644 --- a/sdcm/tester.py +++ b/sdcm/tester.py @@ -461,7 +461,7 @@ def _move_kubectl_config(self): os.environ['KUBECONFIG'] = self.kubectl_config_path if not os.path.exists(self.kubectl_config_path): os.makedirs(os.path.dirname(self.kubectl_config_path), exist_ok=True) - with open(self.kubectl_config_path, 'w') as kube_config_file: + with open(self.kubectl_config_path, 'w', encoding="utf-8") as kube_config_file: kube_config_file.write('') kube_config_file.flush() os.chmod(os.path.dirname(self.kubectl_config_path), mode=secure_mode) @@ -2112,7 +2112,7 @@ def collect_partitions_info(self, table_name, primary_key_column, save_into_file # Collect data about partitions' rows amount. partitions = {} partitions_stats_file = os.path.join(self.logdir, save_into_file_name) - with open(partitions_stats_file, 'a') as stats_file: + with open(partitions_stats_file, 'a', encoding="utf-8") as stats_file: for i in pk_list: self.log.debug("Next PK: {}".format(i)) count_partition_keys_cmd = f'select count(*) from {table_name} where {primary_key_column} = {i}' @@ -2582,13 +2582,13 @@ def check_latency_during_ops(self): 'email_recipients'), events=get_events_grouped_by_category( _registry=self.events_processes_registry)) - with open(self.latency_results_file, 'r') as file: + with open(self.latency_results_file, encoding="utf-8") as file: latency_results = json.load(file) self.log.debug('latency_results were loaded from file %s and its result is %s', self.latency_results_file, latency_results) if latency_results and self.create_stats: latency_results = calculate_latency(latency_results) - with open(self.latency_results_file, 'w') as file: + with open(self.latency_results_file, 'w', encoding="utf-8") as file: json.dump(latency_results, file) self.log.debug('collected latency values are: %s', latency_results) self.update({"latency_during_ops": latency_results}) diff --git a/sdcm/utils/common.py b/sdcm/utils/common.py index 66c25c5990..31f29c5772 100644 --- a/sdcm/utils/common.py +++ b/sdcm/utils/common.py @@ -128,7 +128,7 @@ def get_profile_content(stress_cmd): elif not os.path.exists(cs_profile): raise FileNotFoundError('User profile file {} not found'.format(cs_profile)) - with open(cs_profile, 'r') as yaml_stream: + with open(cs_profile, encoding="utf-8") as yaml_stream: profile = yaml.safe_load(yaml_stream) return cs_profile, profile @@ -503,7 +503,7 @@ def clean_cloud_resources(tags_dict, dry_run=False): def docker_current_container_id() -> Optional[str]: - with open("/proc/1/cgroup") as cgroup: + with open("/proc/1/cgroup", encoding="utf-8") as cgroup: for line in cgroup: match = DOCKER_CGROUP_RE.search(line) if match: @@ -1206,7 +1206,7 @@ def __init__(self, filename, thread_obj): self.thread_obj = thread_obj def __iter__(self): - with open(self.filename, 'r') as input_file: + with open(self.filename, encoding="utf-8") as input_file: line = '' while not self.thread_obj.stopped(): poller = select.poll() # pylint: disable=no-member @@ -1861,11 +1861,11 @@ def get_testrun_status(test_id=None, logdir=None, only_critical=False): error_log = os.path.join(testrun_dir, 'events_log/error.log') if os.path.exists(critical_log): - with open(critical_log) as file: + with open(critical_log, encoding="utf-8") as file: status = file.readlines() if not only_critical and os.path.exists(error_log): - with open(error_log) as file: + with open(error_log, encoding="utf-8") as file: status += file.readlines() return status @@ -2064,7 +2064,7 @@ def get_docker_stress_image_name(tool_name=None): if not tool_name: return None base_path = os.path.dirname(os.path.dirname((os.path.dirname(__file__)))) - with open(os.path.join(base_path, "docker", tool_name, "image"), "r") as image_file: + with open(os.path.join(base_path, "docker", tool_name, "image"), encoding="utf-8") as image_file: result = image_file.read() return result.strip() diff --git a/sdcm/utils/compaction_ops.py b/sdcm/utils/compaction_ops.py index cee49f211b..7ee9063269 100644 --- a/sdcm/utils/compaction_ops.py +++ b/sdcm/utils/compaction_ops.py @@ -90,7 +90,7 @@ def stop_on_user_compaction_logged(node: BaseNode, watch_for: str, timeout: int, stop_func: Callable, mark: Optional[int] = None): LOGGER.info("Starting to watch for user compaction logged...") start_time = time.time() - with open(node.system_log, "r") as log_file: + with open(node.system_log, encoding="utf-8") as log_file: if mark: log_file.seek(mark) diff --git a/sdcm/utils/decorators.py b/sdcm/utils/decorators.py index 0801714d66..e0ecd90fc4 100644 --- a/sdcm/utils/decorators.py +++ b/sdcm/utils/decorators.py @@ -184,7 +184,7 @@ def wrapped(*args, **kwargs): # pylint: disable=too-many-branches, too-many-loc if not os.path.exists(latency_results_file_path): latency_results = {} else: - with open(latency_results_file_path, 'r') as file: + with open(latency_results_file_path, encoding="utf-8") as file: data = file.read().strip() latency_results = json.loads(data or '{}') @@ -200,7 +200,7 @@ def wrapped(*args, **kwargs): # pylint: disable=too-many-branches, too-many-loc else: latency_results[func.__name__]['cycles'].append(result) - with open(latency_results_file_path, 'w') as file: + with open(latency_results_file_path, 'w', encoding="utf-8") as file: json.dump(latency_results, file) return res diff --git a/sdcm/utils/docker_utils.py b/sdcm/utils/docker_utils.py index b19e755e3b..90459d36be 100644 --- a/sdcm/utils/docker_utils.py +++ b/sdcm/utils/docker_utils.py @@ -411,7 +411,7 @@ def unpause_container(cls, instance: object, name: str) -> None: def running_in_docker(): path = '/proc/self/cgroup' - with open(path) as cgroup: + with open(path, encoding="utf-8") as cgroup: return ( os.path.exists('/.dockerenv') or os.path.isfile(path) and any('docker' in line for line in cgroup) diff --git a/sdcm/utils/file.py b/sdcm/utils/file.py index f0b72993e0..cba7073b67 100644 --- a/sdcm/utils/file.py +++ b/sdcm/utils/file.py @@ -63,7 +63,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def _open(self) -> TextIO: kwargs = {attr_name: getattr(self, attr_name) for attr_name in ['mode', 'buffering', 'encoding', 'errors', 'closefd'] if getattr(self, attr_name, None) is not None} - return open(self.path, **kwargs) # pylint: disable=consider-using-with + return open(self.path, **kwargs) # pylint: disable=consider-using-with,unspecified-encoding def move_to(self, pos) -> 'File': self._io.seek(pos) diff --git a/sdcm/utils/k8s.py b/sdcm/utils/k8s.py index 87eda7582c..6c4c840024 100644 --- a/sdcm/utils/k8s.py +++ b/sdcm/utils/k8s.py @@ -329,7 +329,7 @@ def apply_file(cls, kluster, config_path, namespace=None, # pylint: disable=too if envsubst: data = LOCALRUNNER.run(f'{environ_str}envsubst<{config_path}', verbose=False).stdout else: - with open(config_path, 'r') as config_file_stream: + with open(config_path, encoding="utf-8") as config_file_stream: data = config_file_stream.read() file_content = yaml.safe_load_all(data) @@ -425,7 +425,7 @@ def patch_kube_config(cls, static_token_path, kube_config_path: str = None) -> N kube_config_path = os.path.expanduser(os.environ.get('KUBECONFIG', '~/.kube/config')) LOGGER.debug("Patch %s to use file token %s", kube_config_path, static_token_path) - with open(kube_config_path) as kube_config: + with open(kube_config_path, encoding="utf-8") as kube_config: data = yaml.safe_load(kube_config) auth_type, user_config = KubernetesOps.get_kubectl_auth_config_for_first_user(data) @@ -433,7 +433,7 @@ def patch_kube_config(cls, static_token_path, kube_config_path: str = None) -> N raise RuntimeError("Unable to find user configuration in ~/.kube/config") KubernetesOps.patch_kubectl_auth_config(user_config, auth_type, "cat", [static_token_path]) - with open(kube_config_path, "w") as kube_config: + with open(kube_config_path, "w", encoding="utf-8") as kube_config: yaml.safe_dump(data, kube_config) LOGGER.debug('Patched kubectl config at %s with static kubectl token from %s', @@ -574,12 +574,12 @@ def _clean_up_token_in_temporary_location(self): LOGGER.debug('Failed to cleanup temporary token: %s', exc) def _check_token_validity_in_temporary_location(self): - with open(self._temporary_token_path, 'r') as gcloud_config_file: + with open(self._temporary_token_path, encoding="utf-8") as gcloud_config_file: json.load(gcloud_config_file) def _get_token_and_save_to_temporary_location(self): token = self.get_token() - with open(self._temporary_token_path, 'w+') as gcloud_config_file: + with open(self._temporary_token_path, 'w+', encoding="utf-8") as gcloud_config_file: gcloud_config_file.write(token) gcloud_config_file.flush() diff --git a/sdcm/utils/profiler.py b/sdcm/utils/profiler.py index 12fc1f8220..25481bf24a 100644 --- a/sdcm/utils/profiler.py +++ b/sdcm/utils/profiler.py @@ -415,11 +415,11 @@ def dump_stats(self): if os.path.exists(group_dir): shutil.rmtree(group_dir, ignore_errors=True) os.makedirs(group_dir, exist_ok=True) - with open(os.path.join(group_dir, 'stats.txt'), 'w') as stat_file: + with open(os.path.join(group_dir, 'stats.txt'), 'w', encoding="utf-8") as stat_file: self._dump_text_stats(*stat_holders, dst=stat_file) with open(os.path.join(group_dir, 'stats.bin'), 'wb') as stat_file: self._dump_stats(*stat_holders, dst=stat_file, binary=True) - with open(os.path.join(self._target_dir, 'stats.txt'), 'w') as stat_file: + with open(os.path.join(self._target_dir, 'stats.txt'), 'w', encoding="utf-8") as stat_file: self._dump_text_stats(*total_stats, dst=stat_file) with open(os.path.join(self._target_dir, 'stats.bin'), 'wb') as stat_file: self._dump_stats(*total_stats, dst=stat_file, binary=True) diff --git a/sdcm/utils/threads_and_processes_alive.py b/sdcm/utils/threads_and_processes_alive.py index 7ad0ced22f..3721ef54bd 100644 --- a/sdcm/utils/threads_and_processes_alive.py +++ b/sdcm/utils/threads_and_processes_alive.py @@ -38,7 +38,7 @@ def gather_live_threads_and_dump_to_file(dump_file_path: str) -> bool: return False source_modules = [] result = False - with open(dump_file_path, 'a') as log_file: + with open(dump_file_path, 'a', encoding="utf-8") as log_file: for thread in threading.enumerate(): if thread is threading.current_thread(): continue @@ -71,7 +71,7 @@ def gather_live_processes_and_dump_to_file(dump_file_path: str) -> bool: if not multiprocessing.active_children(): return False source_modules = [] - with open(dump_file_path, 'a') as log_file: + with open(dump_file_path, 'a', encoding="utf-8") as log_file: for proc in multiprocessing.active_children(): source = '' module = 'Unknown' diff --git a/sdcm/utils/version_utils.py b/sdcm/utils/version_utils.py index 4933d75632..7051b82e38 100644 --- a/sdcm/utils/version_utils.py +++ b/sdcm/utils/version_utils.py @@ -216,7 +216,7 @@ def get_gemini_version(output: str): def get_node_supported_sstable_versions(node_system_log) -> List[str]: output = [] - with open(node_system_log) as file: + with open(node_system_log, encoding="utf-8") as file: for line in file.readlines(): if match := SSTABLE_FORMAT_VERSION_REGEX.search(line): output.append(match.group(1).lower()) diff --git a/stop_compaction_test.py b/stop_compaction_test.py index e55fd9e646..2ba050603a 100644 --- a/stop_compaction_test.py +++ b/stop_compaction_test.py @@ -238,7 +238,7 @@ def _stop_compaction_base_test_scenario(self, def _grep_log_and_assert(self, node: BaseNode): found_grepped_expression = False - with open(node.system_log, "r") as logfile: + with open(node.system_log, encoding="utf-8") as logfile: pattern = re.compile(self.GREP_PATTERN) for line in logfile.readlines(): if pattern.search(line): diff --git a/unit_tests/lib/data_pickle.py b/unit_tests/lib/data_pickle.py index 9774323987..f40ca7c7d3 100644 --- a/unit_tests/lib/data_pickle.py +++ b/unit_tests/lib/data_pickle.py @@ -199,17 +199,17 @@ def _from_data_list(cls, obj: list) -> list: @classmethod def load_from_file(cls, filepath): - with open(filepath, 'r') as file: + with open(filepath, encoding="utf-8") as file: return cls.from_data(json.load(file)) @classmethod def load_data_from_file(cls, filepath): - with open(filepath, 'r') as file: + with open(filepath, encoding="utf-8") as file: return json.load(file) @classmethod def save_to_file(cls, filepath, data): - with open(filepath, 'w') as file: + with open(filepath, 'w', encoding="utf-8") as file: return json.dump(cls.to_data(data), file) _init_by_type = { diff --git a/unit_tests/test_events.py b/unit_tests/test_events.py index 272ff55f0c..faeb00a017 100644 --- a/unit_tests/test_events.py +++ b/unit_tests/test_events.py @@ -52,7 +52,7 @@ def tearDownClass(cls) -> None: @classmethod def get_event_log_file(cls, name: str) -> str: if (log_file := Path(cls.temp_dir, "events_log", name)).exists(): - return log_file.read_text() + return log_file.read_text(encoding="utf-8") return "" @timeout(timeout=10, sleep_time=0.05) diff --git a/unit_tests/test_hydra_sh.py b/unit_tests/test_hydra_sh.py index 4fea48810d..91c7ac441e 100644 --- a/unit_tests/test_hydra_sh.py +++ b/unit_tests/test_hydra_sh.py @@ -22,7 +22,7 @@ def __init__(self, home_dir: str, aws_creds: bool, gce_creds: bool): @staticmethod def _touch_file(file_path: str): if not os.path.exists(file_path): - with open(file_path, 'w') as token_file: + with open(file_path, 'w', encoding="utf-8") as token_file: token_file.write(' ') token_file.flush() diff --git a/unit_tests/test_prometheus.py b/unit_tests/test_prometheus.py index 8c270f278f..ba14b3eef0 100644 --- a/unit_tests/test_prometheus.py +++ b/unit_tests/test_prometheus.py @@ -52,7 +52,8 @@ def wait_till_alert_manager_up(self): class PrometheusAlertManagerTest(unittest.TestCase): def test_alert_manager_listener_artificial_run(self): with open(os.path.join(os.path.dirname(__file__), - 'test_data/test_prometheus/test_alert_manager_listener_artificial_run.yaml')) as file: + 'test_data/test_prometheus/test_alert_manager_listener_artificial_run.yaml'), + encoding="utf-8") as file: test_data = json.load(file) listener = PrometheusAlertManagerListenerArtificialTest(artificial_alerts=test_data['post']) listener.start() diff --git a/unit_tests/test_remoter.py b/unit_tests/test_remoter.py index 5f2d316263..dac121b432 100644 --- a/unit_tests/test_remoter.py +++ b/unit_tests/test_remoter.py @@ -424,14 +424,14 @@ def run(self, cmd, *_, **__): return Result(stdout="", stderr="") def send_files(self, src: str, dst: str, *_, **__) -> bool: - with open(src) as fobj: + with open(src, encoding="utf-8") as fobj: self.sf_data = fobj.read() self.sf_src = src self.sf_dst = dst return True def receive_files(self, src: str, dst: str, *_, **__) -> bool: - with open(dst, "w"): + with open(dst, "w", encoding="utf-8"): pass self.rf_src = src self.rf_dst = dst diff --git a/unit_tests/test_sct_events_system.py b/unit_tests/test_sct_events_system.py index b8a41531e4..f8b4e53f0a 100644 --- a/unit_tests/test_sct_events_system.py +++ b/unit_tests/test_sct_events_system.py @@ -174,7 +174,8 @@ def test_instance_poweroff_event(self): def test_instance_status_events_patterns(self): cloned_events = [] - with open(os.path.join(os.path.dirname(__file__), 'test_data/system_status_events.log'), 'r') as sct_log: + with open(os.path.join(os.path.dirname(__file__), 'test_data/system_status_events.log'), + encoding="utf-8") as sct_log: for index, line in enumerate(sct_log.readlines()): for pattern, event in INSTANCE_STATUS_EVENTS_PATTERNS: match = pattern.search(line) diff --git a/unit_tests/test_scylla_yaml.py b/unit_tests/test_scylla_yaml.py index f73151292e..96c410c156 100644 --- a/unit_tests/test_scylla_yaml.py +++ b/unit_tests/test_scylla_yaml.py @@ -427,7 +427,7 @@ def test_update_with_scylla_yaml_object(): def test_update_with_dict_object(): yaml1 = ScyllaYaml(cluster_name='cluster1', redis_keyspace_replication_strategy='NetworkTopologyStrategy') test_config_file = Path(__file__).parent / 'test_data' / 'scylla_yaml_update.yaml' - with open(test_config_file, "r") as test_file: + with open(test_config_file, encoding="utf-8") as test_file: test_config_file_yaml = yaml.load(test_file) append_scylla_args_dict = yaml.load(test_config_file_yaml["append_scylla_yaml"]) yaml1.update(append_scylla_args_dict) diff --git a/unit_tests/test_scylla_yaml_builders.py b/unit_tests/test_scylla_yaml_builders.py index 73ac39d215..c23ebd7566 100644 --- a/unit_tests/test_scylla_yaml_builders.py +++ b/unit_tests/test_scylla_yaml_builders.py @@ -590,6 +590,6 @@ def _run_test(self, config_path: str, expected_node_config: str): ) for config_name in os.listdir(BASE_FOLDER) if config_name.endswith('.yaml') ]) def test_integration_node(self, config_path, result_path): - with open(result_path, 'r') as result_file: + with open(result_path, encoding="utf-8") as result_file: expected_node_config = result_file.read() self._run_test(config_path, expected_node_config=expected_node_config) diff --git a/unit_tests/test_tester.py b/unit_tests/test_tester.py index 74c72d2625..f5333292f0 100644 --- a/unit_tests/test_tester.py +++ b/unit_tests/test_tester.py @@ -150,7 +150,7 @@ def _get_unittest_final_event(self) -> TestResultEvent: def sct_log(self): if self._sct_log: return self._sct_log - with open(os.path.join(self.logdir, 'sct.log'), 'r') as log_file: + with open(os.path.join(self.logdir, 'sct.log'), encoding="utf-8") as log_file: output = log_file.read() self._sct_log = output return output diff --git a/utils/build_system/create_test_release_jobs.py b/utils/build_system/create_test_release_jobs.py index a1c45c5511..75d8c7bd64 100644 --- a/utils/build_system/create_test_release_jobs.py +++ b/utils/build_system/create_test_release_jobs.py @@ -19,8 +19,8 @@ from sdcm.wait import wait_for -DIR_TEMPLATE = Path(__file__).parent.joinpath("folder-template.xml").read_text() -JOB_TEMPLATE = Path(__file__).parent.joinpath("template.xml").read_text() +DIR_TEMPLATE = Path(__file__).parent.joinpath("folder-template.xml").read_text(encoding="utf-8") +JOB_TEMPLATE = Path(__file__).parent.joinpath("template.xml").read_text(encoding="utf-8") LOGGER = logging.getLogger(__name__) diff --git a/utils/mocks/aws_mock.py b/utils/mocks/aws_mock.py index 81bb38e6f3..027d6fde5b 100644 --- a/utils/mocks/aws_mock.py +++ b/utils/mocks/aws_mock.py @@ -62,7 +62,7 @@ def aws_mock_container_run_args(self) -> dict: def run(self, force: bool = False) -> str: if not force and AWS_MOCK_IP_FILE.exists(): LOGGER.warning("%s found, don't run a new container and return AWS Mock IP from it", AWS_MOCK_IP_FILE) - return AWS_MOCK_IP_FILE.read_text() + return AWS_MOCK_IP_FILE.read_text(encoding="utf-8") container = ContainerManager.run_container(self, "aws_mock") res = container.exec_run(["bash", "-cxe", dedent("""\ @@ -78,7 +78,7 @@ def run(self, force: bool = False) -> str: raise DockerException(f"{container}: {res.output.decode('utf-8')}") aws_mock_ip = ContainerManager.get_ip_address(self, "aws_mock") - AWS_MOCK_IP_FILE.write_text(aws_mock_ip) + AWS_MOCK_IP_FILE.write_text(aws_mock_ip, encoding="utf-8") return aws_mock_ip @@ -102,7 +102,7 @@ def clean(test_id: str | None = None, if not AWS_MOCK_IP_FILE.exists(): LOGGER.info("No AWS Mock requested to clean") return - aws_mock_ip = AWS_MOCK_IP_FILE.read_text() + aws_mock_ip = AWS_MOCK_IP_FILE.read_text(encoding="utf-8") for container in containers: container.reload() if container.attrs["NetworkSettings"]["IPAddress"] == aws_mock_ip: