Skip to content

Commit

Permalink
fix(pylint): remove unspecified-encoding warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
enaydanov authored and Bentsi committed Jan 10, 2022
1 parent 7092536 commit 5f8c8e0
Show file tree
Hide file tree
Showing 48 changed files with 100 additions and 98 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ repos:

- id: pylint
name: pylint
entry: pylint -j 2 -d consider-using-f-string,unspecified-encoding
entry: pylint -j 2 -d consider-using-f-string
language: system
exclude: ^docker/alternator-dns/.*$
types: [python]
Expand Down
8 changes: 4 additions & 4 deletions cdc_replication_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def mode_str(mode: Mode) -> str:


def print_file_to_stdout(path: str) -> None:
with open(path, "r") as file:
with open(path, encoding="utf-8") as file:
shutil.copyfileobj(file, sys.stdout)


Expand All @@ -58,7 +58,7 @@ def write_cql_result(res, path: str):
:param path: path to file
:type path: str
"""
with open(path, 'w') as file:
with open(path, 'w', encoding="utf-8") as file:
for row in res:
file.write(str(row) + '\n')
file.flush()
Expand Down Expand Up @@ -300,7 +300,7 @@ def test_replication(self, is_gemini_test: bool, mode: Mode) -> None:
migrate_log_path = None
migrate_ok = True
if mode == Mode.PREIMAGE:
with open(replicator_log_path) as file:
with open(replicator_log_path, encoding="utf-8") as file:
self.consistency_ok = not 'Inconsistency detected.\n' in (line for line in file)
else:
migrate_log_path = os.path.join(self.logdir, 'scylla-migrate.log')
Expand Down Expand Up @@ -332,7 +332,7 @@ def check_consistency(self, migrate_log_dst_path: str, compare_timestamps: bool
migrate_ok = res.ok
if not migrate_ok:
self.log.error('scylla-migrate command returned status {}'.format(res.exit_status))
with open(migrate_log_dst_path) as file:
with open(migrate_log_dst_path, encoding="utf-8") as file:
consistency_ok = 'Consistency check OK.\n' in (line for line in file)

return (migrate_ok, consistency_ok)
Expand Down
2 changes: 1 addition & 1 deletion custom_cs_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def test_write_mode(self):
Run cassandra-stress with params defined in data_dir/scylla.yaml
"""
cs_custom_config = get_data_dir_path('cassandra-stress-custom.yaml')
with open(cs_custom_config, 'r') as cs_custom_config_file:
with open(cs_custom_config, encoding="utf-8") as cs_custom_config_file:
self.log.info('Using custom cassandra-stress config:')
self.log.info(cs_custom_config_file.read())
for node in self.loaders.nodes:
Expand Down
2 changes: 1 addition & 1 deletion grow_cluster_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def __init__(self, *args, **kwargs):

def get_stress_cmd_profile(self):
cs_custom_config = get_data_dir_path('cassandra-stress-custom-mixed-narrow-wide-row.yaml')
with open(cs_custom_config, 'r') as cs_custom_config_file:
with open(cs_custom_config, encoding="utf-8") as cs_custom_config_file:
self.log.info('Using custom cassandra-stress config:')
self.log.info(cs_custom_config_file.read())
for node in self.loaders.nodes:
Expand Down
2 changes: 1 addition & 1 deletion jepsen_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def save_jepsen_report(self):
sleep {JEPSEN_WEB_SERVER_START_DELAY}
"""), verbose=True)

with open(os.path.join(self.logdir, "jepsen_report.html"), "wt") as jepsen_report:
with open(os.path.join(self.logdir, "jepsen_report.html"), "wt", encoding="utf-8") as jepsen_report:
jepsen_report.write(requests.get(url).text)
self.log.info("Report has been saved to %s", jepsen_report.name)

Expand Down
6 changes: 3 additions & 3 deletions longevity_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def test_custom_time(self):
assert os.path.exists(cs_profile), 'File not found: {}'.format(cs_profile)
self.log.debug('Run stress test with user profile {}, duration {}'.format(cs_profile, cs_duration))
profile_dst = os.path.join('/tmp', os.path.basename(cs_profile))
with open(cs_profile) as pconf:
with open(cs_profile, encoding="utf-8") as pconf:
cont = pconf.readlines()
user_profile_table_count = self.params.get( # pylint: disable=invalid-name
'user_profile_table_count')
Expand Down Expand Up @@ -497,7 +497,7 @@ def _pre_create_templated_user_schema(self, batch_start=None, batch_end=None):
cs_user_profiles = self.params.get('cs_user_profiles')
# read user-profile
for profile_file in cs_user_profiles:
with open(profile_file) as fobj:
with open(profile_file, encoding="utf-8") as fobj:
profile_yaml = yaml.safe_load(fobj)
keyspace_definition = profile_yaml['keyspace_definition']
keyspace_name = profile_yaml['keyspace']
Expand Down Expand Up @@ -582,7 +582,7 @@ def create_templated_user_stress_params(self, idx, cs_profile): # pylint: disab
params_list = []
cs_duration = self.params.get('cs_duration')

with open(cs_profile) as pconf:
with open(cs_profile, encoding="utf-8") as pconf:
cont = pconf.readlines()
pconf.seek(0)
template = string.Template(pconf.read())
Expand Down
6 changes: 3 additions & 3 deletions performance_regression_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def display_results(self, results, test_name=''):
self.display_single_result(single_result)
test_xml += self.get_test_xml(single_result, test_name=test_name)

with open(os.path.join(self.logdir, 'jenkins_perf_PerfPublisher.xml'), 'w') as pref_file:
with open(os.path.join(self.logdir, 'jenkins_perf_PerfPublisher.xml'), 'w', encoding="utf-8") as pref_file:
content = """<report name="%s report" categ="none">%s</report>""" % (test_name, test_xml)
pref_file.write(content)
except Exception as ex: # pylint: disable=broad-except
Expand Down Expand Up @@ -152,7 +152,7 @@ def _get_total_ops(self):
@staticmethod
def _clean_email_data():
email_data_path = 'email_data.json'
with open(email_data_path, 'w'):
with open(email_data_path, 'w', encoding="utf-8"):
pass

def preload_data(self):
Expand Down Expand Up @@ -556,7 +556,7 @@ def get_mv_name(user_profile):

# Get materialized view name from user profile

with open(user_profile) as fobj:
with open(user_profile, encoding="utf-8") as fobj:
user_profile_yaml = yaml.safe_load(fobj)
mv_name = ''

Expand Down
4 changes: 2 additions & 2 deletions performance_regression_user_profiles_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def __init__(self, *args, **kwargs):
self.create_stats = False

def _clean_keyspace(self, cs_profile): # pylint: disable=invalid-name
with open(cs_profile) as fdr:
with open(cs_profile, encoding="utf-8") as fdr:
key_space = [line.split(':')[-1].strip() for line in fdr.readlines() if line.startswith('keyspace:')]
if key_space:
self.log.debug('Drop keyspace {}'.format(key_space[0]))
Expand All @@ -47,7 +47,7 @@ def test_user_profiles(self):
assert os.path.exists(cs_profile), 'File not found: {}'.format(cs_profile)
self.log.debug('Run stress test with user profile {}, duration {}'.format(cs_profile, duration))
profile_dst = os.path.join('/tmp', os.path.basename(cs_profile))
with open(cs_profile) as pconf:
with open(cs_profile, encoding="utf-8") as pconf:
cont = pconf.readlines()
for cmd in [line.lstrip('#').strip() for line in cont if line.find('cassandra-stress') > 0]:
stress_cmd = (cmd.format(profile_dst, duration))
Expand Down
4 changes: 2 additions & 2 deletions sct.py
Original file line number Diff line number Diff line change
Expand Up @@ -1053,7 +1053,7 @@ def send_email(test_id=None, test_status=None, start_time=None, started_by=None,
start_time = format_timestamp(int(start_time))
testrun_dir = get_testrun_dir(test_id=test_id, base_dir=logdir)
if testrun_dir:
with open(os.path.join(testrun_dir, 'test_id'), 'r', encoding='utf-8') as file:
with open(os.path.join(testrun_dir, 'test_id'), encoding='utf-8') as file:
test_id = file.read().strip()
email_results_file = os.path.join(testrun_dir, "email_data.json")
if not os.path.exists(email_results_file):
Expand Down Expand Up @@ -1286,7 +1286,7 @@ def create_runner_instance(cloud_provider, region, availability_zone, instance_t
remoter = sct_runner.get_remoter(host=runner_public_ip, connect_timeout=120)
if remoter.run("true", timeout=100, verbose=False, ignore_status=True).ok:
LOGGER.info("Successfully connected the SCT Runner. Public IP: %s", runner_public_ip)
with sct_runner_ip_path.open("w") as sct_runner_ip_file:
with sct_runner_ip_path.open(mode="w", encoding="utf-8") as sct_runner_ip_file:
sct_runner_ip_file.write(runner_public_ip)
else:
LOGGER.error("Unable to SSH to %s! Exiting...", runner_public_ip)
Expand Down
16 changes: 8 additions & 8 deletions sdcm/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,7 @@ def extract_seeds_from_scylla_yaml(self):
yaml_dst_path = os.path.join(tempfile.mkdtemp(prefix='sct'), 'scylla.yaml')
wait.wait_for(func=self.remoter.receive_files, step=10, text='Waiting for copying scylla.yaml', timeout=300,
throw_exc=True, src=self.add_install_prefix(SCYLLA_YAML_PATH), dst=yaml_dst_path)
with open(yaml_dst_path, 'r') as yaml_stream:
with open(yaml_dst_path, encoding="utf-8") as yaml_stream:
try:
conf_dict = yaml.safe_load(yaml_stream)
except Exception:
Expand Down Expand Up @@ -1380,7 +1380,7 @@ def mark_log(self):
"""
if not os.path.exists(self.system_log):
return 0
with open(self.system_log) as log_file:
with open(self.system_log, encoding="utf-8") as log_file:
log_file.seek(0, os.SEEK_END)
return log_file.tell()

Expand Down Expand Up @@ -3155,13 +3155,13 @@ def get_db_auth(self):

def write_node_public_ip_file(self):
public_ip_file_path = os.path.join(self.logdir, 'public_ips')
with open(public_ip_file_path, 'w') as public_ip_file:
with open(public_ip_file_path, 'w', encoding="utf-8") as public_ip_file:
public_ip_file.write("%s" % "\n".join(self.get_node_public_ips()))
public_ip_file.write("\n")

def write_node_private_ip_file(self):
private_ip_file_path = os.path.join(self.logdir, 'private_ips')
with open(private_ip_file_path, 'w') as private_ip_file:
with open(private_ip_file_path, 'w', encoding="utf-8") as private_ip_file:
private_ip_file.write("%s" % "\n".join(self.get_node_private_ips()))
private_ip_file.write("\n")

Expand Down Expand Up @@ -4775,13 +4775,13 @@ def sct_dashboard_json_file(self):
@staticmethod
def sct_dashboard_json_file_content_update(update_params: dict, json_file: str):
# Read json data to the string
with open(json_file, 'r') as file:
with open(json_file, encoding="utf-8") as file:
json_data = file.read()

for param, value in update_params.items():
json_data = json_data.replace(param, value)

with open(json_file, 'w') as file:
with open(json_file, 'w', encoding="utf-8") as file:
json.dump(json.loads(json_data), file, indent=2)

def node_setup(self, node, **kwargs): # pylint: disable=unused-argument
Expand Down Expand Up @@ -4959,7 +4959,7 @@ def configure_scylla_monitoring(self, node, sct_metrics=True, alert_manager=True
local_template = os.path.join(temp_dir, template_fn)
node.remoter.receive_files(src=prometheus_yaml_template,
dst=local_template_tmp)
with open(local_template_tmp) as output_file:
with open(local_template_tmp, encoding="utf-8") as output_file:
templ_yaml = yaml.safe_load(output_file)
self.log.debug("Configs %s" % templ_yaml)
loader_targets_list = ["[%s]:9103" % getattr(node, self.DB_NODES_IP_ADDRESS)
Expand Down Expand Up @@ -4998,7 +4998,7 @@ def remove_sct_metrics(metric):
if self.sct_ip_port:
scrape_configs.append(dict(job_name="sct_metrics", honor_labels=True,
static_configs=[dict(targets=[self.sct_ip_port])]))
with open(local_template, "w") as output_file:
with open(local_template, "w", encoding="utf-8") as output_file:
yaml.safe_dump(templ_yaml, output_file, default_flow_style=False) # to remove tag !!python/unicode
node.remoter.send_files(src=local_template, dst=prometheus_yaml_template, delete_dst=True)

Expand Down
2 changes: 1 addition & 1 deletion sdcm/cluster_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -912,7 +912,7 @@ def get_seed_nodes(self):
yaml_dst_path = os.path.join(tempfile.mkdtemp(prefix='sct-cassandra'), 'cassandra.yaml')
node.remoter.receive_files(src='/etc/cassandra/cassandra.yaml',
dst=yaml_dst_path)
with open(yaml_dst_path, 'r') as yaml_stream:
with open(yaml_dst_path, encoding="utf-8") as yaml_stream:
conf_dict = yaml.safe_load(yaml_stream)
try:
return conf_dict['seed_provider'][0]['parameters'][0]['seeds'].split(',')
Expand Down
8 changes: 4 additions & 4 deletions sdcm/collectd.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def _setup_collectd(self):
tmp_path_exporter = os.path.join(tmp_dir_exporter, 'scylla.conf')
tmp_path_remote = "/tmp/scylla-collectd.conf"

with open(tmp_path_exporter, 'w') as tmp_cfg_prom: # deepcode ignore BinaryWrite~open: automatically converted to utf8
with open(tmp_path_exporter, 'w', encoding="utf-8") as tmp_cfg_prom:
tmp_cfg_prom.write(self._collectd_cfg) # pylint: disable=no-member
try:
self.node.remoter.send_files(src=tmp_path_exporter, dst=tmp_path_remote)
Expand Down Expand Up @@ -348,7 +348,7 @@ def collectd_exporter_setup(self):
tmp_path_exporter = os.path.join(tmp_dir_exporter, 'collectd_exporter.conf')
tmp_path_remote = '/tmp/collectd_exporter.conf'
system_path_remote = '/etc/init/collectd_exporter.conf'
with open(tmp_path_exporter, 'w') as tmp_cfg_prom:
with open(tmp_path_exporter, 'w', encoding="utf-8") as tmp_cfg_prom:
tmp_cfg_prom.write(service_file)
try:
self.node.remoter.send_files(src=tmp_path_exporter, dst=tmp_path_remote)
Expand Down Expand Up @@ -423,7 +423,7 @@ def collectd_exporter_setup(self):
tmp_path_exporter = os.path.join(tmp_dir_exporter, 'collectd-exporter.service')
tmp_path_remote = '/tmp/collectd-exporter.service'
system_path_remote = '/etc/systemd/system/collectd-exporter.service'
with open(tmp_path_exporter, 'w') as tmp_cfg_prom: # deepcode ignore BinaryWrite~open: automatically converted to utf8
with open(tmp_path_exporter, 'w', encoding="utf-8") as tmp_cfg_prom:
tmp_cfg_prom.write(systemd_unit)
try:
self.node.remoter.send_files(src=tmp_path_exporter, dst=tmp_path_remote)
Expand Down Expand Up @@ -452,7 +452,7 @@ def collectd_exporter_service_setup(self):
tmp_path_exporter = os.path.join(tmp_dir_exporter, 'collectd_exporter.conf')
tmp_path_remote = '/tmp/collectd_exporter.conf'
system_path_remote = '/etc/init/collectd_exporter.conf'
with open(tmp_path_exporter, 'w') as tmp_cfg_prom: # deepcode ignore BinaryWrite~open: automatically converted to utf8
with open(tmp_path_exporter, 'w', encoding="utf-8") as tmp_cfg_prom:
tmp_cfg_prom.write(service_file)
try:
self.node.remoter.send_files(src=tmp_path_exporter, dst=tmp_path_remote)
Expand Down
2 changes: 1 addition & 1 deletion sdcm/coredump.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ def log_coredump(self, core_info: CoreDumpInfo):
if not core_info.coredump_info:
return
log_file = os.path.join(self.node.logdir, 'coredump.log')
with open(log_file, 'a') as log_file_obj:
with open(log_file, 'a', encoding="utf-8") as log_file_obj:
log_file_obj.write(core_info.coredump_info)
for line in core_info.coredump_info.splitlines():
self.log.error(line)
Expand Down
2 changes: 1 addition & 1 deletion sdcm/db_log_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def _read_and_publish_events(self) -> None:
if not os.path.exists(self._system_log):
return

with open(self._system_log, 'r') as db_file:
with open(self._system_log, encoding="utf-8") as db_file:
if self._last_log_position:
db_file.seek(self._last_log_position)
for index, line in enumerate(db_file, start=self._last_line_no + 1):
Expand Down
2 changes: 1 addition & 1 deletion sdcm/gemini_thread.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def get_gemini_results(self):

local_gemini_result_file = os.path.join(node.logdir, os.path.basename(result_file))
node.remoter.receive_files(src=result_file, dst=local_gemini_result_file)
with open(local_gemini_result_file) as local_file:
with open(local_gemini_result_file, encoding="utf-8") as local_file:
content = local_file.read()
res = self._parse_gemini_summary_json(content)
if res:
Expand Down
4 changes: 2 additions & 2 deletions sdcm/logcollector.py
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,7 @@ def collect(self, node, local_dst, remote_dst=None, local_search_path=None):
os.makedirs(local_dst, exist_ok=True)
snapshots = self.get_grafana_snapshot(node)
snapshots_file = os.path.join(local_dst, "grafana_snapshots")
with open(snapshots_file, "w") as f: # pylint: disable=invalid-name
with open(snapshots_file, "w", encoding="utf-8") as f: # pylint: disable=invalid-name
for snapshot in snapshots:
f.write(snapshot + '\n')

Expand Down Expand Up @@ -1268,7 +1268,7 @@ def create_base_storage_dir(self, test_dir=None):
self.storage_dir = os.path.join(self.sct_result_dir, log_dir, 'collected_logs')
os.makedirs(self.storage_dir, exist_ok=True)
if not os.path.exists(os.path.join(os.path.dirname(self.storage_dir), "test_id")):
with open(os.path.join(os.path.dirname(self.storage_dir), "test_id"), "w") as f: # pylint: disable=invalid-name
with open(os.path.join(os.path.dirname(self.storage_dir), "test_id"), "w", encoding="utf-8") as f: # pylint: disable=invalid-name
f.write(self.test_id)


Expand Down
4 changes: 2 additions & 2 deletions sdcm/mgmt/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def duration_to_timedelta(duration_string):


def get_manager_repo_from_defaults(manager_version_name, distro):
with open("defaults/manager_versions.yaml", 'r') as mgmt_config:
with open("defaults/manager_versions.yaml", encoding="utf-8") as mgmt_config:
manager_repos_by_version_dict = yaml.safe_load(mgmt_config)["manager_repos_by_version"]

version_specific_repos = manager_repos_by_version_dict.get(manager_version_name, None)
Expand All @@ -61,7 +61,7 @@ def get_manager_repo_from_defaults(manager_version_name, distro):


def get_manager_scylla_backend(scylla_backend_version_name, distro):
with open("defaults/manager_versions.yaml", 'r') as mgmt_config:
with open("defaults/manager_versions.yaml", encoding="utf-8") as mgmt_config:
scylla_backend_repos_by_version_dict = yaml.safe_load(mgmt_config)["scylla_backend_repo_by_version"]

version_specific_repos = scylla_backend_repos_by_version_dict.get(scylla_backend_version_name, None)
Expand Down
2 changes: 1 addition & 1 deletion sdcm/microbenchmarking.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ def get_results(self, results_path, update_db):
test_args = os.path.splitext(new_filename)[0]
test_type = dirname + "_" + test_args
json_path = os.path.join(dirname, dataset_name, filename)
with open(json_path, 'r') as json_file:
with open(json_path, encoding="utf-8") as json_file:
self.log.info("Reading: %s", json_path)
datastore = json.load(json_file)
datastore.update({'hostname': self.hostname,
Expand Down
Loading

0 comments on commit 5f8c8e0

Please sign in to comment.