Skip to content

Commit

Permalink
Merge pull request #7 from Bugazelle/dev
Browse files Browse the repository at this point in the history
[Feat] Release to v0.1.11
  • Loading branch information
Bugazelle authored Jul 17, 2019
2 parents 771a02a + d3be59a commit 74059d2
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 15 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ timestamp,url,response_time
--tag_columns url \
--field_columns response_time \
--user admin \
--password test-automation-monitoring-2019 \
--password admin \
--server 127.0.0.1:8086 \
--drop_database=True \
--force_insert_even_csv_no_update True \
Expand All @@ -145,7 +145,7 @@ timestamp,url,response_time
--tag_columns url \
--field_columns response_time \
--user admin \
--password test-automation-monitoring-2019 \
--password admin \
--server 127.0.0.1:8086 \
--drop_database True \
--force_insert_even_csv_no_update True \
Expand Down
2 changes: 1 addition & 1 deletion src/ExportCsvToInflux/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.1.10'
__version__ = '0.1.11'
17 changes: 13 additions & 4 deletions src/ExportCsvToInflux/csv_object.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -203,15 +203,15 @@ def convert_csv_data_to_int_float(self, file_name):
value = row[key]
int_status = int_type[key]
if int_status is True:
row[key] = int(value) if int_type[key] is True else value
row[key] = int(float(value)) if int_type[key] is True else value
else:
row[key] = float(value) if float_type[key] is True else value
yield row
if has_header is False and i == 1:
for key in keys:
int_status = int_type[key]
if int_status is True:
row[key] = int(key) if int_type[key] is True else key
row[key] = int(float(key)) if int_type[key] is True else key
else:
row[key] = float(key) if float_type[key] is True else key
yield row
Expand All @@ -238,9 +238,18 @@ def add_columns_to_csv(self,
message = 'Error: The data should be list type, the item should be dict. Or the json type as following' \
'for example: [{"new_header_1": ["new_value_1", "new_value_2", "new_value_3"]}, ' \
'{"new_header_2": ["new_value_1", "new_value_2", "new_value_3"]}]'
if data_type is not list and data_type is not str and data_type is not unicode:
try:
check_data_type = data_type is not list and data_type is not str and data_type is not unicode
except NameError:
check_data_type = data_type is not list and data_type is not str
if check_data_type:
raise Exception(message)
if data_type is str or data_type is unicode:

try:
check_data_type = data_type is str or data_type is unicode
except NameError:
check_data_type = data_type is str
if check_data_type:
try:
data = json.loads(data)
except ValueError:
Expand Down
27 changes: 21 additions & 6 deletions src/ExportCsvToInflux/exporter_object.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,8 @@ def __check_match_and_filter(self,
return status

@staticmethod
def __validate_match_and_filter(csv_headers, check_columns):
"""Private Function: validate_match_and_filter """
def __validate_columns(csv_headers, check_columns):
"""Private Function: validate_columns """

if check_columns:
validate_check_columns = all(check_column in csv_headers for check_column in check_columns)
Expand Down Expand Up @@ -221,6 +221,25 @@ def export_csv_to_influx(self,
csv_file_length = csv_object.get_csv_lines_count(csv_file_item)
csv_file_md5 = csv_object.get_file_md5(csv_file_item)
csv_headers = csv_object.get_csv_header(csv_file_item)

# Validate csv_headers
if not csv_headers:
print('Error: The csv file {0} has no header detected. Exporter stopping...'.format(csv_file_item))
continue

# Validate field_columns, tag_columns, match_columns, filter_columns
field_columns = self.__validate_columns(csv_headers, field_columns)
tag_columns = self.__validate_columns(csv_headers, tag_columns)
if not field_columns:
print('Error: The input --field_columns does not expected. '
'Please check the fields are in csv headers or not. Exporter stopping...')
if not tag_columns:
print('Error: The input --tag_columns does not expected. '
'Please check the fields are in csv headers or not. Exporter stopping...')
match_columns = self.__validate_columns(csv_headers, match_columns)
filter_columns = self.__validate_columns(csv_headers, filter_columns)

# Validate time_column
with open(csv_file_item) as f:
csv_reader = csv.DictReader(f, delimiter=delimiter, lineterminator=lineterminator)
time_column_exists = True
Expand All @@ -233,10 +252,6 @@ def export_csv_to_influx(self,
time_column_exists = False
break

# Validate match_columns, filter_columns
match_columns = self.__validate_match_and_filter(csv_headers, match_columns)
filter_columns = self.__validate_match_and_filter(csv_headers, filter_columns)

# Check the timestamp, and generate the csv with checksum
new_csv_file = 'influx.csv'
new_csv_file = os.path.join(current_dir, new_csv_file)
Expand Down
4 changes: 2 additions & 2 deletions 中文说明.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ timestamp,url,response_time
--tag_columns url \
--field_columns response_time \
--user admin \
--password test-automation-monitoring-2019 \
--password admin \
--server 127.0.0.1:8086 \
--drop_database True \
--force_insert_even_csv_no_update True \
Expand All @@ -146,7 +146,7 @@ timestamp,url,response_time
--tag_columns url \
--field_columns response_time \
--user admin \
--password test-automation-monitoring-2019 \
--password admin \
--server 127.0.0.1:8086 \
--drop_database True \
--force_insert_even_csv_no_update True \
Expand Down

0 comments on commit 74059d2

Please sign in to comment.