Skip to content

Commit

Permalink
Merge pull request #455 from openxc/protobufmessageupdate
Browse files Browse the repository at this point in the history
Protobufmessageupdate
  • Loading branch information
GenoJAFord authored Jul 27, 2020
2 parents 8f23faf + f1ceff6 commit bf55d18
Show file tree
Hide file tree
Showing 22 changed files with 828 additions and 167 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,4 @@ examples/signals.cpp
.vagrant/
.firmware_options
src/.svn
.scannerwork/
6 changes: 0 additions & 6 deletions docs/_static/rtd.css
Original file line number Diff line number Diff line change
Expand Up @@ -382,12 +382,6 @@ div.sphinxsidebar p a:hover {
border: 1px solid #888;
}

/* Tweak any link appearing in a heading */
div.sphinxsidebar h3 a {
}




/* OTHER STUFF ------------------------------------------------------------ */

Expand Down
2 changes: 1 addition & 1 deletion script/bootstrap/pip-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
Fabric3==1.14.post1
ecdsa==0.13
ecdsa==0.13.3
prettyprint==0.1.5
pyparsing==2.2.0
openxc==2.0.0
4 changes: 2 additions & 2 deletions script/functional_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

SOURCE = None

def setUpModule():
def set_up_module():
configure_logging()

# A bit of a hack to let us pass the product ID in at the command line, so
Expand All @@ -34,7 +34,7 @@ def setUpModule():
SOURCE = UsbVehicleInterface(payload_format="json", product_id=usb_product_id)
SOURCE.start()

def tearDownModule():
def tear_down_module():
SOURCE.set_payload_format("json")

class ViFunctionalTests(unittest.TestCase):
Expand Down
36 changes: 17 additions & 19 deletions script/lcov_cobertura.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,24 +77,23 @@ def parse(self):
file_branches_covered = 0

for line in self.lcov_data.split('\n'):
if line.strip() == 'end_of_record':
if current_file is not None:
package_dict = coverage_data['packages'][package]
package_dict['lines-total'] += file_lines_total
package_dict['lines-covered'] += file_lines_covered
package_dict['branches-total'] += file_branches_total
package_dict['branches-covered'] += file_branches_covered
file_dict = package_dict['classes'][current_file]
file_dict['lines-total'] = file_lines_total
file_dict['lines-covered'] = file_lines_covered
file_dict['lines'] = dict(file_lines)
file_dict['methods'] = dict(file_methods)
file_dict['branches-total'] = file_branches_total
file_dict['branches-covered'] = file_branches_covered
coverage_data['summary']['lines-total'] += file_lines_total
coverage_data['summary']['lines-covered'] += file_lines_covered
coverage_data['summary']['branches-total'] += file_branches_total
coverage_data['summary']['branches-covered'] += file_branches_covered
if line.strip() == 'end_of_record' and current_file is not None:
package_dict = coverage_data['packages'][package]
package_dict['lines-total'] += file_lines_total
package_dict['lines-covered'] += file_lines_covered
package_dict['branches-total'] += file_branches_total
package_dict['branches-covered'] += file_branches_covered
file_dict = package_dict['classes'][current_file]
file_dict['lines-total'] = file_lines_total
file_dict['lines-covered'] = file_lines_covered
file_dict['lines'] = dict(file_lines)
file_dict['methods'] = dict(file_methods)
file_dict['branches-total'] = file_branches_total
file_dict['branches-covered'] = file_branches_covered
coverage_data['summary']['lines-total'] += file_lines_total
coverage_data['summary']['lines-covered'] += file_lines_covered
coverage_data['summary']['branches-total'] += file_branches_total
coverage_data['summary']['branches-covered'] += file_branches_covered

line_parts = line.split(':',1)
input_type = line_parts[0]
Expand All @@ -116,7 +115,6 @@ def parse(self):
'lines-covered': 0, 'branches-total': 0,
'branches-covered': 0
}
package = package
current_file = relative_file_name
file_lines_total = 0
file_lines_covered = 0
Expand Down
97 changes: 46 additions & 51 deletions script/make_trips.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@ def __init__(self, firstTrip):
def next_dest(self, data_file):
newName = "Trip" + str(self.nextTrip).zfill(3) + "-" + data_file
self.nextTrip = self.nextTrip + 1
print "Starting ",
print newName
print(f"Starting {newName}")
return open(newName, "w")

def get_next_file(trace_file):
Expand All @@ -39,72 +38,68 @@ def get_next_file(trace_file):

return filename

def compile_trip(trace_file, tripNum):
dataFileValid = True
def read_trace_file(dataFileValid, destFileGen, destinationFile, trace_file, currentTraceFile):
lastTimeStamp = 0.0
currentTimeStamp = 0
destFileGen = DestFileCreator(tripNum)
errorCount = 0
lineCount = 0

for line in currentTraceFile:
try:
lineCount = lineCount + 1
timestamp, data = line.split(':', 1)
record = json.loads(data)
except ValueError:
sys.stderr.write("Skipping line: %s" % data)
print(" ")
errorCount = errorCount + 1
continue

if lastTimeStamp is not 0.0:
if (float(timestamp) - lastTimeStamp) > 600.00: # Time is in seconds
print(f"Found a gap of {float(timestamp) - lastTimeStamp} seconds. Creating new Trip file.")
destinationFile.close()
lastTimeStamp = 0.0
destinationFile = destFileGen.next_dest(trace_file)
elif (float(timestamp) - lastTimeStamp) > 1.00: # Time is in seconds
print(f"Momentary dropout of {float(timestamp) - lastTimeStamp} seconds. Ignoring.")
lastTimeStamp = float(timestamp)
destinationFile.write(line)

if dataFileValid is True:
currentTraceFile.close()
trace_file = get_next_file(trace_file)

percentBad = 100.0 * errorCount / lineCount
print(f"Parsed {lineCount} lines.")

print(f"Detected {errorCount} errors.")

print(f"{percentBad}% bad data")

def compile_trip(trace_file, tripNum):
dataFileValid = True
destFileGen = DestFileCreator(tripNum)


destinationFile = destFileGen.next_dest(trace_file)

while dataFileValid is True:
try:
currentTraceFile = open(trace_file, "r")
except IOError, e:
print e
except IOError as e:
print(e)
dataFileValid = False
destinationFile.close()
break
else:
print 'Opened %s' % trace_file

for line in currentTraceFile:
try:
lineCount = lineCount + 1
timestamp, data = line.split(':', 1)
record = json.loads(data)
except ValueError:
sys.stderr.write("Skipping line: %s" % data)
print " "
errorCount = errorCount + 1
continue

if lastTimeStamp is not 0.0:
if (float(timestamp) - lastTimeStamp) > 600.00: # Time is in seconds
print "Found a gap of ",
print (float(timestamp) - lastTimeStamp),
print " seconds. Creating new Trip file."
destinationFile.close()
lastTimeStamp = 0.0
destinationFile = destFileGen.next_dest(trace_file)
elif (float(timestamp) - lastTimeStamp) > 1.00: # Time is in seconds
print "Momentary dropout of ",
print (float(timestamp) - lastTimeStamp),
print " seconds. Ignoring."
lastTimeStamp = float(timestamp)
destinationFile.write(line)

if dataFileValid is True:
currentTraceFile.close()
trace_file = get_next_file(trace_file)

percentBad = 100.0 * errorCount / lineCount
print "Parsed",
print lineCount,
print "lines."

print "Detected",
print errorCount,
print "errors."

print percentBad,
print "% bad data."
print(f'Opened {trace_file}')
read_trace_file(dataFileValid, destFileGen, destinationFile, trace_file, currentTraceFile)


if __name__ == '__main__':
if len(sys.argv) is not 3:
print "Must provide the path to the first trace file in a trip and the trip number."
print("Must provide the path to the first trace file in a trip and the trip number.")
sys.exit(1)

compile_trip(sys.argv[1], int(sys.argv[2]))
7 changes: 7 additions & 0 deletions sonar-project.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
sonar.projectKey=com.ford.VI-Firmware

# Uncommet the sonar.language line if you're using a local SonarQube instance
# You will need the community C++ plugin
# https://github.com/SonarOpenCommunity/sonar-cxx

# sonar.language=c++
Loading

0 comments on commit bf55d18

Please sign in to comment.