Merge "Use parallel subprocesses to classify warnings." am: 92396e1747

am: e3345c438b

Change-Id: I9eb0b8820f1fd5a11a7aa133e1f576d76e0e160d
This commit is contained in:
Chih-Hung Hsieh 2016-10-18 04:07:45 +00:00 committed by android-build-merger
commit 8391419e80

View file

@ -81,6 +81,7 @@ Use option --gencsv to output warning counts in CSV format.
# dump_csv(): # dump_csv():
import argparse import argparse
import multiprocessing
import os import os
import re import re
@ -99,6 +100,10 @@ parser.add_argument('--url',
parser.add_argument('--separator', parser.add_argument('--separator',
help='Separator between the end of a URL and the line ' help='Separator between the end of a URL and the line '
'number argument. e.g. #') 'number argument. e.g. #')
parser.add_argument('--processes',
type=int,
default=multiprocessing.cpu_count(),
help='Number of parallel processes to process warnings')
parser.add_argument(dest='buildlog', metavar='build.log', parser.add_argument(dest='buildlog', metavar='build.log',
help='Path to build.log file') help='Path to build.log file')
args = parser.parse_args() args = parser.parse_args()
@ -1706,7 +1711,8 @@ project_list = [
simple_project_pattern('frameworks/av/media/mtp'), simple_project_pattern('frameworks/av/media/mtp'),
simple_project_pattern('frameworks/av/media/ndk'), simple_project_pattern('frameworks/av/media/ndk'),
simple_project_pattern('frameworks/av/media/utils'), simple_project_pattern('frameworks/av/media/utils'),
project_name_and_pattern('frameworks/av/media/Other', 'frameworks/av/media'), project_name_and_pattern('frameworks/av/media/Other',
'frameworks/av/media'),
simple_project_pattern('frameworks/av/radio'), simple_project_pattern('frameworks/av/radio'),
simple_project_pattern('frameworks/av/services'), simple_project_pattern('frameworks/av/services'),
simple_project_pattern('frameworks/av/soundtrigger'), simple_project_pattern('frameworks/av/soundtrigger'),
@ -2065,22 +2071,13 @@ def find_project_index(line):
return -1 return -1
def classify_warning(line): def classify_one_warning(line, results):
for i in range(len(warn_patterns)): for i in range(len(warn_patterns)):
w = warn_patterns[i] w = warn_patterns[i]
for cpat in w['compiled_patterns']: for cpat in w['compiled_patterns']:
if cpat.match(line): if cpat.match(line):
w['members'].append(line)
p = find_project_index(line) p = find_project_index(line)
index = len(warning_messages) results.append([line, i, p])
warning_messages.append(line)
warning_records.append([i, p, index])
pname = '???' if p < 0 else project_names[p]
# Count warnings by project.
if pname in w['projects']:
w['projects'][pname] += 1
else:
w['projects'][pname] = 1
return return
else: else:
# If we end up here, there was a problem parsing the log # If we end up here, there was a problem parsing the log
@ -2089,6 +2086,38 @@ def classify_warning(line):
pass pass
def classify_warnings(lines):
results = []
for line in lines:
classify_one_warning(line, results)
return results
def parallel_classify_warnings(warning_lines):
"""Classify all warning lines with num_cpu parallel processes."""
num_cpu = args.processes
groups = [[] for x in range(num_cpu)]
i = 0
for x in warning_lines:
groups[i].append(x)
i = (i + 1) % num_cpu
pool = multiprocessing.Pool(num_cpu)
group_results = pool.map(classify_warnings, groups)
for result in group_results:
for line, pattern_idx, project_idx in result:
pattern = warn_patterns[pattern_idx]
pattern['members'].append(line)
message_idx = len(warning_messages)
warning_messages.append(line)
warning_records.append([pattern_idx, project_idx, message_idx])
pname = '???' if project_idx < 0 else project_names[project_idx]
# Count warnings by project.
if pname in pattern['projects']:
pattern['projects'][pname] += 1
else:
pattern['projects'][pname] = 1
def compile_patterns(): def compile_patterns():
"""Precompiling every pattern speeds up parsing by about 30x.""" """Precompiling every pattern speeds up parsing by about 30x."""
for i in warn_patterns: for i in warn_patterns:
@ -2156,13 +2185,11 @@ def parse_input_file():
warning_pattern = re.compile('^[^ ]*/[^ ]*: warning: .*') warning_pattern = re.compile('^[^ ]*/[^ ]*: warning: .*')
compile_patterns() compile_patterns()
# read the log file and classify all the warnings # Collect all warnings into the warning_lines set.
warning_lines = set() warning_lines = set()
for line in infile: for line in infile:
if warning_pattern.match(line): if warning_pattern.match(line):
line = normalize_warning_line(line) line = normalize_warning_line(line)
if line not in warning_lines:
classify_warning(line)
warning_lines.add(line) warning_lines.add(line)
elif line_counter < 50: elif line_counter < 50:
# save a little bit of time by only doing this for the first few lines # save a little bit of time by only doing this for the first few lines
@ -2176,6 +2203,7 @@ def parse_input_file():
m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line) m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
if m is not None: if m is not None:
target_variant = m.group(0) target_variant = m.group(0)
parallel_classify_warnings(warning_lines)
# Return s with escaped backslash and quotation characters. # Return s with escaped backslash and quotation characters.