Merge "Apply pylint to remaining scripts in hiddenapi"

This commit is contained in:
Treehugger Robot 2021-09-01 14:27:13 +00:00 committed by Gerrit Code Review
commit 0364846a73
3 changed files with 79 additions and 38 deletions

View file

@ -13,8 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
""" """Merge multiple CSV files, possibly with different columns.
Merge multiple CSV files, possibly with different columns.
""" """
import argparse import argparse
@ -26,34 +25,52 @@ import operator
from zipfile import ZipFile from zipfile import ZipFile
args_parser = argparse.ArgumentParser(description='Merge given CSV files into a single one.') args_parser = argparse.ArgumentParser(
args_parser.add_argument('--header', help='Comma separated field names; ' description='Merge given CSV files into a single one.'
'if missing determines the header from input files.') )
args_parser.add_argument('--zip_input', help='Treat files as ZIP archives containing CSV files to merge.', args_parser.add_argument(
action="store_true") '--header',
args_parser.add_argument('--key_field', help='The name of the field by which the rows should be sorted. ' help='Comma separated field names; '
'Must be in the field names. ' 'if missing determines the header from input files.',
'Will be the first field in the output. ' )
'All input files must be sorted by that field.') args_parser.add_argument(
args_parser.add_argument('--output', help='Output file for merged CSV.', '--zip_input',
default='-', type=argparse.FileType('w')) help='Treat files as ZIP archives containing CSV files to merge.',
action="store_true",
)
args_parser.add_argument(
'--key_field',
help='The name of the field by which the rows should be sorted. '
'Must be in the field names. '
'Will be the first field in the output. '
'All input files must be sorted by that field.',
)
args_parser.add_argument(
'--output',
help='Output file for merged CSV.',
default='-',
type=argparse.FileType('w'),
)
args_parser.add_argument('files', nargs=argparse.REMAINDER) args_parser.add_argument('files', nargs=argparse.REMAINDER)
args = args_parser.parse_args() args = args_parser.parse_args()
def dict_reader(input): def dict_reader(csvfile):
return csv.DictReader(input, delimiter=',', quotechar='|') return csv.DictReader(csvfile, delimiter=',', quotechar='|')
csv_readers = [] csv_readers = []
if not(args.zip_input): if not args.zip_input:
for file in args.files: for file in args.files:
csv_readers.append(dict_reader(open(file, 'r'))) csv_readers.append(dict_reader(open(file, 'r')))
else: else:
for file in args.files: for file in args.files:
with ZipFile(file) as zip: with ZipFile(file) as zipfile:
for entry in zip.namelist(): for entry in zipfile.namelist():
if entry.endswith('.uau'): if entry.endswith('.uau'):
csv_readers.append(dict_reader(io.TextIOWrapper(zip.open(entry, 'r')))) csv_readers.append(
dict_reader(io.TextIOWrapper(zipfile.open(entry, 'r')))
)
if args.header: if args.header:
fieldnames = args.header.split(',') fieldnames = args.header.split(',')
@ -73,8 +90,8 @@ if len(csv_readers) > 0:
keyField = args.key_field keyField = args.key_field
if keyField: if keyField:
assert keyField in fieldnames, ( assert keyField in fieldnames, (
"--key_field {} not found, must be one of {}\n").format( "--key_field {} not found, must be one of {}\n"
keyField, ",".join(fieldnames)) ).format(keyField, ",".join(fieldnames))
# Make the key field the first field in the output # Make the key field the first field in the output
keyFieldIndex = fieldnames.index(args.key_field) keyFieldIndex = fieldnames.index(args.key_field)
fieldnames.insert(0, fieldnames.pop(keyFieldIndex)) fieldnames.insert(0, fieldnames.pop(keyFieldIndex))
@ -83,11 +100,17 @@ if len(csv_readers) > 0:
all_rows = heapq.merge(*csv_readers, key=operator.itemgetter(keyField)) all_rows = heapq.merge(*csv_readers, key=operator.itemgetter(keyField))
# Write all rows from the input files to the output: # Write all rows from the input files to the output:
writer = csv.DictWriter(args.output, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL, writer = csv.DictWriter(
dialect='unix', fieldnames=fieldnames) args.output,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL,
dialect='unix',
fieldnames=fieldnames,
)
writer.writeheader() writer.writeheader()
# Read all the rows from the input and write them to the output in the correct # Read all the rows from the input and write them to the output in the correct
# order: # order:
for row in all_rows: for row in all_rows:
writer.writerow(row) writer.writerow(row)

View file

@ -13,22 +13,26 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
""" """Generate a set of signature patterns from the modular flags generated by a
Generate a set of signature patterns from the modular flags generated by a
bootclasspath_fragment that can be used to select a subset of monolithic flags bootclasspath_fragment that can be used to select a subset of monolithic flags
against which the modular flags can be compared. against which the modular flags can be compared.
""" """
import argparse import argparse
import csv import csv
import sys
def dict_reader(csvfile):
return csv.DictReader(
csvfile, delimiter=',', quotechar='|', fieldnames=['signature']
)
def dict_reader(input):
return csv.DictReader(input, delimiter=',', quotechar='|', fieldnames=['signature'])
def produce_patterns_from_file(file): def produce_patterns_from_file(file):
with open(file, 'r') as f: with open(file, 'r') as f:
return produce_patterns_from_stream(f) return produce_patterns_from_stream(f)
def produce_patterns_from_stream(stream): def produce_patterns_from_stream(stream):
# Read in all the signatures into a list and remove member names. # Read in all the signatures into a list and remove member names.
patterns = set() patterns = set()
@ -38,18 +42,26 @@ def produce_patterns_from_stream(stream):
# Remove the class specific member signature # Remove the class specific member signature
pieces = text.split(";->") pieces = text.split(";->")
qualifiedClassName = pieces[0] qualifiedClassName = pieces[0]
# Remove inner class names as they cannot be separated from the containing outer class. # Remove inner class names as they cannot be separated
# from the containing outer class.
pieces = qualifiedClassName.split("$", maxsplit=1) pieces = qualifiedClassName.split("$", maxsplit=1)
pattern = pieces[0] pattern = pieces[0]
patterns.add(pattern) patterns.add(pattern)
patterns = list(patterns) patterns = list(patterns) #pylint: disable=redefined-variable-type
patterns.sort() patterns.sort()
return patterns return patterns
def main(args): def main(args):
args_parser = argparse.ArgumentParser(description='Generate a set of signature patterns that select a subset of monolithic hidden API files.') args_parser = argparse.ArgumentParser(
args_parser.add_argument('--flags', help='The stub flags file which contains an entry for every dex member') description='Generate a set of signature patterns '
'that select a subset of monolithic hidden API files.'
)
args_parser.add_argument(
'--flags',
help='The stub flags file which contains an entry for every dex member',
)
args_parser.add_argument('--output', help='Generated signature prefixes') args_parser.add_argument('--output', help='Generated signature prefixes')
args = args_parser.parse_args(args) args = args_parser.parse_args(args)
@ -62,5 +74,6 @@ def main(args):
outputFile.write(pattern) outputFile.write(pattern)
outputFile.write("\n") outputFile.write("\n")
if __name__ == "__main__": if __name__ == "__main__":
main(sys.argv[1:]) main(sys.argv[1:])

View file

@ -18,21 +18,25 @@
import io import io
import unittest import unittest
from signature_patterns import * from signature_patterns import * #pylint: disable=unused-wildcard-import,wildcard-import
class TestGeneratedPatterns(unittest.TestCase): class TestGeneratedPatterns(unittest.TestCase):
def produce_patterns_from_string(self, csvdata):
def produce_patterns_from_string(self, csv): with io.StringIO(csvdata) as f:
with io.StringIO(csv) as f:
return produce_patterns_from_stream(f) return produce_patterns_from_stream(f)
def test_generate(self): def test_generate(self):
patterns = self.produce_patterns_from_string(''' #pylint: disable=line-too-long
patterns = self.produce_patterns_from_string(
'''
Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V,blocked Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V,blocked
Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;,public-api Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;,public-api
Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
Ljava/lang/Object;->toString()Ljava/lang/String;,blocked Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
''') '''
)
#pylint: enable=line-too-long
expected = [ expected = [
"java/lang/Character", "java/lang/Character",
"java/lang/Object", "java/lang/Object",
@ -40,5 +44,6 @@ Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
] ]
self.assertEqual(expected, patterns) self.assertEqual(expected, patterns)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main(verbosity=2) unittest.main(verbosity=2)