Update for python3.
This fixes all of the problems with our kernel scripts, but not the clang python script problems. I also removed the updateGitFiles function since that code was just silently failing any way. I replaced all calls with updateFiles. Test: Ran script using python2 to verify it still works. Test: Run script in python3 verifying that it starts to run. Change-Id: I223a31a8324c59e6bc4067f48a6110361b3e26e8
This commit is contained in:
parent
b82fcdaf36
commit
ac7ec11473
5 changed files with 28 additions and 46 deletions
|
@ -127,7 +127,7 @@ def cleanupFile(dst_file, src_file, rel_path, no_update = True):
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
def usage():
|
def usage():
|
||||||
print """\
|
print("""\
|
||||||
usage: %s [options] <header_path>
|
usage: %s [options] <header_path>
|
||||||
|
|
||||||
options:
|
options:
|
||||||
|
@ -142,7 +142,7 @@ if __name__ == "__main__":
|
||||||
-d<path> specify path of cleaned kernel headers
|
-d<path> specify path of cleaned kernel headers
|
||||||
|
|
||||||
<header_path> must be in a subdirectory of 'original'
|
<header_path> must be in a subdirectory of 'original'
|
||||||
""" % os.path.basename(sys.argv[0])
|
""" % os.path.basename(sys.argv[0]))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -211,9 +211,8 @@ if __name__ == "__main__":
|
||||||
else:
|
else:
|
||||||
r = "added"
|
r = "added"
|
||||||
|
|
||||||
print "cleaning: %-*s -> %-*s (%s)" % (35, path, 35, path, r)
|
print("cleaning: %-*s -> %-*s (%s)" % (35, path, 35, path, r))
|
||||||
|
|
||||||
|
b.updateFiles()
|
||||||
b.updateGitFiles()
|
|
||||||
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
|
@ -394,10 +394,10 @@ class CppExpr(object):
|
||||||
self._index = 0
|
self._index = 0
|
||||||
|
|
||||||
if debugCppExpr:
|
if debugCppExpr:
|
||||||
print "CppExpr: trying to parse %s" % repr(tokens)
|
print("CppExpr: trying to parse %s" % repr(tokens))
|
||||||
self.expr = self.parseExpression(0)
|
self.expr = self.parseExpression(0)
|
||||||
if debugCppExpr:
|
if debugCppExpr:
|
||||||
print "CppExpr: got " + repr(self.expr)
|
print("CppExpr: got " + repr(self.expr))
|
||||||
if self._index != self._num_tokens:
|
if self._index != self._num_tokens:
|
||||||
self.throw(BadExpectedToken, "crap at end of input (%d != %d): %s"
|
self.throw(BadExpectedToken, "crap at end of input (%d != %d): %s"
|
||||||
% (self._index, self._num_tokens, repr(tokens)))
|
% (self._index, self._num_tokens, repr(tokens)))
|
||||||
|
@ -405,9 +405,9 @@ class CppExpr(object):
|
||||||
def throw(self, exception, msg):
|
def throw(self, exception, msg):
|
||||||
if self._index < self._num_tokens:
|
if self._index < self._num_tokens:
|
||||||
tok = self.tokens[self._index]
|
tok = self.tokens[self._index]
|
||||||
print "%d:%d: %s" % (tok.location.line, tok.location.column, msg)
|
print("%d:%d: %s" % (tok.location.line, tok.location.column, msg))
|
||||||
else:
|
else:
|
||||||
print "EOF: %s" % msg
|
print("EOF: %s" % msg)
|
||||||
raise exception(msg)
|
raise exception(msg)
|
||||||
|
|
||||||
def expectId(self, id):
|
def expectId(self, id):
|
||||||
|
@ -1179,11 +1179,11 @@ class BlockList(object):
|
||||||
|
|
||||||
def dump(self):
|
def dump(self):
|
||||||
"""Dump all the blocks in current BlockList."""
|
"""Dump all the blocks in current BlockList."""
|
||||||
print '##### BEGIN #####'
|
print('##### BEGIN #####')
|
||||||
for i, b in enumerate(self.blocks):
|
for i, b in enumerate(self.blocks):
|
||||||
print '### BLOCK %d ###' % i
|
print('### BLOCK %d ###' % i)
|
||||||
print b
|
print(b)
|
||||||
print '##### END #####'
|
print('##### END #####')
|
||||||
|
|
||||||
def optimizeIf01(self):
|
def optimizeIf01(self):
|
||||||
"""Remove the code between #if 0 .. #endif in a BlockList."""
|
"""Remove the code between #if 0 .. #endif in a BlockList."""
|
||||||
|
@ -1510,7 +1510,7 @@ class BlockParser(object):
|
||||||
while i < len(tokens) and tokens[i].location in extent:
|
while i < len(tokens) and tokens[i].location in extent:
|
||||||
t = tokens[i]
|
t = tokens[i]
|
||||||
if debugBlockParser:
|
if debugBlockParser:
|
||||||
print ' ' * 2, t.id, t.kind, t.cursor.kind
|
print(' ' * 2, t.id, t.kind, t.cursor.kind)
|
||||||
if (detect_change and t.cursor.extent != extent and
|
if (detect_change and t.cursor.extent != extent and
|
||||||
t.cursor.kind == CursorKind.PREPROCESSING_DIRECTIVE):
|
t.cursor.kind == CursorKind.PREPROCESSING_DIRECTIVE):
|
||||||
break
|
break
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
# list here the macros that you know are always defined/undefined when including
|
# list here the macros that you know are always defined/undefined when including
|
||||||
# the kernel headers
|
# the kernel headers
|
||||||
#
|
#
|
||||||
import sys, cpp, re, os.path, string, time
|
import sys, cpp, re, os.path, time
|
||||||
from defaults import *
|
from defaults import *
|
||||||
|
|
||||||
verboseSearch = 0
|
verboseSearch = 0
|
||||||
|
@ -56,7 +56,7 @@ class HeaderScanner:
|
||||||
# <mtd/*>
|
# <mtd/*>
|
||||||
#
|
#
|
||||||
re_combined_str=\
|
re_combined_str=\
|
||||||
r"^.*<((%s)/[\d\w_\+\.\-/]*)>.*$" % string.join(kernel_dirs,"|")
|
r"^.*<((%s)/[\d\w_\+\.\-/]*)>.*$" % "|".join(kernel_dirs)
|
||||||
|
|
||||||
re_combined = re.compile(re_combined_str)
|
re_combined = re.compile(re_combined_str)
|
||||||
|
|
||||||
|
@ -100,7 +100,7 @@ class HeaderScanner:
|
||||||
|
|
||||||
if from_file:
|
if from_file:
|
||||||
if verboseFind:
|
if verboseFind:
|
||||||
print "=== %s uses %s" % (from_file, header)
|
print("=== %s uses %s" % (from_file, header))
|
||||||
self.headers[header].add(from_file)
|
self.headers[header].add(from_file)
|
||||||
|
|
||||||
def parseFile(self, path, arch=None, kernel_root=None):
|
def parseFile(self, path, arch=None, kernel_root=None):
|
||||||
|
@ -114,7 +114,7 @@ class HeaderScanner:
|
||||||
try:
|
try:
|
||||||
f = open(path, "rt")
|
f = open(path, "rt")
|
||||||
except:
|
except:
|
||||||
print "!!! can't read '%s'" % path
|
print("!!! can't read '%s'" % path)
|
||||||
return
|
return
|
||||||
|
|
||||||
hasIncludes = False
|
hasIncludes = False
|
||||||
|
@ -125,10 +125,10 @@ class HeaderScanner:
|
||||||
break
|
break
|
||||||
|
|
||||||
if not hasIncludes:
|
if not hasIncludes:
|
||||||
if verboseSearch: print "::: " + path
|
if verboseSearch: print("::: " + path)
|
||||||
return
|
return
|
||||||
|
|
||||||
if verboseSearch: print "*** " + path
|
if verboseSearch: print("*** " + path)
|
||||||
|
|
||||||
list = cpp.BlockParser().parseFile(path)
|
list = cpp.BlockParser().parseFile(path)
|
||||||
if list:
|
if list:
|
||||||
|
@ -205,7 +205,6 @@ class KernelHeaderFinder:
|
||||||
|
|
||||||
if len(kernel_root) > 0 and kernel_root[-1] != "/":
|
if len(kernel_root) > 0 and kernel_root[-1] != "/":
|
||||||
kernel_root += "/"
|
kernel_root += "/"
|
||||||
#print "using kernel_root %s" % kernel_root
|
|
||||||
self.archs = archs
|
self.archs = archs
|
||||||
self.searched = set(headers)
|
self.searched = set(headers)
|
||||||
self.kernel_root = kernel_root
|
self.kernel_root = kernel_root
|
||||||
|
@ -301,7 +300,7 @@ class ConfigParser:
|
||||||
self.duplicates = False
|
self.duplicates = False
|
||||||
|
|
||||||
def parseLine(self,line):
|
def parseLine(self,line):
|
||||||
line = string.strip(line)
|
line = line.strip(line)
|
||||||
|
|
||||||
# skip empty and comment lines
|
# skip empty and comment lines
|
||||||
if len(line) == 0 or line[0] == "#":
|
if len(line) == 0 or line[0] == "#":
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#
|
#
|
||||||
import sys, cpp, kernel, glob, os, re, getopt, clean_header, subprocess, shutil
|
import sys, cpp, kernel, glob, os, re, getopt, clean_header, shutil
|
||||||
from defaults import *
|
from defaults import *
|
||||||
from utils import *
|
from utils import *
|
||||||
|
|
||||||
def Usage():
|
def Usage():
|
||||||
print """\
|
print("""\
|
||||||
usage: %(progname)s [kernel-original-path] [kernel-modified-path]
|
usage: %(progname)s [kernel-original-path] [kernel-modified-path]
|
||||||
|
|
||||||
this program is used to update all the auto-generated clean headers
|
this program is used to update all the auto-generated clean headers
|
||||||
|
@ -21,14 +21,14 @@ def Usage():
|
||||||
|
|
||||||
- the clean headers will be placed in 'bionic/libc/kernel/arch-<arch>/asm',
|
- the clean headers will be placed in 'bionic/libc/kernel/arch-<arch>/asm',
|
||||||
'bionic/libc/kernel/android', etc..
|
'bionic/libc/kernel/android', etc..
|
||||||
""" % { "progname" : os.path.basename(sys.argv[0]) }
|
""" % { "progname" : os.path.basename(sys.argv[0]) })
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
def ProcessFiles(updater, original_dir, modified_dir, src_rel_dir, update_rel_dir):
|
def ProcessFiles(updater, original_dir, modified_dir, src_rel_dir, update_rel_dir):
|
||||||
# Delete the old headers before updating to the new headers.
|
# Delete the old headers before updating to the new headers.
|
||||||
update_dir = os.path.join(get_kernel_dir(), update_rel_dir)
|
update_dir = os.path.join(get_kernel_dir(), update_rel_dir)
|
||||||
shutil.rmtree(update_dir)
|
shutil.rmtree(update_dir)
|
||||||
os.mkdir(update_dir, 0755)
|
os.mkdir(update_dir, 0o755)
|
||||||
|
|
||||||
src_dir = os.path.normpath(os.path.join(original_dir, src_rel_dir))
|
src_dir = os.path.normpath(os.path.join(original_dir, src_rel_dir))
|
||||||
src_dir_len = len(src_dir) + 1
|
src_dir_len = len(src_dir) + 1
|
||||||
|
@ -62,7 +62,7 @@ def ProcessFiles(updater, original_dir, modified_dir, src_rel_dir, update_rel_di
|
||||||
else:
|
else:
|
||||||
state = "added"
|
state = "added"
|
||||||
update_path = os.path.join(update_rel_dir, rel_path)
|
update_path = os.path.join(update_rel_dir, rel_path)
|
||||||
print "cleaning %s -> %s (%s)" % (src_str, update_path, state)
|
print("cleaning %s -> %s (%s)" % (src_str, update_path, state))
|
||||||
|
|
||||||
|
|
||||||
# This lets us support regular system calls like __NR_write and also weird
|
# This lets us support regular system calls like __NR_write and also weird
|
||||||
|
@ -149,9 +149,10 @@ ProcessFiles(updater, original_dir, modified_dir, "uapi", "uapi"),
|
||||||
# Now process the special files.
|
# Now process the special files.
|
||||||
ProcessFiles(updater, original_dir, modified_dir, "scsi", os.path.join("android", "scsi", "scsi"))
|
ProcessFiles(updater, original_dir, modified_dir, "scsi", os.path.join("android", "scsi", "scsi"))
|
||||||
|
|
||||||
updater.updateGitFiles()
|
# Copy all of the files.
|
||||||
|
updater.updateFiles()
|
||||||
|
|
||||||
# Now re-generate the <bits/glibc-syscalls.h> from the new uapi headers.
|
# Now re-generate the <bits/glibc-syscalls.h> from the new uapi headers.
|
||||||
updater = BatchFileUpdater()
|
updater = BatchFileUpdater()
|
||||||
GenerateGlibcSyscallsHeader(updater)
|
GenerateGlibcSyscallsHeader(updater)
|
||||||
updater.updateGitFiles()
|
updater.updateFiles()
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# common python utility routines for the Bionic tool scripts
|
# common python utility routines for the Bionic tool scripts
|
||||||
|
|
||||||
import commands
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import string
|
import string
|
||||||
|
@ -146,19 +145,3 @@ class BatchFileUpdater:
|
||||||
|
|
||||||
for dst in sorted(deletes):
|
for dst in sorted(deletes):
|
||||||
os.remove(dst)
|
os.remove(dst)
|
||||||
|
|
||||||
def updateGitFiles(self):
|
|
||||||
adds, deletes, edits = self.getChanges()
|
|
||||||
|
|
||||||
if adds:
|
|
||||||
for dst in sorted(adds):
|
|
||||||
self._writeFile(dst)
|
|
||||||
commands.getoutput("git add " + " ".join(adds))
|
|
||||||
|
|
||||||
if deletes:
|
|
||||||
commands.getoutput("git rm " + " ".join(deletes))
|
|
||||||
|
|
||||||
if edits:
|
|
||||||
for dst in sorted(edits):
|
|
||||||
self._writeFile(dst)
|
|
||||||
commands.getoutput("git add " + " ".join(edits))
|
|
||||||
|
|
Loading…
Reference in a new issue