Newer
Older
for (action, value) in self.results:
if action != ACTION_MOVE:
continue
if not value + '\n' in defconfig_lines:
log += color_text(self.options.color, COLOR_YELLOW,
"'%s' was removed by savedefconfig.\n" %
value)
return log
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
class DatabaseThread(threading.Thread):
"""This thread processes results from Slot threads.
It collects the data in the master config directary. There is only one
result thread, and this helps to serialise the build output.
"""
def __init__(self, config_db, db_queue):
"""Set up a new result thread
Args:
builder: Builder which will be sent each result
"""
threading.Thread.__init__(self)
self.config_db = config_db
self.db_queue= db_queue
def run(self):
"""Called to start up the result thread.
We collect the next result job and pass it on to the build.
"""
while True:
defconfig, configs = self.db_queue.get()
self.config_db[defconfig] = configs
self.db_queue.task_done()
class Slot:
"""A slot to store a subprocess.
Each instance of this class handles one subprocess.
This class is useful to control multiple threads
for faster processing.
"""
def __init__(self, configs, options, progress, devnull, make_cmd,
reference_src_dir, db_queue):
"""Create a new process slot.
Arguments:
configs: A list of CONFIGs to move.
options: option flags.
progress: A progress indicator.
devnull: A file object of '/dev/null'.
make_cmd: command name of GNU Make.
reference_src_dir: Determine the true starting config state from this
source tree.
db_queue: output queue to write config info for the database
"""
self.options = options
self.progress = progress
self.build_dir = tempfile.mkdtemp()
self.devnull = devnull
self.make_cmd = (make_cmd, 'O=' + self.build_dir)
self.reference_src_dir = reference_src_dir
self.db_queue = db_queue
self.parser = KconfigParser(configs, options, self.build_dir)
self.state = STATE_IDLE
self.failed_boards = set()
self.suspicious_boards = set()
def __del__(self):
"""Delete the working directory
This function makes sure the temporary directory is cleaned away
even if Python suddenly dies due to error. It should be done in here
because it is guaranteed the destructor is always invoked when the
instance of the class gets unreferenced.
If the subprocess is still running, wait until it finishes.
"""
if self.state != STATE_IDLE:
while self.ps.poll() == None:
pass
shutil.rmtree(self.build_dir)
def add(self, defconfig):
"""Assign a new subprocess for defconfig and add it to the slot.
If the slot is vacant, create a new subprocess for processing the
given defconfig and add it to the slot. Just returns False if
the slot is occupied (i.e. the current subprocess is still running).
Arguments:
defconfig: defconfig name.
Returns:
Return True on success or False on failure
"""
if self.state != STATE_IDLE:
return False
self.defconfig = defconfig
self.log = ''
self.current_src_dir = self.reference_src_dir
self.do_defconfig()
return True
def poll(self):
"""Check the status of the subprocess and handle it as needed.
Returns True if the slot is vacant (i.e. in idle state).
If the configuration is successfully finished, assign a new
subprocess to build include/autoconf.mk.
If include/autoconf.mk is generated, invoke the parser to
parse the .config and the include/autoconf.mk, moving
config options to the .config as needed.
If the .config was updated, run "make savedefconfig" to sync
it, update the original defconfig, and then set the slot back
to the idle state.
Returns:
Return True if the subprocess is terminated, False otherwise
"""
if self.state == STATE_IDLE:
return True
if self.ps.poll() == None:
return False
if self.ps.poll() != 0:
self.handle_error()
elif self.state == STATE_DEFCONFIG:
if self.reference_src_dir and not self.current_src_dir:
self.do_savedefconfig()
else:
self.do_autoconf()
elif self.state == STATE_AUTOCONF:
if self.current_src_dir:
self.current_src_dir = None
elif self.options.build_db:
self.do_build_db()
else:
self.do_savedefconfig()
elif self.state == STATE_SAVEDEFCONFIG:
self.update_defconfig()
else:
sys.exit("Internal Error. This should not happen.")
return True if self.state == STATE_IDLE else False
def handle_error(self):
"""Handle error cases."""
self.log += color_text(self.options.color, COLOR_LIGHT_RED,
"Failed to process.\n")
if self.options.verbose:
self.log += color_text(self.options.color, COLOR_LIGHT_CYAN,
self.ps.stderr.read())
self.finish(False)
def do_defconfig(self):
"""Run 'make <board>_defconfig' to create the .config file."""
cmd = list(self.make_cmd)
cmd.append(self.defconfig)
self.ps = subprocess.Popen(cmd, stdout=self.devnull,
stderr=subprocess.PIPE,
cwd=self.current_src_dir)
self.state = STATE_DEFCONFIG
def do_autoconf(self):
"""Run 'make AUTO_CONF_PATH'."""
self.cross_compile = self.parser.get_cross_compile()
if self.cross_compile is None:
self.log += color_text(self.options.color, COLOR_YELLOW,
"Compiler is missing. Do nothing.\n")
self.finish(False)
return
cmd = list(self.make_cmd)
if self.cross_compile:
cmd.append('CROSS_COMPILE=%s' % self.cross_compile)
cmd.append('KCONFIG_IGNORE_DUPLICATES=1')
self.ps = subprocess.Popen(cmd, stdout=self.devnull,
stderr=subprocess.PIPE,
cwd=self.current_src_dir)
self.state = STATE_AUTOCONF
def do_build_db(self):
"""Add the board to the database"""
configs = {}
with open(os.path.join(self.build_dir, AUTO_CONF_PATH)) as fd:
for line in fd.readlines():
if line.startswith('CONFIG'):
config, value = line.split('=', 1)
configs[config] = value.rstrip()
self.db_queue.put([self.defconfig, configs])
self.finish(True)
def do_savedefconfig(self):
"""Update the .config and run 'make savedefconfig'."""
(updated, suspicious, log) = self.parser.update_dotconfig()
if suspicious:
self.suspicious_boards.add(self.defconfig)
self.log += log
if not self.options.force_sync and not updated:
self.finish(True)
return
if updated:
self.log += color_text(self.options.color, COLOR_LIGHT_GREEN,
"Syncing by savedefconfig...\n")
else:
self.log += "Syncing by savedefconfig (forced by option)...\n"
cmd = list(self.make_cmd)
cmd.append('savedefconfig')
self.ps = subprocess.Popen(cmd, stdout=self.devnull,
stderr=subprocess.PIPE)
self.state = STATE_SAVEDEFCONFIG
def update_defconfig(self):
"""Update the input defconfig and go back to the idle state."""
log = self.parser.check_defconfig()
if log:
self.suspicious_boards.add(self.defconfig)
self.log += log
orig_defconfig = os.path.join('configs', self.defconfig)
new_defconfig = os.path.join(self.build_dir, 'defconfig')
updated = not filecmp.cmp(orig_defconfig, new_defconfig)
if updated:
self.log += color_text(self.options.color, COLOR_LIGHT_BLUE,
"defconfig was updated.\n")
if not self.options.dry_run and updated:
shutil.move(new_defconfig, orig_defconfig)
self.finish(True)
def finish(self, success):
"""Display log along with progress and go to the idle state.
Arguments:
success: Should be True when the defconfig was processed
successfully, or False when it fails.
"""
# output at least 30 characters to hide the "* defconfigs out of *".
log = self.defconfig.ljust(30) + '\n'
log += '\n'.join([ ' ' + s for s in self.log.split('\n') ])
# Some threads are running in parallel.
# Print log atomically to not mix up logs from different threads.
print >> (sys.stdout if success else sys.stderr), log
if not success:
if self.options.exit_on_error:
sys.exit("Exit on error.")
# If --exit-on-error flag is not set, skip this board and continue.
# Record the failed board.
self.failed_boards.add(self.defconfig)
self.progress.inc()
self.progress.show()
self.state = STATE_IDLE
def get_failed_boards(self):
"""Returns a set of failed boards (defconfigs) in this slot.
"""
return self.failed_boards
def get_suspicious_boards(self):
"""Returns a set of boards (defconfigs) with possible misconversion.
"""
return self.suspicious_boards - self.failed_boards
class Slots:
"""Controller of the array of subprocess slots."""
def __init__(self, configs, options, progress, reference_src_dir, db_queue):
"""Create a new slots controller.
Arguments:
configs: A list of CONFIGs to move.
options: option flags.
progress: A progress indicator.
reference_src_dir: Determine the true starting config state from this
source tree.
db_queue: output queue to write config info for the database
"""
self.options = options
self.slots = []
devnull = get_devnull()
make_cmd = get_make_cmd()
for i in range(options.jobs):
self.slots.append(Slot(configs, options, progress, devnull,
make_cmd, reference_src_dir, db_queue))
def add(self, defconfig):
"""Add a new subprocess if a vacant slot is found.
Arguments:
defconfig: defconfig name to be put into.
Returns:
Return True on success or False on failure
"""
for slot in self.slots:
if slot.add(defconfig):
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
return True
return False
def available(self):
"""Check if there is a vacant slot.
Returns:
Return True if at lease one vacant slot is found, False otherwise.
"""
for slot in self.slots:
if slot.poll():
return True
return False
def empty(self):
"""Check if all slots are vacant.
Returns:
Return True if all the slots are vacant, False otherwise.
"""
ret = True
for slot in self.slots:
if not slot.poll():
ret = False
return ret
def show_failed_boards(self):
"""Display all of the failed boards (defconfigs)."""
boards = set()
output_file = 'moveconfig.failed'
for slot in self.slots:
boards |= slot.get_failed_boards()
if boards:
boards = '\n'.join(boards) + '\n'
msg = "The following boards were not processed due to error:\n"
msg += boards
msg += "(the list has been saved in %s)\n" % output_file
print >> sys.stderr, color_text(self.options.color, COLOR_LIGHT_RED,
msg)
with open(output_file, 'w') as f:
f.write(boards)
def show_suspicious_boards(self):
"""Display all boards (defconfigs) with possible misconversion."""
boards = set()
output_file = 'moveconfig.suspicious'
for slot in self.slots:
boards |= slot.get_suspicious_boards()
if boards:
boards = '\n'.join(boards) + '\n'
msg = "The following boards might have been converted incorrectly.\n"
msg += "It is highly recommended to check them manually:\n"
msg += boards
msg += "(the list has been saved in %s)\n" % output_file
print >> sys.stderr, color_text(self.options.color, COLOR_YELLOW,
msg)
with open(output_file, 'w') as f:
f.write(boards)
class ReferenceSource:
"""Reference source against which original configs should be parsed."""
def __init__(self, commit):
"""Create a reference source directory based on a specified commit.
Arguments:
commit: commit to git-clone
"""
self.src_dir = tempfile.mkdtemp()
print "Cloning git repo to a separate work directory..."
subprocess.check_output(['git', 'clone', os.getcwd(), '.'],
cwd=self.src_dir)
print "Checkout '%s' to build the original autoconf.mk." % \
subprocess.check_output(['git', 'rev-parse', '--short', commit]).strip()
subprocess.check_output(['git', 'checkout', commit],
stderr=subprocess.STDOUT, cwd=self.src_dir)
def __del__(self):
"""Delete the reference source directory
This function makes sure the temporary directory is cleaned away
even if Python suddenly dies due to error. It should be done in here
because it is guaranteed the destructor is always invoked when the
instance of the class gets unreferenced.
"""
shutil.rmtree(self.src_dir)
def get_dir(self):
"""Return the absolute path to the reference source directory."""
return self.src_dir
def move_config(configs, options, db_queue):
"""Move config options to defconfig files.
Arguments:
configs: A list of CONFIGs to move.
options: option flags
"""
if len(configs) == 0:
if options.force_sync:
print 'No CONFIG is specified. You are probably syncing defconfigs.',
elif options.build_db:
print 'Building %s database' % CONFIG_DATABASE
else:
print 'Neither CONFIG nor --force-sync is specified. Nothing will happen.',
else:
print 'Move ' + ', '.join(configs),
print '(jobs: %d)\n' % options.jobs
reference_src = ReferenceSource(options.git_ref)
reference_src_dir = reference_src.get_dir()
else:
reference_src_dir = None
if options.defconfigs:
defconfigs = get_matched_defconfigs(options.defconfigs)
defconfigs = get_all_defconfigs()
progress = Progress(len(defconfigs))
slots = Slots(configs, options, progress, reference_src_dir, db_queue)
# Main loop to process defconfig files:
# Add a new subprocess into a vacant slot.
# Sleep if there is no available slot.
for defconfig in defconfigs:
while not slots.add(defconfig):
while not slots.available():
# No available slot: sleep for a while
time.sleep(SLEEP_TIME)
# wait until all the subprocesses finish
while not slots.empty():
time.sleep(SLEEP_TIME)
slots.show_failed_boards()
slots.show_suspicious_boards()
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
def imply_config(config_list, find_superset=False):
"""Find CONFIG options which imply those in the list
Some CONFIG options can be implied by others and this can help to reduce
the size of the defconfig files. For example, CONFIG_X86 implies
CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and
all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to
each of the x86 defconfig files.
This function uses the moveconfig database to find such options. It
displays a list of things that could possibly imply those in the list.
The algorithm ignores any that start with CONFIG_TARGET since these
typically refer to only a few defconfigs (often one). It also does not
display a config with less than 5 defconfigs.
The algorithm works using sets. For each target config in config_list:
- Get the set 'defconfigs' which use that target config
- For each config (from a list of all configs):
- Get the set 'imply_defconfig' of defconfigs which use that config
-
- If imply_defconfigs contains anything not in defconfigs then
this config does not imply the target config
Params:
config_list: List of CONFIG options to check (each a string)
find_superset: True to look for configs which are a superset of those
already found. So for example if CONFIG_EXYNOS5 implies an option,
but CONFIG_EXYNOS covers a larger set of defconfigs and also
implies that option, this will drop the former in favour of the
latter. In practice this option has not proved very used.
Note the terminoloy:
config - a CONFIG_XXX options (a string, e.g. 'CONFIG_CMD_EEPROM')
defconfig - a defconfig file (a string, e.g. 'configs/snow_defconfig')
"""
# key is defconfig name, value is dict of (CONFIG_xxx, value)
config_db = {}
# Holds a dict containing the set of defconfigs that contain each config
# key is config, value is set of defconfigs using that config
defconfig_db = collections.defaultdict(set)
# Set of all config options we have seen
all_configs = set()
# Set of all defconfigs we have seen
all_defconfigs = set()
# Read in the database
configs = {}
with open(CONFIG_DATABASE) as fd:
for line in fd.readlines():
line = line.rstrip()
if not line: # Separator between defconfigs
config_db[defconfig] = configs
all_defconfigs.add(defconfig)
configs = {}
elif line[0] == ' ': # CONFIG line
config, value = line.strip().split('=', 1)
configs[config] = value
defconfig_db[config].add(defconfig)
all_configs.add(config)
else: # New defconfig
defconfig = line
# Work through each target config option in tern, independently
for config in config_list:
defconfigs = defconfig_db.get(config)
if not defconfigs:
print '%s not found in any defconfig' % config
continue
# Get the set of defconfigs without this one (since a config cannot
# imply itself)
non_defconfigs = all_defconfigs - defconfigs
num_defconfigs = len(defconfigs)
print '%s found in %d/%d defconfigs' % (config, num_defconfigs,
len(all_configs))
# This will hold the results: key=config, value=defconfigs containing it
imply_configs = {}
rest_configs = all_configs - set([config])
# Look at every possible config, except the target one
for imply_config in rest_configs:
if 'CONFIG_TARGET' in imply_config:
continue
# Find set of defconfigs that have this config
imply_defconfig = defconfig_db[imply_config]
# Get the intersection of this with defconfigs containing the
# target config
common_defconfigs = imply_defconfig & defconfigs
# Get the set of defconfigs containing this config which DO NOT
# also contain the taret config. If this set is non-empty it means
# that this config affects other defconfigs as well as (possibly)
# the ones affected by the target config. This means it implies
# things we don't want to imply.
not_common_defconfigs = imply_defconfig & non_defconfigs
if not_common_defconfigs:
continue
# If there are common defconfigs, imply_config may be useful
if common_defconfigs:
skip = False
if find_superset:
for prev in imply_configs.keys():
prev_count = len(imply_configs[prev])
count = len(common_defconfigs)
if (prev_count > count and
(imply_configs[prev] & common_defconfigs ==
common_defconfigs)):
# skip imply_config because prev is a superset
skip = True
break
elif count > prev_count:
# delete prev because imply_config is a superset
del imply_configs[prev]
if not skip:
imply_configs[imply_config] = common_defconfigs
# Now we have a dict imply_configs of configs which imply each config
# The value of each dict item is the set of defconfigs containing that
# config. Rank them so that we print the configs that imply the largest
# number of defconfigs first.
ranked_configs = sorted(imply_configs,
key=lambda k: len(imply_configs[k]), reverse=True)
for config in ranked_configs:
num_common = len(imply_configs[config])
# Don't bother if there are less than 5 defconfigs affected.
if num_common < 5:
continue
missing = defconfigs - imply_configs[config]
missing_str = ', '.join(missing) if missing else 'all'
missing_str = ''
print ' %d : %-30s%s' % (num_common, config.ljust(30),
missing_str)
def main():
try:
cpu_count = multiprocessing.cpu_count()
except NotImplementedError:
cpu_count = 1
parser = optparse.OptionParser()
# Add options here
parser.add_option('-b', '--build-db', action='store_true', default=False,
help='build a CONFIG database')
parser.add_option('-c', '--color', action='store_true', default=False,
help='display the log in color')
parser.add_option('-C', '--commit', action='store_true', default=False,
help='Create a git commit for the operation')
parser.add_option('-d', '--defconfigs', type='string',
help='a file containing a list of defconfigs to move, '
"one per line (for example 'snow_defconfig') "
"or '-' to read from stdin")
parser.add_option('-i', '--imply', action='store_true', default=False,
help='find options which imply others')
parser.add_option('-n', '--dry-run', action='store_true', default=False,
help='perform a trial run (show log with no changes)')
parser.add_option('-e', '--exit-on-error', action='store_true',
default=False,
help='exit immediately on any error')
parser.add_option('-s', '--force-sync', action='store_true', default=False,
help='force sync by savedefconfig')
parser.add_option('-S', '--spl', action='store_true', default=False,
help='parse config options defined for SPL build')
parser.add_option('-H', '--headers-only', dest='cleanup_headers_only',
action='store_true', default=False,
help='only cleanup the headers')
parser.add_option('-j', '--jobs', type='int', default=cpu_count,
help='the number of jobs to run simultaneously')
parser.add_option('-r', '--git-ref', type='string',
help='the git ref to clone for building the autoconf.mk')
parser.add_option('-y', '--yes', action='store_true', default=False,
help="respond 'yes' to any prompts")
parser.add_option('-v', '--verbose', action='store_true', default=False,
help='show any build errors as boards are built')
parser.usage += ' CONFIG ...'
(options, configs) = parser.parse_args()
if len(configs) == 0 and not any((options.force_sync, options.build_db,
options.imply)):
parser.print_usage()
sys.exit(1)
# prefix the option name with CONFIG_ if missing
configs = [ config if config.startswith('CONFIG_') else 'CONFIG_' + config
for config in configs ]
check_top_directory()
if options.imply:
imply_config(configs)
return
config_db = {}
db_queue = Queue.Queue()
t = DatabaseThread(config_db, db_queue)
t.setDaemon(True)
t.start()
if not options.cleanup_headers_only:
check_clean_directory()
update_cross_compile(options.color)
move_config(configs, options, db_queue)
db_queue.join()
if configs:
cleanup_headers(configs, options)
cleanup_extra_options(configs, options)
cleanup_whitelist(configs, options)
cleanup_readme(configs, options)
if options.commit:
subprocess.call(['git', 'add', '-u'])
if configs:
msg = 'Convert %s %sto Kconfig' % (configs[0],
'et al ' if len(configs) > 1 else '')
msg += ('\n\nThis converts the following to Kconfig:\n %s\n' %
'\n '.join(configs))
else:
msg = 'configs: Resync with savedefconfig'
msg += '\n\nRsync all defconfig files using moveconfig.py'
subprocess.call(['git', 'commit', '-s', '-m', msg])
if options.build_db:
with open(CONFIG_DATABASE, 'w') as fd:
for defconfig, configs in config_db.iteritems():
print >>fd, '%s' % defconfig
for config in sorted(configs.keys()):
print >>fd, ' %s=%s' % (config, configs[config])
print >>fd
if __name__ == '__main__':
main()