Newer
Older
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
"""This thread processes results from Slot threads.
It collects the data in the master config directary. There is only one
result thread, and this helps to serialise the build output.
"""
def __init__(self, config_db, db_queue):
"""Set up a new result thread
Args:
builder: Builder which will be sent each result
"""
threading.Thread.__init__(self)
self.config_db = config_db
self.db_queue= db_queue
def run(self):
"""Called to start up the result thread.
We collect the next result job and pass it on to the build.
"""
while True:
defconfig, configs = self.db_queue.get()
self.config_db[defconfig] = configs
self.db_queue.task_done()
class Slot:
"""A slot to store a subprocess.
Each instance of this class handles one subprocess.
This class is useful to control multiple threads
for faster processing.
"""
def __init__(self, toolchains, configs, options, progress, devnull,
make_cmd, reference_src_dir, db_queue):
"""Create a new process slot.
Arguments:
toolchains: Toolchains object containing toolchains.
configs: A list of CONFIGs to move.
options: option flags.
progress: A progress indicator.
devnull: A file object of '/dev/null'.
make_cmd: command name of GNU Make.
reference_src_dir: Determine the true starting config state from this
source tree.
db_queue: output queue to write config info for the database
"""
self.toolchains = toolchains
self.options = options
self.progress = progress
self.build_dir = tempfile.mkdtemp()
self.devnull = devnull
self.make_cmd = (make_cmd, 'O=' + self.build_dir)
self.reference_src_dir = reference_src_dir
self.db_queue = db_queue
self.parser = KconfigParser(configs, options, self.build_dir)
self.state = STATE_IDLE
self.failed_boards = set()
self.suspicious_boards = set()
def __del__(self):
"""Delete the working directory
This function makes sure the temporary directory is cleaned away
even if Python suddenly dies due to error. It should be done in here
because it is guaranteed the destructor is always invoked when the
instance of the class gets unreferenced.
If the subprocess is still running, wait until it finishes.
"""
if self.state != STATE_IDLE:
while self.ps.poll() == None:
pass
shutil.rmtree(self.build_dir)
def add(self, defconfig):
"""Assign a new subprocess for defconfig and add it to the slot.
If the slot is vacant, create a new subprocess for processing the
given defconfig and add it to the slot. Just returns False if
the slot is occupied (i.e. the current subprocess is still running).
Arguments:
defconfig: defconfig name.
Returns:
Return True on success or False on failure
"""
if self.state != STATE_IDLE:
return False
self.defconfig = defconfig
self.log = ''
self.current_src_dir = self.reference_src_dir
self.do_defconfig()
return True
def poll(self):
"""Check the status of the subprocess and handle it as needed.
Returns True if the slot is vacant (i.e. in idle state).
If the configuration is successfully finished, assign a new
subprocess to build include/autoconf.mk.
If include/autoconf.mk is generated, invoke the parser to
parse the .config and the include/autoconf.mk, moving
config options to the .config as needed.
If the .config was updated, run "make savedefconfig" to sync
it, update the original defconfig, and then set the slot back
to the idle state.
Returns:
Return True if the subprocess is terminated, False otherwise
"""
if self.state == STATE_IDLE:
return True
if self.ps.poll() == None:
return False
if self.ps.poll() != 0:
self.handle_error()
elif self.state == STATE_DEFCONFIG:
if self.reference_src_dir and not self.current_src_dir:
self.do_savedefconfig()
else:
self.do_autoconf()
elif self.state == STATE_AUTOCONF:
if self.current_src_dir:
self.current_src_dir = None
elif self.options.build_db:
self.do_build_db()
else:
self.do_savedefconfig()
elif self.state == STATE_SAVEDEFCONFIG:
self.update_defconfig()
else:
sys.exit("Internal Error. This should not happen.")
return True if self.state == STATE_IDLE else False
def handle_error(self):
"""Handle error cases."""
self.log += color_text(self.options.color, COLOR_LIGHT_RED,
"Failed to process.\n")
if self.options.verbose:
self.log += color_text(self.options.color, COLOR_LIGHT_CYAN,
self.ps.stderr.read())
self.finish(False)
def do_defconfig(self):
"""Run 'make <board>_defconfig' to create the .config file."""
cmd = list(self.make_cmd)
cmd.append(self.defconfig)
self.ps = subprocess.Popen(cmd, stdout=self.devnull,
stderr=subprocess.PIPE,
cwd=self.current_src_dir)
self.state = STATE_DEFCONFIG
def do_autoconf(self):
"""Run 'make AUTO_CONF_PATH'."""
arch = self.parser.get_arch()
try:
toolchain = self.toolchains.Select(arch)
except ValueError:
self.log += color_text(self.options.color, COLOR_YELLOW,
"Tool chain for '%s' is missing. Do nothing.\n % arch")
self.finish(False)
return
env = toolchain.MakeEnvironment(False)
cmd = list(self.make_cmd)
cmd.append('KCONFIG_IGNORE_DUPLICATES=1')
self.ps = subprocess.Popen(cmd, stdout=self.devnull, env=env,
stderr=subprocess.PIPE,
cwd=self.current_src_dir)
self.state = STATE_AUTOCONF
def do_build_db(self):
"""Add the board to the database"""
configs = {}
with open(os.path.join(self.build_dir, AUTO_CONF_PATH)) as fd:
for line in fd.readlines():
if line.startswith('CONFIG'):
config, value = line.split('=', 1)
configs[config] = value.rstrip()
self.db_queue.put([self.defconfig, configs])
self.finish(True)
def do_savedefconfig(self):
"""Update the .config and run 'make savedefconfig'."""
(updated, suspicious, log) = self.parser.update_dotconfig()
if suspicious:
self.suspicious_boards.add(self.defconfig)
self.log += log
if not self.options.force_sync and not updated:
self.finish(True)
return
if updated:
self.log += color_text(self.options.color, COLOR_LIGHT_GREEN,
"Syncing by savedefconfig...\n")
else:
self.log += "Syncing by savedefconfig (forced by option)...\n"
cmd = list(self.make_cmd)
cmd.append('savedefconfig')
self.ps = subprocess.Popen(cmd, stdout=self.devnull,
stderr=subprocess.PIPE)
self.state = STATE_SAVEDEFCONFIG
def update_defconfig(self):
"""Update the input defconfig and go back to the idle state."""
log = self.parser.check_defconfig()
if log:
self.suspicious_boards.add(self.defconfig)
self.log += log
orig_defconfig = os.path.join('configs', self.defconfig)
new_defconfig = os.path.join(self.build_dir, 'defconfig')
updated = not filecmp.cmp(orig_defconfig, new_defconfig)
if updated:
self.log += color_text(self.options.color, COLOR_LIGHT_BLUE,
"defconfig was updated.\n")
if not self.options.dry_run and updated:
shutil.move(new_defconfig, orig_defconfig)
self.finish(True)
def finish(self, success):
"""Display log along with progress and go to the idle state.
Arguments:
success: Should be True when the defconfig was processed
successfully, or False when it fails.
"""
# output at least 30 characters to hide the "* defconfigs out of *".
log = self.defconfig.ljust(30) + '\n'
log += '\n'.join([ ' ' + s for s in self.log.split('\n') ])
# Some threads are running in parallel.
# Print log atomically to not mix up logs from different threads.
print >> (sys.stdout if success else sys.stderr), log
if not success:
if self.options.exit_on_error:
sys.exit("Exit on error.")
# If --exit-on-error flag is not set, skip this board and continue.
# Record the failed board.
self.failed_boards.add(self.defconfig)
self.progress.inc()
self.progress.show()
self.state = STATE_IDLE
def get_failed_boards(self):
"""Returns a set of failed boards (defconfigs) in this slot.
"""
return self.failed_boards
def get_suspicious_boards(self):
"""Returns a set of boards (defconfigs) with possible misconversion.
"""
return self.suspicious_boards - self.failed_boards
class Slots:
"""Controller of the array of subprocess slots."""
def __init__(self, toolchains, configs, options, progress,
reference_src_dir, db_queue):
"""Create a new slots controller.
Arguments:
toolchains: Toolchains object containing toolchains.
configs: A list of CONFIGs to move.
options: option flags.
progress: A progress indicator.
reference_src_dir: Determine the true starting config state from this
source tree.
db_queue: output queue to write config info for the database
"""
self.options = options
self.slots = []
devnull = get_devnull()
make_cmd = get_make_cmd()
for i in range(options.jobs):
self.slots.append(Slot(toolchains, configs, options, progress,
devnull, make_cmd, reference_src_dir,
db_queue))
def add(self, defconfig):
"""Add a new subprocess if a vacant slot is found.
Arguments:
defconfig: defconfig name to be put into.
Returns:
Return True on success or False on failure
"""
for slot in self.slots:
if slot.add(defconfig):
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
return True
return False
def available(self):
"""Check if there is a vacant slot.
Returns:
Return True if at lease one vacant slot is found, False otherwise.
"""
for slot in self.slots:
if slot.poll():
return True
return False
def empty(self):
"""Check if all slots are vacant.
Returns:
Return True if all the slots are vacant, False otherwise.
"""
ret = True
for slot in self.slots:
if not slot.poll():
ret = False
return ret
def show_failed_boards(self):
"""Display all of the failed boards (defconfigs)."""
boards = set()
output_file = 'moveconfig.failed'
for slot in self.slots:
boards |= slot.get_failed_boards()
if boards:
boards = '\n'.join(boards) + '\n'
msg = "The following boards were not processed due to error:\n"
msg += boards
msg += "(the list has been saved in %s)\n" % output_file
print >> sys.stderr, color_text(self.options.color, COLOR_LIGHT_RED,
msg)
with open(output_file, 'w') as f:
f.write(boards)
def show_suspicious_boards(self):
"""Display all boards (defconfigs) with possible misconversion."""
boards = set()
output_file = 'moveconfig.suspicious'
for slot in self.slots:
boards |= slot.get_suspicious_boards()
if boards:
boards = '\n'.join(boards) + '\n'
msg = "The following boards might have been converted incorrectly.\n"
msg += "It is highly recommended to check them manually:\n"
msg += boards
msg += "(the list has been saved in %s)\n" % output_file
print >> sys.stderr, color_text(self.options.color, COLOR_YELLOW,
msg)
with open(output_file, 'w') as f:
f.write(boards)
class ReferenceSource:
"""Reference source against which original configs should be parsed."""
def __init__(self, commit):
"""Create a reference source directory based on a specified commit.
Arguments:
commit: commit to git-clone
"""
self.src_dir = tempfile.mkdtemp()
print "Cloning git repo to a separate work directory..."
subprocess.check_output(['git', 'clone', os.getcwd(), '.'],
cwd=self.src_dir)
print "Checkout '%s' to build the original autoconf.mk." % \
subprocess.check_output(['git', 'rev-parse', '--short', commit]).strip()
subprocess.check_output(['git', 'checkout', commit],
stderr=subprocess.STDOUT, cwd=self.src_dir)
def __del__(self):
"""Delete the reference source directory
This function makes sure the temporary directory is cleaned away
even if Python suddenly dies due to error. It should be done in here
because it is guaranteed the destructor is always invoked when the
instance of the class gets unreferenced.
"""
shutil.rmtree(self.src_dir)
def get_dir(self):
"""Return the absolute path to the reference source directory."""
return self.src_dir
def move_config(toolchains, configs, options, db_queue):
"""Move config options to defconfig files.
Arguments:
configs: A list of CONFIGs to move.
options: option flags
"""
if len(configs) == 0:
if options.force_sync:
print 'No CONFIG is specified. You are probably syncing defconfigs.',
elif options.build_db:
print 'Building %s database' % CONFIG_DATABASE
else:
print 'Neither CONFIG nor --force-sync is specified. Nothing will happen.',
else:
print 'Move ' + ', '.join(configs),
print '(jobs: %d)\n' % options.jobs
reference_src = ReferenceSource(options.git_ref)
reference_src_dir = reference_src.get_dir()
else:
reference_src_dir = None
if options.defconfigs:
defconfigs = get_matched_defconfigs(options.defconfigs)
defconfigs = get_all_defconfigs()
progress = Progress(len(defconfigs))
slots = Slots(toolchains, configs, options, progress, reference_src_dir,
db_queue)
# Main loop to process defconfig files:
# Add a new subprocess into a vacant slot.
# Sleep if there is no available slot.
for defconfig in defconfigs:
while not slots.add(defconfig):
while not slots.available():
# No available slot: sleep for a while
time.sleep(SLEEP_TIME)
# wait until all the subprocesses finish
while not slots.empty():
time.sleep(SLEEP_TIME)
slots.show_failed_boards()
slots.show_suspicious_boards()
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
def find_kconfig_rules(kconf, config, imply_config):
"""Check whether a config has a 'select' or 'imply' keyword
Args:
kconf: Kconfig.Config object
config: Name of config to check (without CONFIG_ prefix)
imply_config: Implying config (without CONFIG_ prefix) which may or
may not have an 'imply' for 'config')
Returns:
Symbol object for 'config' if found, else None
"""
sym = kconf.get_symbol(imply_config)
if sym:
for sel in sym.get_selected_symbols():
if sel.get_name() == config:
return sym
return None
def check_imply_rule(kconf, config, imply_config):
"""Check if we can add an 'imply' option
This finds imply_config in the Kconfig and looks to see if it is possible
to add an 'imply' for 'config' to that part of the Kconfig.
Args:
kconf: Kconfig.Config object
config: Name of config to check (without CONFIG_ prefix)
imply_config: Implying config (without CONFIG_ prefix) which may or
may not have an 'imply' for 'config')
Returns:
tuple:
filename of Kconfig file containing imply_config, or None if none
line number within the Kconfig file, or 0 if none
message indicating the result
"""
sym = kconf.get_symbol(imply_config)
if not sym:
return 'cannot find sym'
locs = sym.get_def_locations()
if len(locs) != 1:
return '%d locations' % len(locs)
fname, linenum = locs[0]
cwd = os.getcwd()
if cwd and fname.startswith(cwd):
fname = fname[len(cwd) + 1:]
file_line = ' at %s:%d' % (fname, linenum)
with open(fname) as fd:
data = fd.read().splitlines()
if data[linenum - 1] != 'config %s' % imply_config:
return None, 0, 'bad sym format %s%s' % (data[linenum], file_line)
return fname, linenum, 'adding%s' % file_line
def add_imply_rule(config, fname, linenum):
"""Add a new 'imply' option to a Kconfig
Args:
config: config option to add an imply for (without CONFIG_ prefix)
fname: Kconfig filename to update
linenum: Line number to place the 'imply' before
Returns:
Message indicating the result
"""
file_line = ' at %s:%d' % (fname, linenum)
data = open(fname).read().splitlines()
linenum -= 1
for offset, line in enumerate(data[linenum:]):
if line.strip().startswith('help') or not line:
data.insert(linenum + offset, '\timply %s' % config)
with open(fname, 'w') as fd:
fd.write('\n'.join(data) + '\n')
return 'added%s' % file_line
return 'could not insert%s'
(IMPLY_MIN_2, IMPLY_TARGET, IMPLY_CMD, IMPLY_NON_ARCH_BOARD) = (
1, 2, 4, 8)
IMPLY_FLAGS = {
'min2': [IMPLY_MIN_2, 'Show options which imply >2 boards (normally >5)'],
'target': [IMPLY_TARGET, 'Allow CONFIG_TARGET_... options to imply'],
'cmd': [IMPLY_CMD, 'Allow CONFIG_CMD_... to imply'],
'non-arch-board': [
IMPLY_NON_ARCH_BOARD,
'Allow Kconfig options outside arch/ and /board/ to imply'],
def do_imply_config(config_list, add_imply, imply_flags, skip_added,
check_kconfig=True, find_superset=False):
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
"""Find CONFIG options which imply those in the list
Some CONFIG options can be implied by others and this can help to reduce
the size of the defconfig files. For example, CONFIG_X86 implies
CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and
all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to
each of the x86 defconfig files.
This function uses the moveconfig database to find such options. It
displays a list of things that could possibly imply those in the list.
The algorithm ignores any that start with CONFIG_TARGET since these
typically refer to only a few defconfigs (often one). It also does not
display a config with less than 5 defconfigs.
The algorithm works using sets. For each target config in config_list:
- Get the set 'defconfigs' which use that target config
- For each config (from a list of all configs):
- Get the set 'imply_defconfig' of defconfigs which use that config
-
- If imply_defconfigs contains anything not in defconfigs then
this config does not imply the target config
Params:
config_list: List of CONFIG options to check (each a string)
add_imply: Automatically add an 'imply' for each config.
imply_flags: Flags which control which implying configs are allowed
(IMPLY_...)
skip_added: Don't show options which already have an imply added.
check_kconfig: Check if implied symbols already have an 'imply' or
'select' for the target config, and show this information if so.
find_superset: True to look for configs which are a superset of those
already found. So for example if CONFIG_EXYNOS5 implies an option,
but CONFIG_EXYNOS covers a larger set of defconfigs and also
implies that option, this will drop the former in favour of the
latter. In practice this option has not proved very used.
Note the terminoloy:
config - a CONFIG_XXX options (a string, e.g. 'CONFIG_CMD_EEPROM')
defconfig - a defconfig file (a string, e.g. 'configs/snow_defconfig')
"""
kconf = KconfigScanner().conf if check_kconfig else None
if add_imply and add_imply != 'all':
add_imply = add_imply.split()
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
# key is defconfig name, value is dict of (CONFIG_xxx, value)
config_db = {}
# Holds a dict containing the set of defconfigs that contain each config
# key is config, value is set of defconfigs using that config
defconfig_db = collections.defaultdict(set)
# Set of all config options we have seen
all_configs = set()
# Set of all defconfigs we have seen
all_defconfigs = set()
# Read in the database
configs = {}
with open(CONFIG_DATABASE) as fd:
for line in fd.readlines():
line = line.rstrip()
if not line: # Separator between defconfigs
config_db[defconfig] = configs
all_defconfigs.add(defconfig)
configs = {}
elif line[0] == ' ': # CONFIG line
config, value = line.strip().split('=', 1)
configs[config] = value
defconfig_db[config].add(defconfig)
all_configs.add(config)
else: # New defconfig
defconfig = line
# Work through each target config option in tern, independently
for config in config_list:
defconfigs = defconfig_db.get(config)
if not defconfigs:
print '%s not found in any defconfig' % config
continue
# Get the set of defconfigs without this one (since a config cannot
# imply itself)
non_defconfigs = all_defconfigs - defconfigs
num_defconfigs = len(defconfigs)
print '%s found in %d/%d defconfigs' % (config, num_defconfigs,
len(all_configs))
# This will hold the results: key=config, value=defconfigs containing it
imply_configs = {}
rest_configs = all_configs - set([config])
# Look at every possible config, except the target one
for imply_config in rest_configs:
if 'ERRATUM' in imply_config:
if not (imply_flags & IMPLY_CMD):
if 'CONFIG_CMD' in imply_config:
continue
if not (imply_flags & IMPLY_TARGET):
if 'CONFIG_TARGET' in imply_config:
continue
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
# Find set of defconfigs that have this config
imply_defconfig = defconfig_db[imply_config]
# Get the intersection of this with defconfigs containing the
# target config
common_defconfigs = imply_defconfig & defconfigs
# Get the set of defconfigs containing this config which DO NOT
# also contain the taret config. If this set is non-empty it means
# that this config affects other defconfigs as well as (possibly)
# the ones affected by the target config. This means it implies
# things we don't want to imply.
not_common_defconfigs = imply_defconfig & non_defconfigs
if not_common_defconfigs:
continue
# If there are common defconfigs, imply_config may be useful
if common_defconfigs:
skip = False
if find_superset:
for prev in imply_configs.keys():
prev_count = len(imply_configs[prev])
count = len(common_defconfigs)
if (prev_count > count and
(imply_configs[prev] & common_defconfigs ==
common_defconfigs)):
# skip imply_config because prev is a superset
skip = True
break
elif count > prev_count:
# delete prev because imply_config is a superset
del imply_configs[prev]
if not skip:
imply_configs[imply_config] = common_defconfigs
# Now we have a dict imply_configs of configs which imply each config
# The value of each dict item is the set of defconfigs containing that
# config. Rank them so that we print the configs that imply the largest
# number of defconfigs first.
ranked_iconfigs = sorted(imply_configs,
key=lambda k: len(imply_configs[k]), reverse=True)
kconfig_info = ''
cwd = os.getcwd()
add_list = collections.defaultdict(list)
for iconfig in ranked_iconfigs:
num_common = len(imply_configs[iconfig])
# Don't bother if there are less than 5 defconfigs affected.
if num_common < (2 if imply_flags & IMPLY_MIN_2 else 5):
missing = defconfigs - imply_configs[iconfig]
missing_str = ', '.join(missing) if missing else 'all'
missing_str = ''
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
show = True
if kconf:
sym = find_kconfig_rules(kconf, config[CONFIG_LEN:],
iconfig[CONFIG_LEN:])
kconfig_info = ''
if sym:
locs = sym.get_def_locations()
if len(locs) == 1:
fname, linenum = locs[0]
if cwd and fname.startswith(cwd):
fname = fname[len(cwd) + 1:]
kconfig_info = '%s:%d' % (fname, linenum)
if skip_added:
show = False
else:
sym = kconf.get_symbol(iconfig[CONFIG_LEN:])
fname = ''
if sym:
locs = sym.get_def_locations()
if len(locs) == 1:
fname, linenum = locs[0]
if cwd and fname.startswith(cwd):
fname = fname[len(cwd) + 1:]
in_arch_board = not sym or (fname.startswith('arch') or
fname.startswith('board'))
if (not in_arch_board and
not (imply_flags & IMPLY_NON_ARCH_BOARD)):
continue
if add_imply and (add_imply == 'all' or
iconfig in add_imply):
fname, linenum, kconfig_info = (check_imply_rule(kconf,
config[CONFIG_LEN:], iconfig[CONFIG_LEN:]))
if fname:
add_list[fname].append(linenum)
if show and kconfig_info != 'skip':
print '%5d : %-30s%-25s %s' % (num_common, iconfig.ljust(30),
kconfig_info, missing_str)
# Having collected a list of things to add, now we add them. We process
# each file from the largest line number to the smallest so that
# earlier additions do not affect our line numbers. E.g. if we added an
# imply at line 20 it would change the position of each line after
# that.
for fname, linenums in add_list.iteritems():
for linenum in sorted(linenums, reverse=True):
add_imply_rule(config[CONFIG_LEN:], fname, linenum)
def main():
try:
cpu_count = multiprocessing.cpu_count()
except NotImplementedError:
cpu_count = 1
parser = optparse.OptionParser()
# Add options here
parser.add_option('-a', '--add-imply', type='string', default='',
help='comma-separated list of CONFIG options to add '
"an 'imply' statement to for the CONFIG in -i")
parser.add_option('-A', '--skip-added', action='store_true', default=False,
help="don't show options which are already marked as "
'implying others')
parser.add_option('-b', '--build-db', action='store_true', default=False,
help='build a CONFIG database')
parser.add_option('-c', '--color', action='store_true', default=False,
help='display the log in color')
parser.add_option('-C', '--commit', action='store_true', default=False,
help='Create a git commit for the operation')
parser.add_option('-d', '--defconfigs', type='string',
help='a file containing a list of defconfigs to move, '
"one per line (for example 'snow_defconfig') "
"or '-' to read from stdin")
parser.add_option('-i', '--imply', action='store_true', default=False,
help='find options which imply others')
parser.add_option('-I', '--imply-flags', type='string', default='',
help="control the -i option ('help' for help")
parser.add_option('-n', '--dry-run', action='store_true', default=False,
help='perform a trial run (show log with no changes)')
parser.add_option('-e', '--exit-on-error', action='store_true',
default=False,
help='exit immediately on any error')
parser.add_option('-s', '--force-sync', action='store_true', default=False,
help='force sync by savedefconfig')
parser.add_option('-S', '--spl', action='store_true', default=False,
help='parse config options defined for SPL build')
parser.add_option('-H', '--headers-only', dest='cleanup_headers_only',
action='store_true', default=False,
help='only cleanup the headers')
parser.add_option('-j', '--jobs', type='int', default=cpu_count,
help='the number of jobs to run simultaneously')
parser.add_option('-r', '--git-ref', type='string',
help='the git ref to clone for building the autoconf.mk')
parser.add_option('-y', '--yes', action='store_true', default=False,
help="respond 'yes' to any prompts")
parser.add_option('-v', '--verbose', action='store_true', default=False,
help='show any build errors as boards are built')
parser.usage += ' CONFIG ...'
(options, configs) = parser.parse_args()
if len(configs) == 0 and not any((options.force_sync, options.build_db,
options.imply)):
parser.print_usage()
sys.exit(1)
# prefix the option name with CONFIG_ if missing
configs = [ config if config.startswith('CONFIG_') else 'CONFIG_' + config
for config in configs ]
check_top_directory()
imply_flags = 0
if options.imply_flags == 'all':
imply_flags = -1
elif options.imply_flags:
for flag in options.imply_flags.split(','):
bad = flag not in IMPLY_FLAGS
if bad:
print "Invalid flag '%s'" % flag
if flag == 'help' or bad:
print "Imply flags: (separate with ',')"
for name, info in IMPLY_FLAGS.iteritems():
print ' %-15s: %s' % (name, info[1])
parser.print_usage()
sys.exit(1)
imply_flags |= IMPLY_FLAGS[flag][0]
do_imply_config(configs, options.add_imply, imply_flags,
options.skip_added)
config_db = {}
db_queue = Queue.Queue()
t = DatabaseThread(config_db, db_queue)
t.setDaemon(True)
t.start()
if not options.cleanup_headers_only:
check_clean_directory()
bsettings.Setup('')
toolchains = toolchain.Toolchains()
toolchains.GetSettings()
toolchains.Scan(verbose=False)
move_config(toolchains, configs, options, db_queue)
if configs:
cleanup_headers(configs, options)
cleanup_extra_options(configs, options)
cleanup_whitelist(configs, options)
cleanup_readme(configs, options)
if options.commit:
subprocess.call(['git', 'add', '-u'])
if configs:
msg = 'Convert %s %sto Kconfig' % (configs[0],
'et al ' if len(configs) > 1 else '')
msg += ('\n\nThis converts the following to Kconfig:\n %s\n' %
'\n '.join(configs))
else:
msg = 'configs: Resync with savedefconfig'
msg += '\n\nRsync all defconfig files using moveconfig.py'
subprocess.call(['git', 'commit', '-s', '-m', msg])
if options.build_db:
with open(CONFIG_DATABASE, 'w') as fd:
for defconfig, configs in config_db.iteritems():
fd.write('%s\n' % defconfig)
for config in sorted(configs.keys()):
fd.write(' %s=%s\n' % (config, configs[config]))
fd.write('\n')
if __name__ == '__main__':
main()