summaryrefslogtreecommitdiffstats
path: root/import-layers/yocto-poky/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'import-layers/yocto-poky/bitbake/lib/bb')
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/COW.py44
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/__init__.py12
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/build.py194
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/cache.py404
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/checksum.py15
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/codeparser.py42
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/command.py2
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/cooker.py600
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py84
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/daemonize.py4
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/data.py10
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/data_smart.py11
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/event.py101
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/exceptions.py4
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py152
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py13
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py6
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py17
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py118
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py25
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py23
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py28
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py4
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py71
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py10
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py269
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py15
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py8
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py6
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py15
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py99
-rwxr-xr-ximport-layers/yocto-poky/bitbake/lib/bb/main.py288
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py4
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/msg.py12
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py62
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py2
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py16
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/persist_data.py6
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/process.py36
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/progress.py276
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/providers.py7
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/pysh/builtin.py2
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/pysh/interp.py4
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshlex.py2
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py4
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/runqueue.py1577
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/server/process.py16
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py86
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/siggen.py66
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/taskdata.py414
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py12
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/tests/cow.py88
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/tests/data.py12
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py58
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py19
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py46
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py743
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/__init__.py17
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/__init__.py0
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py44
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py70
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py219
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py172
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py298
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py437
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py122
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hobcolor.py38
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hobwidget.py904
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/persistenttooltip.py186
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/progress.py23
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/progressbar.py59
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/puccho.glade606
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/runningbuild.py551
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/utils.py34
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py183
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/goggle.py121
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py207
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py8
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py87
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/uievent.py6
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/ui/uihelper.py17
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/utils.py102
82 files changed, 4256 insertions, 6519 deletions
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/COW.py b/import-layers/yocto-poky/bitbake/lib/bb/COW.py
index 6917ec378..77a05cfe3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/COW.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/COW.py
@@ -23,19 +23,17 @@
# Assign a file to __warn__ to get warnings about slow operations.
#
-from __future__ import print_function
+
import copy
import types
ImmutableTypes = (
- types.NoneType,
bool,
complex,
float,
int,
- long,
tuple,
frozenset,
- basestring
+ str
)
MUTABLE = "__mutable__"
@@ -61,7 +59,7 @@ class COWDictMeta(COWMeta):
__call__ = cow
def __setitem__(cls, key, value):
- if not isinstance(value, ImmutableTypes):
+ if value is not None and not isinstance(value, ImmutableTypes):
if not isinstance(value, COWMeta):
cls.__hasmutable__ = True
key += MUTABLE
@@ -116,7 +114,7 @@ class COWDictMeta(COWMeta):
cls.__setitem__(key, cls.__marker__)
def __revertitem__(cls, key):
- if not cls.__dict__.has_key(key):
+ if key not in cls.__dict__:
key += MUTABLE
delattr(cls, key)
@@ -183,7 +181,7 @@ class COWSetMeta(COWDictMeta):
COWDictMeta.__delitem__(cls, repr(hash(value)))
def __in__(cls, value):
- return COWDictMeta.has_key(repr(hash(value)))
+ return repr(hash(value)) in COWDictMeta
def iterkeys(cls):
raise TypeError("sets don't have keys")
@@ -192,12 +190,10 @@ class COWSetMeta(COWDictMeta):
raise TypeError("sets don't have 'items'")
# These are the actual classes you use!
-class COWDictBase(object):
- __metaclass__ = COWDictMeta
+class COWDictBase(object, metaclass = COWDictMeta):
__count__ = 0
-class COWSetBase(object):
- __metaclass__ = COWSetMeta
+class COWSetBase(object, metaclass = COWSetMeta):
__count__ = 0
if __name__ == "__main__":
@@ -217,11 +213,11 @@ if __name__ == "__main__":
print()
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
- for x in b.iteritems():
+ for x in b.items():
print(x)
print()
@@ -229,11 +225,11 @@ if __name__ == "__main__":
b['a'] = 'c'
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
- for x in b.iteritems():
+ for x in b.items():
print(x)
print()
@@ -248,22 +244,22 @@ if __name__ == "__main__":
a['set'].add("o2")
print("a", a)
- for x in a['set'].itervalues():
+ for x in a['set'].values():
print(x)
print("--")
print("b", b)
- for x in b['set'].itervalues():
+ for x in b['set'].values():
print(x)
print()
b['set'].add('o3')
print("a", a)
- for x in a['set'].itervalues():
+ for x in a['set'].values():
print(x)
print("--")
print("b", b)
- for x in b['set'].itervalues():
+ for x in b['set'].values():
print(x)
print()
@@ -273,7 +269,7 @@ if __name__ == "__main__":
a['set2'].add("o2")
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
@@ -287,13 +283,13 @@ if __name__ == "__main__":
except KeyError:
print("Yay! deleted key raises error")
- if b.has_key('b'):
+ if 'b' in b:
print("Boo!")
else:
print("Yay - has_key with delete works!")
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
@@ -304,7 +300,7 @@ if __name__ == "__main__":
b.__revertitem__('b')
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
@@ -314,7 +310,7 @@ if __name__ == "__main__":
b.__revertitem__('dict')
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
index 502ad839e..f019d4831 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
@@ -21,11 +21,11 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-__version__ = "1.30.0"
+__version__ = "1.32.0"
import sys
-if sys.version_info < (2, 7, 3):
- raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake")
+if sys.version_info < (3, 4, 0):
+ raise RuntimeError("Sorry, python 3.4.0 or later is required for this version of bitbake")
class BBHandledException(Exception):
@@ -84,8 +84,8 @@ def plain(*args):
mainlogger.plain(''.join(args))
def debug(lvl, *args):
- if isinstance(lvl, basestring):
- mainlogger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
+ if isinstance(lvl, str):
+ mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
args = (lvl,) + args
lvl = 1
mainlogger.debug(lvl, ''.join(args))
@@ -94,7 +94,7 @@ def note(*args):
mainlogger.info(''.join(args))
def warn(*args):
- mainlogger.warn(''.join(args))
+ mainlogger.warning(''.join(args))
def error(*args, **kwargs):
mainlogger.error(''.join(args), extra=kwargs)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/build.py b/import-layers/yocto-poky/bitbake/lib/bb/build.py
index db5072cb4..c4c8aeb64 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/build.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/build.py
@@ -35,8 +35,8 @@ import stat
import bb
import bb.msg
import bb.process
-from contextlib import nested
-from bb import event, utils
+import bb.progress
+from bb import data, event, utils
bblogger = logging.getLogger('BitBake')
logger = logging.getLogger('BitBake.Build')
@@ -61,8 +61,13 @@ def reset_cache():
# in all namespaces, hence we add them to __builtins__.
# If we do not do this and use the exec globals, they will
# not be available to subfunctions.
-__builtins__['bb'] = bb
-__builtins__['os'] = os
+if hasattr(__builtins__, '__setitem__'):
+ builtins = __builtins__
+else:
+ builtins = __builtins__.__dict__
+
+builtins['bb'] = bb
+builtins['os'] = os
class FuncFailed(Exception):
def __init__(self, name = None, logfile = None):
@@ -133,6 +138,25 @@ class TaskInvalid(TaskBase):
super(TaskInvalid, self).__init__(task, None, metadata)
self._message = "No such task '%s'" % task
+class TaskProgress(event.Event):
+ """
+ Task made some progress that could be reported to the user, usually in
+ the form of a progress bar or similar.
+ NOTE: this class does not inherit from TaskBase since it doesn't need
+ to - it's fired within the task context itself, so we don't have any of
+ the context information that you do in the case of the other events.
+ The event PID can be used to determine which task it came from.
+ The progress value is normally 0-100, but can also be negative
+ indicating that progress has been made but we aren't able to determine
+ how much.
+ The rate is optional, this is simply an extra string to display to the
+ user if specified.
+ """
+ def __init__(self, progress, rate=None):
+ self.progress = progress
+ self.rate = rate
+ event.Event.__init__(self)
+
class LogTee(object):
def __init__(self, logger, outfile):
@@ -164,11 +188,10 @@ class LogTee(object):
def exec_func(func, d, dirs = None, pythonexception=False):
"""Execute a BB 'function'"""
- body = d.getVar(func, False)
- if not body:
- if body is None:
- logger.warn("Function %s doesn't exist", func)
- return
+ try:
+ oldcwd = os.getcwd()
+ except:
+ oldcwd = None
flags = d.getVarFlags(func)
cleandirs = flags.get('cleandirs')
@@ -187,8 +210,13 @@ def exec_func(func, d, dirs = None, pythonexception=False):
bb.utils.mkdirhier(adir)
adir = dirs[-1]
else:
- adir = d.getVar('B', True)
- bb.utils.mkdirhier(adir)
+ adir = None
+
+ body = d.getVar(func, False)
+ if not body:
+ if body is None:
+ logger.warning("Function %s doesn't exist", func)
+ return
ispython = flags.get('python')
@@ -233,6 +261,18 @@ def exec_func(func, d, dirs = None, pythonexception=False):
else:
exec_func_shell(func, d, runfile, cwd=adir)
+ try:
+ curcwd = os.getcwd()
+ except:
+ curcwd = None
+
+ if oldcwd and curcwd != oldcwd:
+ try:
+ bb.warn("Task %s changed cwd to %s" % (func, curcwd))
+ os.chdir(oldcwd)
+ except:
+ pass
+
_functionfmt = """
{function}(d)
"""
@@ -248,7 +288,8 @@ def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
if cwd:
try:
olddir = os.getcwd()
- except OSError:
+ except OSError as e:
+ bb.warn("%s: Cannot get cwd: %s" % (func, e))
olddir = None
os.chdir(cwd)
@@ -274,8 +315,8 @@ def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
if cwd and olddir:
try:
os.chdir(olddir)
- except OSError:
- pass
+ except OSError as e:
+ bb.warn("%s: Cannot restore cwd %s: %s" % (func, olddir, e))
def shell_trap_code():
return '''#!/bin/sh\n
@@ -323,7 +364,7 @@ trap '' 0
exit $ret
''')
- os.chmod(runfile, 0775)
+ os.chmod(runfile, 0o775)
cmd = runfile
if d.getVarFlag(func, 'fakeroot', False):
@@ -336,41 +377,64 @@ exit $ret
else:
logfile = sys.stdout
+ progress = d.getVarFlag(func, 'progress', True)
+ if progress:
+ if progress == 'percent':
+ # Use default regex
+ logfile = bb.progress.BasicProgressHandler(d, outfile=logfile)
+ elif progress.startswith('percent:'):
+ # Use specified regex
+ logfile = bb.progress.BasicProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
+ elif progress.startswith('outof:'):
+ # Use specified regex
+ logfile = bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
+ else:
+ bb.warn('%s: invalid task progress varflag value "%s", ignoring' % (func, progress))
+
+ fifobuffer = bytearray()
def readfifo(data):
- lines = data.split('\0')
- for line in lines:
- splitval = line.split(' ', 1)
- cmd = splitval[0]
- if len(splitval) > 1:
- value = splitval[1]
+ nonlocal fifobuffer
+ fifobuffer.extend(data)
+ while fifobuffer:
+ message, token, nextmsg = fifobuffer.partition(b"\00")
+ if token:
+ splitval = message.split(b' ', 1)
+ cmd = splitval[0].decode("utf-8")
+ if len(splitval) > 1:
+ value = splitval[1].decode("utf-8")
+ else:
+ value = ''
+ if cmd == 'bbplain':
+ bb.plain(value)
+ elif cmd == 'bbnote':
+ bb.note(value)
+ elif cmd == 'bbwarn':
+ bb.warn(value)
+ elif cmd == 'bberror':
+ bb.error(value)
+ elif cmd == 'bbfatal':
+ # The caller will call exit themselves, so bb.error() is
+ # what we want here rather than bb.fatal()
+ bb.error(value)
+ elif cmd == 'bbfatal_log':
+ bb.error(value, forcelog=True)
+ elif cmd == 'bbdebug':
+ splitval = value.split(' ', 1)
+ level = int(splitval[0])
+ value = splitval[1]
+ bb.debug(level, value)
+ else:
+ bb.warn("Unrecognised command '%s' on FIFO" % cmd)
+ fifobuffer = nextmsg
else:
- value = ''
- if cmd == 'bbplain':
- bb.plain(value)
- elif cmd == 'bbnote':
- bb.note(value)
- elif cmd == 'bbwarn':
- bb.warn(value)
- elif cmd == 'bberror':
- bb.error(value)
- elif cmd == 'bbfatal':
- # The caller will call exit themselves, so bb.error() is
- # what we want here rather than bb.fatal()
- bb.error(value)
- elif cmd == 'bbfatal_log':
- bb.error(value, forcelog=True)
- elif cmd == 'bbdebug':
- splitval = value.split(' ', 1)
- level = int(splitval[0])
- value = splitval[1]
- bb.debug(level, value)
+ break
tempdir = d.getVar('T', True)
fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
if os.path.exists(fifopath):
os.unlink(fifopath)
os.mkfifo(fifopath)
- with open(fifopath, 'r+') as fifo:
+ with open(fifopath, 'r+b', buffering=0) as fifo:
try:
bb.debug(2, "Executing shell function %s" % func)
@@ -501,21 +565,32 @@ def _exec_task(fn, task, d, quieterr):
flags = localdata.getVarFlags(task)
- event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
try:
- for func in (prefuncs or '').split():
- exec_func(func, localdata)
- exec_func(task, localdata)
- for func in (postfuncs or '').split():
- exec_func(func, localdata)
- except FuncFailed as exc:
- if quieterr:
- event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
- else:
- errprinted = errchk.triggered
+ try:
+ event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
+ except (bb.BBHandledException, SystemExit):
+ return 1
+ except FuncFailed as exc:
logger.error(str(exc))
- event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
- return 1
+ return 1
+
+ try:
+ for func in (prefuncs or '').split():
+ exec_func(func, localdata)
+ exec_func(task, localdata)
+ for func in (postfuncs or '').split():
+ exec_func(func, localdata)
+ except FuncFailed as exc:
+ if quieterr:
+ event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
+ else:
+ errprinted = errchk.triggered
+ logger.error(str(exc))
+ event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
+ return 1
+ except bb.BBHandledException:
+ event.fire(TaskFailed(task, logfn, localdata, True), localdata)
+ return 1
finally:
sys.stdout.flush()
sys.stderr.flush()
@@ -575,7 +650,7 @@ def exec_task(fn, task, d, profile = False):
event.fire(failedevent, d)
return 1
-def stamp_internal(taskname, d, file_name, baseonly=False):
+def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
"""
Internal stamp helper function
Makes sure the stamp directory exists
@@ -598,6 +673,8 @@ def stamp_internal(taskname, d, file_name, baseonly=False):
if baseonly:
return stamp
+ if noextra:
+ extrainfo = ""
if not stamp:
return
@@ -693,12 +770,12 @@ def write_taint(task, d, file_name = None):
with open(taintfn, 'w') as taintf:
taintf.write(str(uuid.uuid4()))
-def stampfile(taskname, d, file_name = None):
+def stampfile(taskname, d, file_name = None, noextra=False):
"""
Return the stamp for a given task
(d can be a data dict or dataCache)
"""
- return stamp_internal(taskname, d, file_name)
+ return stamp_internal(taskname, d, file_name, noextra=noextra)
def add_tasks(tasklist, d):
task_deps = d.getVar('_task_deps', False)
@@ -774,6 +851,7 @@ def deltask(task, d):
bbtasks = d.getVar('__BBTASKS', False) or []
if task in bbtasks:
bbtasks.remove(task)
+ d.delVarFlag(task, 'task')
d.setVar('__BBTASKS', bbtasks)
d.delVarFlag(task, 'deps')
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cache.py b/import-layers/yocto-poky/bitbake/lib/bb/cache.py
index af5b9fbc6..dd9cfdfac 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cache.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cache.py
@@ -28,22 +28,16 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
import os
+import sys
import logging
+import pickle
from collections import defaultdict
import bb.utils
logger = logging.getLogger("BitBake.Cache")
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
-__cache_version__ = "149"
+__cache_version__ = "150"
def getCacheFile(path, filename, data_hash):
return os.path.join(path, filename + "." + data_hash)
@@ -80,7 +74,7 @@ class RecipeInfoCommon(object):
out_dict = dict((var, metadata.getVarFlag(var, flag, True))
for var in varlist)
if squash:
- return dict((k,v) for (k,v) in out_dict.iteritems() if v)
+ return dict((k,v) for (k,v) in out_dict.items() if v)
else:
return out_dict
@@ -240,7 +234,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
cachedata.universe_target.append(self.pn)
cachedata.hashfn[fn] = self.hashfilename
- for task, taskhash in self.basetaskhashes.iteritems():
+ for task, taskhash in self.basetaskhashes.items():
identifier = '%s.%s' % (fn, task)
cachedata.basetaskhash[identifier] = taskhash
@@ -250,14 +244,136 @@ class CoreRecipeInfo(RecipeInfoCommon):
cachedata.fakerootdirs[fn] = self.fakerootdirs
cachedata.extradepsfunc[fn] = self.extradepsfunc
+def virtualfn2realfn(virtualfn):
+ """
+ Convert a virtual file name to a real one + the associated subclass keyword
+ """
+ mc = ""
+ if virtualfn.startswith('multiconfig:'):
+ elems = virtualfn.split(':')
+ mc = elems[1]
+ virtualfn = ":".join(elems[2:])
+
+ fn = virtualfn
+ cls = ""
+ if virtualfn.startswith('virtual:'):
+ elems = virtualfn.split(':')
+ cls = ":".join(elems[1:-1])
+ fn = elems[-1]
+
+ return (fn, cls, mc)
+
+def realfn2virtual(realfn, cls, mc):
+ """
+ Convert a real filename + the associated subclass keyword to a virtual filename
+ """
+ if cls:
+ realfn = "virtual:" + cls + ":" + realfn
+ if mc:
+ realfn = "multiconfig:" + mc + ":" + realfn
+ return realfn
+
+def variant2virtual(realfn, variant):
+ """
+ Convert a real filename + the associated subclass keyword to a virtual filename
+ """
+ if variant == "":
+ return realfn
+ if variant.startswith("multiconfig:"):
+ elems = variant.split(":")
+ if elems[2]:
+ return "multiconfig:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
+ return "multiconfig:" + elems[1] + ":" + realfn
+ return "virtual:" + variant + ":" + realfn
+
+def parse_recipe(bb_data, bbfile, appends, mc=''):
+ """
+ Parse a recipe
+ """
+
+ chdir_back = False
+
+ bb_data.setVar("__BBMULTICONFIG", mc)
+
+ # expand tmpdir to include this topdir
+ bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR', True) or "")
+ bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
+ oldpath = os.path.abspath(os.getcwd())
+ bb.parse.cached_mtime_noerror(bbfile_loc)
+
+ # The ConfHandler first looks if there is a TOPDIR and if not
+ # then it would call getcwd().
+ # Previously, we chdir()ed to bbfile_loc, called the handler
+ # and finally chdir()ed back, a couple of thousand times. We now
+ # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
+ if not bb_data.getVar('TOPDIR', False):
+ chdir_back = True
+ bb_data.setVar('TOPDIR', bbfile_loc)
+ try:
+ if appends:
+ bb_data.setVar('__BBAPPEND', " ".join(appends))
+ bb_data = bb.parse.handle(bbfile, bb_data)
+ if chdir_back:
+ os.chdir(oldpath)
+ return bb_data
+ except:
+ if chdir_back:
+ os.chdir(oldpath)
+ raise
+
+
+
+class NoCache(object):
+
+ def __init__(self, databuilder):
+ self.databuilder = databuilder
+ self.data = databuilder.data
+
+ def loadDataFull(self, virtualfn, appends):
+ """
+ Return a complete set of data for fn.
+ To do this, we need to parse the file.
+ """
+ logger.debug(1, "Parsing %s (full)" % virtualfn)
+ (fn, virtual, mc) = virtualfn2realfn(virtualfn)
+ bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
+ return bb_data[virtual]
+
+ def load_bbfile(self, bbfile, appends, virtonly = False):
+ """
+ Load and parse one .bb build file
+ Return the data and whether parsing resulted in the file being skipped
+ """
+
+ if virtonly:
+ (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
+ bb_data = self.databuilder.mcdata[mc].createCopy()
+ bb_data.setVar("__ONLYFINALISE", virtual or "default")
+ datastores = parse_recipe(bb_data, bbfile, appends, mc)
+ return datastores
+ bb_data = self.data.createCopy()
+ datastores = parse_recipe(bb_data, bbfile, appends)
-class Cache(object):
+ for mc in self.databuilder.mcdata:
+ if not mc:
+ continue
+ bb_data = self.databuilder.mcdata[mc].createCopy()
+ newstores = parse_recipe(bb_data, bbfile, appends, mc)
+ for ns in newstores:
+ datastores["multiconfig:%s:%s" % (mc, ns)] = newstores[ns]
+
+ return datastores
+
+class Cache(NoCache):
"""
BitBake Cache implementation
"""
- def __init__(self, data, data_hash, caches_array):
+ def __init__(self, databuilder, data_hash, caches_array):
+ super().__init__(databuilder)
+ data = databuilder.data
+
# Pass caches_array information into Cache Constructor
# It will be used later for deciding whether we
# need extra cache file dump/load support
@@ -266,7 +382,6 @@ class Cache(object):
self.clean = set()
self.checked = set()
self.depends_cache = {}
- self.data = None
self.data_fn = None
self.cacheclean = True
self.data_hash = data_hash
@@ -286,72 +401,74 @@ class Cache(object):
cache_ok = True
if self.caches_array:
for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- cache_ok = cache_ok and os.path.exists(cachefile)
- cache_class.init_cacheData(self)
+ cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+ cache_ok = cache_ok and os.path.exists(cachefile)
+ cache_class.init_cacheData(self)
if cache_ok:
self.load_cachefile()
elif os.path.isfile(self.cachefile):
logger.info("Out of date cache found, rebuilding...")
def load_cachefile(self):
- # Firstly, using core cache file information for
- # valid checking
- with open(self.cachefile, "rb") as cachefile:
- pickled = pickle.Unpickler(cachefile)
- try:
- cache_ver = pickled.load()
- bitbake_ver = pickled.load()
- except Exception:
- logger.info('Invalid cache, rebuilding...')
- return
-
- if cache_ver != __cache_version__:
- logger.info('Cache version mismatch, rebuilding...')
- return
- elif bitbake_ver != bb.__version__:
- logger.info('Bitbake version mismatch, rebuilding...')
- return
-
-
cachesize = 0
previous_progress = 0
previous_percent = 0
# Calculate the correct cachesize of all those cache files
for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- with open(cachefile, "rb") as cachefile:
- cachesize += os.fstat(cachefile.fileno()).st_size
+ cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+ with open(cachefile, "rb") as cachefile:
+ cachesize += os.fstat(cachefile.fileno()).st_size
bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- with open(cachefile, "rb") as cachefile:
- pickled = pickle.Unpickler(cachefile)
- while cachefile:
- try:
- key = pickled.load()
- value = pickled.load()
- except Exception:
- break
- if self.depends_cache.has_key(key):
- self.depends_cache[key].append(value)
- else:
- self.depends_cache[key] = [value]
- # only fire events on even percentage boundaries
- current_progress = cachefile.tell() + previous_progress
- current_percent = 100 * current_progress / cachesize
- if current_percent > previous_percent:
- previous_percent = current_percent
- bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
- self.data)
-
- previous_progress += current_progress
+ cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+ with open(cachefile, "rb") as cachefile:
+ pickled = pickle.Unpickler(cachefile)
+ # Check cache version information
+ try:
+ cache_ver = pickled.load()
+ bitbake_ver = pickled.load()
+ except Exception:
+ logger.info('Invalid cache, rebuilding...')
+ return
+
+ if cache_ver != __cache_version__:
+ logger.info('Cache version mismatch, rebuilding...')
+ return
+ elif bitbake_ver != bb.__version__:
+ logger.info('Bitbake version mismatch, rebuilding...')
+ return
+
+ # Load the rest of the cache file
+ current_progress = 0
+ while cachefile:
+ try:
+ key = pickled.load()
+ value = pickled.load()
+ except Exception:
+ break
+ if not isinstance(key, str):
+ bb.warn("%s from extras cache is not a string?" % key)
+ break
+ if not isinstance(value, RecipeInfoCommon):
+ bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
+ break
+
+ if key in self.depends_cache:
+ self.depends_cache[key].append(value)
+ else:
+ self.depends_cache[key] = [value]
+ # only fire events on even percentage boundaries
+ current_progress = cachefile.tell() + previous_progress
+ current_percent = 100 * current_progress / cachesize
+ if current_percent > previous_percent:
+ previous_percent = current_percent
+ bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
+ self.data)
+
+ previous_progress += current_progress
# Note: depends cache number is corresponding to the parsing file numbers.
# The same file has several caches, still regarded as one item in the cache
@@ -359,69 +476,33 @@ class Cache(object):
len(self.depends_cache)),
self.data)
-
- @staticmethod
- def virtualfn2realfn(virtualfn):
- """
- Convert a virtual file name to a real one + the associated subclass keyword
- """
-
- fn = virtualfn
- cls = ""
- if virtualfn.startswith('virtual:'):
- elems = virtualfn.split(':')
- cls = ":".join(elems[1:-1])
- fn = elems[-1]
- return (fn, cls)
-
- @staticmethod
- def realfn2virtual(realfn, cls):
- """
- Convert a real filename + the associated subclass keyword to a virtual filename
- """
- if cls == "":
- return realfn
- return "virtual:" + cls + ":" + realfn
-
- @classmethod
- def loadDataFull(cls, virtualfn, appends, cfgData):
- """
- Return a complete set of data for fn.
- To do this, we need to parse the file.
- """
-
- (fn, virtual) = cls.virtualfn2realfn(virtualfn)
-
- logger.debug(1, "Parsing %s (full)", fn)
-
- cfgData.setVar("__ONLYFINALISE", virtual or "default")
- bb_data = cls.load_bbfile(fn, appends, cfgData)
- return bb_data[virtual]
-
- @classmethod
- def parse(cls, filename, appends, configdata, caches_array):
+ def parse(self, filename, appends):
"""Parse the specified filename, returning the recipe information"""
+ logger.debug(1, "Parsing %s", filename)
infos = []
- datastores = cls.load_bbfile(filename, appends, configdata)
+ datastores = self.load_bbfile(filename, appends)
depends = []
- for variant, data in sorted(datastores.iteritems(),
+ variants = []
+ # Process the "real" fn last so we can store variants list
+ for variant, data in sorted(datastores.items(),
key=lambda i: i[0],
reverse=True):
- virtualfn = cls.realfn2virtual(filename, variant)
+ virtualfn = variant2virtual(filename, variant)
+ variants.append(variant)
depends = depends + (data.getVar("__depends", False) or [])
if depends and not variant:
data.setVar("__depends", depends)
-
+ if virtualfn == filename:
+ data.setVar("__VARIANTS", " ".join(variants))
info_array = []
- for cache_class in caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- info = cache_class(filename, data)
- info_array.append(info)
+ for cache_class in self.caches_array:
+ info = cache_class(filename, data)
+ info_array.append(info)
infos.append((virtualfn, info_array))
return infos
- def load(self, filename, appends, configdata):
+ def load(self, filename, appends):
"""Obtain the recipe information for the specified filename,
using cached values if available, otherwise parsing.
@@ -435,21 +516,20 @@ class Cache(object):
# info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
info_array = self.depends_cache[filename]
for variant in info_array[0].variants:
- virtualfn = self.realfn2virtual(filename, variant)
+ virtualfn = variant2virtual(filename, variant)
infos.append((virtualfn, self.depends_cache[virtualfn]))
else:
- logger.debug(1, "Parsing %s", filename)
return self.parse(filename, appends, configdata, self.caches_array)
return cached, infos
- def loadData(self, fn, appends, cfgData, cacheData):
+ def loadData(self, fn, appends, cacheData):
"""Load the recipe info for the specified filename,
parsing and adding to the cache if necessary, and adding
the recipe information to the supplied CacheData instance."""
skipped, virtuals = 0, 0
- cached, infos = self.load(fn, appends, cfgData)
+ cached, infos = self.load(fn, appends)
for virtualfn, info_array in infos:
if info_array[0].skipped:
logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
@@ -557,16 +637,19 @@ class Cache(object):
invalid = False
for cls in info_array[0].variants:
- virtualfn = self.realfn2virtual(fn, cls)
+ virtualfn = variant2virtual(fn, cls)
self.clean.add(virtualfn)
if virtualfn not in self.depends_cache:
logger.debug(2, "Cache: %s is not cached", virtualfn)
invalid = True
+ elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
+ logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
+ invalid = True
# If any one of the variants is not present, mark as invalid for all
if invalid:
for cls in info_array[0].variants:
- virtualfn = self.realfn2virtual(fn, cls)
+ virtualfn = variant2virtual(fn, cls)
if virtualfn in self.clean:
logger.debug(2, "Cache: Removing %s from cache", virtualfn)
self.clean.remove(virtualfn)
@@ -603,30 +686,19 @@ class Cache(object):
logger.debug(2, "Cache is clean, not saving.")
return
- file_dict = {}
- pickler_dict = {}
for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cache_class_name = cache_class.__name__
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- file_dict[cache_class_name] = open(cachefile, "wb")
- pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
-
- pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
- pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
-
- try:
- for key, info_array in self.depends_cache.iteritems():
- for info in info_array:
- if isinstance(info, RecipeInfoCommon):
- cache_class_name = info.__class__.__name__
- pickler_dict[cache_class_name].dump(key)
- pickler_dict[cache_class_name].dump(info)
- finally:
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cache_class_name = cache_class.__name__
- file_dict[cache_class_name].close()
+ cache_class_name = cache_class.__name__
+ cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+ with open(cachefile, "wb") as f:
+ p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
+ p.dump(__cache_version__)
+ p.dump(bb.__version__)
+
+ for key, info_array in self.depends_cache.items():
+ for info in info_array:
+ if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
+ p.dump(key)
+ p.dump(info)
del self.depends_cache
@@ -654,50 +726,13 @@ class Cache(object):
Save data we need into the cache
"""
- realfn = self.virtualfn2realfn(file_name)[0]
+ realfn = virtualfn2realfn(file_name)[0]
info_array = []
for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- info_array.append(cache_class(realfn, data))
+ info_array.append(cache_class(realfn, data))
self.add_info(file_name, info_array, cacheData, parsed)
- @staticmethod
- def load_bbfile(bbfile, appends, config):
- """
- Load and parse one .bb build file
- Return the data and whether parsing resulted in the file being skipped
- """
- chdir_back = False
-
- from bb import parse
-
- # expand tmpdir to include this topdir
- config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- oldpath = os.path.abspath(os.getcwd())
- parse.cached_mtime_noerror(bbfile_loc)
- bb_data = config.createCopy()
- # The ConfHandler first looks if there is a TOPDIR and if not
- # then it would call getcwd().
- # Previously, we chdir()ed to bbfile_loc, called the handler
- # and finally chdir()ed back, a couple of thousand times. We now
- # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
- if not bb_data.getVar('TOPDIR', False):
- chdir_back = True
- bb_data.setVar('TOPDIR', bbfile_loc)
- try:
- if appends:
- bb_data.setVar('__BBAPPEND', " ".join(appends))
- bb_data = parse.handle(bbfile, bb_data)
- if chdir_back:
- os.chdir(oldpath)
- return bb_data
- except:
- if chdir_back:
- os.chdir(oldpath)
- raise
-
def init(cooker):
"""
@@ -727,8 +762,9 @@ class CacheData(object):
def __init__(self, caches_array):
self.caches_array = caches_array
for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cache_class.init_cacheData(self)
+ if not issubclass(cache_class, RecipeInfoCommon):
+ bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
+ cache_class.init_cacheData(self)
# Direct cache variables
self.task_queues = {}
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/checksum.py b/import-layers/yocto-poky/bitbake/lib/bb/checksum.py
index 2ec964d73..84289208f 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/checksum.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/checksum.py
@@ -19,20 +19,13 @@ import glob
import operator
import os
import stat
+import pickle
import bb.utils
import logging
from bb.cache import MultiProcessCache
logger = logging.getLogger("BitBake.Cache")
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
-
# mtime cache (non-persistent)
# based upon the assumption that files do not change during bitbake run
class FileMtimeCache(object):
@@ -127,13 +120,15 @@ class FileChecksumCache(MultiProcessCache):
checksums.extend(checksum_dir(f))
else:
checksum = checksum_file(f)
- checksums.append((f, checksum))
+ if checksum:
+ checksums.append((f, checksum))
elif os.path.isdir(pth):
if not os.path.islink(pth):
checksums.extend(checksum_dir(pth))
else:
checksum = checksum_file(pth)
- checksums.append((pth, checksum))
+ if checksum:
+ checksums.append((pth, checksum))
checksums.sort(key=operator.itemgetter(1))
return checksums
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py b/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py
index 3ee4d5622..25938d658 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py
@@ -1,21 +1,20 @@
import ast
+import sys
import codegen
import logging
+import pickle
+import bb.pysh as pysh
import os.path
import bb.utils, bb.data
+import hashlib
from itertools import chain
-from pysh import pyshyacc, pyshlex, sherrors
+from bb.pysh import pyshyacc, pyshlex, sherrors
from bb.cache import MultiProcessCache
-
logger = logging.getLogger('BitBake.CodeParser')
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
+def bbhash(s):
+ return hashlib.md5(s.encode("utf-8")).hexdigest()
def check_indent(codestr):
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
@@ -68,11 +67,12 @@ class SetCache(object):
new = []
for i in items:
- new.append(intern(i))
+ new.append(sys.intern(i))
s = frozenset(new)
- if hash(s) in self.setcache:
- return self.setcache[hash(s)]
- self.setcache[hash(s)] = s
+ h = hash(s)
+ if h in self.setcache:
+ return self.setcache[h]
+ self.setcache[h] = s
return s
codecache = SetCache()
@@ -117,7 +117,7 @@ class shellCacheLine(object):
class CodeParserCache(MultiProcessCache):
cache_file_name = "bb_codeparser.dat"
- CACHE_VERSION = 7
+ CACHE_VERSION = 8
def __init__(self):
MultiProcessCache.__init__(self)
@@ -191,6 +191,7 @@ class BufferedLogger(Logger):
class PythonParser():
getvars = (".getVar", ".appendVar", ".prependVar")
+ getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
@@ -210,15 +211,20 @@ class PythonParser():
def visit_Call(self, node):
name = self.called_node_name(node.func)
- if name and name.endswith(self.getvars) or name in self.containsfuncs:
+ if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs):
if isinstance(node.args[0], ast.Str):
varname = node.args[0].s
if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
if varname not in self.contains:
self.contains[varname] = set()
self.contains[varname].add(node.args[1].s)
- else:
- self.references.add(node.args[0].s)
+ elif name.endswith(self.getvarflags):
+ if isinstance(node.args[1], ast.Str):
+ self.references.add('%s[%s]' % (varname, node.args[1].s))
+ else:
+ self.warn(node.func, node.args[1])
+ else:
+ self.references.add(varname)
else:
self.warn(node.func, node.args[0])
elif name and name.endswith(".expand"):
@@ -268,7 +274,7 @@ class PythonParser():
if not node or not node.strip():
return
- h = hash(str(node))
+ h = bbhash(str(node))
if h in codeparsercache.pythoncache:
self.references = set(codeparsercache.pythoncache[h].refs)
@@ -313,7 +319,7 @@ class ShellParser():
commands it executes.
"""
- h = hash(str(value))
+ h = bbhash(str(value))
if h in codeparsercache.shellcache:
self.execs = set(codeparsercache.shellcache[h].execs)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/command.py b/import-layers/yocto-poky/bitbake/lib/bb/command.py
index 0559ffc07..caa3e4d45 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/command.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/command.py
@@ -110,7 +110,7 @@ class Command:
return False
except SystemExit as exc:
arg = exc.args[0]
- if isinstance(arg, basestring):
+ if isinstance(arg, str):
self.finishAsyncCommand(arg)
else:
self.finishAsyncCommand("Exited with %s" % arg)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cooker.py b/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
index 9b565fc37..42831e277 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
@@ -22,7 +22,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-from __future__ import print_function
+
import sys, os, glob, os.path, re, time
import atexit
import itertools
@@ -30,18 +30,21 @@ import logging
import multiprocessing
import sre_constants
import threading
-from cStringIO import StringIO
+from io import StringIO, UnsupportedOperation
from contextlib import closing
from functools import wraps
-from collections import defaultdict
+from collections import defaultdict, namedtuple
import bb, bb.exceptions, bb.command
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
-import Queue
+import queue
import signal
import subprocess
import errno
import prserv.serv
import pyinotify
+import json
+import pickle
+import codecs
logger = logging.getLogger("BitBake")
collectlog = logging.getLogger("BitBake.Collection")
@@ -65,7 +68,7 @@ class CollectionError(bb.BBHandledException):
"""
class state:
- initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
+ initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
@classmethod
def get_name(cls, code):
@@ -93,7 +96,7 @@ class SkippedPackage:
class CookerFeatures(object):
- _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
+ _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
def __init__(self):
self._features=set()
@@ -110,9 +113,49 @@ class CookerFeatures(object):
def __iter__(self):
return self._features.__iter__()
- def next(self):
- return self._features.next()
+ def __next__(self):
+ return next(self._features)
+
+
+class EventWriter:
+ def __init__(self, cooker, eventfile):
+ self.file_inited = None
+ self.cooker = cooker
+ self.eventfile = eventfile
+ self.event_queue = []
+
+ def write_event(self, event):
+ with open(self.eventfile, "a") as f:
+ try:
+ str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
+ f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
+ "vars": str_event}))
+ except Exception as err:
+ import traceback
+ print(err, traceback.format_exc())
+
+ def send(self, event):
+ if self.file_inited:
+ # we have the file, just write the event
+ self.write_event(event)
+ else:
+ # init on bb.event.BuildStarted
+ name = "%s.%s" % (event.__module__, event.__class__.__name__)
+ if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
+ with open(self.eventfile, "w") as f:
+ f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
+ self.file_inited = True
+
+ # write pending events
+ for evt in self.event_queue:
+ self.write_event(evt)
+
+ # also write the current event
+ self.write_event(event)
+ else:
+ # queue all events until the file is inited
+ self.event_queue.append(event)
#============================================================================#
# BBCooker
@@ -123,7 +166,7 @@ class BBCooker:
"""
def __init__(self, configuration, featureSet=None):
- self.recipecache = None
+ self.recipecaches = None
self.skiplist = {}
self.featureset = CookerFeatures()
if featureSet:
@@ -151,6 +194,13 @@ class BBCooker:
self.initConfigurationData()
+ # we log all events to a file if so directed
+ if self.configuration.writeeventlog:
+ # register the log file writer as UI Handler
+ writer = EventWriter(self, self.configuration.writeeventlog)
+ EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
+ bb.event.register_UIHhandler(EventLogWriteHandler(writer))
+
self.inotify_modified_files = []
def _process_inotify_updates(server, notifier_list, abort):
@@ -180,14 +230,17 @@ class BBCooker:
pass
# TOSTOP must not be set or our children will hang when they output
- fd = sys.stdout.fileno()
- if os.isatty(fd):
- import termios
- tcattr = termios.tcgetattr(fd)
- if tcattr[3] & termios.TOSTOP:
- buildlog.info("The terminal had the TOSTOP bit set, clearing...")
- tcattr[3] = tcattr[3] & ~termios.TOSTOP
- termios.tcsetattr(fd, termios.TCSANOW, tcattr)
+ try:
+ fd = sys.stdout.fileno()
+ if os.isatty(fd):
+ import termios
+ tcattr = termios.tcgetattr(fd)
+ if tcattr[3] & termios.TOSTOP:
+ buildlog.info("The terminal had the TOSTOP bit set, clearing...")
+ tcattr[3] = tcattr[3] & ~termios.TOSTOP
+ termios.tcsetattr(fd, termios.TCSANOW, tcattr)
+ except UnsupportedOperation:
+ pass
self.command = bb.command.Command(self)
self.state = state.initial
@@ -301,74 +354,6 @@ class BBCooker:
if consolelog:
self.data.setVar("BB_CONSOLELOG", consolelog)
- # we log all events to a file if so directed
- if self.configuration.writeeventlog:
- import json, pickle
- DEFAULT_EVENTFILE = self.configuration.writeeventlog
- class EventLogWriteHandler():
-
- class EventWriter():
- def __init__(self, cooker):
- self.file_inited = None
- self.cooker = cooker
- self.event_queue = []
-
- def init_file(self):
- try:
- # delete the old log
- os.remove(DEFAULT_EVENTFILE)
- except:
- pass
-
- # write current configuration data
- with open(DEFAULT_EVENTFILE, "w") as f:
- f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
-
- def write_event(self, event):
- with open(DEFAULT_EVENTFILE, "a") as f:
- try:
- f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
- except Exception as e:
- import traceback
- print(e, traceback.format_exc(e))
-
-
- def send(self, event):
- event_class = event.__module__ + "." + event.__class__.__name__
-
- # init on bb.event.BuildStarted
- if self.file_inited is None:
- if event_class == "bb.event.BuildStarted":
- self.init_file()
- self.file_inited = True
-
- # write pending events
- for e in self.event_queue:
- self.write_event(e)
-
- # also write the current event
- self.write_event(event)
-
- else:
- # queue all events until the file is inited
- self.event_queue.append(event)
-
- else:
- # we have the file, just write the event
- self.write_event(event)
-
- # set our handler's event processor
- event = EventWriter(self) # self is the cooker here
-
-
- # set up cooker features for this mock UI handler
-
- # we need to write the dependency tree in the log
- self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
- # register the log file writer as UI Handler
- bb.event.register_UIHhandler(EventLogWriteHandler())
-
-
#
# Copy of the data store which has been expanded.
# Used for firing events and accessing variables where expansion needs to be accounted for
@@ -539,11 +524,14 @@ class BBCooker:
nice = int(nice) - curnice
buildlog.verbose("Renice to %s " % os.nice(nice))
- if self.recipecache:
- del self.recipecache
- self.recipecache = bb.cache.CacheData(self.caches_array)
+ if self.recipecaches:
+ del self.recipecaches
+ self.multiconfigs = self.databuilder.mcdata.keys()
+ self.recipecaches = {}
+ for mc in self.multiconfigs:
+ self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
- self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
+ self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS", True))
def updateConfigOpts(self, options, environment):
clean = True
@@ -587,8 +575,8 @@ class BBCooker:
def showVersions(self):
- pkg_pn = self.recipecache.pkg_pn
- (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
+ pkg_pn = self.recipecaches[''].pkg_pn
+ (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecaches[''], pkg_pn)
logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
@@ -619,25 +607,25 @@ class BBCooker:
# this showEnvironment() code path doesn't use the cache
self.parseConfiguration()
- fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
+ fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
fn = self.matchFile(fn)
- fn = bb.cache.Cache.realfn2virtual(fn, cls)
+ fn = bb.cache.realfn2virtual(fn, cls, mc)
elif len(pkgs_to_build) == 1:
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
if pkgs_to_build[0] in set(ignore.split()):
bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
- taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
+ taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
- targetid = taskdata.getbuild_id(pkgs_to_build[0])
- fnid = taskdata.build_targets[targetid][0]
- fn = taskdata.fn_index[fnid]
+ mc = runlist[0][0]
+ fn = runlist[0][3]
else:
envdata = self.data
if fn:
try:
- envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
+ bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
+ envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
except Exception as e:
parselog.exception("Unable to read %s", fn)
raise
@@ -656,7 +644,7 @@ class BBCooker:
# emit the metadata which isnt valid shell
data.expandKeys(envdata)
for e in envdata.keys():
- if data.getVarFlag( e, 'python', envdata ):
+ if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
@@ -670,30 +658,44 @@ class BBCooker:
if task is None:
task = self.configuration.cmd
- fulltargetlist = self.checkPackages(pkgs_to_build)
+ fulltargetlist = self.checkPackages(pkgs_to_build, task)
+ taskdata = {}
+ localdata = {}
- localdata = data.createCopy(self.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
- taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
+ for mc in self.multiconfigs:
+ taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
+ localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
+ bb.data.update_data(localdata[mc])
+ bb.data.expandKeys(localdata[mc])
current = 0
runlist = []
for k in fulltargetlist:
+ mc = ""
+ if k.startswith("multiconfig:"):
+ mc = k.split(":")[1]
+ k = ":".join(k.split(":")[2:])
ktask = task
if ":do_" in k:
k2 = k.split(":do_")
k = k2[0]
ktask = k2[1]
- taskdata.add_provider(localdata, self.recipecache, k)
+ taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
current += 1
if not ktask.startswith("do_"):
ktask = "do_%s" % ktask
- runlist.append([k, ktask])
+ if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
+ # e.g. in ASSUME_PROVIDED
+ continue
+ fn = taskdata[mc].build_targets[k][0]
+ runlist.append([mc, k, ktask, fn])
bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
- taskdata.add_unresolved(localdata, self.recipecache)
+
+ for mc in self.multiconfigs:
+ taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
+
bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
- return taskdata, runlist, fulltargetlist
+ return taskdata, runlist
def prepareTreeData(self, pkgs_to_build, task):
"""
@@ -702,7 +704,7 @@ class BBCooker:
# We set abort to False here to prevent unbuildable targets raising
# an exception when we're just generating data
- taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
+ taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
return runlist, taskdata
@@ -714,13 +716,18 @@ class BBCooker:
information.
"""
runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
- rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
+ rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
rq.rqdata.prepare()
return self.buildDependTree(rq, taskdata)
+ @staticmethod
+ def add_mc_prefix(mc, pn):
+ if mc:
+ return "multiconfig:%s.%s" % (mc, pn)
+ return pn
def buildDependTree(self, rq, taskdata):
- seen_fnids = []
+ seen_fns = []
depend_tree = {}
depend_tree["depends"] = {}
depend_tree["tdepends"] = {}
@@ -730,25 +737,26 @@ class BBCooker:
depend_tree["rdepends-pkg"] = {}
depend_tree["rrecs-pkg"] = {}
depend_tree['providermap'] = {}
- depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
-
- for name, fn in taskdata.get_providermap().iteritems():
- pn = self.recipecache.pkg_fn[fn]
- if name != pn:
- version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
- depend_tree['providermap'][name] = (pn, version)
-
- for task in xrange(len(rq.rqdata.runq_fnid)):
- taskname = rq.rqdata.runq_task[task]
- fnid = rq.rqdata.runq_fnid[task]
- fn = taskdata.fn_index[fnid]
- pn = self.recipecache.pkg_fn[fn]
- version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
+ depend_tree["layer-priorities"] = self.bbfile_config_priorities
+
+ for mc in taskdata:
+ for name, fn in list(taskdata[mc].get_providermap().items()):
+ pn = self.recipecaches[mc].pkg_fn[fn]
+ pn = self.add_mc_prefix(mc, pn)
+ if name != pn:
+ version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
+ depend_tree['providermap'][name] = (pn, version)
+
+ for tid in rq.rqdata.runtaskentries:
+ (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
+ pn = self.recipecaches[mc].pkg_fn[taskfn]
+ pn = self.add_mc_prefix(mc, pn)
+ version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
if pn not in depend_tree["pn"]:
depend_tree["pn"][pn] = {}
- depend_tree["pn"][pn]["filename"] = fn
+ depend_tree["pn"][pn]["filename"] = taskfn
depend_tree["pn"][pn]["version"] = version
- depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
+ depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
# if we have extra caches, list all attributes they bring in
extra_info = []
@@ -759,36 +767,36 @@ class BBCooker:
# for all attributes stored, add them to the dependency tree
for ei in extra_info:
- depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
+ depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
- for dep in rq.rqdata.runq_depends[task]:
- depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
- deppn = self.recipecache.pkg_fn[depfn]
- dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
+ for dep in rq.rqdata.runtaskentries[tid].depends:
+ (depmc, depfn, deptaskname, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
+ deppn = self.recipecaches[mc].pkg_fn[deptaskfn]
+ dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
if not dotname in depend_tree["tdepends"]:
depend_tree["tdepends"][dotname] = []
- depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
- if fnid not in seen_fnids:
- seen_fnids.append(fnid)
+ depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
+ if taskfn not in seen_fns:
+ seen_fns.append(taskfn)
packages = []
depend_tree["depends"][pn] = []
- for dep in taskdata.depids[fnid]:
- depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
+ for dep in taskdata[mc].depids[taskfn]:
+ depend_tree["depends"][pn].append(dep)
depend_tree["rdepends-pn"][pn] = []
- for rdep in taskdata.rdepids[fnid]:
- depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
+ for rdep in taskdata[mc].rdepids[taskfn]:
+ depend_tree["rdepends-pn"][pn].append(rdep)
- rdepends = self.recipecache.rundeps[fn]
+ rdepends = self.recipecaches[mc].rundeps[taskfn]
for package in rdepends:
depend_tree["rdepends-pkg"][package] = []
for rdepend in rdepends[package]:
depend_tree["rdepends-pkg"][package].append(rdepend)
packages.append(package)
- rrecs = self.recipecache.runrecs[fn]
+ rrecs = self.recipecaches[mc].runrecs[taskfn]
for package in rrecs:
depend_tree["rrecs-pkg"][package] = []
for rdepend in rrecs[package]:
@@ -800,7 +808,7 @@ class BBCooker:
if package not in depend_tree["packages"]:
depend_tree["packages"][package] = {}
depend_tree["packages"][package]["pn"] = pn
- depend_tree["packages"][package]["filename"] = fn
+ depend_tree["packages"][package]["filename"] = taskfn
depend_tree["packages"][package]["version"] = version
return depend_tree
@@ -811,12 +819,8 @@ class BBCooker:
Create a dependency tree of pkgs_to_build, returning the data.
"""
_, taskdata = self.prepareTreeData(pkgs_to_build, task)
- tasks_fnid = []
- if len(taskdata.tasks_name) != 0:
- for task in xrange(len(taskdata.tasks_name)):
- tasks_fnid.append(taskdata.tasks_fnid[task])
- seen_fnids = []
+ seen_fns = []
depend_tree = {}
depend_tree["depends"] = {}
depend_tree["pn"] = {}
@@ -831,51 +835,53 @@ class BBCooker:
cachefields = getattr(cache_class, 'cachefields', [])
extra_info = extra_info + cachefields
- for task in xrange(len(tasks_fnid)):
- fnid = tasks_fnid[task]
- fn = taskdata.fn_index[fnid]
- pn = self.recipecache.pkg_fn[fn]
+ tids = []
+ for mc in taskdata:
+ for tid in taskdata[mc].taskentries:
+ tids.append(tid)
+
+ for tid in tids:
+ (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
+
+ pn = self.recipecaches[mc].pkg_fn[taskfn]
+ pn = self.add_mc_prefix(mc, pn)
if pn not in depend_tree["pn"]:
depend_tree["pn"][pn] = {}
- depend_tree["pn"][pn]["filename"] = fn
- version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
+ depend_tree["pn"][pn]["filename"] = taskfn
+ version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
depend_tree["pn"][pn]["version"] = version
- rdepends = self.recipecache.rundeps[fn]
- rrecs = self.recipecache.runrecs[fn]
- depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
+ rdepends = self.recipecaches[mc].rundeps[taskfn]
+ rrecs = self.recipecaches[mc].runrecs[taskfn]
+ depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
# for all extra attributes stored, add them to the dependency tree
for ei in extra_info:
- depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
+ depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
- if fnid not in seen_fnids:
- seen_fnids.append(fnid)
+ if taskfn not in seen_fns:
+ seen_fns.append(taskfn)
depend_tree["depends"][pn] = []
- for dep in taskdata.depids[fnid]:
- item = taskdata.build_names_index[dep]
+ for item in taskdata[mc].depids[taskfn]:
pn_provider = ""
- targetid = taskdata.getbuild_id(item)
- if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
- id = taskdata.build_targets[targetid][0]
- fn_provider = taskdata.fn_index[id]
- pn_provider = self.recipecache.pkg_fn[fn_provider]
+ if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
+ fn_provider = taskdata[mc].build_targets[dep][0]
+ pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
else:
pn_provider = item
+ pn_provider = self.add_mc_prefix(mc, pn_provider)
depend_tree["depends"][pn].append(pn_provider)
depend_tree["rdepends-pn"][pn] = []
- for rdep in taskdata.rdepids[fnid]:
- item = taskdata.run_names_index[rdep]
+ for rdep in taskdata[mc].rdepids[taskfn]:
pn_rprovider = ""
- targetid = taskdata.getrun_id(item)
- if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
- id = taskdata.run_targets[targetid][0]
- fn_rprovider = taskdata.fn_index[id]
- pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
+ if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
+ fn_rprovider = taskdata[mc].run_targets[rdep][0]
+ pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
else:
- pn_rprovider = item
+ pn_rprovider = rdep
+ pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
depend_tree["rdepends-pn"][pn].append(pn_rprovider)
depend_tree["rdepends-pkg"].update(rdepends)
@@ -900,8 +906,8 @@ class BBCooker:
depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
# Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
- depends_file = file('pn-depends.dot', 'w' )
- buildlist_file = file('pn-buildlist', 'w' )
+ depends_file = open('pn-depends.dot', 'w' )
+ buildlist_file = open('pn-buildlist', 'w' )
print("digraph depends {", file=depends_file)
for pn in depgraph["pn"]:
fn = depgraph["pn"][pn]["filename"]
@@ -917,9 +923,10 @@ class BBCooker:
for rdepend in depgraph["rdepends-pn"][pn]:
print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
print("}", file=depends_file)
+ depends_file.close()
logger.info("PN dependencies saved to 'pn-depends.dot'")
- depends_file = file('package-depends.dot', 'w' )
+ depends_file = open('package-depends.dot', 'w' )
print("digraph depends {", file=depends_file)
for package in depgraph["packages"]:
pn = depgraph["packages"][package]["pn"]
@@ -938,9 +945,10 @@ class BBCooker:
for rdepend in depgraph["rrecs-pkg"][package]:
print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file)
print("}", file=depends_file)
+ depends_file.close()
logger.info("Package dependencies saved to 'package-depends.dot'")
- tdepends_file = file('task-depends.dot', 'w' )
+ tdepends_file = open('task-depends.dot', 'w' )
print("digraph depends {", file=tdepends_file)
for task in depgraph["tdepends"]:
(pn, taskname) = task.rsplit(".", 1)
@@ -950,13 +958,14 @@ class BBCooker:
for dep in depgraph["tdepends"][task]:
print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
print("}", file=tdepends_file)
+ tdepends_file.close()
logger.info("Task dependencies saved to 'task-depends.dot'")
def show_appends_with_no_recipes(self):
# Determine which bbappends haven't been applied
# First get list of recipes, including skipped
- recipefns = self.recipecache.pkg_fn.keys()
+ recipefns = list(self.recipecaches[''].pkg_fn.keys())
recipefns.extend(self.skiplist.keys())
# Work out list of bbappends that have been applied
@@ -980,20 +989,21 @@ class BBCooker:
def handlePrefProviders(self):
- localdata = data.createCopy(self.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
+ for mc in self.multiconfigs:
+ localdata = data.createCopy(self.databuilder.mcdata[mc])
+ bb.data.update_data(localdata)
+ bb.data.expandKeys(localdata)
- # Handle PREFERRED_PROVIDERS
- for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
- try:
- (providee, provider) = p.split(':')
- except:
- providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
- continue
- if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
- providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
- self.recipecache.preferred[providee] = provider
+ # Handle PREFERRED_PROVIDERS
+ for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
+ try:
+ (providee, provider) = p.split(':')
+ except:
+ providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
+ continue
+ if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
+ providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
+ self.recipecaches[mc].preferred[providee] = provider
def findCoreBaseFiles(self, subdir, configfile):
corebase = self.data.getVar('COREBASE', True) or ""
@@ -1088,10 +1098,10 @@ class BBCooker:
"""
pkg_list = []
- for pfn in self.recipecache.pkg_fn:
- inherits = self.recipecache.inherits.get(pfn, None)
+ for pfn in self.recipecaches[''].pkg_fn:
+ inherits = self.recipecaches[''].inherits.get(pfn, None)
if inherits and klass in inherits:
- pkg_list.append(self.recipecache.pkg_fn[pfn])
+ pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
return pkg_list
@@ -1124,16 +1134,18 @@ class BBCooker:
shell.start( self )
- def handleCollections( self, collections ):
+ def handleCollections(self, collections):
"""Handle collections"""
errors = False
- self.recipecache.bbfile_config_priorities = []
+ self.bbfile_config_priorities = []
if collections:
collection_priorities = {}
collection_depends = {}
collection_list = collections.split()
min_prio = 0
for c in collection_list:
+ bb.debug(1,'Processing %s in collection list' % (c))
+
# Get collection priority if defined explicitly
priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
if priority:
@@ -1152,10 +1164,10 @@ class BBCooker:
deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
if deps:
try:
- deplist = bb.utils.explode_dep_versions2(deps)
+ depDict = bb.utils.explode_dep_versions2(deps)
except bb.utils.VersionStringException as vse:
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
- for dep, oplist in deplist.iteritems():
+ for dep, oplist in list(depDict.items()):
if dep in collection_list:
for opstr in oplist:
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
@@ -1174,10 +1186,39 @@ class BBCooker:
else:
parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
errors = True
- collection_depends[c] = deplist.keys()
+ collection_depends[c] = list(depDict.keys())
else:
collection_depends[c] = []
+ # Check recommends and store information for priority calculation
+ recs = self.data.getVar("LAYERRECOMMENDS_%s" % c, True)
+ if recs:
+ try:
+ recDict = bb.utils.explode_dep_versions2(recs)
+ except bb.utils.VersionStringException as vse:
+ bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
+ for rec, oplist in list(recDict.items()):
+ if rec in collection_list:
+ if oplist:
+ opstr = oplist[0]
+ layerver = self.data.getVar("LAYERVERSION_%s" % rec, True)
+ if layerver:
+ (op, recver) = opstr.split()
+ try:
+ res = bb.utils.vercmp_string_op(layerver, recver, op)
+ except bb.utils.VersionStringException as vse:
+ bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
+ if not res:
+ parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
+ continue
+ else:
+ parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
+ continue
+ parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
+ collection_depends[c].append(rec)
+ else:
+ parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
+
# Recursively work out collection priorities based on dependencies
def calc_layer_priority(collection):
if not collection_priorities[collection]:
@@ -1205,7 +1246,7 @@ class BBCooker:
parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
errors = True
continue
- self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
+ self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
if errors:
# We've already printed the actual error(s)
raise CollectionError("Errors during parsing layer configuration")
@@ -1228,7 +1269,7 @@ class BBCooker:
if bf.startswith("/") or bf.startswith("../"):
bf = os.path.abspath(bf)
- self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
+ self.collection = CookerCollectFiles(self.bbfile_config_priorities)
filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
try:
os.stat(bf)
@@ -1264,6 +1305,7 @@ class BBCooker:
"""
Build the file matching regexp buildfile
"""
+ bb.event.fire(bb.event.BuildInit(), self.expanded_data)
# Too many people use -b because they think it's how you normally
# specify a target to be built, so show a warning
@@ -1277,17 +1319,17 @@ class BBCooker:
if (task == None):
task = self.configuration.cmd
- fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
+ fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
fn = self.matchFile(fn)
self.buildSetVars()
- infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
- self.data,
- self.caches_array)
+ bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
+
+ infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
infos = dict(infos)
- fn = bb.cache.Cache.realfn2virtual(fn, cls)
+ fn = bb.cache.realfn2virtual(fn, cls, mc)
try:
info_array = infos[fn]
except KeyError:
@@ -1296,29 +1338,30 @@ class BBCooker:
if info_array[0].skipped:
bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
- self.recipecache.add_from_recipeinfo(fn, info_array)
+ self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
# Tweak some variables
item = info_array[0].pn
- self.recipecache.ignored_dependencies = set()
- self.recipecache.bbfile_priority[fn] = 1
+ self.recipecaches[mc].ignored_dependencies = set()
+ self.recipecaches[mc].bbfile_priority[fn] = 1
# Remove external dependencies
- self.recipecache.task_deps[fn]['depends'] = {}
- self.recipecache.deps[fn] = []
- self.recipecache.rundeps[fn] = []
- self.recipecache.runrecs[fn] = []
+ self.recipecaches[mc].task_deps[fn]['depends'] = {}
+ self.recipecaches[mc].deps[fn] = []
+ self.recipecaches[mc].rundeps[fn] = []
+ self.recipecaches[mc].runrecs[fn] = []
# Invalidate task for target if force mode active
if self.configuration.force:
logger.verbose("Invalidate task %s, %s", task, fn)
if not task.startswith("do_"):
task = "do_%s" % task
- bb.parse.siggen.invalidate_task(task, self.recipecache, fn)
+ bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
# Setup taskdata structure
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
- taskdata.add_provider(self.data, self.recipecache, item)
+ taskdata = {}
+ taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
+ taskdata[mc].add_provider(self.data, self.recipecaches[mc], item)
buildname = self.data.getVar("BUILDNAME", True)
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
@@ -1326,9 +1369,9 @@ class BBCooker:
# Execute the runqueue
if not task.startswith("do_"):
task = "do_%s" % task
- runlist = [[item, task]]
+ runlist = [[mc, item, task, fn]]
- rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
+ rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
def buildFileIdle(server, rq, abort):
@@ -1353,7 +1396,7 @@ class BBCooker:
return False
if not retval:
- bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures, interrupted), self.expanded_data)
+ bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.expanded_data)
self.command.finishAsyncCommand(msg)
return False
if retval is True:
@@ -1389,7 +1432,7 @@ class BBCooker:
return False
if not retval:
- bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures, interrupted), self.data)
+ bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.data)
self.command.finishAsyncCommand(msg)
return False
if retval is True:
@@ -1406,23 +1449,24 @@ class BBCooker:
if not task.startswith("do_"):
task = "do_%s" % task
- taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
+ packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
+
+ bb.event.fire(bb.event.BuildInit(packages), self.expanded_data)
+
+ taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
buildname = self.data.getVar("BUILDNAME", False)
# make targets to always look as <target>:do_<task>
ntargets = []
- for target in fulltargetlist:
- if ":" in target:
- if ":do_" not in target:
- target = "%s:do_%s" % tuple(target.split(":", 1))
- else:
- target = "%s:%s" % (target, task)
- ntargets.append(target)
+ for target in runlist:
+ if target[0]:
+ ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
+ ntargets.append("%s:%s" % (target[1], target[2]))
bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
- rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
+ rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
if 'universe' in targets:
rq.rqdata.warn_multi_bb = True
@@ -1537,13 +1581,14 @@ class BBCooker:
if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
bb.event.fire(bb.event.SanityCheck(False), self.data)
- ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
- self.recipecache.ignored_dependencies = set(ignore.split())
+ for mc in self.multiconfigs:
+ ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED", True) or ""
+ self.recipecaches[mc].ignored_dependencies = set(ignore.split())
- for dep in self.configuration.extra_assume_provided:
- self.recipecache.ignored_dependencies.add(dep)
+ for dep in self.configuration.extra_assume_provided:
+ self.recipecaches[mc].ignored_dependencies.add(dep)
- self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
+ self.collection = CookerCollectFiles(self.bbfile_config_priorities)
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
self.parser = CookerParser(self, filelist, masked)
@@ -1557,18 +1602,20 @@ class BBCooker:
raise bb.BBHandledException()
self.show_appends_with_no_recipes()
self.handlePrefProviders()
- self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
+ for mc in self.multiconfigs:
+ self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
self.state = state.running
# Send an event listing all stamps reachable after parsing
# which the metadata may use to clean up stale data
- event = bb.event.ReachableStamps(self.recipecache.stamp)
- bb.event.fire(event, self.expanded_data)
+ for mc in self.multiconfigs:
+ event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
+ bb.event.fire(event, self.databuilder.mcdata[mc])
return None
return True
- def checkPackages(self, pkgs_to_build):
+ def checkPackages(self, pkgs_to_build, task=None):
# Return a copy, don't modify the original
pkgs_to_build = pkgs_to_build[:]
@@ -1579,26 +1626,29 @@ class BBCooker:
ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
for pkg in pkgs_to_build:
if pkg in ignore:
- parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
+ parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
if 'world' in pkgs_to_build:
- bb.providers.buildWorldTargetList(self.recipecache)
pkgs_to_build.remove('world')
- for t in self.recipecache.world_target:
- pkgs_to_build.append(t)
+ for mc in self.multiconfigs:
+ bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
+ for t in self.recipecaches[mc].world_target:
+ if mc:
+ t = "multiconfig:" + mc + ":" + t
+ pkgs_to_build.append(t)
if 'universe' in pkgs_to_build:
- parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
+ parselog.warning("The \"universe\" target is only intended for testing and may produce errors.")
parselog.debug(1, "collating packages for \"universe\"")
pkgs_to_build.remove('universe')
- for t in self.recipecache.universe_target:
- pkgs_to_build.append(t)
+ for mc in self.multiconfigs:
+ for t in self.recipecaches[mc].universe_target:
+ if mc:
+ t = "multiconfig:" + mc + ":" + t
+ pkgs_to_build.append(t)
return pkgs_to_build
-
-
-
def pre_serve(self):
# Empty the environment. The environment will be populated as
# necessary from the data store.
@@ -1847,7 +1897,7 @@ class CookerCollectFiles(object):
# Calculate priorities for each file
matched = set()
for p in pkgfns:
- realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
+ realfn, cls, mc = bb.cache.virtualfn2realfn(p)
priorities[p] = self.calc_bbfile_priority(realfn, matched)
# Don't show the warning if the BBFILE_PATTERN did match .bbappend files
@@ -1870,7 +1920,7 @@ class CookerCollectFiles(object):
for collection, pattern, regex, _ in self.bbfile_config_priorities:
if regex in unmatched:
if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
- collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
+ collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
return priorities
@@ -1891,7 +1941,7 @@ class Feeder(multiprocessing.Process):
while True:
try:
quit = self.quit.get_nowait()
- except Queue.Empty:
+ except queue.Empty:
pass
else:
if quit == 'cancel':
@@ -1905,7 +1955,7 @@ class Feeder(multiprocessing.Process):
try:
self.to_parsers.put(job, timeout=0.5)
- except Queue.Full:
+ except queue.Full:
self.jobs.insert(0, job)
continue
@@ -1945,7 +1995,7 @@ class Parser(multiprocessing.Process):
while True:
try:
self.quit.get_nowait()
- except Queue.Empty:
+ except queue.Empty:
pass
else:
self.results.cancel_join_thread()
@@ -1956,7 +2006,7 @@ class Parser(multiprocessing.Process):
else:
try:
job = self.jobs.get(timeout=0.25)
- except Queue.Empty:
+ except queue.Empty:
continue
if job is None:
@@ -1965,10 +2015,10 @@ class Parser(multiprocessing.Process):
try:
self.results.put(result, timeout=0.25)
- except Queue.Full:
+ except queue.Full:
pending.append(result)
- def parse(self, filename, appends, caches_array):
+ def parse(self, filename, appends):
try:
# Record the filename we're parsing into any events generated
def parse_filter(self, record):
@@ -1981,7 +2031,7 @@ class Parser(multiprocessing.Process):
bb.event.set_class_handlers(self.handlers.copy())
bb.event.LogHandler.filter = parse_filter
- return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
+ return True, self.bb_cache.parse(filename, appends)
except Exception as exc:
tb = sys.exc_info()[2]
exc.recipe = filename
@@ -1999,6 +2049,7 @@ class CookerParser(object):
self.cooker = cooker
self.cfgdata = cooker.data
self.cfghash = cooker.data_hash
+ self.cfgbuilder = cooker.databuilder
# Accounting statistics
self.parsed = 0
@@ -2013,17 +2064,17 @@ class CookerParser(object):
self.current = 0
self.process_names = []
- self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
+ self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
self.fromcache = []
self.willparse = []
for filename in self.filelist:
appends = self.cooker.collection.get_file_appends(filename)
if not self.bb_cache.cacheValid(filename, appends):
- self.willparse.append((filename, appends, cooker.caches_array))
+ self.willparse.append((filename, appends))
else:
self.fromcache.append((filename, appends))
self.toparse = self.total - len(self.fromcache)
- self.progress_chunk = max(self.toparse / 100, 1)
+ self.progress_chunk = int(max(self.toparse / 100, 1))
self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
multiprocessing.cpu_count()), len(self.willparse))
@@ -2037,7 +2088,7 @@ class CookerParser(object):
if self.toparse:
bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
def init():
- Parser.cfg = self.cfgdata
+ Parser.bb_cache = self.bb_cache
bb.utils.set_process_name(multiprocessing.current_process().name)
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
@@ -2108,7 +2159,7 @@ class CookerParser(object):
def load_cached(self):
for filename, appends in self.fromcache:
- cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
+ cached, infos = self.bb_cache.load(filename, appends)
yield not cached, infos
def parse_generator(self):
@@ -2118,7 +2169,7 @@ class CookerParser(object):
try:
result = self.result_queue.get(timeout=0.25)
- except Queue.Empty:
+ except queue.Empty:
pass
else:
value = result[1]
@@ -2131,7 +2182,7 @@ class CookerParser(object):
result = []
parsed = None
try:
- parsed, result = self.results.next()
+ parsed, result = next(self.results)
except StopIteration:
self.shutdown()
return False
@@ -2153,15 +2204,18 @@ class CookerParser(object):
return False
except bb.data_smart.ExpansionError as exc:
self.error += 1
- _, value, _ = sys.exc_info()
- logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
+ bbdir = os.path.dirname(__file__) + os.sep
+ etype, value, _ = sys.exc_info()
+ tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
+ logger.error('ExpansionError during parsing %s', value.recipe,
+ exc_info=(etype, value, tb))
self.shutdown(clean=False)
return False
except Exception as exc:
self.error += 1
etype, value, tb = sys.exc_info()
if hasattr(value, "recipe"):
- logger.error('Unable to parse %s', value.recipe,
+ logger.error('Unable to parse %s' % value.recipe,
exc_info=(etype, value, exc.traceback))
else:
# Most likely, an exception occurred during raising an exception
@@ -2184,13 +2238,13 @@ class CookerParser(object):
if info_array[0].skipped:
self.skipped += 1
self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
- self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
+ (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
+ self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
parsed=parsed, watcher = self.cooker.add_filewatch)
return True
def reparse(self, filename):
- infos = self.bb_cache.parse(filename,
- self.cooker.collection.get_file_appends(filename),
- self.cfgdata, self.cooker.caches_array)
+ infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
for vfn, info_array in infos:
- self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)
+ (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
+ self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py b/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
index 50259a9a0..b07c26643 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
@@ -22,9 +22,11 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-import os, sys
-from functools import wraps
import logging
+import os
+import re
+import sys
+from functools import wraps
import bb
from bb import data
import bb.parse
@@ -192,7 +194,8 @@ def catch_parse_error(func):
fn, _, _, _ = traceback.extract_tb(tb, 1)[0]
if not fn.startswith(bbdir):
break
- parselog.critical("Unable to parse %s", fn, exc_info=(exc_class, exc, tb))
+ parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
+ sys.exit(1)
except bb.parse.ParseError as exc:
parselog.critical(str(exc))
sys.exit(1)
@@ -234,9 +237,9 @@ class CookerDataBuilder(object):
bb.utils.set_context(bb.utils.clean_context())
bb.event.set_class_handlers(bb.event.clean_class_handlers())
- self.data = bb.data.init()
+ self.basedata = bb.data.init()
if self.tracking:
- self.data.enableTracking()
+ self.basedata.enableTracking()
# Keep a datastore of the initial environment variables and their
# values from when BitBake was launched to enable child processes
@@ -247,16 +250,49 @@ class CookerDataBuilder(object):
self.savedenv.setVar(k, cookercfg.env[k])
filtered_keys = bb.utils.approved_variables()
- bb.data.inheritFromOS(self.data, self.savedenv, filtered_keys)
- self.data.setVar("BB_ORIGENV", self.savedenv)
+ bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys)
+ self.basedata.setVar("BB_ORIGENV", self.savedenv)
if worker:
- self.data.setVar("BB_WORKERCONTEXT", "1")
+ self.basedata.setVar("BB_WORKERCONTEXT", "1")
+
+ self.data = self.basedata
+ self.mcdata = {}
def parseBaseConfiguration(self):
try:
- self.parseConfigurationFiles(self.prefiles, self.postfiles)
- except SyntaxError:
+ bb.parse.init_parser(self.basedata)
+ self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
+
+ if self.data.getVar("BB_WORKERCONTEXT", False) is None:
+ bb.fetch.fetcher_init(self.data)
+ bb.codeparser.parser_cache_init(self.data)
+
+ bb.event.fire(bb.event.ConfigParsed(), self.data)
+
+ reparse_cnt = 0
+ while self.data.getVar("BB_INVALIDCONF", False) is True:
+ if reparse_cnt > 20:
+ logger.error("Configuration has been re-parsed over 20 times, "
+ "breaking out of the loop...")
+ raise Exception("Too deep config re-parse loop. Check locations where "
+ "BB_INVALIDCONF is being set (ConfigParsed event handlers)")
+ self.data.setVar("BB_INVALIDCONF", False)
+ self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
+ reparse_cnt += 1
+ bb.event.fire(bb.event.ConfigParsed(), self.data)
+
+ bb.parse.init_parser(self.data)
+ self.data_hash = self.data.get_hash()
+ self.mcdata[''] = self.data
+
+ multiconfig = (self.data.getVar("BBMULTICONFIG", True) or "").split()
+ for config in multiconfig:
+ mcdata = self.parseConfigurationFiles(['conf/multiconfig/%s.conf' % config] + self.prefiles, self.postfiles)
+ bb.event.fire(bb.event.ConfigParsed(), mcdata)
+ self.mcdata[config] = mcdata
+
+ except (SyntaxError, bb.BBHandledException):
raise bb.BBHandledException
except bb.data_smart.ExpansionError as e:
logger.error(str(e))
@@ -269,8 +305,7 @@ class CookerDataBuilder(object):
return findConfigFile("bblayers.conf", data)
def parseConfigurationFiles(self, prefiles, postfiles):
- data = self.data
- bb.parse.init_parser(data)
+ data = bb.data.createCopy(self.basedata)
# Parse files for loading *before* bitbake.conf and any includes
for f in prefiles:
@@ -289,15 +324,22 @@ class CookerDataBuilder(object):
data = bb.data.createCopy(data)
approved = bb.utils.approved_variables()
for layer in layers:
+ if not os.path.isdir(layer):
+ parselog.critical("Layer directory '%s' does not exist! "
+ "Please check BBLAYERS in %s" % (layer, layerconf))
+ sys.exit(1)
parselog.debug(2, "Adding layer %s", layer)
if 'HOME' in approved and '~' in layer:
layer = os.path.expanduser(layer)
if layer.endswith('/'):
layer = layer.rstrip('/')
data.setVar('LAYERDIR', layer)
+ data.setVar('LAYERDIR_RE', re.escape(layer))
data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
data.expandVarref('LAYERDIR')
+ data.expandVarref('LAYERDIR_RE')
+ data.delVar('LAYERDIR_RE')
data.delVar('LAYERDIR')
if not data.getVar("BBPATH", True):
@@ -323,23 +365,13 @@ class CookerDataBuilder(object):
# We register any handlers we've found so far here...
for var in data.getVar('__BBHANDLERS', False) or []:
handlerfn = data.getVarFlag(var, "filename", False)
+ if not handlerfn:
+ parselog.critical("Undefined event handler function '%s'" % var)
+ sys.exit(1)
handlerln = int(data.getVarFlag(var, "lineno", False))
bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
- if data.getVar("BB_WORKERCONTEXT", False) is None:
- bb.fetch.fetcher_init(data)
- bb.codeparser.parser_cache_init(data)
- bb.event.fire(bb.event.ConfigParsed(), data)
-
- if data.getVar("BB_INVALIDCONF", False) is True:
- data.setVar("BB_INVALIDCONF", False)
- self.parseConfigurationFiles(self.prefiles, self.postfiles)
- return
-
- bb.parse.init_parser(data)
data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
- self.data = data
- self.data_hash = data.get_hash()
-
+ return data
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/daemonize.py b/import-layers/yocto-poky/bitbake/lib/bb/daemonize.py
index 346a61858..ab4a95462 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/daemonize.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/daemonize.py
@@ -178,8 +178,8 @@ def createDaemon(function, logfile):
# os.dup2(0, 2) # standard error (2)
- si = file('/dev/null', 'r')
- so = file(logfile, 'w')
+ si = open('/dev/null', 'r')
+ so = open(logfile, 'w')
se = so
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/data.py b/import-layers/yocto-poky/bitbake/lib/bb/data.py
index dbc6dea68..c1f27cd0c 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/data.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/data.py
@@ -182,12 +182,12 @@ def inheritFromOS(d, savedenv, permitted):
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
"""Emit a variable to be sourced by a shell."""
- if d.getVarFlag(var, "python", False):
+ func = d.getVarFlag(var, "func", False)
+ if d.getVarFlag(var, 'python', False) and func:
return False
export = d.getVarFlag(var, "export", False)
unexport = d.getVarFlag(var, "unexport", False)
- func = d.getVarFlag(var, "func", False)
if not all and not export and not unexport and not func:
return False
@@ -339,7 +339,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
deps |= parser.references
deps = deps | (keys & parser.execs)
return deps, value
- varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
+ varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
vardeps = varflags.get("vardeps")
value = d.getVar(key, False)
@@ -364,7 +364,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
if varflags.get("python"):
parser = bb.codeparser.PythonParser(key, logger)
if value and "\t" in value:
- logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
+ logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
deps = deps | parser.references
deps = deps | (keys & parser.execs)
@@ -383,6 +383,8 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
deps = deps | set(varflags["prefuncs"].split())
if "postfuncs" in varflags:
deps = deps | set(varflags["postfuncs"].split())
+ if "exports" in varflags:
+ deps = deps | set(varflags["exports"].split())
else:
parser = d.expandWithRefs(value, key)
deps |= parser.references
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py b/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py
index fa1e79427..f100446dc 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py
@@ -135,7 +135,7 @@ class VariableParse:
self.contains[k] = parser.contains[k].copy()
else:
self.contains[k].update(parser.contains[k])
- value = utils.better_eval(codeobj, DataContext(self.d))
+ value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
return str(value)
@@ -372,7 +372,7 @@ class DataSmart(MutableMapping):
def expandWithRefs(self, s, varname):
- if not isinstance(s, basestring): # sanity check
+ if not isinstance(s, str): # sanity check
return VariableParse(varname, self, s)
if varname and varname in self.expand_cache:
@@ -397,8 +397,7 @@ class DataSmart(MutableMapping):
except bb.parse.SkipRecipe:
raise
except Exception as exc:
- exc_class, exc, tb = sys.exc_info()
- raise ExpansionError, ExpansionError(varname, s, exc), tb
+ raise ExpansionError(varname, s, exc) from exc
varparse.value = s
@@ -917,7 +916,7 @@ class DataSmart(MutableMapping):
yield k
def __len__(self):
- return len(frozenset(self))
+ return len(frozenset(iter(self)))
def __getitem__(self, item):
value = self.getVar(item, False)
@@ -966,4 +965,4 @@ class DataSmart(MutableMapping):
data.update({i:value})
data_str = str([(k, data[k]) for k in sorted(data.keys())])
- return hashlib.md5(data_str).hexdigest()
+ return hashlib.md5(data_str.encode("utf-8")).hexdigest()
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/event.py b/import-layers/yocto-poky/bitbake/lib/bb/event.py
index 5ffe89eae..6f1cb101f 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/event.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/event.py
@@ -24,14 +24,13 @@ BitBake build tools.
import os, sys
import warnings
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
+import pickle
import logging
import atexit
import traceback
import ast
+import threading
+
import bb.utils
import bb.compat
import bb.exceptions
@@ -71,12 +70,27 @@ _event_handler_map = {}
_catchall_handlers = {}
_eventfilter = None
_uiready = False
+_thread_lock = threading.Lock()
+_thread_lock_enabled = False
+
+if hasattr(__builtins__, '__setitem__'):
+ builtins = __builtins__
+else:
+ builtins = __builtins__.__dict__
+
+def enable_threadlock():
+ global _thread_lock_enabled
+ _thread_lock_enabled = True
+
+def disable_threadlock():
+ global _thread_lock_enabled
+ _thread_lock_enabled = False
def execute_handler(name, handler, event, d):
event.data = d
addedd = False
- if 'd' not in __builtins__:
- __builtins__['d'] = d
+ if 'd' not in builtins:
+ builtins['d'] = d
addedd = True
try:
ret = handler(event)
@@ -94,7 +108,7 @@ def execute_handler(name, handler, event, d):
finally:
del event.data
if addedd:
- del __builtins__['d']
+ del builtins['d']
def fire_class_handlers(event, d):
if isinstance(event, logging.LogRecord):
@@ -102,7 +116,7 @@ def fire_class_handlers(event, d):
eid = str(event.__class__)[8:-2]
evt_hmap = _event_handler_map.get(eid, {})
- for name, handler in _handlers.iteritems():
+ for name, handler in list(_handlers.items()):
if name in _catchall_handlers or name in evt_hmap:
if _eventfilter:
if not _eventfilter(name, handler, event, d):
@@ -117,31 +131,44 @@ def print_ui_queue():
logger = logging.getLogger("BitBake")
if not _uiready:
from bb.msg import BBLogFormatter
- console = logging.StreamHandler(sys.stdout)
- console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
- logger.handlers = [console]
+ stdout = logging.StreamHandler(sys.stdout)
+ stderr = logging.StreamHandler(sys.stderr)
+ formatter = BBLogFormatter("%(levelname)s: %(message)s")
+ stdout.setFormatter(formatter)
+ stderr.setFormatter(formatter)
# First check to see if we have any proper messages
msgprint = False
- for event in ui_queue:
+ for event in ui_queue[:]:
if isinstance(event, logging.LogRecord):
if event.levelno > logging.DEBUG:
+ if event.levelno >= logging.WARNING:
+ logger.addHandler(stderr)
+ else:
+ logger.addHandler(stdout)
logger.handle(event)
msgprint = True
if msgprint:
return
# Nope, so just print all of the messages we have (including debug messages)
- for event in ui_queue:
+ logger.addHandler(stdout)
+ for event in ui_queue[:]:
if isinstance(event, logging.LogRecord):
logger.handle(event)
def fire_ui_handlers(event, d):
+ global _thread_lock
+ global _thread_lock_enabled
+
if not _uiready:
# No UI handlers registered yet, queue up the messages
ui_queue.append(event)
return
+ if _thread_lock_enabled:
+ _thread_lock.acquire()
+
errors = []
for h in _ui_handlers:
#print "Sending event %s" % event
@@ -160,6 +187,9 @@ def fire_ui_handlers(event, d):
for h in errors:
del _ui_handlers[h]
+ if _thread_lock_enabled:
+ _thread_lock.release()
+
def fire(event, d):
"""Fire off an Event"""
@@ -187,7 +217,7 @@ def register(name, handler, mask=None, filename=None, lineno=None):
if handler is not None:
# handle string containing python code
- if isinstance(handler, basestring):
+ if isinstance(handler, str):
tmp = "def %s(e):\n%s" % (name, handler)
try:
code = bb.methodpool.compile_cache(tmp)
@@ -225,6 +255,13 @@ def remove(name, handler):
"""Remove an Event handler"""
_handlers.pop(name)
+def get_handlers():
+ return _handlers
+
+def set_handlers(handlers):
+ global _handlers
+ _handlers = handlers
+
def set_eventfilter(func):
global _eventfilter
_eventfilter = func
@@ -373,7 +410,11 @@ class BuildBase(Event):
-
+class BuildInit(BuildBase):
+ """buildFile or buildTargets was invoked"""
+ def __init__(self, p=[]):
+ name = None
+ BuildBase.__init__(self, name, p)
class BuildStarted(BuildBase, OperationStarted):
"""bbmake build run started"""
@@ -605,8 +646,9 @@ class LogHandler(logging.Handler):
if hasattr(tb, 'tb_next'):
tb = list(bb.exceptions.extract_traceback(tb, context=3))
# Need to turn the value into something the logging system can pickle
- value = str(value)
record.bb_exc_info = (etype, value, tb)
+ record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
+ value = str(value)
record.exc_info = None
fire(record, None)
@@ -637,6 +679,33 @@ class MetadataEvent(Event):
self.type = eventtype
self._localdata = eventdata
+class ProcessStarted(Event):
+ """
+ Generic process started event (usually part of the initial startup)
+ where further progress events will be delivered
+ """
+ def __init__(self, processname, total):
+ Event.__init__(self)
+ self.processname = processname
+ self.total = total
+
+class ProcessProgress(Event):
+ """
+ Generic process progress event (usually part of the initial startup)
+ """
+ def __init__(self, processname, progress):
+ Event.__init__(self)
+ self.processname = processname
+ self.progress = progress
+
+class ProcessFinished(Event):
+ """
+ Generic process finished event (usually part of the initial startup)
+ """
+ def __init__(self, processname):
+ Event.__init__(self)
+ self.processname = processname
+
class SanityCheck(Event):
"""
Event to run sanity checks, either raise errors or generate events as return status.
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/exceptions.py b/import-layers/yocto-poky/bitbake/lib/bb/exceptions.py
index f182c8fd6..cd713439e 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/exceptions.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/exceptions.py
@@ -1,4 +1,4 @@
-from __future__ import absolute_import
+
import inspect
import traceback
import bb.namedtuple_with_abc
@@ -86,6 +86,6 @@ def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
def to_string(exc):
if isinstance(exc, SystemExit):
- if not isinstance(exc.code, basestring):
+ if not isinstance(exc.code, str):
return 'Exited with "%d"' % exc.code
return str(exc)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
index 1fa67020c..cd7362c44 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
@@ -25,31 +25,26 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-from __future__ import absolute_import
-from __future__ import print_function
import os, re
import signal
import logging
-import urllib
-import urlparse
+import urllib.request, urllib.parse, urllib.error
+if 'git' not in urllib.parse.uses_netloc:
+ urllib.parse.uses_netloc.append('git')
+import operator
+import collections
+import subprocess
+import pickle
import bb.persist_data, bb.utils
import bb.checksum
from bb import data
import bb.process
-import subprocess
__version__ = "2"
_checksum_cache = bb.checksum.FileChecksumCache()
logger = logging.getLogger("BitBake.Fetcher")
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
class BBFetchException(Exception):
"""Class all fetch exceptions inherit from"""
def __init__(self, message):
@@ -231,14 +226,14 @@ class URI(object):
# them are not quite RFC compliant.
uri, param_str = (uri.split(";", 1) + [None])[:2]
- urlp = urlparse.urlparse(uri)
+ urlp = urllib.parse.urlparse(uri)
self.scheme = urlp.scheme
reparse = 0
# Coerce urlparse to make URI scheme use netloc
- if not self.scheme in urlparse.uses_netloc:
- urlparse.uses_params.append(self.scheme)
+ if not self.scheme in urllib.parse.uses_netloc:
+ urllib.parse.uses_params.append(self.scheme)
reparse = 1
# Make urlparse happy(/ier) by converting local resources
@@ -249,7 +244,7 @@ class URI(object):
reparse = 1
if reparse:
- urlp = urlparse.urlparse(uri)
+ urlp = urllib.parse.urlparse(uri)
# Identify if the URI is relative or not
if urlp.scheme in self._relative_schemes and \
@@ -265,7 +260,7 @@ class URI(object):
if urlp.password:
self.userinfo += ':%s' % urlp.password
- self.path = urllib.unquote(urlp.path)
+ self.path = urllib.parse.unquote(urlp.path)
if param_str:
self.params = self._param_str_split(param_str, ";")
@@ -297,7 +292,7 @@ class URI(object):
if self.query else '')
def _param_str_split(self, string, elmdelim, kvdelim="="):
- ret = {}
+ ret = collections.OrderedDict()
for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
ret[k] = v
return ret
@@ -313,11 +308,11 @@ class URI(object):
@property
def path_quoted(self):
- return urllib.quote(self.path)
+ return urllib.parse.quote(self.path)
@path_quoted.setter
def path_quoted(self, path):
- self.path = urllib.unquote(path)
+ self.path = urllib.parse.unquote(path)
@property
def path(self):
@@ -390,7 +385,7 @@ def decodeurl(url):
user = ''
pswd = ''
- p = {}
+ p = collections.OrderedDict()
if parm:
for s in parm.split(';'):
if s:
@@ -399,7 +394,7 @@ def decodeurl(url):
s1, s2 = s.split('=')
p[s1] = s2
- return type, host, urllib.unquote(path), user, pswd, p
+ return type, host, urllib.parse.unquote(path), user, pswd, p
def encodeurl(decoded):
"""Encodes a URL from tokens (scheme, network location, path,
@@ -423,7 +418,7 @@ def encodeurl(decoded):
# Standardise path to ensure comparisons work
while '//' in path:
path = path.replace("//", "/")
- url += "%s" % urllib.quote(path)
+ url += "%s" % urllib.parse.quote(path)
if p:
for parm in p:
url += ";%s=%s" % (parm, p[parm])
@@ -586,12 +581,12 @@ def verify_checksum(ud, d, precomputed={}):
raise NoChecksumError('Missing SRC_URI checksum', ud.url)
# Log missing sums so user can more easily add them
- logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"',
- ud.localpath, ud.md5_name, md5data)
- logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"',
- ud.localpath, ud.sha256_name, sha256data)
+ logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"',
+ ud.localpath, ud.md5_name, md5data)
+ logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"',
+ ud.localpath, ud.sha256_name, sha256data)
# We want to alert the user if a checksum is defined in the recipe but
# it does not match.
@@ -659,9 +654,9 @@ def verify_donestamp(ud, d, origud=None):
# files to those containing the checksums.
if not isinstance(e, EOFError):
# Ignore errors, they aren't fatal
- logger.warn("Couldn't load checksums from donestamp %s: %s "
- "(msg: %s)" % (ud.donestamp, type(e).__name__,
- str(e)))
+ logger.warning("Couldn't load checksums from donestamp %s: %s "
+ "(msg: %s)" % (ud.donestamp, type(e).__name__,
+ str(e)))
try:
checksums = verify_checksum(ud, d, precomputed_checksums)
@@ -669,14 +664,14 @@ def verify_donestamp(ud, d, origud=None):
# as an upgrade path from the previous done stamp file format.
if checksums != precomputed_checksums:
with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+ p = pickle.Pickler(cachefile, 2)
p.dump(checksums)
return True
except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file.
- logger.warn("Checksum mismatch for local file %s\n"
- "Cleaning and trying again." % ud.localpath)
+ logger.warning("Checksum mismatch for local file %s\n"
+ "Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
@@ -703,13 +698,13 @@ def update_stamp(ud, d):
checksums = verify_checksum(ud, d)
# Store the checksums for later re-verification against the recipe
with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+ p = pickle.Pickler(cachefile, 2)
p.dump(checksums)
except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file.
- logger.warn("Checksum mismatch for local file %s\n"
- "Cleaning and trying again." % ud.localpath)
+ logger.warning("Checksum mismatch for local file %s\n"
+ "Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
@@ -766,6 +761,7 @@ def get_srcrev(d, method_name='sortable_revision'):
if not format:
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+ name_to_rev = {}
seenautoinc = False
for scm in scms:
ud = urldata[scm]
@@ -774,7 +770,16 @@ def get_srcrev(d, method_name='sortable_revision'):
seenautoinc = seenautoinc or autoinc
if len(rev) > 10:
rev = rev[:10]
- format = format.replace(name, rev)
+ name_to_rev[name] = rev
+ # Replace names by revisions in the SRCREV_FORMAT string. The approach used
+ # here can handle names being prefixes of other names and names appearing
+ # as substrings in revisions (in which case the name should not be
+ # expanded). The '|' regular expression operator tries matches from left to
+ # right, so we need to sort the names with the longest ones first.
+ names_descending_len = sorted(name_to_rev, key=len, reverse=True)
+ name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
+ format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
+
if seenautoinc:
format = "AUTOINC+" + format
@@ -784,7 +789,7 @@ def localpath(url, d):
fetcher = bb.fetch2.Fetch([url], d)
return fetcher.localpath(url)
-def runfetchcmd(cmd, d, quiet=False, cleanup=None):
+def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
"""
Run cmd returning the command output
Raise an error if interrupted or cmd fails
@@ -807,13 +812,16 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None):
'GIT_SSL_CAINFO',
'GIT_SMART_HTTP',
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
- 'SOCKS5_USER', 'SOCKS5_PASSWD']
+ 'SOCKS5_USER', 'SOCKS5_PASSWD',
+ 'DBUS_SESSION_BUS_ADDRESS',
+ 'P4CONFIG']
if not cleanup:
cleanup = []
+ origenv = d.getVar("BB_ORIGENV", False)
for var in exportvars:
- val = d.getVar(var, True)
+ val = d.getVar(var, True) or (origenv and origenv.getVar(var, True))
if val:
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
@@ -823,7 +831,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None):
error_message = ""
try:
- (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
+ (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
success = True
except bb.process.NotFoundError as e:
error_message = "Fetch command %s" % (e.command)
@@ -834,7 +842,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None):
output = "output:\n%s" % e.stderr
else:
output = "no output"
- error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
+ error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
except bb.process.CmdError as e:
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
if not success:
@@ -937,8 +945,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
return found
return False
- os.chdir(ld.getVar("DL_DIR", True))
-
if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
ud.method.download(ud, ld)
if hasattr(ud.method,"build_mirror_data"):
@@ -982,8 +988,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
except bb.fetch2.BBFetchException as e:
if isinstance(e, ChecksumError):
- logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
- logger.warn(str(e))
+ logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
+ logger.warning(str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
@@ -1198,7 +1204,7 @@ class FetchData(object):
raise NonLocalMethod()
if self.parm.get("proto", None) and "protocol" not in self.parm:
- logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
+ logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
self.parm["protocol"] = self.parm.get("proto", None)
if hasattr(self.method, "urldata_init"):
@@ -1395,7 +1401,18 @@ class FetchMethod(object):
else:
cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
elif file.endswith('.deb') or file.endswith('.ipk'):
- cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
+ output = subprocess.check_output('ar -t %s' % file, preexec_fn=subprocess_setup, shell=True)
+ datafile = None
+ if output:
+ for line in output.decode().splitlines():
+ if line.startswith('data.tar.'):
+ datafile = line
+ break
+ else:
+ raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
+ else:
+ raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
+ cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
elif file.endswith('.tar.7z'):
cmd = '7z x -so %s | tar xf - ' % file
elif file.endswith('.7z'):
@@ -1403,7 +1420,13 @@ class FetchMethod(object):
# If 'subdir' param exists, create a dir and use it as destination for unpack cmd
if 'subdir' in urldata.parm:
- unpackdir = '%s/%s' % (rootdir, urldata.parm.get('subdir'))
+ subdir = urldata.parm.get('subdir')
+ if os.path.isabs(subdir):
+ if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
+ raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
+ unpackdir = subdir
+ else:
+ unpackdir = os.path.join(rootdir, subdir)
bb.utils.mkdirhier(unpackdir)
else:
unpackdir = rootdir
@@ -1422,22 +1445,16 @@ class FetchMethod(object):
if urlpath.find("/") != -1:
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
- cmd = 'cp -fpPR %s %s' % (file, destdir)
+ cmd = 'cp -fpPRH %s %s' % (file, destdir)
if not cmd:
return
- # Change to unpackdir before executing command
- save_cwd = os.getcwd();
- os.chdir(unpackdir)
-
path = data.getVar('PATH', True)
if path:
cmd = "PATH=\"%s\" %s" % (path, cmd)
- bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
-
- os.chdir(save_cwd)
+ bb.note("Unpacking %s to %s/" % (file, unpackdir))
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
if ret != 0:
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
@@ -1505,8 +1522,9 @@ class Fetch(object):
self.connection_cache = connection_cache
fn = d.getVar('FILE', True)
- if cache and fn and fn in urldata_cache:
- self.ud = urldata_cache[fn]
+ mc = d.getVar('__BBMULTICONFIG', True) or ""
+ if cache and fn and mc + fn in urldata_cache:
+ self.ud = urldata_cache[mc + fn]
for url in urls:
if url not in self.ud:
@@ -1518,7 +1536,7 @@ class Fetch(object):
pass
if fn and cache:
- urldata_cache[fn] = self.ud
+ urldata_cache[mc + fn] = self.ud
def localpath(self, url):
if url not in self.urls:
@@ -1572,8 +1590,6 @@ class Fetch(object):
if premirroronly:
self.d.setVar("BB_NO_NETWORK", "1")
- os.chdir(self.d.getVar("DL_DIR", True))
-
firsterr = None
verified_stamp = verify_donestamp(ud, self.d)
if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
@@ -1594,14 +1610,14 @@ class Fetch(object):
except BBFetchException as e:
if isinstance(e, ChecksumError):
- logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
+ logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
logger.debug(1, str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
- logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
+ logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
logger.debug(1, str(e))
firsterr = e
# Remove any incomplete fetch
@@ -1734,7 +1750,7 @@ class FetchConnectionCache(object):
del self.cache[cn]
def close_connections(self):
- for cn in self.cache.keys():
+ for cn in list(self.cache.keys()):
self.cache[cn].close()
del self.cache[cn]
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
index 03e9ac461..72264afb5 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
@@ -88,28 +88,25 @@ class Bzr(FetchMethod):
bzrcmd = self._buildbzrcommand(ud, d, "update")
logger.debug(1, "BZR Update %s", ud.url)
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
- runfetchcmd(bzrcmd, d)
+ runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
else:
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
logger.debug(1, "BZR Checkout %s", ud.url)
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", bzrcmd)
- runfetchcmd(bzrcmd, d)
-
- os.chdir(ud.pkgdir)
+ runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
scmdata = ud.parm.get("scmdata", "")
if scmdata == "keep":
tar_flags = ""
else:
- tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
+ tar_flags = "--exclude='.bzr' --exclude='.bzrtags'"
# tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)),
+ d, cleanup=[ud.localpath], workdir=ud.pkgdir)
def supports_srcrev(self):
return True
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
index ba83e7cb6..70e280a8d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -202,11 +202,10 @@ class ClearCase(FetchMethod):
def _remove_view(self, ud, d):
if os.path.exists(ud.viewdir):
- os.chdir(ud.ccasedir)
cmd = self._build_ccase_command(ud, 'rmview');
logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
bb.fetch2.check_network_access(d, cmd, ud.url)
- output = runfetchcmd(cmd, d)
+ output = runfetchcmd(cmd, d, workdir=ud.ccasedir)
logger.info("rmview output: %s", output)
def need_update(self, ud, d):
@@ -241,11 +240,10 @@ class ClearCase(FetchMethod):
raise e
# Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
- os.chdir(ud.viewdir)
cmd = self._build_ccase_command(ud, 'setcs');
logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
bb.fetch2.check_network_access(d, cmd, ud.url)
- output = runfetchcmd(cmd, d)
+ output = runfetchcmd(cmd, d, workdir=ud.viewdir)
logger.info("%s", output)
# Copy the configspec to the viewdir so we have it in our source tarball later
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
index d27d96f68..5ff70ba92 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
@@ -123,22 +123,23 @@ class Cvs(FetchMethod):
pkg = d.getVar('PN', True)
pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
moddir = os.path.join(pkgdir, localdir)
+ workdir = None
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
logger.info("Update " + ud.url)
bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
# update sources there
- os.chdir(moddir)
+ workdir = moddir
cmd = cvsupdatecmd
else:
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(pkgdir)
- os.chdir(pkgdir)
+ workdir = pkgdir
logger.debug(1, "Running %s", cvscmd)
bb.fetch2.check_network_access(d, cvscmd, ud.url)
cmd = cvscmd
- runfetchcmd(cmd, d, cleanup = [moddir])
+ runfetchcmd(cmd, d, cleanup=[moddir], workdir=workdir)
if not os.access(moddir, os.R_OK):
raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
@@ -147,18 +148,18 @@ class Cvs(FetchMethod):
if scmdata == "keep":
tar_flags = ""
else:
- tar_flags = "--exclude 'CVS'"
+ tar_flags = "--exclude='CVS'"
# tar them up to a defined filename
+ workdir = None
if 'fullpath' in ud.parm:
- os.chdir(pkgdir)
+ workdir = pkgdir
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
else:
- os.chdir(moddir)
- os.chdir('..')
+ workdir = os.path.dirname(os.path.realpath(moddir))
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
- runfetchcmd(cmd, d, cleanup = [ud.localpath])
+ runfetchcmd(cmd, d, cleanup=[ud.localpath], workdir=workdir)
def clean(self, ud, d):
""" Clean CVS Files and tarballs """
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
index 526668bc2..1bec60ab7 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
@@ -49,6 +49,10 @@ Supported SRC_URI options are:
referring to commit which is valid in tag instead of branch.
The default is "0", set nobranch=1 if needed.
+- usehead
+ For local git:// urls to use the current branch HEAD as the revsion for use with
+ AUTOREV. Implies nobranch.
+
"""
#Copyright (C) 2005 Richard Purdie
@@ -71,11 +75,53 @@ import os
import re
import bb
import errno
+import bb.progress
from bb import data
from bb.fetch2 import FetchMethod
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
+
+class GitProgressHandler(bb.progress.LineFilterProgressHandler):
+ """Extract progress information from git output"""
+ def __init__(self, d):
+ self._buffer = ''
+ self._count = 0
+ super(GitProgressHandler, self).__init__(d)
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(-1)
+
+ def write(self, string):
+ self._buffer += string
+ stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
+ stage_weights = [0.2, 0.05, 0.5, 0.25]
+ stagenum = 0
+ for i, stage in reversed(list(enumerate(stages))):
+ if stage in self._buffer:
+ stagenum = i
+ self._buffer = ''
+ break
+ self._status = stages[stagenum]
+ percs = re.findall(r'(\d+)%', string)
+ if percs:
+ progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
+ rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
+ if rates:
+ rate = rates[-1]
+ else:
+ rate = None
+ self.update(progress, rate)
+ else:
+ if stagenum == 0:
+ percs = re.findall(r': (\d+)', string)
+ if percs:
+ count = int(percs[-1])
+ if count > self._count:
+ self._count = count
+ self._fire_progress(-count)
+ super(GitProgressHandler, self).write(string)
+
+
class Git(FetchMethod):
"""Class to fetch a module or modules from git repositories"""
def init(self, d):
@@ -111,6 +157,13 @@ class Git(FetchMethod):
ud.nobranch = ud.parm.get("nobranch","0") == "1"
+ # usehead implies nobranch
+ ud.usehead = ud.parm.get("usehead","0") == "1"
+ if ud.usehead:
+ if ud.proto != "file":
+ raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
+ ud.nobranch = 1
+
# bareclone implies nocheckout
ud.bareclone = ud.parm.get("bareclone","0") == "1"
if ud.bareclone:
@@ -126,6 +179,9 @@ class Git(FetchMethod):
ud.branches[name] = branch
ud.unresolvedrev[name] = branch
+ if ud.usehead:
+ ud.unresolvedrev['default'] = 'HEAD'
+
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
@@ -163,9 +219,8 @@ class Git(FetchMethod):
def need_update(self, ud, d):
if not os.path.exists(ud.clonedir):
return True
- os.chdir(ud.clonedir)
for name in ud.names:
- if not self._contains_ref(ud, d, name):
+ if not self._contains_ref(ud, d, name, ud.clonedir):
return True
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
return True
@@ -186,8 +241,7 @@ class Git(FetchMethod):
# If the checkout doesn't exist and the mirror tarball does, extract it
if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
bb.utils.mkdirhier(ud.clonedir)
- os.chdir(ud.clonedir)
- runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
+ runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.clonedir)
repourl = self._get_repo_url(ud)
@@ -196,38 +250,38 @@ class Git(FetchMethod):
# We do this since git will use a "-l" option automatically for local urls where possible
if repourl.startswith("file://"):
repourl = repourl[7:]
- clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
+ clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, clone_cmd)
- runfetchcmd(clone_cmd, d)
+ progresshandler = GitProgressHandler(d)
+ runfetchcmd(clone_cmd, d, log=progresshandler)
- os.chdir(ud.clonedir)
# Update the checkout if needed
needupdate = False
for name in ud.names:
- if not self._contains_ref(ud, d, name):
+ if not self._contains_ref(ud, d, name, ud.clonedir):
needupdate = True
if needupdate:
try:
- runfetchcmd("%s remote rm origin" % ud.basecmd, d)
+ runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
except bb.fetch2.FetchError:
logger.debug(1, "No Origin")
- runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
- fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
+ runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
+ fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
- runfetchcmd(fetch_cmd, d)
- runfetchcmd("%s prune-packed" % ud.basecmd, d)
- runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
+ progresshandler = GitProgressHandler(d)
+ runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
+ runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
+ runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
try:
os.unlink(ud.fullmirror)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
- os.chdir(ud.clonedir)
for name in ud.names:
- if not self._contains_ref(ud, d, name):
+ if not self._contains_ref(ud, d, name, ud.clonedir):
raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
def build_mirror_data(self, ud, d):
@@ -237,10 +291,9 @@ class Git(FetchMethod):
if os.path.islink(ud.fullmirror):
os.unlink(ud.fullmirror)
- os.chdir(ud.clonedir)
logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+ runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d, workdir=ud.clonedir)
+ runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.clonedir)
def unpack(self, ud, destdir, d):
""" unpack the downloaded src to destdir"""
@@ -263,21 +316,21 @@ class Git(FetchMethod):
cloneflags += " --mirror"
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, ud.clonedir, destdir), d)
- os.chdir(destdir)
repourl = self._get_repo_url(ud)
- runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
+ runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
if not ud.nocheckout:
if subdir != "":
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
- runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
+ runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
+ workdir=destdir)
+ runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
elif not ud.nobranch:
branchname = ud.branches[ud.names[0]]
runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
- ud.revisions[ud.names[0]]), d)
+ ud.revisions[ud.names[0]]), d, workdir=destdir)
runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
- branchname), d)
+ branchname), d, workdir=destdir)
else:
- runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
+ runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
return True
@@ -291,7 +344,7 @@ class Git(FetchMethod):
def supports_srcrev(self):
return True
- def _contains_ref(self, ud, d, name):
+ def _contains_ref(self, ud, d, name, wd):
cmd = ""
if ud.nobranch:
cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
@@ -300,7 +353,7 @@ class Git(FetchMethod):
cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
ud.basecmd, ud.revisions[name], ud.branches[name])
try:
- output = runfetchcmd(cmd, d, quiet=True)
+ output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
except bb.fetch2.FetchError:
return False
if len(output.split()) > 1:
@@ -343,16 +396,17 @@ class Git(FetchMethod):
"""
output = self._lsremote(ud, d, "")
# Tags of the form ^{} may not work, need to fallback to other form
- if ud.unresolvedrev[name][:5] == "refs/":
+ if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
head = ud.unresolvedrev[name]
tag = ud.unresolvedrev[name]
else:
head = "refs/heads/%s" % ud.unresolvedrev[name]
tag = "refs/tags/%s" % ud.unresolvedrev[name]
for s in [head, tag + "^{}", tag]:
- for l in output.split('\n'):
- if s in l:
- return l.split()[0]
+ for l in output.strip().split('\n'):
+ sha1, ref = l.split()
+ if s == ref:
+ return sha1
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
(ud.unresolvedrev[name], ud.host+ud.path))
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
index 0f3789745..4937a1089 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
@@ -34,43 +34,42 @@ class GitANNEX(Git):
"""
return ud.type in ['gitannex']
- def uses_annex(self, ud, d):
+ def uses_annex(self, ud, d, wd):
for name in ud.names:
try:
- runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
+ runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True, workdir=wd)
return True
except bb.fetch.FetchError:
pass
return False
- def update_annex(self, ud, d):
+ def update_annex(self, ud, d, wd):
try:
- runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
+ runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True, workdir=wd)
except bb.fetch.FetchError:
return False
- runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
+ runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True, workdir=wd)
return True
def download(self, ud, d):
Git.download(self, ud, d)
- os.chdir(ud.clonedir)
- annex = self.uses_annex(ud, d)
+ annex = self.uses_annex(ud, d, ud.clonedir)
if annex:
- self.update_annex(ud, d)
+ self.update_annex(ud, d, ud.clonedir)
def unpack(self, ud, destdir, d):
Git.unpack(self, ud, destdir, d)
- os.chdir(ud.destdir)
try:
- runfetchcmd("%s annex sync" % (ud.basecmd), d)
+ runfetchcmd("%s annex init" % (ud.basecmd), d, workdir=ud.destdir)
except bb.fetch.FetchError:
pass
- annex = self.uses_annex(ud, d)
+ annex = self.uses_annex(ud, d, ud.destdir)
if annex:
- runfetchcmd("%s annex get" % (ud.basecmd), d)
- runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
+ runfetchcmd("%s annex get" % (ud.basecmd), d, workdir=ud.destdir)
+ runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True, workdir=ud.destdir)
+
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
index 752f1d3c1..661376204 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -43,10 +43,10 @@ class GitSM(Git):
"""
return ud.type in ['gitsm']
- def uses_submodules(self, ud, d):
+ def uses_submodules(self, ud, d, wd):
for name in ud.names:
try:
- runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
+ runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
return True
except bb.fetch.FetchError:
pass
@@ -107,28 +107,25 @@ class GitSM(Git):
os.mkdir(tmpclonedir)
os.rename(ud.clonedir, gitdir)
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
- os.chdir(tmpclonedir)
- runfetchcmd(ud.basecmd + " reset --hard", d)
- runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
- runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
+ runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
+ runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
+ runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
self._set_relative_paths(tmpclonedir)
- runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
+ runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
os.rename(gitdir, ud.clonedir,)
bb.utils.remove(tmpclonedir, True)
def download(self, ud, d):
Git.download(self, ud, d)
- os.chdir(ud.clonedir)
- submodules = self.uses_submodules(ud, d)
+ submodules = self.uses_submodules(ud, d, ud.clonedir)
if submodules:
self.update_submodules(ud, d)
def unpack(self, ud, destdir, d):
Git.unpack(self, ud, destdir, d)
- os.chdir(ud.destdir)
- submodules = self.uses_submodules(ud, d)
+ submodules = self.uses_submodules(ud, d, ud.destdir)
if submodules:
- runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
- runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
+ runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
+ runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
index 3b743ff51..20df8016d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
@@ -169,25 +169,22 @@ class Hg(FetchMethod):
# If the checkout doesn't exist and the mirror tarball does, extract it
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
+ runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.pkgdir)
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
# Found the source, check whether need pull
updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
logger.debug(1, "Running %s", updatecmd)
try:
- runfetchcmd(updatecmd, d)
+ runfetchcmd(updatecmd, d, workdir=ud.moddir)
except bb.fetch2.FetchError:
# Runnning pull in the repo
pullcmd = self._buildhgcommand(ud, d, "pull")
logger.info("Pulling " + ud.url)
# update sources there
- os.chdir(ud.moddir)
logger.debug(1, "Running %s", pullcmd)
bb.fetch2.check_network_access(d, pullcmd, ud.url)
- runfetchcmd(pullcmd, d)
+ runfetchcmd(pullcmd, d, workdir=ud.moddir)
try:
os.unlink(ud.fullmirror)
except OSError as exc:
@@ -200,17 +197,15 @@ class Hg(FetchMethod):
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", fetchcmd)
bb.fetch2.check_network_access(d, fetchcmd, ud.url)
- runfetchcmd(fetchcmd, d)
+ runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
# Even when we clone (fetch), we still need to update as hg's clone
# won't checkout the specified revision if its on a branch
updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
+ runfetchcmd(updatecmd, d, workdir=ud.moddir)
def clean(self, ud, d):
""" Clean the hg dir """
@@ -246,10 +241,9 @@ class Hg(FetchMethod):
if os.path.islink(ud.fullmirror):
os.unlink(ud.fullmirror)
- os.chdir(ud.pkgdir)
logger.info("Creating tarball of hg repository")
- runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+ runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d, workdir=ud.pkgdir)
+ runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.pkgdir)
def localpath(self, ud, d):
return ud.pkgdir
@@ -269,10 +263,8 @@ class Hg(FetchMethod):
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
logger.debug(2, "Unpack: updating source in '" + codir + "'")
- os.chdir(codir)
- runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
- runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
+ runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d, workdir=codir)
+ runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
else:
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
- os.chdir(ud.moddir)
- runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
+ runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
index 303a52b63..51ca78d12 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
@@ -26,7 +26,7 @@ BitBake build tools.
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os
-import urllib
+import urllib.request, urllib.parse, urllib.error
import bb
import bb.utils
from bb import data
@@ -42,7 +42,7 @@ class Local(FetchMethod):
def urldata_init(self, ud, d):
# We don't set localfile as for this fetcher the file is already local!
- ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
+ ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0])
ud.basename = os.path.basename(ud.decodedurl)
ud.basepath = ud.decodedurl
ud.needdonestamp = False
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
index e8d9b1109..699ae72e0 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
@@ -13,14 +13,14 @@ Usage in the recipe:
- name
- version
- npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};ver=${PV}
+ npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};version=${PV}
The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
"""
import os
import sys
-import urllib
+import urllib.request, urllib.parse, urllib.error
import json
import subprocess
import signal
@@ -88,7 +88,7 @@ class Npm(FetchMethod):
ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
- self.basecmd += " --directory-prefix=%s " % prefixdir
+ ud.prefixdir = prefixdir
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0")
ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
@@ -102,7 +102,8 @@ class Npm(FetchMethod):
def _runwget(self, ud, d, command, quiet):
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
bb.fetch2.check_network_access(d, command)
- runfetchcmd(command, d, quiet)
+ dldir = d.getVar("DL_DIR", True)
+ runfetchcmd(command, d, quiet, workdir=dldir)
def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
file = data[pkg]['tgz']
@@ -113,16 +114,13 @@ class Npm(FetchMethod):
bb.fatal("NPM package %s downloaded not a tarball!" % file)
# Change to subdir before executing command
- save_cwd = os.getcwd()
if not os.path.exists(destdir):
os.makedirs(destdir)
- os.chdir(destdir)
path = d.getVar('PATH', True)
if path:
cmd = "PATH=\"%s\" %s" % (path, cmd)
- bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
- os.chdir(save_cwd)
+ bb.note("Unpacking %s to %s/" % (file, destdir))
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)
if ret != 0:
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
@@ -140,7 +138,12 @@ class Npm(FetchMethod):
workobj = json.load(datafile)
dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
- self._unpackdep(ud, ud.pkgname, workobj, "%s/npmpkg" % destdir, dldir, d)
+ if 'subdir' in ud.parm:
+ unpackdir = '%s/%s' % (destdir, ud.parm.get('subdir'))
+ else:
+ unpackdir = '%s/npmpkg' % destdir
+
+ self._unpackdep(ud, ud.pkgname, workobj, unpackdir, dldir, d)
def _parse_view(self, output):
'''
@@ -162,7 +165,9 @@ class Npm(FetchMethod):
pdata = json.loads('\n'.join(datalines))
return pdata
- def _getdependencies(self, pkg, data, version, d, ud, optional=False):
+ def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
+ if fetchedlist is None:
+ fetchedlist = []
pkgfullname = pkg
if version != '*' and not '/' in version:
pkgfullname += "@'%s'" % version
@@ -184,7 +189,9 @@ class Npm(FetchMethod):
outputurl = pdata['dist']['tarball']
data[pkg] = {}
data[pkg]['tgz'] = os.path.basename(outputurl)
- self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
+ if not outputurl in fetchedlist:
+ self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
+ fetchedlist.append(outputurl)
dependencies = pdata.get('dependencies', {})
optionalDependencies = pdata.get('optionalDependencies', {})
@@ -196,13 +203,20 @@ class Npm(FetchMethod):
optdepsfound[dep] = dependencies[dep]
else:
depsfound[dep] = dependencies[dep]
- for dep, version in optdepsfound.iteritems():
- self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True)
- for dep, version in depsfound.iteritems():
- self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
+ for dep, version in optdepsfound.items():
+ self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
+ for dep, version in depsfound.items():
+ self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
- def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest):
+ def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
logger.debug(2, "NPM shrinkwrap file is %s" % data)
+ if toplevel:
+ name = data.get('name', None)
+ if name and name != pkg:
+ for obj in data.get('dependencies', []):
+ if obj == pkg:
+ self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
+ return
outputurl = "invalid"
if ('resolved' not in data) or (not data['resolved'].startswith('http')):
# will be the case for ${PN}
@@ -211,7 +225,7 @@ class Npm(FetchMethod):
outputurl = runfetchcmd(fetchcmd, d, True)
else:
outputurl = data['resolved']
- self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
+ self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
manifest[pkg] = {}
manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
manifest[pkg]['deps'] = {}
@@ -228,7 +242,7 @@ class Npm(FetchMethod):
if 'dependencies' in data:
for obj in data['dependencies']:
logger.debug(2, "Found dep is %s" % str(obj))
- self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'])
+ self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False)
def download(self, ud, d):
"""Fetch url"""
@@ -239,10 +253,7 @@ class Npm(FetchMethod):
if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
dest = d.getVar("DL_DIR", True)
bb.utils.mkdirhier(dest)
- save_cwd = os.getcwd()
- os.chdir(dest)
- runfetchcmd("tar -xJf %s" % (ud.fullmirror), d)
- os.chdir(save_cwd)
+ runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
return
shwrf = d.getVar('NPM_SHRINKWRAP', True)
@@ -251,14 +262,14 @@ class Npm(FetchMethod):
with open(shwrf) as datafile:
shrinkobj = json.load(datafile)
except:
- logger.warn('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
+ logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
lckdf = d.getVar('NPM_LOCKDOWN', True)
logger.debug(2, "NPM lockdown file is %s" % lckdf)
try:
with open(lckdf) as datafile:
lockdown = json.load(datafile)
except:
- logger.warn('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
+ logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
if ('name' not in shrinkobj):
self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
@@ -275,10 +286,8 @@ class Npm(FetchMethod):
if os.path.islink(ud.fullmirror):
os.unlink(ud.fullmirror)
- save_cwd = os.getcwd()
- os.chdir(d.getVar("DL_DIR", True))
+ dldir = d.getVar("DL_DIR", True)
logger.info("Creating tarball of npm data")
- runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
- os.chdir(save_cwd)
-
+ runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
+ workdir=dldir)
+ runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
index d051dfdaf..295abf953 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
@@ -88,23 +88,21 @@ class Osc(FetchMethod):
oscupdatecmd = self._buildosccommand(ud, d, "update")
logger.info("Update "+ ud.url)
# update sources there
- os.chdir(ud.moddir)
logger.debug(1, "Running %s", oscupdatecmd)
bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
- runfetchcmd(oscupdatecmd, d)
+ runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
else:
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", oscfetchcmd)
bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
- runfetchcmd(oscfetchcmd, d)
+ runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
- os.chdir(os.path.join(ud.pkgdir + ud.path))
# tar them up to a defined filename
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
+ runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d,
+ cleanup=[ud.localpath], workdir=os.path.join(ud.pkgdir + ud.path))
def supports_srcrev(self):
return False
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
index 3a10c7ca3..50cb47909 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
@@ -1,14 +1,12 @@
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
+BitBake 'Fetch' implementation for perforce
"""
# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2016 Kodak Alaris, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -25,9 +23,7 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-from future_builtins import zip
import os
-import subprocess
import logging
import bb
from bb import data
@@ -37,151 +33,178 @@ from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
class Perforce(FetchMethod):
+ """ Class to fetch from perforce repositories """
def supports(self, ud, d):
+ """ Check to see if a given url can be fetched with perforce. """
return ud.type in ['p4']
- def doparse(url, d):
- parm = {}
- path = url.split("://")[1]
- delim = path.find("@");
+ def urldata_init(self, ud, d):
+ """
+ Initialize perforce specific variables within url data. If P4CONFIG is
+ provided by the env, use it. If P4PORT is specified by the recipe, use
+ its values, which may override the settings in P4CONFIG.
+ """
+ ud.basecmd = d.getVar('FETCHCMD_p4', True)
+ if not ud.basecmd:
+ ud.basecmd = "/usr/bin/env p4"
+
+ ud.dldir = d.getVar('P4DIR', True)
+ if not ud.dldir:
+ ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4')
+
+ path = ud.url.split('://')[1]
+ path = path.split(';')[0]
+ delim = path.find('@');
if delim != -1:
- (user, pswd, host, port) = path.split('@')[0].split(":")
- path = path.split('@')[1]
+ (ud.user, ud.pswd) = path.split('@')[0].split(':')
+ ud.path = path.split('@')[1]
else:
- (host, port) = d.getVar('P4PORT', False).split(':')
- user = ""
- pswd = ""
-
- if path.find(";") != -1:
- keys=[]
- values=[]
- plist = path.split(';')
- for item in plist:
- if item.count('='):
- (key, value) = item.split('=')
- keys.append(key)
- values.append(value)
-
- parm = dict(zip(keys, values))
- path = "//" + path.split(';')[0]
- host += ":%s" % (port)
- parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
-
- return host, path, user, pswd, parm
- doparse = staticmethod(doparse)
-
- def getcset(d, depot, host, user, pswd, parm):
- p4opt = ""
- if "cset" in parm:
- return parm["cset"];
- if user:
- p4opt += " -u %s" % (user)
- if pswd:
- p4opt += " -P %s" % (pswd)
- if host:
- p4opt += " -p %s" % (host)
-
- p4date = d.getVar("P4DATE", True)
- if "revision" in parm:
- depot += "#%s" % (parm["revision"])
- elif "label" in parm:
- depot += "@%s" % (parm["label"])
- elif p4date:
- depot += "@%s" % (p4date)
-
- p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
- logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
- p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
- cset = p4file.strip()
- logger.debug(1, "READ %s", cset)
- if not cset:
- return -1
-
- return cset.split(' ')[1]
- getcset = staticmethod(getcset)
+ ud.path = path
- def urldata_init(self, ud, d):
- (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
+ ud.usingp4config = False
+ p4port = d.getVar('P4PORT', True)
- base_path = path.replace('/...', '')
- base_path = self._strip_leading_slashes(base_path)
-
- if "label" in parm:
- version = parm["label"]
+ if p4port:
+ logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
+ ud.host = p4port
+ else:
+ logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
+ ud.usingp4config = True
+ p4cmd = '%s info | grep "Server address"' % ud.basecmd
+ bb.fetch2.check_network_access(d, p4cmd)
+ ud.host = runfetchcmd(p4cmd, d, True)
+ ud.host = ud.host.split(': ')[1].strip()
+ logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
+ if not ud.host:
+ raise FetchError('Could not determine P4PORT from P4CONFIG')
+
+ if ud.path.find('/...') >= 0:
+ ud.pathisdir = True
else:
- version = Perforce.getcset(d, path, host, user, pswd, parm)
+ ud.pathisdir = False
- ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
+ cleanedpath = ud.path.replace('/...', '').replace('/', '.')
+ cleanedhost = ud.host.replace(':', '.')
+ ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
- def download(self, ud, d):
+ ud.setup_revisons(d)
+
+ ud.localfile = data.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision), d)
+
+ def _buildp4command(self, ud, d, command, depot_filename=None):
"""
- Fetch urls
+ Build a p4 commandline. Valid commands are "changes", "print", and
+ "files". depot_filename is the full path to the file in the depot
+ including the trailing '#rev' value.
"""
+ p4opt = ""
+
+ if ud.user:
+ p4opt += ' -u "%s"' % (ud.user)
- (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
+ if ud.pswd:
+ p4opt += ' -P "%s"' % (ud.pswd)
- if depot.find('/...') != -1:
- path = depot[:depot.find('/...')]
+ if ud.host and not ud.usingp4config:
+ p4opt += ' -p %s' % (ud.host)
+
+ if hasattr(ud, 'revision') and ud.revision:
+ pathnrev = '%s@%s' % (ud.path, ud.revision)
+ else:
+ pathnrev = '%s' % (ud.path)
+
+ if depot_filename:
+ if ud.pathisdir: # Remove leading path to obtain filename
+ filename = depot_filename[len(ud.path)-1:]
+ else:
+ filename = depot_filename[depot_filename.rfind('/'):]
+ filename = filename[:filename.find('#')] # Remove trailing '#rev'
+
+ if command == 'changes':
+ p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev)
+ elif command == 'print':
+ if depot_filename != None:
+ p4cmd = '%s%s print -o "p4/%s" "%s"' % (ud.basecmd, p4opt, filename, depot_filename)
+ else:
+ raise FetchError('No depot file name provided to p4 %s' % command, ud.url)
+ elif command == 'files':
+ p4cmd = '%s%s files //%s' % (ud.basecmd, p4opt, pathnrev)
else:
- path = depot[:depot.rfind('/')]
+ raise FetchError('Invalid p4 command %s' % command, ud.url)
- module = parm.get('module', os.path.basename(path))
+ return p4cmd
- # Get the p4 command
- p4opt = ""
- if user:
- p4opt += " -u %s" % (user)
+ def _p4listfiles(self, ud, d):
+ """
+ Return a list of the file names which are present in the depot using the
+ 'p4 files' command, including trailing '#rev' file revision indicator
+ """
+ p4cmd = self._buildp4command(ud, d, 'files')
+ bb.fetch2.check_network_access(d, p4cmd)
+ p4fileslist = runfetchcmd(p4cmd, d, True)
+ p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()]
+
+ if not p4fileslist:
+ raise FetchError('Unable to fetch listing of p4 files from %s@%s' % (ud.host, ud.path))
+
+ count = 0
+ filelist = []
- if pswd:
- p4opt += " -P %s" % (pswd)
+ for filename in p4fileslist:
+ item = filename.split(' - ')
+ lastaction = item[1].split()
+ logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
+ if lastaction[0] == 'delete':
+ continue
+ filelist.append(item[0])
- if host:
- p4opt += " -p %s" % (host)
+ return filelist
- p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
+ def download(self, ud, d):
+ """ Get the list of files, fetch each one """
+ filelist = self._p4listfiles(ud, d)
+ if not filelist:
+ raise FetchError('No files found in depot %s@%s' % (ud.host, ud.path))
- # create temp directory
- logger.debug(2, "Fetch: creating temporary directory")
- bb.utils.mkdirhier(d.expand('${WORKDIR}'))
- mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
- tmpfile, errors = bb.process.run(mktemp)
- tmpfile = tmpfile.strip()
- if not tmpfile:
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
+ bb.utils.remove(ud.pkgdir, True)
+ bb.utils.mkdirhier(ud.pkgdir)
- if "label" in parm:
- depot = "%s@%s" % (depot, parm["label"])
- else:
- cset = Perforce.getcset(d, depot, host, user, pswd, parm)
- depot = "%s@%s" % (depot, cset)
+ for afile in filelist:
+ p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
+ bb.fetch2.check_network_access(d, p4fetchcmd)
+ runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
- os.chdir(tmpfile)
- logger.info("Fetch " + ud.url)
- logger.info("%s%s files %s", p4cmd, p4opt, depot)
- p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
- p4file = [f.rstrip() for f in p4file.splitlines()]
+ runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
- if not p4file:
- raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
+ def clean(self, ud, d):
+ """ Cleanup p4 specific files and dirs"""
+ bb.utils.remove(ud.localpath)
+ bb.utils.remove(ud.pkgdir, True)
- count = 0
+ def supports_srcrev(self):
+ return True
- for file in p4file:
- list = file.split()
+ def _revision_key(self, ud, d, name):
+ """ Return a unique key for the url """
+ return 'p4:%s' % ud.pkgdir
- if list[2] == "delete":
- continue
+ def _latest_revision(self, ud, d, name):
+ """ Return the latest upstream scm revision number """
+ p4cmd = self._buildp4command(ud, d, "changes")
+ bb.fetch2.check_network_access(d, p4cmd)
+ tip = runfetchcmd(p4cmd, d, True)
+
+ if not tip:
+ raise FetchError('Could not determine the latest perforce changelist')
- dest = list[0][len(path)+1:]
- where = dest.find("#")
+ tipcset = tip.split(' ')[1]
+ logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
+ return tipcset
- subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
- count = count + 1
+ def sortable_revision(self, ud, d, name):
+ """ Return a sortable revision number """
+ return False, self._build_revision(ud, d)
- if count == 0:
- logger.error()
- raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
+ def _build_revision(self, ud, d):
+ return ud.revision
- runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
index 21678eb7d..ecc6e68e9 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
@@ -69,24 +69,23 @@ class Repo(FetchMethod):
else:
username = ""
- bb.utils.mkdirhier(os.path.join(codir, "repo"))
- os.chdir(os.path.join(codir, "repo"))
- if not os.path.exists(os.path.join(codir, "repo", ".repo")):
+ repodir = os.path.join(codir, "repo")
+ bb.utils.mkdirhier(repodir)
+ if not os.path.exists(os.path.join(repodir, ".repo")):
bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
- runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
+ runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
- runfetchcmd("repo sync", d)
- os.chdir(codir)
+ runfetchcmd("repo sync", d, workdir=repodir)
scmdata = ud.parm.get("scmdata", "")
if scmdata == "keep":
tar_flags = ""
else:
- tar_flags = "--exclude '.repo' --exclude '.git'"
+ tar_flags = "--exclude='.repo' --exclude='.git'"
# Create a cache
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d, workdir=codir)
def supports_srcrev(self):
return False
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
index cb2f753a8..7989fccc7 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
@@ -61,8 +61,7 @@ SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
import os
import bb
-import urllib
-import commands
+import urllib.request, urllib.parse, urllib.error
from bb import data
from bb.fetch2 import URI
from bb.fetch2 import FetchMethod
@@ -93,7 +92,7 @@ class SFTP(FetchMethod):
else:
ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
def download(self, ud, d):
"""Fetch urls"""
@@ -121,8 +120,7 @@ class SFTP(FetchMethod):
remote = '%s%s:%s' % (user, urlo.hostname, path)
- cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
- commands.mkarg(lpath))
+ cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
bb.fetch2.check_network_access(d, cmd, ud.url)
runfetchcmd(cmd, d)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
index 635578a71..56f9b7eb3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
@@ -114,12 +114,10 @@ class SSH(FetchMethod):
fr = host
fr += ':%s' % path
-
- import commands
cmd = 'scp -B -r %s %s %s/' % (
portarg,
- commands.mkarg(fr),
- commands.mkarg(dldir)
+ fr,
+ dldir
)
bb.fetch2.check_network_access(d, cmd, urldata.url)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
index 8a291935c..6ca79d35d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
@@ -126,35 +126,32 @@ class Svn(FetchMethod):
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
svnupdatecmd = self._buildsvncommand(ud, d, "update")
logger.info("Update " + ud.url)
- # update sources there
- os.chdir(ud.moddir)
# We need to attempt to run svn upgrade first in case its an older working format
try:
- runfetchcmd(ud.basecmd + " upgrade", d)
+ runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
except FetchError:
pass
logger.debug(1, "Running %s", svnupdatecmd)
bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
- runfetchcmd(svnupdatecmd, d)
+ runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
else:
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", svnfetchcmd)
bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
- runfetchcmd(svnfetchcmd, d)
+ runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
scmdata = ud.parm.get("scmdata", "")
if scmdata == "keep":
tar_flags = ""
else:
- tar_flags = "--exclude '.svn'"
+ tar_flags = "--exclude='.svn'"
- os.chdir(ud.pkgdir)
# tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup = [ud.localpath])
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
+ cleanup=[ud.localpath], workdir=ud.pkgdir)
def clean(self, ud, d):
""" Clean SVN specific files and dirs """
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
index 8bc9e93ca..ecb946aa8 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
@@ -31,7 +31,8 @@ import subprocess
import os
import logging
import bb
-import urllib
+import bb.progress
+import urllib.request, urllib.parse, urllib.error
from bb import data
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
@@ -41,6 +42,27 @@ from bb.utils import export_proxies
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
+class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
+ """
+ Extract progress information from wget output.
+ Note: relies on --progress=dot (with -v or without -q/-nv) being
+ specified on the wget command line.
+ """
+ def __init__(self, d):
+ super(WgetProgressHandler, self).__init__(d)
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(0)
+
+ def writeline(self, line):
+ percs = re.findall(r'(\d+)%\s+([\d.]+[A-Z])', line)
+ if percs:
+ progress = int(percs[-1][0])
+ rate = percs[-1][1] + '/s'
+ self.update(progress, rate)
+ return False
+ return True
+
+
class Wget(FetchMethod):
"""Class to fetch urls via 'wget'"""
def supports(self, ud, d):
@@ -62,17 +84,19 @@ class Wget(FetchMethod):
else:
ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
if not ud.localfile:
- ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d)
- self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
+ self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
def _runwget(self, ud, d, command, quiet):
+ progresshandler = WgetProgressHandler(d)
+
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
bb.fetch2.check_network_access(d, command)
- runfetchcmd(command, d, quiet)
+ runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler)
def download(self, ud, d):
"""Fetch urls"""
@@ -84,6 +108,10 @@ class Wget(FetchMethod):
bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
fetchcmd += " -O " + dldir + os.sep + ud.localfile
+ if ud.user:
+ up = ud.user.split(":")
+ fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (up[0],up[1])
+
uri = ud.url.split(";")[0]
if os.path.exists(ud.localpath):
# file exists, but we didnt complete it.. trying again..
@@ -104,12 +132,12 @@ class Wget(FetchMethod):
return True
- def checkstatus(self, fetch, ud, d):
- import urllib2, socket, httplib
- from urllib import addinfourl
+ def checkstatus(self, fetch, ud, d, try_again=True):
+ import urllib.request, urllib.error, urllib.parse, socket, http.client
+ from urllib.response import addinfourl
from bb.fetch2 import FetchConnectionCache
- class HTTPConnectionCache(httplib.HTTPConnection):
+ class HTTPConnectionCache(http.client.HTTPConnection):
if fetch.connection_cache:
def connect(self):
"""Connect to the host and port specified in __init__."""
@@ -125,7 +153,7 @@ class Wget(FetchMethod):
if self._tunnel_host:
self._tunnel()
- class CacheHTTPHandler(urllib2.HTTPHandler):
+ class CacheHTTPHandler(urllib.request.HTTPHandler):
def http_open(self, req):
return self.do_open(HTTPConnectionCache, req)
@@ -139,7 +167,7 @@ class Wget(FetchMethod):
- geturl(): return the original request URL
- code: HTTP status code
"""
- host = req.get_host()
+ host = req.host
if not host:
raise urlllib2.URLError('no host given')
@@ -147,7 +175,7 @@ class Wget(FetchMethod):
h.set_debuglevel(self._debuglevel)
headers = dict(req.unredirected_hdrs)
- headers.update(dict((k, v) for k, v in req.headers.items()
+ headers.update(dict((k, v) for k, v in list(req.headers.items())
if k not in headers))
# We want to make an HTTP/1.1 request, but the addinfourl
@@ -164,7 +192,7 @@ class Wget(FetchMethod):
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
headers = dict(
- (name.title(), val) for name, val in headers.items())
+ (name.title(), val) for name, val in list(headers.items()))
if req._tunnel_host:
tunnel_headers = {}
@@ -177,12 +205,12 @@ class Wget(FetchMethod):
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
- h.request(req.get_method(), req.get_selector(), req.data, headers)
- except socket.error, err: # XXX what error?
+ h.request(req.get_method(), req.selector, req.data, headers)
+ except socket.error as err: # XXX what error?
# Don't close connection when cache is enabled.
if fetch.connection_cache is None:
h.close()
- raise urllib2.URLError(err)
+ raise urllib.error.URLError(err)
else:
try:
r = h.getresponse(buffering=True)
@@ -222,7 +250,7 @@ class Wget(FetchMethod):
return resp
- class HTTPMethodFallback(urllib2.BaseHandler):
+ class HTTPMethodFallback(urllib.request.BaseHandler):
"""
Fallback to GET if HEAD is not allowed (405 HTTP error)
"""
@@ -230,11 +258,11 @@ class Wget(FetchMethod):
fp.read()
fp.close()
- newheaders = dict((k,v) for k,v in req.headers.items()
+ newheaders = dict((k,v) for k,v in list(req.headers.items())
if k.lower() not in ("content-length", "content-type"))
- return self.parent.open(urllib2.Request(req.get_full_url(),
+ return self.parent.open(urllib.request.Request(req.get_full_url(),
headers=newheaders,
- origin_req_host=req.get_origin_req_host(),
+ origin_req_host=req.origin_req_host,
unverifiable=True))
"""
@@ -249,38 +277,49 @@ class Wget(FetchMethod):
"""
http_error_406 = http_error_405
- class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
+ class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
"""
urllib2.HTTPRedirectHandler resets the method to GET on redirect,
when we want to follow redirects using the original method.
"""
def redirect_request(self, req, fp, code, msg, headers, newurl):
- newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
+ newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
newreq.get_method = lambda: req.get_method()
return newreq
exported_proxies = export_proxies(d)
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
if export_proxies:
- handlers.append(urllib2.ProxyHandler())
+ handlers.append(urllib.request.ProxyHandler())
handlers.append(CacheHTTPHandler())
# XXX: Since Python 2.7.9 ssl cert validation is enabled by default
# see PEP-0476, this causes verification errors on some https servers
# so disable by default.
import ssl
if hasattr(ssl, '_create_unverified_context'):
- handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
- opener = urllib2.build_opener(*handlers)
+ handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
+ opener = urllib.request.build_opener(*handlers)
try:
uri = ud.url.split(";")[0]
- r = urllib2.Request(uri)
+ r = urllib.request.Request(uri)
r.get_method = lambda: "HEAD"
+
+ if ud.user:
+ import base64
+ encodeuser = base64.b64encode(ud.user.encode('utf-8')).decode("utf-8")
+ authheader = "Basic %s" % encodeuser
+ r.add_header("Authorization", authheader)
+
opener.open(r)
- except urllib2.URLError as e:
- # debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug(2, "checkstatus() urlopen failed: %s" % e)
- return False
+ except urllib.error.URLError as e:
+ if try_again:
+ logger.debug(2, "checkstatus: trying again")
+ return self.checkstatus(fetch, ud, d, False)
+ else:
+ # debug for now to avoid spamming the logs in e.g. remote sstate searches
+ logger.debug(2, "checkstatus() urlopen failed: %s" % e)
+ return False
return True
def _parse_path(self, regex, s):
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/main.py b/import-layers/yocto-poky/bitbake/lib/bb/main.py
index e30217369..f2f59f670 100755
--- a/import-layers/yocto-poky/bitbake/lib/bb/main.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/main.py
@@ -27,6 +27,7 @@ import sys
import logging
import optparse
import warnings
+import fcntl
import bb
from bb import event
@@ -100,11 +101,12 @@ def import_extension_module(pkg, modulename, checkattr):
# Dynamically load the UI based on the ui name. Although we
# suggest a fixed set this allows you to have flexibility in which
# ones are available.
- module = __import__(pkg.__name__, fromlist = [modulename])
+ module = __import__(pkg.__name__, fromlist=[modulename])
return getattr(module, modulename)
except AttributeError:
- raise BBMainException('FATAL: Unable to import extension module "%s" from %s. Valid extension modules: %s' % (modulename, pkg.__name__, present_options(list_extension_modules(pkg, checkattr))))
-
+ modules = present_options(list_extension_modules(pkg, checkattr))
+ raise BBMainException('FATAL: Unable to import extension module "%s" from %s. '
+ 'Valid extension modules: %s' % (modulename, pkg.__name__, modules))
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
warnlog = logging.getLogger("BitBake.Warnings")
@@ -115,7 +117,7 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
_warnings_showwarning(message, category, filename, lineno, file, line)
else:
s = warnings.formatwarning(message, category, filename, lineno)
- warnlog.warn(s)
+ warnlog.warning(s)
warnings.showwarning = _showwarning
warnings.filterwarnings("ignore")
@@ -129,136 +131,189 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
def parseCommandLine(self, argv=sys.argv):
parser = optparse.OptionParser(
- formatter = BitbakeHelpFormatter(),
- version = "BitBake Build Tool Core version %s" % bb.__version__,
- usage = """%prog [options] [recipename/target recipe:do_task ...]
+ formatter=BitbakeHelpFormatter(),
+ version="BitBake Build Tool Core version %s" % bb.__version__,
+ usage="""%prog [options] [recipename/target recipe:do_task ...]
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
will provide the layer, BBFILES and other configuration information.""")
- parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
- action = "store", dest = "buildfile", default = None)
+ parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None,
+ help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
+ "not handle any dependencies from other recipes.")
+
+ parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True,
+ help="Continue as much as possible after an error. While the target that "
+ "failed and anything depending on it cannot be built, as much as "
+ "possible will be built before stopping.")
- parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
- action = "store_false", dest = "abort", default = True)
+ parser.add_option("-a", "--tryaltconfigs", action="store_true",
+ dest="tryaltconfigs", default=False,
+ help="Continue with builds by trying to use alternative providers "
+ "where possible.")
- parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
- action = "store_true", dest = "tryaltconfigs", default = False)
+ parser.add_option("-f", "--force", action="store_true", dest="force", default=False,
+ help="Force the specified targets/task to run (invalidating any "
+ "existing stamp file).")
- parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
- action = "store_true", dest = "force", default = False)
+ parser.add_option("-c", "--cmd", action="store", dest="cmd",
+ help="Specify the task to execute. The exact options available "
+ "depend on the metadata. Some examples might be 'compile'"
+ " or 'populate_sysroot' or 'listtasks' may give a list of "
+ "the tasks available.")
- parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
- action = "store", dest = "cmd")
+ parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp",
+ help="Invalidate the stamp for the specified task such as 'compile' "
+ "and then run the default task for the specified target(s).")
- parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
- action = "store", dest = "invalidate_stamp")
+ parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
+ help="Read the specified file before bitbake.conf.")
- parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
- action = "append", dest = "prefile", default = [])
+ parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
+ help="Read the specified file after bitbake.conf.")
- parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
- action = "append", dest = "postfile", default = [])
+ parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
+ help="Output more log message data to the terminal.")
- parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
- action = "store_true", dest = "verbose", default = False)
+ parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
+ help="Increase the debug level. You can specify this more than once.")
- parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
- action = "count", dest="debug", default = 0)
+ parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False,
+ help="Output less log message data to the terminal.")
- parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
- action = "store_true", dest = "dry_run", default = False)
+ parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
+ help="Don't execute, just go through the motions.")
- parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
- action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
+ parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
+ default=[], metavar="SIGNATURE_HANDLER",
+ help="Dump out the signature construction information, with no task "
+ "execution. The SIGNATURE_HANDLER parameter is passed to the "
+ "handler. Two common values are none and printdiff but the handler "
+ "may define more/less. none means only dump the signature, printdiff"
+ " means compare the dumped signature with the cached one.")
- parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
- action = "store_true", dest = "parse_only", default = False)
+ parser.add_option("-p", "--parse-only", action="store_true",
+ dest="parse_only", default=False,
+ help="Quit after parsing the BB recipes.")
- parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
- action = "store_true", dest = "show_versions", default = False)
+ parser.add_option("-s", "--show-versions", action="store_true",
+ dest="show_versions", default=False,
+ help="Show current and preferred versions of all recipes.")
- parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
- action = "store_true", dest = "show_environment", default = False)
+ parser.add_option("-e", "--environment", action="store_true",
+ dest="show_environment", default=False,
+ help="Show the global or per-recipe environment complete with information"
+ " about where variables were set/changed.")
- parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
- action = "store_true", dest = "dot_graph", default = False)
+ parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
+ help="Save dependency tree information for the specified "
+ "targets in the dot syntax.")
- parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
- action = "append", dest = "extra_assume_provided", default = [])
+ parser.add_option("-I", "--ignore-deps", action="append",
+ dest="extra_assume_provided", default=[],
+ help="Assume these dependencies don't exist and are already provided "
+ "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
+ "graphs more appealing")
- parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
- action = "append", dest = "debug_domains", default = [])
+ parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
+ help="Show debug logging for the specified logging domains")
- parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
- action = "store_true", dest = "profile", default = False)
+ parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
+ help="Profile the command and save reports.")
- env_ui = os.environ.get('BITBAKE_UI', None)
- default_ui = env_ui or 'knotty'
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
- parser.add_option("-u", "--ui", help = "The user interface to use (@CHOICES@ - default %default).",
- action="store", dest="ui", default=default_ui)
+ parser.add_option("-u", "--ui", action="store", dest="ui",
+ default=os.environ.get('BITBAKE_UI', 'knotty'),
+ help="The user interface to use (@CHOICES@ - default %default).")
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
- parser.add_option("-t", "--servertype", help = "Choose which server type to use (@CHOICES@ - default %default).",
- action = "store", dest = "servertype", default = "process")
+ parser.add_option("-t", "--servertype", action="store", dest="servertype",
+ default=["process", "xmlrpc"]["BBSERVER" in os.environ],
+ help="Choose which server type to use (@CHOICES@ - default %default).")
+
+ parser.add_option("", "--token", action="store", dest="xmlrpctoken",
+ default=os.environ.get("BBTOKEN"),
+ help="Specify the connection token to be used when connecting "
+ "to a remote server.")
- parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
- action = "store", dest = "xmlrpctoken")
+ parser.add_option("", "--revisions-changed", action="store_true",
+ dest="revisions_changed", default=False,
+ help="Set the exit code depending on whether upstream floating "
+ "revisions have changed or not.")
- parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
- action = "store_true", dest = "revisions_changed", default = False)
+ parser.add_option("", "--server-only", action="store_true",
+ dest="server_only", default=False,
+ help="Run bitbake without a UI, only starting a server "
+ "(cooker) process.")
- parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
- action = "store_true", dest = "server_only", default = False)
+ parser.add_option("", "--foreground", action="store_true",
+ help="Run bitbake server in foreground.")
- parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
- action = "store", dest = "bind", default = False)
+ parser.add_option("-B", "--bind", action="store", dest="bind", default=False,
+ help="The name/address for the bitbake server to bind to.")
- parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
- action = "store_true", dest = "nosetscene", default = False)
+ parser.add_option("-T", "--idle-timeout", type=int,
+ default=int(os.environ.get("BBTIMEOUT", "0")),
+ help="Set timeout to unload bitbake server due to inactivity")
- parser.add_option("", "--setscene-only", help = "Only run setscene tasks, don't run any real tasks.",
- action = "store_true", dest = "setsceneonly", default = False)
+ parser.add_option("", "--no-setscene", action="store_true",
+ dest="nosetscene", default=False,
+ help="Do not run any setscene tasks. sstate will be ignored and "
+ "everything needed, built.")
- parser.add_option("", "--remote-server", help = "Connect to the specified server.",
- action = "store", dest = "remote_server", default = False)
+ parser.add_option("", "--setscene-only", action="store_true",
+ dest="setsceneonly", default=False,
+ help="Only run setscene tasks, don't run any real tasks.")
- parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
- action = "store_true", dest = "kill_server", default = False)
+ parser.add_option("", "--remote-server", action="store", dest="remote_server",
+ default=os.environ.get("BBSERVER"),
+ help="Connect to the specified server.")
- parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
- action = "store_true", dest = "observe_only", default = False)
+ parser.add_option("-m", "--kill-server", action="store_true",
+ dest="kill_server", default=False,
+ help="Terminate the remote server.")
- parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
- action = "store_true", dest = "status_only", default = False)
+ parser.add_option("", "--observe-only", action="store_true",
+ dest="observe_only", default=False,
+ help="Connect to a server as an observing-only client.")
- parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
- action = "store", dest = "writeeventlog")
+ parser.add_option("", "--status-only", action="store_true",
+ dest="status_only", default=False,
+ help="Check the status of the remote bitbake server.")
+
+ parser.add_option("-w", "--write-log", action="store", dest="writeeventlog",
+ default=os.environ.get("BBEVENTLOG"),
+ help="Writes the event log of the build to a bitbake event json file. "
+ "Use '' (empty string) to assign the name automatically.")
options, targets = parser.parse_args(argv)
- # some environmental variables set also configuration options
- if "BBSERVER" in os.environ:
- options.servertype = "xmlrpc"
- options.remote_server = os.environ["BBSERVER"]
+ if options.quiet and options.verbose:
+ parser.error("options --quiet and --verbose are mutually exclusive")
+
+ if options.quiet and options.debug:
+ parser.error("options --quiet and --debug are mutually exclusive")
- if "BBTOKEN" in os.environ:
- options.xmlrpctoken = os.environ["BBTOKEN"]
+ # use configuration files from environment variables
+ if "BBPRECONF" in os.environ:
+ options.prefile.append(os.environ["BBPRECONF"])
- if "BBEVENTLOG" in os.environ:
- options.writeeventlog = os.environ["BBEVENTLOG"]
+ if "BBPOSTCONF" in os.environ:
+ options.postfile.append(os.environ["BBPOSTCONF"])
# fill in proper log name if not supplied
if options.writeeventlog is not None and len(options.writeeventlog) == 0:
- import datetime
- options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
+ from datetime import datetime
+ eventlog = "bitbake_eventlog_%s.json" % datetime.now().strftime("%Y%m%d%H%M%S")
+ options.writeeventlog = eventlog
# if BBSERVER says to autodetect, let's do that
if options.remote_server:
- [host, port] = options.remote_server.split(":", 2)
- port = int(port)
+ port = -1
+ if options.remote_server != 'autostart':
+ host, port = options.remote_server.split(":", 2)
+ port = int(port)
# use automatic port if port set to -1, means read it from
# the bitbake.lock file; this is a bit tricky, but we always expect
# to be in the base of the build directory if we need to have a
@@ -275,18 +330,20 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
lf.close()
options.remote_server = remotedef
except Exception as e:
- raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
+ if options.remote_server != 'autostart':
+ raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
return options, targets[1:]
def start_server(servermodule, configParams, configuration, features):
server = servermodule.BitBakeServer()
- single_use = not configParams.server_only
+ single_use = not configParams.server_only and os.getenv('BBSERVER') != 'autostart'
if configParams.bind:
(host, port) = configParams.bind.split(':')
- server.initServer((host, int(port)), single_use)
- configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
+ server.initServer((host, int(port)), single_use=single_use,
+ idle_timeout=configParams.idle_timeout)
+ configuration.interface = [server.serverImpl.host, server.serverImpl.port]
else:
server.initServer(single_use=single_use)
configuration.interface = []
@@ -299,20 +356,17 @@ def start_server(servermodule, configParams, configuration, features):
server.addcooker(cooker)
server.saveConnectionDetails()
except Exception as e:
- exc_info = sys.exc_info()
while hasattr(server, "event_queue"):
- try:
- import queue
- except ImportError:
- import Queue as queue
+ import queue
try:
event = server.event_queue.get(block=False)
except (queue.Empty, IOError):
break
if isinstance(event, logging.LogRecord):
logger.handle(event)
- raise exc_info[1], None, exc_info[2]
- server.detach()
+ raise
+ if not configParams.foreground:
+ server.detach()
cooker.lock.close()
return server
@@ -328,7 +382,10 @@ def bitbake_main(configParams, configuration):
# updates to log files for use with tail
try:
if sys.stdout.name == '<stdout>':
- sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+ # Reopen with O_SYNC (unbuffered)
+ fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
+ fl |= os.O_SYNC
+ fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
except:
pass
@@ -345,10 +402,21 @@ def bitbake_main(configParams, configuration):
if not configParams.bind:
raise BBMainException("FATAL: The '--server-only' option requires a name/address "
"to bind to with the -B option.\n")
+ else:
+ try:
+ #Checking that the port is a number
+ int(configParams.bind.split(":")[1])
+ except (ValueError,IndexError):
+ raise BBMainException(
+ "FATAL: Malformed host:port bind parameter")
if configParams.remote_server:
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
- else "the '--remote-server' option" ))
+ else "the '--remote-server' option"))
+
+ elif configParams.foreground:
+ raise BBMainException("FATAL: The '--foreground' option can only be used "
+ "with --server-only.\n")
if configParams.bind and configParams.servertype != "xmlrpc":
raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
@@ -363,7 +431,8 @@ def bitbake_main(configParams, configuration):
"connecting to a server.\n")
if configParams.kill_server and not configParams.remote_server:
- raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
+ raise BBMainException("FATAL: '--kill-server' can only be used to "
+ "terminate a remote server")
if "BBDEBUG" in os.environ:
level = int(os.environ["BBDEBUG"])
@@ -371,7 +440,7 @@ def bitbake_main(configParams, configuration):
configuration.debug = level
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
- configuration.debug_domains)
+ configuration.debug_domains)
# Ensure logging messages get sent to the UI as events
handler = bb.event.LogHandler()
@@ -399,8 +468,17 @@ def bitbake_main(configParams, configuration):
server = start_server(servermodule, configParams, configuration, featureset)
bb.event.ui_queue = []
else:
+ if os.getenv('BBSERVER') == 'autostart':
+ if configParams.remote_server == 'autostart' or \
+ not servermodule.check_connection(configParams.remote_server, timeout=2):
+ configParams.bind = 'localhost:0'
+ srv = start_server(servermodule, configParams, configuration, featureset)
+ configParams.remote_server = '%s:%d' % tuple(configuration.interface)
+ bb.event.ui_queue = []
+
# we start a stub server that is actually a XMLRPClient that connects to a real server
- server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
+ server = servermodule.BitBakeXMLRPCClient(configParams.observe_only,
+ configParams.xmlrpctoken)
server.saveConnectionDetails(configParams.remote_server)
@@ -429,12 +507,16 @@ def bitbake_main(configParams, configuration):
return 0
try:
- return ui_module.main(server_connection.connection, server_connection.events, configParams)
+ return ui_module.main(server_connection.connection, server_connection.events,
+ configParams)
finally:
bb.event.ui_queue = []
server_connection.terminate()
else:
- print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
+ print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host,
+ server.serverImpl.port))
+ if configParams.foreground:
+ server.serverImpl.serve_forever()
return 0
return 1
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py b/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py
index 466523c6e..203c40504 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py
@@ -220,7 +220,7 @@ class diskMonitor:
if minSpace and freeSpace < minSpace:
# Always show warning, the self.checked would always be False if the action is WARN
if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
- logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \
+ logger.warning("The free space of %s (%s) is running low (%.3fGB left)" % \
(path, dev, freeSpace / 1024 / 1024 / 1024.0))
self.preFreeS[k] = freeSpace
@@ -246,7 +246,7 @@ class diskMonitor:
continue
# Always show warning, the self.checked would always be False if the action is WARN
if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
- logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \
+ logger.warning("The free inode of %s (%s) is running low (%.3fK left)" % \
(path, dev, freeInode / 1024.0))
self.preFreeI[k] = freeInode
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/msg.py b/import-layers/yocto-poky/bitbake/lib/bb/msg.py
index 786b5aef4..b7c39fa13 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/msg.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/msg.py
@@ -57,7 +57,7 @@ class BBLogFormatter(logging.Formatter):
}
color_enabled = False
- BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38)
+ BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(29,38))
COLORS = {
DEBUG3 : CYAN,
@@ -90,8 +90,9 @@ class BBLogFormatter(logging.Formatter):
if self.color_enabled:
record = self.colorize(record)
msg = logging.Formatter.format(self, record)
-
- if hasattr(record, 'bb_exc_info'):
+ if hasattr(record, 'bb_exc_formatted'):
+ msg += '\n' + ''.join(record.bb_exc_formatted)
+ elif hasattr(record, 'bb_exc_info'):
etype, value, tb = record.bb_exc_info
formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
msg += '\n' + ''.join(formatted)
@@ -181,9 +182,12 @@ def constructLogOptions():
debug_domains["BitBake.%s" % domainarg] = logging.DEBUG - dlevel + 1
return level, debug_domains
-def addDefaultlogFilter(handler, cls = BBLogFilter):
+def addDefaultlogFilter(handler, cls = BBLogFilter, forcelevel=None):
level, debug_domains = constructLogOptions()
+ if forcelevel is not None:
+ level = forcelevel
+
cls(handler, level, debug_domains)
#
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
index 5f55af5ef..fa83b1898 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
@@ -21,8 +21,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-from __future__ import absolute_import
-from future_builtins import filter
+
import re
import string
import logging
@@ -70,6 +69,33 @@ class ExportNode(AstNode):
def eval(self, data):
data.setVarFlag(self.var, "export", 1, op = 'exported')
+class UnsetNode(AstNode):
+ def __init__(self, filename, lineno, var):
+ AstNode.__init__(self, filename, lineno)
+ self.var = var
+
+ def eval(self, data):
+ loginfo = {
+ 'variable': self.var,
+ 'file': self.filename,
+ 'line': self.lineno,
+ }
+ data.delVar(self.var,**loginfo)
+
+class UnsetFlagNode(AstNode):
+ def __init__(self, filename, lineno, var, flag):
+ AstNode.__init__(self, filename, lineno)
+ self.var = var
+ self.flag = flag
+
+ def eval(self, data):
+ loginfo = {
+ 'variable': self.var,
+ 'file': self.filename,
+ 'line': self.lineno,
+ }
+ data.delVarFlag(self.var, self.flag, **loginfo)
+
class DataNode(AstNode):
"""
Various data related updates. For the sake of sanity
@@ -139,7 +165,7 @@ class DataNode(AstNode):
data.setVar(key, val, parsing=True, **loginfo)
class MethodNode(AstNode):
- tr_tbl = string.maketrans('/.+-@%&', '_______')
+ tr_tbl = str.maketrans('/.+-@%&', '_______')
def __init__(self, filename, lineno, func_name, body, python, fakeroot):
AstNode.__init__(self, filename, lineno)
@@ -271,6 +297,12 @@ def handleInclude(statements, filename, lineno, m, force):
def handleExport(statements, filename, lineno, m):
statements.append(ExportNode(filename, lineno, m.group(1)))
+def handleUnset(statements, filename, lineno, m):
+ statements.append(UnsetNode(filename, lineno, m.group(1)))
+
+def handleUnsetFlag(statements, filename, lineno, m):
+ statements.append(UnsetFlagNode(filename, lineno, m.group(1), m.group(2)))
+
def handleData(statements, filename, lineno, groupd):
statements.append(DataNode(filename, lineno, groupd))
@@ -307,10 +339,13 @@ def handleInherit(statements, filename, lineno, m):
statements.append(InheritNode(filename, lineno, classes))
def finalize(fn, d, variant = None):
- all_handlers = {}
+ saved_handlers = bb.event.get_handlers().copy()
+
for var in d.getVar('__BBHANDLERS', False) or []:
# try to add the handler
handlerfn = d.getVarFlag(var, "filename", False)
+ if not handlerfn:
+ bb.fatal("Undefined event handler function '%s'" % var)
handlerln = int(d.getVarFlag(var, "lineno", False))
bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
@@ -332,6 +367,7 @@ def finalize(fn, d, variant = None):
d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
bb.event.fire(bb.event.RecipeParsed(fn), d)
+ bb.event.set_handlers(saved_handlers)
def _create_variants(datastores, names, function, onlyfinalise):
def create_variant(name, orig_d, arg = None):
@@ -341,17 +377,17 @@ def _create_variants(datastores, names, function, onlyfinalise):
function(arg or name, new_d)
datastores[name] = new_d
- for variant, variant_d in datastores.items():
+ for variant in list(datastores.keys()):
for name in names:
if not variant:
# Based on main recipe
- create_variant(name, variant_d)
+ create_variant(name, datastores[""])
else:
- create_variant("%s-%s" % (variant, name), variant_d, name)
+ create_variant("%s-%s" % (variant, name), datastores[variant], name)
def _expand_versions(versions):
def expand_one(version, start, end):
- for i in xrange(start, end + 1):
+ for i in range(start, end + 1):
ver = _bbversions_re.sub(str(i), version, 1)
yield ver
@@ -460,17 +496,13 @@ def multi_finalize(fn, d):
safe_d.setVar("BBCLASSEXTEND", extended)
_create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
- for variant, variant_d in datastores.iteritems():
+ for variant in datastores.keys():
if variant:
try:
if not onlyfinalise or variant in onlyfinalise:
- finalize(fn, variant_d, variant)
+ finalize(fn, datastores[variant], variant)
except bb.parse.SkipRecipe as e:
- variant_d.setVar("__SKIPPED", e.args[0])
-
- if len(datastores) > 1:
- variants = filter(None, datastores.iterkeys())
- safe_d.setVar("__VARIANTS", " ".join(variants))
+ datastores[variant].setVar("__SKIPPED", e.args[0])
datastores[""] = d
return datastores
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index ef72c3700..c54a07979 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -25,7 +25,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-from __future__ import absolute_import
+
import re, bb, os
import logging
import bb.build, bb.utils
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index fbd75b14a..875250de4 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -57,6 +57,8 @@ __config_regexp__ = re.compile( r"""
__include_regexp__ = re.compile( r"include\s+(.+)" )
__require_regexp__ = re.compile( r"require\s+(.+)" )
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
+__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/]+)$" )
+__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/]+)\[([a-zA-Z0-9\-_+.${}/]+)\]$" )
def init(data):
topdir = data.getVar('TOPDIR', False)
@@ -84,13 +86,13 @@ def include(parentfn, fn, lineno, data, error_out):
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
if abs_fn and bb.parse.check_dependency(data, abs_fn):
- logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
+ logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
for af in attempts:
bb.parse.mark_dependency(data, af)
if abs_fn:
fn = abs_fn
elif bb.parse.check_dependency(data, fn):
- logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
+ logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
try:
bb.parse.handle(fn, data, True)
@@ -185,6 +187,16 @@ def feeder(lineno, s, fn, statements):
ast.handleExport(statements, fn, lineno, m)
return
+ m = __unset_regexp__.match(s)
+ if m:
+ ast.handleUnset(statements, fn, lineno, m)
+ return
+
+ m = __unset_flag_regexp__.match(s)
+ if m:
+ ast.handleUnsetFlag(statements, fn, lineno, m)
+ return
+
raise ParseError("unparsed line: '%s'" % s, fn, lineno);
# Add us to the handlers list
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py b/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py
index e45042324..bb6deca52 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py
@@ -92,9 +92,9 @@ class SQLTable(collections.MutableMapping):
self._execute("DELETE from %s where key=?;" % self.table, [key])
def __setitem__(self, key, value):
- if not isinstance(key, basestring):
+ if not isinstance(key, str):
raise TypeError('Only string keys are supported')
- elif not isinstance(value, basestring):
+ elif not isinstance(value, str):
raise TypeError('Only string values are supported')
data = self._execute("SELECT * from %s where key=?;" %
@@ -178,7 +178,7 @@ class PersistData(object):
"""
Return a list of key + value pairs for a domain
"""
- return self.data[domain].items()
+ return list(self.data[domain].items())
def getValue(self, domain, key):
"""
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/process.py b/import-layers/yocto-poky/bitbake/lib/bb/process.py
index 1c07f2d9b..c62d7bca4 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/process.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/process.py
@@ -17,7 +17,7 @@ class CmdError(RuntimeError):
self.msg = msg
def __str__(self):
- if not isinstance(self.command, basestring):
+ if not isinstance(self.command, str):
cmd = subprocess.list2cmdline(self.command)
else:
cmd = self.command
@@ -97,6 +97,8 @@ def _logged_communicate(pipe, log, input, extrafiles):
try:
while pipe.poll() is None:
rlist = rin
+ stdoutbuf = b""
+ stderrbuf = b""
try:
r,w,e = select.select (rlist, [], [], 1)
except OSError as e:
@@ -104,16 +106,26 @@ def _logged_communicate(pipe, log, input, extrafiles):
raise
if pipe.stdout in r:
- data = pipe.stdout.read()
- if data is not None:
- outdata.append(data)
- log.write(data)
+ data = stdoutbuf + pipe.stdout.read()
+ if data is not None and len(data) > 0:
+ try:
+ data = data.decode("utf-8")
+ outdata.append(data)
+ log.write(data)
+ stdoutbuf = b""
+ except UnicodeDecodeError:
+ stdoutbuf = data
if pipe.stderr in r:
- data = pipe.stderr.read()
- if data is not None:
- errdata.append(data)
- log.write(data)
+ data = stderrbuf + pipe.stderr.read()
+ if data is not None and len(data) > 0:
+ try:
+ data = data.decode("utf-8")
+ errdata.append(data)
+ log.write(data)
+ stderrbuf = b""
+ except UnicodeDecodeError:
+ stderrbuf = data
readextras(r)
@@ -135,7 +147,7 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
if not extrafiles:
extrafiles = []
- if isinstance(cmd, basestring) and not "shell" in options:
+ if isinstance(cmd, str) and not "shell" in options:
options["shell"] = True
try:
@@ -150,6 +162,10 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
else:
stdout, stderr = pipe.communicate(input)
+ if stdout:
+ stdout = stdout.decode("utf-8")
+ if stderr:
+ stderr = stderr.decode("utf-8")
if pipe.returncode != 0:
raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/progress.py b/import-layers/yocto-poky/bitbake/lib/bb/progress.py
new file mode 100644
index 000000000..f54d1c76f
--- /dev/null
+++ b/import-layers/yocto-poky/bitbake/lib/bb/progress.py
@@ -0,0 +1,276 @@
+"""
+BitBake progress handling code
+"""
+
+# Copyright (C) 2016 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import sys
+import re
+import time
+import inspect
+import bb.event
+import bb.build
+
+class ProgressHandler(object):
+ """
+ Base class that can pretend to be a file object well enough to be
+ used to build objects to intercept console output and determine the
+ progress of some operation.
+ """
+ def __init__(self, d, outfile=None):
+ self._progress = 0
+ self._data = d
+ self._lastevent = 0
+ if outfile:
+ self._outfile = outfile
+ else:
+ self._outfile = sys.stdout
+
+ def _fire_progress(self, taskprogress, rate=None):
+ """Internal function to fire the progress event"""
+ bb.event.fire(bb.build.TaskProgress(taskprogress, rate), self._data)
+
+ def write(self, string):
+ self._outfile.write(string)
+
+ def flush(self):
+ self._outfile.flush()
+
+ def update(self, progress, rate=None):
+ ts = time.time()
+ if progress > 100:
+ progress = 100
+ if progress != self._progress or self._lastevent + 1 < ts:
+ self._fire_progress(progress, rate)
+ self._lastevent = ts
+ self._progress = progress
+
+class LineFilterProgressHandler(ProgressHandler):
+ """
+ A ProgressHandler variant that provides the ability to filter out
+ the lines if they contain progress information. Additionally, it
+ filters out anything before the last line feed on a line. This can
+ be used to keep the logs clean of output that we've only enabled for
+ getting progress, assuming that that can be done on a per-line
+ basis.
+ """
+ def __init__(self, d, outfile=None):
+ self._linebuffer = ''
+ super(LineFilterProgressHandler, self).__init__(d, outfile)
+
+ def write(self, string):
+ self._linebuffer += string
+ while True:
+ breakpos = self._linebuffer.find('\n') + 1
+ if breakpos == 0:
+ break
+ line = self._linebuffer[:breakpos]
+ self._linebuffer = self._linebuffer[breakpos:]
+ # Drop any line feeds and anything that precedes them
+ lbreakpos = line.rfind('\r') + 1
+ if lbreakpos:
+ line = line[lbreakpos:]
+ if self.writeline(line):
+ super(LineFilterProgressHandler, self).write(line)
+
+ def writeline(self, line):
+ return True
+
+class BasicProgressHandler(ProgressHandler):
+ def __init__(self, d, regex=r'(\d+)%', outfile=None):
+ super(BasicProgressHandler, self).__init__(d, outfile)
+ self._regex = re.compile(regex)
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(0)
+
+ def write(self, string):
+ percs = self._regex.findall(string)
+ if percs:
+ progress = int(percs[-1])
+ self.update(progress)
+ super(BasicProgressHandler, self).write(string)
+
+class OutOfProgressHandler(ProgressHandler):
+ def __init__(self, d, regex, outfile=None):
+ super(OutOfProgressHandler, self).__init__(d, outfile)
+ self._regex = re.compile(regex)
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(0)
+
+ def write(self, string):
+ nums = self._regex.findall(string)
+ if nums:
+ progress = (float(nums[-1][0]) / float(nums[-1][1])) * 100
+ self.update(progress)
+ super(OutOfProgressHandler, self).write(string)
+
+class MultiStageProgressReporter(object):
+ """
+ Class which allows reporting progress without the caller
+ having to know where they are in the overall sequence. Useful
+ for tasks made up of python code spread across multiple
+ classes / functions - the progress reporter object can
+ be passed around or stored at the object level and calls
+ to next_stage() and update() made whereever needed.
+ """
+ def __init__(self, d, stage_weights, debug=False):
+ """
+ Initialise the progress reporter.
+
+ Parameters:
+ * d: the datastore (needed for firing the events)
+ * stage_weights: a list of weight values, one for each stage.
+ The value is scaled internally so you only need to specify
+ values relative to other values in the list, so if there
+ are two stages and the first takes 2s and the second takes
+ 10s you would specify [2, 10] (or [1, 5], it doesn't matter).
+ * debug: specify True (and ensure you call finish() at the end)
+ in order to show a printout of the calculated stage weights
+ based on timing each stage. Use this to determine what the
+ weights should be when you're not sure.
+ """
+ self._data = d
+ total = sum(stage_weights)
+ self._stage_weights = [float(x)/total for x in stage_weights]
+ self._stage = -1
+ self._base_progress = 0
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(0)
+ self._debug = debug
+ self._finished = False
+ if self._debug:
+ self._last_time = time.time()
+ self._stage_times = []
+ self._stage_total = None
+ self._callers = []
+
+ def _fire_progress(self, taskprogress):
+ bb.event.fire(bb.build.TaskProgress(taskprogress), self._data)
+
+ def next_stage(self, stage_total=None):
+ """
+ Move to the next stage.
+ Parameters:
+ * stage_total: optional total for progress within the stage,
+ see update() for details
+ NOTE: you need to call this before the first stage.
+ """
+ self._stage += 1
+ self._stage_total = stage_total
+ if self._stage == 0:
+ # First stage
+ if self._debug:
+ self._last_time = time.time()
+ else:
+ if self._stage < len(self._stage_weights):
+ self._base_progress = sum(self._stage_weights[:self._stage]) * 100
+ if self._debug:
+ currtime = time.time()
+ self._stage_times.append(currtime - self._last_time)
+ self._last_time = currtime
+ self._callers.append(inspect.getouterframes(inspect.currentframe())[1])
+ elif not self._debug:
+ bb.warn('ProgressReporter: current stage beyond declared number of stages')
+ self._base_progress = 100
+ self._fire_progress(self._base_progress)
+
+ def update(self, stage_progress):
+ """
+ Update progress within the current stage.
+ Parameters:
+ * stage_progress: progress value within the stage. If stage_total
+ was specified when next_stage() was last called, then this
+ value is considered to be out of stage_total, otherwise it should
+ be a percentage value from 0 to 100.
+ """
+ if self._stage_total:
+ stage_progress = (float(stage_progress) / self._stage_total) * 100
+ if self._stage < 0:
+ bb.warn('ProgressReporter: update called before first call to next_stage()')
+ elif self._stage < len(self._stage_weights):
+ progress = self._base_progress + (stage_progress * self._stage_weights[self._stage])
+ else:
+ progress = self._base_progress
+ if progress > 100:
+ progress = 100
+ self._fire_progress(progress)
+
+ def finish(self):
+ if self._finished:
+ return
+ self._finished = True
+ if self._debug:
+ import math
+ self._stage_times.append(time.time() - self._last_time)
+ mintime = max(min(self._stage_times), 0.01)
+ self._callers.append(None)
+ stage_weights = [int(math.ceil(x / mintime)) for x in self._stage_times]
+ bb.warn('Stage weights: %s' % stage_weights)
+ out = []
+ for stage_weight, caller in zip(stage_weights, self._callers):
+ if caller:
+ out.append('Up to %s:%d: %d' % (caller[1], caller[2], stage_weight))
+ else:
+ out.append('Up to finish: %d' % stage_weight)
+ bb.warn('Stage times:\n %s' % '\n '.join(out))
+
+class MultiStageProcessProgressReporter(MultiStageProgressReporter):
+ """
+ Version of MultiStageProgressReporter intended for use with
+ standalone processes (such as preparing the runqueue)
+ """
+ def __init__(self, d, processname, stage_weights, debug=False):
+ self._processname = processname
+ self._started = False
+ MultiStageProgressReporter.__init__(self, d, stage_weights, debug)
+
+ def start(self):
+ if not self._started:
+ bb.event.fire(bb.event.ProcessStarted(self._processname, 100), self._data)
+ self._started = True
+
+ def _fire_progress(self, taskprogress):
+ if taskprogress == 0:
+ self.start()
+ return
+ bb.event.fire(bb.event.ProcessProgress(self._processname, taskprogress), self._data)
+
+ def finish(self):
+ MultiStageProgressReporter.finish(self)
+ bb.event.fire(bb.event.ProcessFinished(self._processname), self._data)
+
+class DummyMultiStageProcessProgressReporter(MultiStageProgressReporter):
+ """
+ MultiStageProcessProgressReporter that takes the calls and does nothing
+ with them (to avoid a bunch of "if progress_reporter:" checks)
+ """
+ def __init__(self):
+ MultiStageProcessProgressReporter.__init__(self, "", None, [])
+
+ def _fire_progress(self, taskprogress, rate=None):
+ pass
+
+ def start(self):
+ pass
+
+ def next_stage(self, stage_total=None):
+ pass
+
+ def update(self, stage_progress):
+ pass
+
+ def finish(self):
+ pass
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/providers.py b/import-layers/yocto-poky/bitbake/lib/bb/providers.py
index 563a091fd..db02a0b0d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/providers.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/providers.py
@@ -245,7 +245,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
pkg_pn[pn] = []
pkg_pn[pn].append(p)
- logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
+ logger.debug(1, "providers for %s are: %s", item, list(pkg_pn.keys()))
# First add PREFERRED_VERSIONS
for pn in pkg_pn:
@@ -402,7 +402,7 @@ def getRuntimeProviders(dataCache, rdepend):
return rproviders
-def buildWorldTargetList(dataCache):
+def buildWorldTargetList(dataCache, task=None):
"""
Build package list for "bitbake world"
"""
@@ -413,6 +413,9 @@ def buildWorldTargetList(dataCache):
for f in dataCache.possible_world:
terminal = True
pn = dataCache.pkg_fn[f]
+ if task and task not in dataCache.task_deps[f]['tasks']:
+ logger.debug(2, "World build skipping %s as task %s doesn't exist", f, task)
+ terminal = False
for p in dataCache.pn_provides[pn]:
if p.startswith('virtual/'):
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/pysh/builtin.py b/import-layers/yocto-poky/bitbake/lib/bb/pysh/builtin.py
index b748e4a4f..a8814dc33 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/pysh/builtin.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/pysh/builtin.py
@@ -527,7 +527,7 @@ def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
# Scan pattern arguments and append a space if necessary
- for i in xrange(len(args)):
+ for i in range(len(args)):
if not RE_SED.search(args[i]):
continue
args[i] = args[i] + ' '
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/pysh/interp.py b/import-layers/yocto-poky/bitbake/lib/bb/pysh/interp.py
index 25d8c92ec..d14ecf3c6 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/pysh/interp.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/pysh/interp.py
@@ -474,7 +474,7 @@ class Environment:
"""
# Save and remove previous arguments
prevargs = []
- for i in xrange(int(self._env['#'])):
+ for i in range(int(self._env['#'])):
i = str(i+1)
prevargs.append(self._env[i])
del self._env[i]
@@ -488,7 +488,7 @@ class Environment:
return prevargs
def get_positional_args(self):
- return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
+ return [self._env[str(i+1)] for i in range(int(self._env['#']))]
def get_variables(self):
return dict(self._env)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshlex.py b/import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshlex.py
index b30123675..fbf094b7a 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshlex.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshlex.py
@@ -20,7 +20,7 @@ except NameError:
from Set import Set as set
from ply import lex
-from sherrors import *
+from bb.pysh.sherrors import *
class NeedMore(Exception):
pass
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py b/import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py
index e8e80aac4..ba4cefdcb 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py
@@ -10,11 +10,11 @@
import os.path
import sys
-import pyshlex
+import bb.pysh.pyshlex as pyshlex
tokens = pyshlex.tokens
from ply import yacc
-import sherrors
+import bb.pysh.sherrors as sherrors
class IORedirect:
def __init__(self, op, filename, io_number=None):
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py b/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
index e1b9b2e66..84b268580 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
@@ -35,17 +35,44 @@ import bb
from bb import msg, data, event
from bb import monitordisk
import subprocess
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
+import pickle
bblogger = logging.getLogger("BitBake")
logger = logging.getLogger("BitBake.RunQueue")
__find_md5__ = re.compile( r'(?i)(?<![a-z0-9])[a-f0-9]{32}(?![a-z0-9])' )
+def fn_from_tid(tid):
+ return tid.rsplit(":", 1)[0]
+
+def taskname_from_tid(tid):
+ return tid.rsplit(":", 1)[1]
+
+def split_tid(tid):
+ (mc, fn, taskname, _) = split_tid_mcfn(tid)
+ return (mc, fn, taskname)
+
+def split_tid_mcfn(tid):
+ if tid.startswith('multiconfig:'):
+ elems = tid.split(':')
+ mc = elems[1]
+ fn = ":".join(elems[2:-1])
+ taskname = elems[-1]
+ mcfn = "multiconfig:" + mc + ":" + fn
+ else:
+ tid = tid.rsplit(":", 1)
+ mc = ""
+ fn = tid[0]
+ taskname = tid[1]
+ mcfn = fn
+
+ return (mc, fn, taskname, mcfn)
+
+def build_tid(mc, fn, taskname):
+ if mc:
+ return "multiconfig:" + mc + ":" + fn + ":" + taskname
+ return fn + ":" + taskname
+
class RunQueueStats:
"""
Holds statistics on the tasks handled by the associated runQueue
@@ -101,19 +128,17 @@ class RunQueueScheduler(object):
"""
self.rq = runqueue
self.rqdata = rqdata
- self.numTasks = len(self.rqdata.runq_fnid)
+ self.numTasks = len(self.rqdata.runtaskentries)
- self.prio_map = []
- self.prio_map.extend(range(self.numTasks))
+ self.prio_map = [self.rqdata.runtaskentries.keys()]
self.buildable = []
self.stamps = {}
- for taskid in xrange(self.numTasks):
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[taskid]]
- taskname = self.rqdata.runq_task[taskid]
- self.stamps[taskid] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
- if self.rq.runq_buildable[taskid] == 1:
- self.buildable.append(taskid)
+ for tid in self.rqdata.runtaskentries:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ if tid in self.rq.runq_buildable:
+ self.buildable.append(tid)
self.rev_prio_map = None
@@ -121,30 +146,30 @@ class RunQueueScheduler(object):
"""
Return the id of the first task we find that is buildable
"""
- self.buildable = [x for x in self.buildable if not self.rq.runq_running[x] == 1]
+ self.buildable = [x for x in self.buildable if x not in self.rq.runq_running]
if not self.buildable:
return None
if len(self.buildable) == 1:
- taskid = self.buildable[0]
- stamp = self.stamps[taskid]
- if stamp not in self.rq.build_stamps.itervalues():
- return taskid
+ tid = self.buildable[0]
+ stamp = self.stamps[tid]
+ if stamp not in self.rq.build_stamps.values():
+ return tid
if not self.rev_prio_map:
- self.rev_prio_map = range(self.numTasks)
- for taskid in xrange(self.numTasks):
- self.rev_prio_map[self.prio_map[taskid]] = taskid
+ self.rev_prio_map = {}
+ for tid in self.rqdata.runtaskentries:
+ self.rev_prio_map[tid] = self.prio_map.index(tid)
best = None
bestprio = None
- for taskid in self.buildable:
- prio = self.rev_prio_map[taskid]
+ for tid in self.buildable:
+ prio = self.rev_prio_map[tid]
if bestprio is None or bestprio > prio:
- stamp = self.stamps[taskid]
- if stamp in self.rq.build_stamps.itervalues():
+ stamp = self.stamps[tid]
+ if stamp in self.rq.build_stamps.values():
continue
bestprio = prio
- best = taskid
+ best = tid
return best
@@ -171,14 +196,17 @@ class RunQueueSchedulerSpeed(RunQueueScheduler):
"""
RunQueueScheduler.__init__(self, runqueue, rqdata)
- sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight))
- copyweight = copy.deepcopy(self.rqdata.runq_weight)
- self.prio_map = []
+ weights = {}
+ for tid in self.rqdata.runtaskentries:
+ weight = self.rqdata.runtaskentries[tid].weight
+ if not weight in weights:
+ weights[weight] = []
+ weights[weight].append(tid)
- for weight in sortweight:
- idx = copyweight.index(weight)
- self.prio_map.append(idx)
- copyweight[idx] = -1
+ self.prio_map = []
+ for weight in sorted(weights):
+ for w in weights[weight]:
+ self.prio_map.append(w)
self.prio_map.reverse()
@@ -195,32 +223,40 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
def __init__(self, runqueue, rqdata):
RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata)
- #FIXME - whilst this groups all fnids together it does not reorder the
- #fnid groups optimally.
+ #FIXME - whilst this groups all fns together it does not reorder the
+ #fn groups optimally.
basemap = copy.deepcopy(self.prio_map)
self.prio_map = []
while (len(basemap) > 0):
entry = basemap.pop(0)
self.prio_map.append(entry)
- fnid = self.rqdata.runq_fnid[entry]
+ fn = fn_from_tid(entry)
todel = []
for entry in basemap:
- entry_fnid = self.rqdata.runq_fnid[entry]
- if entry_fnid == fnid:
+ entry_fn = fn_from_tid(entry)
+ if entry_fn == fn:
todel.append(basemap.index(entry))
self.prio_map.append(entry)
todel.reverse()
for idx in todel:
del basemap[idx]
+class RunTaskEntry(object):
+ def __init__(self):
+ self.depends = set()
+ self.revdeps = set()
+ self.hash = None
+ self.task = None
+ self.weight = 1
+
class RunQueueData:
"""
BitBake Run Queue implementation
"""
- def __init__(self, rq, cooker, cfgData, dataCache, taskData, targets):
+ def __init__(self, rq, cooker, cfgData, dataCaches, taskData, targets):
self.cooker = cooker
- self.dataCache = dataCache
+ self.dataCaches = dataCaches
self.taskData = taskData
self.targets = targets
self.rq = rq
@@ -228,52 +264,36 @@ class RunQueueData:
self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or ""
self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
+ self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData)
+ self.setscenewhitelist_checked = False
+ self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter()
self.reset()
def reset(self):
- self.runq_fnid = []
- self.runq_task = []
- self.runq_depends = []
- self.runq_revdeps = []
- self.runq_hash = []
+ self.runtaskentries = {}
def runq_depends_names(self, ids):
import re
ret = []
- for id in self.runq_depends[ids]:
- nam = os.path.basename(self.get_user_idstring(id))
+ for id in ids:
+ nam = os.path.basename(id)
nam = re.sub("_[^,]*,", ",", nam)
ret.extend([nam])
return ret
- def get_task_name(self, task):
- return self.runq_task[task]
-
- def get_task_file(self, task):
- return self.taskData.fn_index[self.runq_fnid[task]]
+ def get_task_hash(self, tid):
+ return self.runtaskentries[tid].hash
- def get_task_hash(self, task):
- return self.runq_hash[task]
-
- def get_user_idstring(self, task, task_name_suffix = ""):
- fn = self.taskData.fn_index[self.runq_fnid[task]]
- taskname = self.runq_task[task] + task_name_suffix
- return "%s, %s" % (fn, taskname)
+ def get_user_idstring(self, tid, task_name_suffix = ""):
+ return tid + task_name_suffix
def get_short_user_idstring(self, task, task_name_suffix = ""):
- fn = self.taskData.fn_index[self.runq_fnid[task]]
- pn = self.dataCache.pkg_fn[fn]
- taskname = self.runq_task[task] + task_name_suffix
+ (mc, fn, taskname, _) = split_tid_mcfn(task)
+ pn = self.dataCaches[mc].pkg_fn[fn]
+ taskname = taskname_from_tid(task) + task_name_suffix
return "%s:%s" % (pn, taskname)
-
- def get_task_id(self, fnid, taskname):
- for listid in xrange(len(self.runq_fnid)):
- if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
- return listid
- return None
-
def circular_depchains_handler(self, tasks):
"""
Some tasks aren't buildable, likely due to circular dependency issues.
@@ -291,7 +311,7 @@ class RunQueueData:
"""
lowest = 0
new_chain = []
- for entry in xrange(len(chain)):
+ for entry in range(len(chain)):
if chain[entry] < chain[lowest]:
lowest = entry
new_chain.extend(chain[lowest:])
@@ -304,7 +324,7 @@ class RunQueueData:
"""
if len(chain1) != len(chain2):
return False
- for index in xrange(len(chain1)):
+ for index in range(len(chain1)):
if chain1[index] != chain2[index]:
return False
return True
@@ -318,11 +338,11 @@ class RunQueueData:
return True
return False
- def find_chains(taskid, prev_chain):
- prev_chain.append(taskid)
+ def find_chains(tid, prev_chain):
+ prev_chain.append(tid)
total_deps = []
- total_deps.extend(self.runq_revdeps[taskid])
- for revdep in self.runq_revdeps[taskid]:
+ total_deps.extend(self.runtaskentries[tid].revdeps)
+ for revdep in self.runtaskentries[tid].revdeps:
if revdep in prev_chain:
idx = prev_chain.index(revdep)
# To prevent duplicates, reorder the chain to start with the lowest taskid
@@ -333,7 +353,7 @@ class RunQueueData:
valid_chains.append(new_chain)
msgs.append("Dependency loop #%d found:\n" % len(valid_chains))
for dep in new_chain:
- msgs.append(" Task %s (%s) (dependent Tasks %s)\n" % (dep, self.get_user_idstring(dep), self.runq_depends_names(dep)))
+ msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends)))
msgs.append("\n")
if len(valid_chains) > 10:
msgs.append("Aborted dependency loops search after 10 matches.\n")
@@ -354,7 +374,7 @@ class RunQueueData:
if dep not in total_deps:
total_deps.append(dep)
- explored_deps[taskid] = total_deps
+ explored_deps[tid] = total_deps
for task in tasks:
find_chains(task, [])
@@ -370,25 +390,25 @@ class RunQueueData:
possible to execute due to circular dependencies.
"""
- numTasks = len(self.runq_fnid)
- weight = []
- deps_left = []
- task_done = []
+ numTasks = len(self.runtaskentries)
+ weight = {}
+ deps_left = {}
+ task_done = {}
- for listid in xrange(numTasks):
- task_done.append(False)
- weight.append(1)
- deps_left.append(len(self.runq_revdeps[listid]))
+ for tid in self.runtaskentries:
+ task_done[tid] = False
+ weight[tid] = 1
+ deps_left[tid] = len(self.runtaskentries[tid].revdeps)
- for listid in endpoints:
- weight[listid] = 10
- task_done[listid] = True
+ for tid in endpoints:
+ weight[tid] = 10
+ task_done[tid] = True
while True:
next_points = []
- for listid in endpoints:
- for revdep in self.runq_depends[listid]:
- weight[revdep] = weight[revdep] + weight[listid]
+ for tid in endpoints:
+ for revdep in self.runtaskentries[tid].depends:
+ weight[revdep] = weight[revdep] + weight[tid]
deps_left[revdep] = deps_left[revdep] - 1
if deps_left[revdep] == 0:
next_points.append(revdep)
@@ -399,14 +419,15 @@ class RunQueueData:
# Circular dependency sanity check
problem_tasks = []
- for task in xrange(numTasks):
- if task_done[task] is False or deps_left[task] != 0:
- problem_tasks.append(task)
- logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task))
- logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[task], deps_left[task])
+ for tid in self.runtaskentries:
+ if task_done[tid] is False or deps_left[tid] != 0:
+ problem_tasks.append(tid)
+ logger.debug(2, "Task %s is not buildable", tid)
+ logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid])
+ self.runtaskentries[tid].weight = weight[tid]
if problem_tasks:
- message = "Unbuildable tasks were found.\n"
+ message = "%s unbuildable tasks were found.\n" % len(problem_tasks)
message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
message = message + "Identifying dependency loops (this may take a short while)...\n"
logger.error(message)
@@ -426,18 +447,24 @@ class RunQueueData:
to optimise the execution order.
"""
- runq_build = []
+ runq_build = {}
recursivetasks = {}
recursiveitasks = {}
recursivetasksselfref = set()
taskData = self.taskData
- if len(taskData.tasks_name) == 0:
+ found = False
+ for mc in self.taskData:
+ if len(taskData[mc].taskentries) > 0:
+ found = True
+ break
+ if not found:
# Nothing to do
return 0
- logger.info("Preparing RunQueue")
+ self.init_progress_reporter.start()
+ self.init_progress_reporter.next_stage()
# Step A - Work out a list of tasks to run
#
@@ -450,161 +477,173 @@ class RunQueueData:
# process is repeated for each type of dependency (tdepends, deptask,
# rdeptast, recrdeptask, idepends).
- def add_build_dependencies(depids, tasknames, depends):
- for depid in depids:
+ def add_build_dependencies(depids, tasknames, depends, mc):
+ for depname in depids:
# Won't be in build_targets if ASSUME_PROVIDED
- if depid not in taskData.build_targets:
+ if depname not in taskData[mc].build_targets or not taskData[mc].build_targets[depname]:
continue
- depdata = taskData.build_targets[depid][0]
+ depdata = taskData[mc].build_targets[depname][0]
if depdata is None:
continue
for taskname in tasknames:
- taskid = taskData.gettask_id_fromfnid(depdata, taskname)
- if taskid is not None:
- depends.add(taskid)
+ t = depdata + ":" + taskname
+ if t in taskData[mc].taskentries:
+ depends.add(t)
- def add_runtime_dependencies(depids, tasknames, depends):
- for depid in depids:
- if depid not in taskData.run_targets:
+ def add_runtime_dependencies(depids, tasknames, depends, mc):
+ for depname in depids:
+ if depname not in taskData[mc].run_targets or not taskData[mc].run_targets[depname]:
continue
- depdata = taskData.run_targets[depid][0]
+ depdata = taskData[mc].run_targets[depname][0]
if depdata is None:
continue
for taskname in tasknames:
- taskid = taskData.gettask_id_fromfnid(depdata, taskname)
- if taskid is not None:
- depends.add(taskid)
+ t = depdata + ":" + taskname
+ if t in taskData[mc].taskentries:
+ depends.add(t)
- def add_resolved_dependencies(depids, tasknames, depends):
- for depid in depids:
- for taskname in tasknames:
- taskid = taskData.gettask_id_fromfnid(depid, taskname)
- if taskid is not None:
- depends.add(taskid)
+ def add_resolved_dependencies(mc, fn, tasknames, depends):
+ for taskname in tasknames:
+ tid = build_tid(mc, fn, taskname)
+ if tid in self.runtaskentries:
+ depends.add(tid)
- for task in xrange(len(taskData.tasks_name)):
- depends = set()
- fnid = taskData.tasks_fnid[task]
- fn = taskData.fn_index[fnid]
- task_deps = self.dataCache.task_deps[fn]
+ for mc in taskData:
+ for tid in taskData[mc].taskentries:
- #logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ #runtid = build_tid(mc, fn, taskname)
- if fnid not in taskData.failed_fnids:
+ #logger.debug(2, "Processing %s,%s:%s", mc, fn, taskname)
+
+ depends = set()
+ task_deps = self.dataCaches[mc].task_deps[taskfn]
+
+ self.runtaskentries[tid] = RunTaskEntry()
+
+ if fn in taskData[mc].failed_fns:
+ continue
# Resolve task internal dependencies
#
# e.g. addtask before X after Y
- depends = set(taskData.tasks_tdepends[task])
+ for t in taskData[mc].taskentries[tid].tdepends:
+ (_, depfn, deptaskname, _) = split_tid_mcfn(t)
+ depends.add(build_tid(mc, depfn, deptaskname))
# Resolve 'deptask' dependencies
#
# e.g. do_sometask[deptask] = "do_someothertask"
# (makes sure sometask runs after someothertask of all DEPENDS)
- if 'deptask' in task_deps and taskData.tasks_name[task] in task_deps['deptask']:
- tasknames = task_deps['deptask'][taskData.tasks_name[task]].split()
- add_build_dependencies(taskData.depids[fnid], tasknames, depends)
+ if 'deptask' in task_deps and taskname in task_deps['deptask']:
+ tasknames = task_deps['deptask'][taskname].split()
+ add_build_dependencies(taskData[mc].depids[taskfn], tasknames, depends, mc)
# Resolve 'rdeptask' dependencies
#
# e.g. do_sometask[rdeptask] = "do_someothertask"
# (makes sure sometask runs after someothertask of all RDEPENDS)
- if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
- tasknames = task_deps['rdeptask'][taskData.tasks_name[task]].split()
- add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
+ if 'rdeptask' in task_deps and taskname in task_deps['rdeptask']:
+ tasknames = task_deps['rdeptask'][taskname].split()
+ add_runtime_dependencies(taskData[mc].rdepids[taskfn], tasknames, depends, mc)
# Resolve inter-task dependencies
#
# e.g. do_sometask[depends] = "targetname:do_someothertask"
# (makes sure sometask runs after targetname's someothertask)
- idepends = taskData.tasks_idepends[task]
- for (depid, idependtask) in idepends:
- if depid in taskData.build_targets and not depid in taskData.failed_deps:
+ idepends = taskData[mc].taskentries[tid].idepends
+ for (depname, idependtask) in idepends:
+ if depname in taskData[mc].build_targets and taskData[mc].build_targets[depname] and not depname in taskData[mc].failed_deps:
# Won't be in build_targets if ASSUME_PROVIDED
- depdata = taskData.build_targets[depid][0]
+ depdata = taskData[mc].build_targets[depname][0]
if depdata is not None:
- taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
- if taskid is None:
- bb.msg.fatal("RunQueue", "Task %s in %s depends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, taskData.fn_index[depdata]))
- depends.add(taskid)
- irdepends = taskData.tasks_irdepends[task]
- for (depid, idependtask) in irdepends:
- if depid in taskData.run_targets:
+ t = depdata + ":" + idependtask
+ depends.add(t)
+ if t not in taskData[mc].taskentries:
+ bb.msg.fatal("RunQueue", "Task %s in %s depends upon non-existent task %s in %s" % (taskname, fn, idependtask, depdata))
+ irdepends = taskData[mc].taskentries[tid].irdepends
+ for (depname, idependtask) in irdepends:
+ if depname in taskData[mc].run_targets:
# Won't be in run_targets if ASSUME_PROVIDED
- depdata = taskData.run_targets[depid][0]
+ depdata = taskData[mc].run_targets[depname][0]
if depdata is not None:
- taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
- if taskid is None:
- bb.msg.fatal("RunQueue", "Task %s in %s rdepends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, taskData.fn_index[depdata]))
- depends.add(taskid)
+ t = depdata + ":" + idependtask
+ depends.add(t)
+ if t not in taskData[mc].taskentries:
+ bb.msg.fatal("RunQueue", "Task %s in %s rdepends upon non-existent task %s in %s" % (taskname, fn, idependtask, depdata))
# Resolve recursive 'recrdeptask' dependencies (Part A)
#
# e.g. do_sometask[recrdeptask] = "do_someothertask"
# (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
# We cover the recursive part of the dependencies below
- if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
- tasknames = task_deps['recrdeptask'][taskData.tasks_name[task]].split()
- recursivetasks[task] = tasknames
- add_build_dependencies(taskData.depids[fnid], tasknames, depends)
- add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
- if taskData.tasks_name[task] in tasknames:
- recursivetasksselfref.add(task)
-
- if 'recideptask' in task_deps and taskData.tasks_name[task] in task_deps['recideptask']:
- recursiveitasks[task] = []
- for t in task_deps['recideptask'][taskData.tasks_name[task]].split():
- newdep = taskData.gettask_id_fromfnid(fnid, t)
- recursiveitasks[task].append(newdep)
-
- self.runq_fnid.append(taskData.tasks_fnid[task])
- self.runq_task.append(taskData.tasks_name[task])
- self.runq_depends.append(depends)
- self.runq_revdeps.append(set())
- self.runq_hash.append("")
-
- runq_build.append(0)
+ if 'recrdeptask' in task_deps and taskname in task_deps['recrdeptask']:
+ tasknames = task_deps['recrdeptask'][taskname].split()
+ recursivetasks[tid] = tasknames
+ add_build_dependencies(taskData[mc].depids[taskfn], tasknames, depends, mc)
+ add_runtime_dependencies(taskData[mc].rdepids[taskfn], tasknames, depends, mc)
+ if taskname in tasknames:
+ recursivetasksselfref.add(tid)
+
+ if 'recideptask' in task_deps and taskname in task_deps['recideptask']:
+ recursiveitasks[tid] = []
+ for t in task_deps['recideptask'][taskname].split():
+ newdep = build_tid(mc, fn, t)
+ recursiveitasks[tid].append(newdep)
+
+ self.runtaskentries[tid].depends = depends
+
+ #self.dump_data()
# Resolve recursive 'recrdeptask' dependencies (Part B)
#
# e.g. do_sometask[recrdeptask] = "do_someothertask"
# (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
- # We need to do this separately since we need all of self.runq_depends to be complete before this is processed
+ # We need to do this separately since we need all of runtaskentries[*].depends to be complete before this is processed
+ self.init_progress_reporter.next_stage(len(recursivetasks))
extradeps = {}
- for task in recursivetasks:
- extradeps[task] = set(self.runq_depends[task])
- tasknames = recursivetasks[task]
+ for taskcounter, tid in enumerate(recursivetasks):
+ extradeps[tid] = set(self.runtaskentries[tid].depends)
+
+ tasknames = recursivetasks[tid]
seendeps = set()
- seenfnid = []
def generate_recdeps(t):
newdeps = set()
- add_resolved_dependencies([taskData.tasks_fnid[t]], tasknames, newdeps)
- extradeps[task].update(newdeps)
+ (mc, fn, taskname, _) = split_tid_mcfn(t)
+ add_resolved_dependencies(mc, fn, tasknames, newdeps)
+ extradeps[tid].update(newdeps)
seendeps.add(t)
newdeps.add(t)
for i in newdeps:
- for n in self.runq_depends[i]:
+ task = self.runtaskentries[i].task
+ for n in self.runtaskentries[i].depends:
if n not in seendeps:
- generate_recdeps(n)
- generate_recdeps(task)
+ generate_recdeps(n)
+ generate_recdeps(tid)
- if task in recursiveitasks:
- for dep in recursiveitasks[task]:
+ if tid in recursiveitasks:
+ for dep in recursiveitasks[tid]:
generate_recdeps(dep)
+ self.init_progress_reporter.update(taskcounter)
# Remove circular references so that do_a[recrdeptask] = "do_a do_b" can work
- for task in recursivetasks:
- extradeps[task].difference_update(recursivetasksselfref)
+ for tid in recursivetasks:
+ extradeps[tid].difference_update(recursivetasksselfref)
- for task in xrange(len(taskData.tasks_name)):
+ for tid in self.runtaskentries:
+ task = self.runtaskentries[tid].task
# Add in extra dependencies
- if task in extradeps:
- self.runq_depends[task] = extradeps[task]
+ if tid in extradeps:
+ self.runtaskentries[tid].depends = extradeps[tid]
# Remove all self references
- if task in self.runq_depends[task]:
- logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], self.runq_depends[task])
- self.runq_depends[task].remove(task)
+ if tid in self.runtaskentries[tid].depends:
+ logger.debug(2, "Task %s contains self reference!", tid)
+ self.runtaskentries[tid].depends.remove(tid)
+
+ self.init_progress_reporter.next_stage()
+
+ #self.dump_data()
# Step B - Mark all active tasks
#
@@ -614,148 +653,146 @@ class RunQueueData:
logger.verbose("Marking Active Tasks")
- def mark_active(listid, depth):
+ def mark_active(tid, depth):
"""
Mark an item as active along with its depends
(calls itself recursively)
"""
- if runq_build[listid] == 1:
+ if tid in runq_build:
return
- runq_build[listid] = 1
+ runq_build[tid] = 1
- depends = self.runq_depends[listid]
+ depends = self.runtaskentries[tid].depends
for depend in depends:
mark_active(depend, depth+1)
- self.target_pairs = []
- for target in self.targets:
- targetid = taskData.getbuild_id(target[0])
+ self.target_tids = []
+ for (mc, target, task, fn) in self.targets:
- if targetid not in taskData.build_targets:
+ if target not in taskData[mc].build_targets or not taskData[mc].build_targets[target]:
continue
- if targetid in taskData.failed_deps:
+ if target in taskData[mc].failed_deps:
continue
- fnid = taskData.build_targets[targetid][0]
- fn = taskData.fn_index[fnid]
- task = target[1]
parents = False
if task.endswith('-'):
parents = True
task = task[:-1]
- self.target_pairs.append((fn, task))
-
- if fnid in taskData.failed_fnids:
+ if fn in taskData[mc].failed_fns:
continue
- if task not in taskData.tasks_lookup[fnid]:
+ # fn already has mc prefix
+ tid = fn + ":" + task
+ self.target_tids.append(tid)
+ if tid not in taskData[mc].taskentries:
import difflib
- close_matches = difflib.get_close_matches(task, taskData.tasks_lookup[fnid], cutoff=0.7)
+ tasks = []
+ for x in taskData[mc].taskentries:
+ if x.startswith(fn + ":"):
+ tasks.append(taskname_from_tid(x))
+ close_matches = difflib.get_close_matches(task, tasks, cutoff=0.7)
if close_matches:
extra = ". Close matches:\n %s" % "\n ".join(close_matches)
else:
extra = ""
- bb.msg.fatal("RunQueue", "Task %s does not exist for target %s%s" % (task, target[0], extra))
-
+ bb.msg.fatal("RunQueue", "Task %s does not exist for target %s (%s)%s" % (task, target, tid, extra))
+
# For tasks called "XXXX-", ony run their dependencies
- listid = taskData.tasks_lookup[fnid][task]
if parents:
- for i in self.runq_depends[listid]:
+ for i in self.runtaskentries[tid].depends:
mark_active(i, 1)
else:
- mark_active(listid, 1)
+ mark_active(tid, 1)
+
+ self.init_progress_reporter.next_stage()
# Step C - Prune all inactive tasks
#
# Once all active tasks are marked, prune the ones we don't need.
- maps = []
delcount = 0
- for listid in xrange(len(self.runq_fnid)):
- if runq_build[listid-delcount] == 1:
- maps.append(listid-delcount)
- else:
- del self.runq_fnid[listid-delcount]
- del self.runq_task[listid-delcount]
- del self.runq_depends[listid-delcount]
- del runq_build[listid-delcount]
- del self.runq_revdeps[listid-delcount]
- del self.runq_hash[listid-delcount]
- delcount = delcount + 1
- maps.append(-1)
+ for tid in list(self.runtaskentries.keys()):
+ if tid not in runq_build:
+ del self.runtaskentries[tid]
+ delcount += 1
+
+ self.init_progress_reporter.next_stage()
#
# Step D - Sanity checks and computation
#
# Check to make sure we still have tasks to run
- if len(self.runq_fnid) == 0:
- if not taskData.abort:
+ if len(self.runtaskentries) == 0:
+ if not taskData[''].abort:
bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
else:
bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
- logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
-
- # Remap the dependencies to account for the deleted tasks
- # Check we didn't delete a task we depend on
- for listid in xrange(len(self.runq_fnid)):
- newdeps = []
- origdeps = self.runq_depends[listid]
- for origdep in origdeps:
- if maps[origdep] == -1:
- bb.msg.fatal("RunQueue", "Invalid mapping - Should never happen!")
- newdeps.append(maps[origdep])
- self.runq_depends[listid] = set(newdeps)
+ logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runtaskentries))
logger.verbose("Assign Weightings")
+ self.init_progress_reporter.next_stage()
+
# Generate a list of reverse dependencies to ease future calculations
- for listid in xrange(len(self.runq_fnid)):
- for dep in self.runq_depends[listid]:
- self.runq_revdeps[dep].add(listid)
+ for tid in self.runtaskentries:
+ for dep in self.runtaskentries[tid].depends:
+ self.runtaskentries[dep].revdeps.add(tid)
+
+ self.init_progress_reporter.next_stage()
# Identify tasks at the end of dependency chains
# Error on circular dependency loops (length two)
endpoints = []
- for listid in xrange(len(self.runq_fnid)):
- revdeps = self.runq_revdeps[listid]
+ for tid in self.runtaskentries:
+ revdeps = self.runtaskentries[tid].revdeps
if len(revdeps) == 0:
- endpoints.append(listid)
+ endpoints.append(tid)
for dep in revdeps:
- if dep in self.runq_depends[listid]:
- #self.dump_data(taskData)
- bb.msg.fatal("RunQueue", "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
+ if dep in self.runtaskentries[tid].depends:
+ bb.msg.fatal("RunQueue", "Task %s has circular dependency on %s" % (tid, dep))
+
logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
+ self.init_progress_reporter.next_stage()
+
# Calculate task weights
# Check of higher length circular dependencies
self.runq_weight = self.calculate_task_weights(endpoints)
+ self.init_progress_reporter.next_stage()
+
# Sanity Check - Check for multiple tasks building the same provider
- prov_list = {}
- seen_fn = []
- for task in xrange(len(self.runq_fnid)):
- fn = taskData.fn_index[self.runq_fnid[task]]
- if fn in seen_fn:
- continue
- seen_fn.append(fn)
- for prov in self.dataCache.fn_provides[fn]:
- if prov not in prov_list:
- prov_list[prov] = [fn]
- elif fn not in prov_list[prov]:
- prov_list[prov].append(fn)
- for prov in prov_list:
- if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
+ for mc in self.dataCaches:
+ prov_list = {}
+ seen_fn = []
+ for tid in self.runtaskentries:
+ (tidmc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ if taskfn in seen_fn:
+ continue
+ if mc != tidmc:
+ continue
+ seen_fn.append(taskfn)
+ for prov in self.dataCaches[mc].fn_provides[taskfn]:
+ if prov not in prov_list:
+ prov_list[prov] = [taskfn]
+ elif taskfn not in prov_list[prov]:
+ prov_list[prov].append(taskfn)
+ for prov in prov_list:
+ if len(prov_list[prov]) < 2:
+ continue
+ if prov in self.multi_provider_whitelist:
+ continue
seen_pn = []
# If two versions of the same PN are being built its fatal, we don't support it.
for fn in prov_list[prov]:
- pn = self.dataCache.pkg_fn[fn]
+ pn = self.dataCaches[mc].pkg_fn[fn]
if pn not in seen_pn:
seen_pn.append(pn)
else:
@@ -770,15 +807,15 @@ class RunQueueData:
commondeps = None
for provfn in prov_list[prov]:
deps = set()
- for task, fnid in enumerate(self.runq_fnid):
- fn = taskData.fn_index[fnid]
+ for tid in self.runtaskentries:
+ fn = fn_from_tid(tid)
if fn != provfn:
continue
- for dep in self.runq_revdeps[task]:
- fn = taskData.fn_index[self.runq_fnid[dep]]
+ for dep in self.runtaskentries[tid].revdeps:
+ fn = fn_from_tid(dep)
if fn == provfn:
continue
- deps.add(self.get_short_user_idstring(dep))
+ deps.add(dep)
if not commondeps:
commondeps = set(deps)
else:
@@ -796,16 +833,16 @@ class RunQueueData:
commonprovs = None
commonrprovs = None
for provfn in prov_list[prov]:
- provides = set(self.dataCache.fn_provides[provfn])
+ provides = set(self.dataCaches[mc].fn_provides[provfn])
rprovides = set()
- for rprovide in self.dataCache.rproviders:
- if provfn in self.dataCache.rproviders[rprovide]:
+ for rprovide in self.dataCaches[mc].rproviders:
+ if provfn in self.dataCaches[mc].rproviders[rprovide]:
rprovides.add(rprovide)
- for package in self.dataCache.packages:
- if provfn in self.dataCache.packages[package]:
+ for package in self.dataCaches[mc].packages:
+ if provfn in self.dataCaches[mc].packages[package]:
rprovides.add(package)
- for package in self.dataCache.packages_dynamic:
- if provfn in self.dataCache.packages_dynamic[package]:
+ for package in self.dataCaches[mc].packages_dynamic:
+ if provfn in self.dataCaches[mc].packages_dynamic[package]:
rprovides.add(package)
if not commonprovs:
commonprovs = set(provides)
@@ -824,35 +861,39 @@ class RunQueueData:
msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs))
if self.warn_multi_bb:
- logger.warn(msg)
+ logger.warning(msg)
else:
logger.error(msg)
+ self.init_progress_reporter.next_stage()
+
# Create a whitelist usable by the stamp checks
- stampfnwhitelist = []
- for entry in self.stampwhitelist.split():
- entryid = self.taskData.getbuild_id(entry)
- if entryid not in self.taskData.build_targets:
- continue
- fnid = self.taskData.build_targets[entryid][0]
- fn = self.taskData.fn_index[fnid]
- stampfnwhitelist.append(fn)
- self.stampfnwhitelist = stampfnwhitelist
+ self.stampfnwhitelist = {}
+ for mc in self.taskData:
+ self.stampfnwhitelist[mc] = []
+ for entry in self.stampwhitelist.split():
+ if entry not in self.taskData[mc].build_targets:
+ continue
+ fn = self.taskData.build_targets[entry][0]
+ self.stampfnwhitelist[mc].append(fn)
+
+ self.init_progress_reporter.next_stage()
# Iterate over the task list looking for tasks with a 'setscene' function
- self.runq_setscene = []
+ self.runq_setscene_tids = []
if not self.cooker.configuration.nosetscene:
- for task in range(len(self.runq_fnid)):
- setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
- if not setscene:
+ for tid in self.runtaskentries:
+ (mc, fn, taskname, _) = split_tid_mcfn(tid)
+ setscenetid = fn + ":" + taskname + "_setscene"
+ if setscenetid not in taskData[mc].taskentries:
continue
- self.runq_setscene.append(task)
+ self.runq_setscene_tids.append(tid)
- def invalidate_task(fn, taskname, error_nostamp):
- taskdep = self.dataCache.task_deps[fn]
- fnid = self.taskData.getfn_id(fn)
- if taskname not in taskData.tasks_lookup[fnid]:
- logger.warn("Task %s does not exist, invalidating this task will have no effect" % taskname)
+ def invalidate_task(tid, error_nostamp):
+ (mc, fn, taskname, _) = split_tid_mcfn(tid)
+ taskdep = self.dataCaches[mc].task_deps[fn]
+ if fn + ":" + taskname not in taskData[mc].taskentries:
+ logger.warning("Task %s does not exist, invalidating this task will have no effect" % taskname)
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
if error_nostamp:
bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname)
@@ -860,80 +901,84 @@ class RunQueueData:
bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
else:
logger.verbose("Invalidate task %s, %s", taskname, fn)
- bb.parse.siggen.invalidate_task(taskname, self.dataCache, fn)
+ bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], fn)
+
+ self.init_progress_reporter.next_stage()
# Invalidate task if force mode active
if self.cooker.configuration.force:
- for (fn, target) in self.target_pairs:
- invalidate_task(fn, target, False)
+ for tid in self.target_tids:
+ invalidate_task(tid, False)
# Invalidate task if invalidate mode active
if self.cooker.configuration.invalidate_stamp:
- for (fn, target) in self.target_pairs:
+ for tid in self.target_tids:
+ fn = fn_from_tid(tid)
for st in self.cooker.configuration.invalidate_stamp.split(','):
if not st.startswith("do_"):
st = "do_%s" % st
- invalidate_task(fn, st, True)
+ invalidate_task(fn + ":" + st, True)
+
+ self.init_progress_reporter.next_stage()
# Create and print to the logs a virtual/xxxx -> PN (fn) table
- virtmap = taskData.get_providermap(prefix="virtual/")
- virtpnmap = {}
- for v in virtmap:
- virtpnmap[v] = self.dataCache.pkg_fn[virtmap[v]]
- bb.debug(2, "%s resolved to: %s (%s)" % (v, virtpnmap[v], virtmap[v]))
- if hasattr(bb.parse.siggen, "tasks_resolved"):
- bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCache)
+ for mc in taskData:
+ virtmap = taskData[mc].get_providermap(prefix="virtual/")
+ virtpnmap = {}
+ for v in virtmap:
+ virtpnmap[v] = self.dataCaches[mc].pkg_fn[virtmap[v]]
+ bb.debug(2, "%s resolved to: %s (%s)" % (v, virtpnmap[v], virtmap[v]))
+ if hasattr(bb.parse.siggen, "tasks_resolved"):
+ bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc])
+
+ self.init_progress_reporter.next_stage()
# Iterate over the task list and call into the siggen code
dealtwith = set()
- todeal = set(range(len(self.runq_fnid)))
+ todeal = set(self.runtaskentries)
while len(todeal) > 0:
- for task in todeal.copy():
- if len(self.runq_depends[task] - dealtwith) == 0:
- dealtwith.add(task)
- todeal.remove(task)
+ for tid in todeal.copy():
+ if len(self.runtaskentries[tid].depends - dealtwith) == 0:
+ dealtwith.add(tid)
+ todeal.remove(tid)
procdep = []
- for dep in self.runq_depends[task]:
- procdep.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
- self.runq_hash[task] = bb.parse.siggen.get_taskhash(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task], procdep, self.dataCache)
+ for dep in self.runtaskentries[tid].depends:
+ procdep.append(fn_from_tid(dep) + "." + taskname_from_tid(dep))
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(taskfn, taskname, procdep, self.dataCaches[mc])
+ task = self.runtaskentries[tid].task
bb.parse.siggen.writeout_file_checksum_cache()
- return len(self.runq_fnid)
- def dump_data(self, taskQueue):
+ #self.dump_data()
+ return len(self.runtaskentries)
+
+ def dump_data(self):
"""
Dump some debug information on the internal data structures
"""
logger.debug(3, "run_tasks:")
- for task in xrange(len(self.rqdata.runq_task)):
- logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task])
-
- logger.debug(3, "sorted_tasks:")
- for task1 in xrange(len(self.rqdata.runq_task)):
- if task1 in self.prio_map:
- task = self.prio_map[task1]
- logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task])
+ for tid in self.runtaskentries:
+ logger.debug(3, " %s: %s Deps %s RevDeps %s", tid,
+ self.runtaskentries[tid].weight,
+ self.runtaskentries[tid].depends,
+ self.runtaskentries[tid].revdeps)
+
+class RunQueueWorker():
+ def __init__(self, process, pipe):
+ self.process = process
+ self.pipe = pipe
class RunQueue:
- def __init__(self, cooker, cfgData, dataCache, taskData, targets):
+ def __init__(self, cooker, cfgData, dataCaches, taskData, targets):
self.cooker = cooker
self.cfgData = cfgData
- self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets)
+ self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets)
self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile"
self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None
- self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION", True) or None
+ self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2", True) or None
self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID", True) or None
self.state = runQueuePrepare
@@ -942,12 +987,10 @@ class RunQueue:
self.dm = monitordisk.diskMonitor(cfgData)
self.rqexe = None
- self.worker = None
- self.workerpipe = None
- self.fakeworker = None
- self.fakeworkerpipe = None
+ self.worker = {}
+ self.fakeworker = {}
- def _start_worker(self, fakeroot = False, rqexec = None):
+ def _start_worker(self, mc, fakeroot = False, rqexec = None):
logger.debug(1, "Starting bitbake-worker")
magic = "decafbad"
if self.cooker.configuration.profile:
@@ -965,13 +1008,17 @@ class RunQueue:
bb.utils.nonblockingfd(worker.stdout)
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
+ runqhash = {}
+ for tid in self.rqdata.runtaskentries:
+ runqhash[tid] = self.rqdata.runtaskentries[tid].hash
+
workerdata = {
- "taskdeps" : self.rqdata.dataCache.task_deps,
- "fakerootenv" : self.rqdata.dataCache.fakerootenv,
- "fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
- "fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
+ "taskdeps" : self.rqdata.dataCaches[mc].task_deps,
+ "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
+ "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
+ "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
"sigdata" : bb.parse.siggen.get_taskdata(),
- "runq_hash" : self.rqdata.runq_hash,
+ "runq_hash" : runqhash,
"logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
"logdefaultverbose" : bb.msg.loggerDefaultVerbose,
"logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
@@ -982,61 +1029,65 @@ class RunQueue:
"time" : self.cfgData.getVar("TIME", True),
}
- worker.stdin.write("<cookerconfig>" + pickle.dumps(self.cooker.configuration) + "</cookerconfig>")
- worker.stdin.write("<workerdata>" + pickle.dumps(workerdata) + "</workerdata>")
+ worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>")
+ worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>")
worker.stdin.flush()
- return worker, workerpipe
+ return RunQueueWorker(worker, workerpipe)
- def _teardown_worker(self, worker, workerpipe):
+ def _teardown_worker(self, worker):
if not worker:
return
logger.debug(1, "Teardown for bitbake-worker")
try:
- worker.stdin.write("<quit></quit>")
- worker.stdin.flush()
+ worker.process.stdin.write(b"<quit></quit>")
+ worker.process.stdin.flush()
+ worker.process.stdin.close()
except IOError:
pass
- while worker.returncode is None:
- workerpipe.read()
- worker.poll()
- while workerpipe.read():
+ while worker.process.returncode is None:
+ worker.pipe.read()
+ worker.process.poll()
+ while worker.pipe.read():
continue
- workerpipe.close()
+ worker.pipe.close()
def start_worker(self):
if self.worker:
self.teardown_workers()
self.teardown = False
- self.worker, self.workerpipe = self._start_worker()
+ for mc in self.rqdata.dataCaches:
+ self.worker[mc] = self._start_worker(mc)
def start_fakeworker(self, rqexec):
if not self.fakeworker:
- self.fakeworker, self.fakeworkerpipe = self._start_worker(True, rqexec)
+ for mc in self.rqdata.dataCaches:
+ self.fakeworker[mc] = self._start_worker(mc, True, rqexec)
def teardown_workers(self):
self.teardown = True
- self._teardown_worker(self.worker, self.workerpipe)
- self.worker = None
- self.workerpipe = None
- self._teardown_worker(self.fakeworker, self.fakeworkerpipe)
- self.fakeworker = None
- self.fakeworkerpipe = None
+ for mc in self.worker:
+ self._teardown_worker(self.worker[mc])
+ self.worker = {}
+ for mc in self.fakeworker:
+ self._teardown_worker(self.fakeworker[mc])
+ self.fakeworker = {}
def read_workers(self):
- self.workerpipe.read()
- if self.fakeworkerpipe:
- self.fakeworkerpipe.read()
+ for mc in self.worker:
+ self.worker[mc].pipe.read()
+ for mc in self.fakeworker:
+ self.fakeworker[mc].pipe.read()
def active_fds(self):
fds = []
- if self.workerpipe:
- fds.append(self.workerpipe.input)
- if self.fakeworkerpipe:
- fds.append(self.fakeworkerpipe.input)
+ for mc in self.worker:
+ fds.append(self.worker[mc].pipe.input)
+ for mc in self.fakeworker:
+ fds.append(self.fakeworker[mc].pipe.input)
return fds
- def check_stamp_task(self, task, taskname = None, recurse = False, cache = None):
+ def check_stamp_task(self, tid, taskname = None, recurse = False, cache = None):
def get_timestamp(f):
try:
if not os.access(f, os.F_OK):
@@ -1045,26 +1096,26 @@ class RunQueue:
except:
return None
+ (mc, fn, tn, taskfn) = split_tid_mcfn(tid)
+ if taskname is None:
+ taskname = tn
+
if self.stamppolicy == "perfile":
fulldeptree = False
else:
fulldeptree = True
stampwhitelist = []
if self.stamppolicy == "whitelist":
- stampwhitelist = self.rqdata.stampfnwhitelist
-
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- if taskname is None:
- taskname = self.rqdata.runq_task[task]
+ stampwhitelist = self.rqdata.stampfnwhitelist[mc]
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
+ stampfile = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
# If the stamp is missing, it's not current
if not os.access(stampfile, os.F_OK):
logger.debug(2, "Stampfile %s not available", stampfile)
return False
# If it's a 'nostamp' task, it's not current
- taskdep = self.rqdata.dataCache.task_deps[fn]
+ taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
return False
@@ -1077,23 +1128,26 @@ class RunQueue:
iscurrent = True
t1 = get_timestamp(stampfile)
- for dep in self.rqdata.runq_depends[task]:
+ for dep in self.rqdata.runtaskentries[tid].depends:
if iscurrent:
- fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
- taskname2 = self.rqdata.runq_task[dep]
- stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
- stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCache, fn2)
+ (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep)
+ stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCaches[mc2], taskfn2)
+ stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCaches[mc2], taskfn2)
t2 = get_timestamp(stampfile2)
t3 = get_timestamp(stampfile3)
+ if t3 and not t2:
+ continue
if t3 and t3 > t2:
- continue
+ continue
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
if not t2:
logger.debug(2, 'Stampfile %s does not exist', stampfile2)
iscurrent = False
+ break
if t1 < t2:
logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
iscurrent = False
+ break
if recurse and iscurrent:
if dep in cache:
iscurrent = cache[dep]
@@ -1103,7 +1157,7 @@ class RunQueue:
iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
cache[dep] = iscurrent
if recurse:
- cache[task] = iscurrent
+ cache[tid] = iscurrent
return iscurrent
def _execute_runqueue(self):
@@ -1117,19 +1171,31 @@ class RunQueue:
if self.state is runQueuePrepare:
self.rqexe = RunQueueExecuteDummy(self)
+ # NOTE: if you add, remove or significantly refactor the stages of this
+ # process then you should recalculate the weightings here. This is quite
+ # easy to do - just change the next line temporarily to pass debug=True as
+ # the last parameter and you'll get a printout of the weightings as well
+ # as a map to the lines where next_stage() was called. Of course this isn't
+ # critical, but it helps to keep the progress reporting accurate.
+ self.rqdata.init_progress_reporter = bb.progress.MultiStageProcessProgressReporter(self.cooker.data,
+ "Initialising tasks",
+ [43, 967, 4, 3, 1, 5, 3, 7, 13, 1, 2, 1, 1, 246, 35, 1, 38, 1, 35, 2, 338, 204, 142, 3, 3, 37, 244])
if self.rqdata.prepare() == 0:
self.state = runQueueComplete
else:
self.state = runQueueSceneInit
+ self.rqdata.init_progress_reporter.next_stage()
- # we are ready to run, see if any UI client needs the dependency info
- if bb.cooker.CookerFeatures.SEND_DEPENDS_TREE in self.cooker.featureset:
- depgraph = self.cooker.buildDependTree(self, self.rqdata.taskData)
- bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.cooker.data)
+ # we are ready to run, emit dependency info to any UI or class which
+ # needs it
+ depgraph = self.cooker.buildDependTree(self, self.rqdata.taskData)
+ self.rqdata.init_progress_reporter.next_stage()
+ bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.cooker.data)
if self.state is runQueueSceneInit:
dump = self.cooker.configuration.dump_signatures
if dump:
+ self.rqdata.init_progress_reporter.finish()
if 'printdiff' in dump:
invalidtasks = self.print_diffscenetasks()
self.dump_signatures(dump)
@@ -1137,7 +1203,9 @@ class RunQueue:
self.write_diffscenetasks(invalidtasks)
self.state = runQueueComplete
else:
+ self.rqdata.init_progress_reporter.next_stage()
self.start_worker()
+ self.rqdata.init_progress_reporter.next_stage()
self.rqexe = RunQueueExecuteScenequeue(self)
if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp]:
@@ -1150,6 +1218,8 @@ class RunQueue:
if self.cooker.configuration.setsceneonly:
self.state = runQueueComplete
else:
+ # Just in case we didn't setscene
+ self.rqdata.init_progress_reporter.finish()
logger.info("Executing RunQueue Tasks")
self.rqexe = RunQueueExecuteTasks(self)
self.state = runQueueRunning
@@ -1169,10 +1239,11 @@ class RunQueue:
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and all succeeded.", self.rqexe.stats.completed, self.rqexe.stats.skipped)
if self.state is runQueueFailed:
- if not self.rqdata.taskData.tryaltconfigs:
- raise bb.runqueue.TaskFailure(self.rqexe.failed_fnids)
- for fnid in self.rqexe.failed_fnids:
- self.rqdata.taskData.fail_fnid(fnid)
+ if not self.rqdata.taskData[''].tryaltconfigs:
+ raise bb.runqueue.TaskFailure(self.rqexe.failed_tids)
+ for tid in self.rqexe.failed_tids:
+ (mc, fn, tn, _) = split_tid_mcfn(tid)
+ self.rqdata.taskData[mc].fail_fn(fn)
self.rqdata.reset()
if self.state is runQueueComplete:
@@ -1197,8 +1268,8 @@ class RunQueue:
pass
self.state = runQueueComplete
raise
- except:
- logger.error("An uncaught exception occured in runqueue, please see the failure below:")
+ except Exception as err:
+ logger.exception("An uncaught exception occurred in runqueue")
try:
self.teardown_workers()
except:
@@ -1219,13 +1290,14 @@ class RunQueue:
def dump_signatures(self, options):
done = set()
bb.note("Reparsing files to collect dependency data")
- for task in range(len(self.rqdata.runq_fnid)):
- if self.rqdata.runq_fnid[task] not in done:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn), self.cooker.data)
- done.add(self.rqdata.runq_fnid[task])
+ bb_cache = bb.cache.NoCache(self.cooker.databuilder)
+ for tid in self.rqdata.runtaskentries:
+ fn = fn_from_tid(tid)
+ if fn not in done:
+ the_data = bb_cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn))
+ done.add(fn)
- bb.parse.siggen.dump_sigs(self.rqdata.dataCache, options)
+ bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options)
return
@@ -1241,20 +1313,19 @@ class RunQueue:
stamppresent = []
valid_new = set()
- for task in xrange(len(self.rqdata.runq_fnid)):
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
- taskdep = self.rqdata.dataCache.task_deps[fn]
+ for tid in self.rqdata.runtaskentries:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
if 'noexec' in taskdep and taskname in taskdep['noexec']:
- noexec.append(task)
+ noexec.append(tid)
continue
sq_fn.append(fn)
- sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
- sq_hash.append(self.rqdata.runq_hash[task])
+ sq_hashfn.append(self.rqdata.dataCaches[mc].hashfn[fn])
+ sq_hash.append(self.rqdata.runtaskentries[tid].hash)
sq_taskname.append(taskname)
- sq_task.append(task)
+ sq_task.append(tid)
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
try:
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
@@ -1269,13 +1340,13 @@ class RunQueue:
# Tasks which are both setscene and noexec never care about dependencies
# We therefore find tasks which are setscene and noexec and mark their
# unique dependencies as valid.
- for task in noexec:
- if task not in self.rqdata.runq_setscene:
+ for tid in noexec:
+ if tid not in self.rqdata.runq_setscene_tids:
continue
- for dep in self.rqdata.runq_depends[task]:
+ for dep in self.rqdata.runtaskentries[tid].depends:
hasnoexecparents = True
- for dep2 in self.rqdata.runq_revdeps[dep]:
- if dep2 in self.rqdata.runq_setscene and dep2 in noexec:
+ for dep2 in self.rqdata.runtaskentries[dep].revdeps:
+ if dep2 in self.rqdata.runq_setscene_tids and dep2 in noexec:
continue
hasnoexecparents = False
break
@@ -1283,30 +1354,30 @@ class RunQueue:
valid_new.add(dep)
invalidtasks = set()
- for task in xrange(len(self.rqdata.runq_fnid)):
- if task not in valid_new and task not in noexec:
- invalidtasks.add(task)
+ for tid in self.rqdata.runtaskentries:
+ if tid not in valid_new and tid not in noexec:
+ invalidtasks.add(tid)
found = set()
processed = set()
- for task in invalidtasks:
- toprocess = set([task])
+ for tid in invalidtasks:
+ toprocess = set([tid])
while toprocess:
next = set()
for t in toprocess:
- for dep in self.rqdata.runq_depends[t]:
+ for dep in self.rqdata.runtaskentries[t].depends:
if dep in invalidtasks:
- found.add(task)
+ found.add(tid)
if dep not in processed:
processed.add(dep)
next.add(dep)
toprocess = next
- if task in found:
+ if tid in found:
toprocess = set()
tasklist = []
- for task in invalidtasks.difference(found):
- tasklist.append(self.rqdata.get_user_idstring(task))
+ for tid in invalidtasks.difference(found):
+ tasklist.append(tid)
if tasklist:
bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
@@ -1330,11 +1401,10 @@ class RunQueue:
return recout
- for task in invalidtasks:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- pn = self.rqdata.dataCache.pkg_fn[fn]
- taskname = self.rqdata.runq_task[task]
- h = self.rqdata.runq_hash[task]
+ for tid in invalidtasks:
+ (mc, fn, taskname, _) = split_tid_mcfn(tid)
+ pn = self.rqdata.dataCaches[mc].pkg_fn[fn]
+ h = self.rqdata.runtaskentries[tid].hash
matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData)
match = None
for m in matches:
@@ -1342,7 +1412,7 @@ class RunQueue:
match = m
if match is None:
bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h)
- matches = {k : v for k, v in matches.iteritems() if h not in k}
+ matches = {k : v for k, v in iter(matches.items()) if h not in k}
if matches:
latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
prevh = __find_md5__.search(latestmatch).group(0)
@@ -1360,19 +1430,20 @@ class RunQueueExecute:
self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1)
self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed"
- self.runq_buildable = []
- self.runq_running = []
- self.runq_complete = []
+ self.runq_buildable = set()
+ self.runq_running = set()
+ self.runq_complete = set()
self.build_stamps = {}
self.build_stamps2 = []
- self.failed_fnids = []
+ self.failed_tids = []
self.stampcache = {}
- rq.workerpipe.setrunqueueexec(self)
- if rq.fakeworkerpipe:
- rq.fakeworkerpipe.setrunqueueexec(self)
+ for mc in rq.worker:
+ rq.worker[mc].pipe.setrunqueueexec(self)
+ for mc in rq.fakeworker:
+ rq.fakeworker[mc].pipe.setrunqueueexec(self)
if self.number_tasks <= 0:
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
@@ -1391,18 +1462,22 @@ class RunQueueExecute:
return True
def finish_now(self):
-
- for worker in [self.rq.worker, self.rq.fakeworker]:
- if not worker:
- continue
+ for mc in self.rq.worker:
+ try:
+ self.rq.worker[mc].process.stdin.write(b"<finishnow></finishnow>")
+ self.rq.worker[mc].process.stdin.flush()
+ except IOError:
+ # worker must have died?
+ pass
+ for mc in self.rq.fakeworker:
try:
- worker.stdin.write("<finishnow></finishnow>")
- worker.stdin.flush()
+ self.rq.fakeworker[mc].process.stdin.write(b"<finishnow></finishnow>")
+ self.rq.fakeworker[mc].process.stdin.flush()
except IOError:
# worker must have died?
pass
- if len(self.failed_fnids) != 0:
+ if len(self.failed_tids) != 0:
self.rq.state = runQueueFailed
return
@@ -1417,7 +1492,7 @@ class RunQueueExecute:
self.rq.read_workers()
return self.rq.active_fds()
- if len(self.failed_fnids) != 0:
+ if len(self.failed_tids) != 0:
self.rq.state = runQueueFailed
return True
@@ -1431,13 +1506,8 @@ class RunQueueExecute:
taskdata = {}
taskdeps.add(task)
for dep in taskdeps:
- if setscene:
- depid = self.rqdata.runq_setscene[dep]
- else:
- depid = dep
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[depid]]
- pn = self.rqdata.dataCache.pkg_fn[fn]
- taskname = self.rqdata.runq_task[depid]
+ (mc, fn, taskname, _) = split_tid_mcfn(dep)
+ pn = self.rqdata.dataCaches[mc].pkg_fn[fn]
taskdata[dep] = [pn, taskname, fn]
call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
@@ -1457,34 +1527,32 @@ class RunQueueExecuteTasks(RunQueueExecute):
def __init__(self, rq):
RunQueueExecute.__init__(self, rq)
- self.stats = RunQueueStats(len(self.rqdata.runq_fnid))
+ self.stats = RunQueueStats(len(self.rqdata.runtaskentries))
self.stampcache = {}
initial_covered = self.rq.scenequeue_covered.copy()
# Mark initial buildable tasks
- for task in xrange(self.stats.total):
- self.runq_running.append(0)
- self.runq_complete.append(0)
- if len(self.rqdata.runq_depends[task]) == 0:
- self.runq_buildable.append(1)
- else:
- self.runq_buildable.append(0)
- if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
- self.rq.scenequeue_covered.add(task)
+ for tid in self.rqdata.runtaskentries:
+ if len(self.rqdata.runtaskentries[tid].depends) == 0:
+ self.runq_buildable.add(tid)
+ if len(self.rqdata.runtaskentries[tid].revdeps) > 0 and self.rqdata.runtaskentries[tid].revdeps.issubset(self.rq.scenequeue_covered):
+ self.rq.scenequeue_covered.add(tid)
found = True
while found:
found = False
- for task in xrange(self.stats.total):
- if task in self.rq.scenequeue_covered:
+ for tid in self.rqdata.runtaskentries:
+ if tid in self.rq.scenequeue_covered:
continue
- logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
+ logger.debug(1, 'Considering %s: %s' % (tid, str(self.rqdata.runtaskentries[tid].revdeps)))
- if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
+ if len(self.rqdata.runtaskentries[tid].revdeps) > 0 and self.rqdata.runtaskentries[tid].revdeps.issubset(self.rq.scenequeue_covered):
+ if tid in self.rq.scenequeue_notcovered:
+ continue
found = True
- self.rq.scenequeue_covered.add(task)
+ self.rq.scenequeue_covered.add(tid)
logger.debug(1, 'Skip list (pre setsceneverify) %s', sorted(self.rq.scenequeue_covered))
@@ -1492,35 +1560,32 @@ class RunQueueExecuteTasks(RunQueueExecute):
covered_remove = set()
if self.rq.setsceneverify:
invalidtasks = []
- for task in xrange(len(self.rqdata.runq_task)):
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
- taskdep = self.rqdata.dataCache.task_deps[fn]
-
+ tasknames = {}
+ fns = {}
+ for tid in self.rqdata.runtaskentries:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
+ fns[tid] = taskfn
+ tasknames[tid] = taskname
if 'noexec' in taskdep and taskname in taskdep['noexec']:
continue
- if self.rq.check_stamp_task(task, taskname + "_setscene", cache=self.stampcache):
- logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
+ if self.rq.check_stamp_task(tid, taskname + "_setscene", cache=self.stampcache):
+ logger.debug(2, 'Setscene stamp current for task %s', tid)
continue
- if self.rq.check_stamp_task(task, taskname, recurse = True, cache=self.stampcache):
- logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
+ if self.rq.check_stamp_task(tid, taskname, recurse = True, cache=self.stampcache):
+ logger.debug(2, 'Normal stamp current for task %s', tid)
continue
- invalidtasks.append(task)
+ invalidtasks.append(tid)
- call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
- call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
- locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
- # Backwards compatibility with older versions without invalidtasks
- try:
- covered_remove = bb.utils.better_eval(call, locs)
- except TypeError:
- covered_remove = bb.utils.better_eval(call2, locs)
+ call = self.rq.setsceneverify + "(covered, tasknames, fns, d, invalidtasks=invalidtasks)"
+ locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : tasknames, "fns" : fns, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
+ covered_remove = bb.utils.better_eval(call, locs)
- def removecoveredtask(task):
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task] + '_setscene'
- bb.build.del_stamp(taskname, self.rqdata.dataCache, fn)
- self.rq.scenequeue_covered.remove(task)
+ def removecoveredtask(tid):
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ taskname = taskname + '_setscene'
+ bb.build.del_stamp(taskname, self.rqdata.dataCaches[mc], taskfn)
+ self.rq.scenequeue_covered.remove(tid)
toremove = covered_remove
for task in toremove:
@@ -1529,7 +1594,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
covered_remove = []
for task in toremove:
removecoveredtask(task)
- for deptask in self.rqdata.runq_depends[task]:
+ for deptask in self.rqdata.runtaskentries[task].depends:
if deptask not in self.rq.scenequeue_covered:
continue
if deptask in toremove or deptask in covered_remove or deptask in initial_covered:
@@ -1540,7 +1605,15 @@ class RunQueueExecuteTasks(RunQueueExecute):
logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
- event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData)
+
+ for mc in self.rqdata.dataCaches:
+ target_pairs = []
+ for tid in self.rqdata.target_tids:
+ (tidmc, fn, taskname, _) = split_tid_mcfn(tid)
+ if tidmc == mc:
+ target_pairs.append((fn, taskname))
+
+ event.fire(bb.event.StampUpdate(target_pairs, self.rqdata.dataCaches[mc].stamp), self.cfgData)
schedulers = self.get_schedulers()
for scheduler in schedulers:
@@ -1575,7 +1648,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
return schedulers
def setbuildable(self, task):
- self.runq_buildable[task] = 1
+ self.runq_buildable.add(task)
self.sched.newbuilable(task)
def task_completeoutright(self, task):
@@ -1584,21 +1657,21 @@ class RunQueueExecuteTasks(RunQueueExecute):
Look at the reverse dependencies and mark any task with
completed dependencies as buildable
"""
- self.runq_complete[task] = 1
- for revdep in self.rqdata.runq_revdeps[task]:
- if self.runq_running[revdep] == 1:
+ self.runq_complete.add(task)
+ for revdep in self.rqdata.runtaskentries[task].revdeps:
+ if revdep in self.runq_running:
continue
- if self.runq_buildable[revdep] == 1:
+ if revdep in self.runq_buildable:
continue
alldeps = 1
- for dep in self.rqdata.runq_depends[revdep]:
- if self.runq_complete[dep] != 1:
+ for dep in self.rqdata.runtaskentries[revdep].depends:
+ if dep not in self.runq_complete:
alldeps = 0
if alldeps == 1:
self.setbuildable(revdep)
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
- taskname = self.rqdata.runq_task[revdep]
- logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
+ fn = fn_from_tid(revdep)
+ taskname = taskname_from_tid(revdep)
+ logger.debug(1, "Marking task %s as buildable", revdep)
def task_complete(self, task):
self.stats.taskCompleted()
@@ -1611,14 +1684,13 @@ class RunQueueExecuteTasks(RunQueueExecute):
Updates the state engine with the failure
"""
self.stats.taskFailed()
- fnid = self.rqdata.runq_fnid[task]
- self.failed_fnids.append(fnid)
+ self.failed_tids.append(task)
bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
- if self.rqdata.taskData.abort:
+ if self.rqdata.taskData[''].abort:
self.rq.state = runQueueCleanUp
def task_skip(self, task, reason):
- self.runq_running[task] = 1
+ self.runq_running.add(task)
self.setbuildable(task)
bb.event.fire(runQueueTaskSkipped(task, self.stats, self.rq, reason), self.cfgData)
self.task_completeoutright(task)
@@ -1630,8 +1702,52 @@ class RunQueueExecuteTasks(RunQueueExecute):
Run the tasks in a queue prepared by rqdata.prepare()
"""
+ if self.rqdata.setscenewhitelist and not self.rqdata.setscenewhitelist_checked:
+ self.rqdata.setscenewhitelist_checked = True
+
+ # Check tasks that are going to run against the whitelist
+ def check_norun_task(tid, showerror=False):
+ (mc, fn, taskname, _) = split_tid_mcfn(tid)
+ # Ignore covered tasks
+ if tid in self.rq.scenequeue_covered:
+ return False
+ # Ignore stamped tasks
+ if self.rq.check_stamp_task(tid, taskname, cache=self.stampcache):
+ return False
+ # Ignore noexec tasks
+ taskdep = self.rqdata.dataCaches[mc].task_deps[fn]
+ if 'noexec' in taskdep and taskname in taskdep['noexec']:
+ return False
+
+ pn = self.rqdata.dataCaches[mc].pkg_fn[fn]
+ if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist):
+ if showerror:
+ if tid in self.rqdata.runq_setscene_tids:
+ logger.error('Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname))
+ else:
+ logger.error('Task %s.%s attempted to execute unexpectedly' % (pn, taskname))
+ return True
+ return False
+ # Look to see if any tasks that we think shouldn't run are going to
+ unexpected = False
+ for tid in self.rqdata.runtaskentries:
+ if check_norun_task(tid):
+ unexpected = True
+ break
+ if unexpected:
+ # Run through the tasks in the rough order they'd have executed and print errors
+ # (since the order can be useful - usually missing sstate for the last few tasks
+ # is the cause of the problem)
+ task = self.sched.next()
+ while task is not None:
+ check_norun_task(task, showerror=True)
+ self.task_skip(task, 'Setscene enforcement check')
+ task = self.sched.next()
+
+ self.rq.state = runQueueCleanUp
+ return True
+
self.rq.read_workers()
-
if self.stats.total == 0:
# nothing to do
@@ -1639,30 +1755,28 @@ class RunQueueExecuteTasks(RunQueueExecute):
task = self.sched.next()
if task is not None:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
if task in self.rq.scenequeue_covered:
- logger.debug(2, "Setscene covered task %s (%s)", task,
- self.rqdata.get_user_idstring(task))
+ logger.debug(2, "Setscene covered task %s", task)
self.task_skip(task, "covered")
return True
if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
- logger.debug(2, "Stamp current task %s (%s)", task,
- self.rqdata.get_user_idstring(task))
+ logger.debug(2, "Stamp current task %s", task)
+
self.task_skip(task, "existing")
return True
- taskdep = self.rqdata.dataCache.task_deps[fn]
+ taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
if 'noexec' in taskdep and taskname in taskdep['noexec']:
startevent = runQueueTaskStarted(task, self.stats, self.rq,
noexec=True)
bb.event.fire(startevent, self.cfgData)
- self.runq_running[task] = 1
+ self.runq_running.add(task)
self.stats.taskActive()
if not self.cooker.configuration.dry_run:
- bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
+ bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn)
self.task_complete(task)
return True
else:
@@ -1671,24 +1785,25 @@ class RunQueueExecuteTasks(RunQueueExecute):
taskdepdata = self.build_taskdepdata(task)
- taskdep = self.rqdata.dataCache.task_deps[fn]
+ taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not self.rq.fakeworker:
try:
self.rq.start_fakeworker(self)
except OSError as exc:
- logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc)))
+ logger.critical("Failed to spawn fakeroot worker to run %s: %s" % (task, str(exc)))
self.rq.state = runQueueFailed
+ self.stats.taskFailed()
return True
- self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
- self.rq.fakeworker.stdin.flush()
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
- self.rq.worker.stdin.flush()
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.flush()
- self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
- self.build_stamps2.append(self.build_stamps[task])
- self.runq_running[task] = 1
+ self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.build_stamps2.append(self.build_stamps[task])
+ self.runq_running.add(task)
self.stats.taskActive()
if self.stats.active < self.number_tasks:
return True
@@ -1697,17 +1812,17 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.rq.read_workers()
return self.rq.active_fds()
- if len(self.failed_fnids) != 0:
+ if len(self.failed_tids) != 0:
self.rq.state = runQueueFailed
return True
# Sanity Checks
- for task in xrange(self.stats.total):
- if self.runq_buildable[task] == 0:
+ for task in self.rqdata.runtaskentries:
+ if task not in self.runq_buildable:
logger.error("Task %s never buildable!", task)
- if self.runq_running[task] == 0:
+ if task not in self.runq_running:
logger.error("Task %s never ran!", task)
- if self.runq_complete[task] == 0:
+ if task not in self.runq_complete:
logger.error("Task %s never completed!", task)
self.rq.state = runQueueComplete
@@ -1715,16 +1830,15 @@ class RunQueueExecuteTasks(RunQueueExecute):
def build_taskdepdata(self, task):
taskdepdata = {}
- next = self.rqdata.runq_depends[task]
+ next = self.rqdata.runtaskentries[task].depends
next.add(task)
while next:
additional = []
for revdep in next:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
- pn = self.rqdata.dataCache.pkg_fn[fn]
- taskname = self.rqdata.runq_task[revdep]
- deps = self.rqdata.runq_depends[revdep]
- provides = self.rqdata.dataCache.fn_provides[fn]
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
+ pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
+ deps = self.rqdata.runtaskentries[revdep].depends
+ provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
taskdepdata[revdep] = [pn, taskname, fn, deps, provides]
for revdep2 in deps:
if revdep2 not in taskdepdata:
@@ -1743,16 +1857,16 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
self.scenequeue_notneeded = set()
# If we don't have any setscene functions, skip this step
- if len(self.rqdata.runq_setscene) == 0:
+ if len(self.rqdata.runq_setscene_tids) == 0:
rq.scenequeue_covered = set()
rq.state = runQueueRunInit
return
- self.stats = RunQueueStats(len(self.rqdata.runq_setscene))
+ self.stats = RunQueueStats(len(self.rqdata.runq_setscene_tids))
- sq_revdeps = []
- sq_revdeps_new = []
- sq_revdeps_squash = []
+ sq_revdeps = {}
+ sq_revdeps_new = {}
+ sq_revdeps_squash = {}
self.sq_harddeps = {}
# We need to construct a dependency graph for the setscene functions. Intermediate
@@ -1760,25 +1874,29 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
# therefore aims to collapse the huge runqueue dependency tree into a smaller one
# only containing the setscene functions.
- for task in xrange(self.stats.total):
- self.runq_running.append(0)
- self.runq_complete.append(0)
- self.runq_buildable.append(0)
+ self.rqdata.init_progress_reporter.next_stage()
# First process the chains up to the first setscene task.
endpoints = {}
- for task in xrange(len(self.rqdata.runq_fnid)):
- sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task]))
- sq_revdeps_new.append(set())
- if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
- endpoints[task] = set()
+ for tid in self.rqdata.runtaskentries:
+ sq_revdeps[tid] = copy.copy(self.rqdata.runtaskentries[tid].revdeps)
+ sq_revdeps_new[tid] = set()
+ if (len(sq_revdeps[tid]) == 0) and tid not in self.rqdata.runq_setscene_tids:
+ #bb.warn("Added endpoint %s" % (tid))
+ endpoints[tid] = set()
+
+ self.rqdata.init_progress_reporter.next_stage()
# Secondly process the chains between setscene tasks.
- for task in self.rqdata.runq_setscene:
- for dep in self.rqdata.runq_depends[task]:
+ for tid in self.rqdata.runq_setscene_tids:
+ #bb.warn("Added endpoint 2 %s" % (tid))
+ for dep in self.rqdata.runtaskentries[tid].depends:
if dep not in endpoints:
endpoints[dep] = set()
- endpoints[dep].add(task)
+ #bb.warn(" Added endpoint 3 %s" % (dep))
+ endpoints[dep].add(tid)
+
+ self.rqdata.init_progress_reporter.next_stage()
def process_endpoints(endpoints):
newendpoints = {}
@@ -1789,26 +1907,28 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
if sq_revdeps_new[point]:
tasks |= sq_revdeps_new[point]
sq_revdeps_new[point] = set()
- if point in self.rqdata.runq_setscene:
+ if point in self.rqdata.runq_setscene_tids:
sq_revdeps_new[point] = tasks
tasks = set()
- for dep in self.rqdata.runq_depends[point]:
+ for dep in self.rqdata.runtaskentries[point].depends:
if point in sq_revdeps[dep]:
sq_revdeps[dep].remove(point)
if tasks:
sq_revdeps_new[dep] |= tasks
- if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene:
+ if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene_tids:
newendpoints[dep] = task
if len(newendpoints) != 0:
process_endpoints(newendpoints)
process_endpoints(endpoints)
+ self.rqdata.init_progress_reporter.next_stage()
+
# Build a list of setscene tasks which are "unskippable"
# These are direct endpoints referenced by the build
endpoints2 = {}
- sq_revdeps2 = []
- sq_revdeps_new2 = []
+ sq_revdeps2 = {}
+ sq_revdeps_new2 = {}
def process_endpoints2(endpoints):
newendpoints = {}
for point, task in endpoints.items():
@@ -1818,84 +1938,99 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
if sq_revdeps_new2[point]:
tasks |= sq_revdeps_new2[point]
sq_revdeps_new2[point] = set()
- if point in self.rqdata.runq_setscene:
+ if point in self.rqdata.runq_setscene_tids:
sq_revdeps_new2[point] = tasks
- for dep in self.rqdata.runq_depends[point]:
+ for dep in self.rqdata.runtaskentries[point].depends:
if point in sq_revdeps2[dep]:
sq_revdeps2[dep].remove(point)
if tasks:
sq_revdeps_new2[dep] |= tasks
- if (len(sq_revdeps2[dep]) == 0 or len(sq_revdeps_new2[dep]) != 0) and dep not in self.rqdata.runq_setscene:
+ if (len(sq_revdeps2[dep]) == 0 or len(sq_revdeps_new2[dep]) != 0) and dep not in self.rqdata.runq_setscene_tids:
newendpoints[dep] = tasks
if len(newendpoints) != 0:
process_endpoints2(newendpoints)
- for task in xrange(len(self.rqdata.runq_fnid)):
- sq_revdeps2.append(copy.copy(self.rqdata.runq_revdeps[task]))
- sq_revdeps_new2.append(set())
- if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
- endpoints2[task] = set()
+ for tid in self.rqdata.runtaskentries:
+ sq_revdeps2[tid] = copy.copy(self.rqdata.runtaskentries[tid].revdeps)
+ sq_revdeps_new2[tid] = set()
+ if (len(sq_revdeps2[tid]) == 0) and tid not in self.rqdata.runq_setscene_tids:
+ endpoints2[tid] = set()
process_endpoints2(endpoints2)
self.unskippable = []
- for task in self.rqdata.runq_setscene:
- if sq_revdeps_new2[task]:
- self.unskippable.append(self.rqdata.runq_setscene.index(task))
+ for tid in self.rqdata.runq_setscene_tids:
+ if sq_revdeps_new2[tid]:
+ self.unskippable.append(tid)
- for task in xrange(len(self.rqdata.runq_fnid)):
- if task in self.rqdata.runq_setscene:
+ self.rqdata.init_progress_reporter.next_stage(len(self.rqdata.runtaskentries))
+
+ for taskcounter, tid in enumerate(self.rqdata.runtaskentries):
+ if tid in self.rqdata.runq_setscene_tids:
deps = set()
- for dep in sq_revdeps_new[task]:
- deps.add(self.rqdata.runq_setscene.index(dep))
- sq_revdeps_squash.append(deps)
- elif len(sq_revdeps_new[task]) != 0:
+ for dep in sq_revdeps_new[tid]:
+ deps.add(dep)
+ sq_revdeps_squash[tid] = deps
+ elif len(sq_revdeps_new[tid]) != 0:
bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
+ self.rqdata.init_progress_reporter.update(taskcounter)
+
+ self.rqdata.init_progress_reporter.next_stage()
# Resolve setscene inter-task dependencies
# e.g. do_sometask_setscene[depends] = "targetname:do_someothertask_setscene"
# Note that anything explicitly depended upon will have its reverse dependencies removed to avoid circular dependencies
- for task in self.rqdata.runq_setscene:
- realid = self.rqdata.taskData.gettask_id(self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]], self.rqdata.runq_task[task] + "_setscene", False)
- idepends = self.rqdata.taskData.tasks_idepends[realid]
- for (depid, idependtask) in idepends:
- if depid not in self.rqdata.taskData.build_targets:
+ for tid in self.rqdata.runq_setscene_tids:
+ (mc, fn, taskname, _) = split_tid_mcfn(tid)
+ realtid = fn + ":" + taskname + "_setscene"
+ idepends = self.rqdata.taskData[mc].taskentries[realtid].idepends
+ for (depname, idependtask) in idepends:
+
+ if depname not in self.rqdata.taskData[mc].build_targets:
continue
- depdata = self.rqdata.taskData.build_targets[depid][0]
- if depdata is None:
+ depfn = self.rqdata.taskData[mc].build_targets[depname][0]
+ if depfn is None:
continue
- dep = self.rqdata.taskData.fn_index[depdata]
- taskid = self.rqdata.get_task_id(self.rqdata.taskData.getfn_id(dep), idependtask.replace("_setscene", ""))
- if taskid is None:
- bb.msg.fatal("RunQueue", "Task %s_setscene depends upon non-existent task %s:%s" % (self.rqdata.get_user_idstring(task), dep, idependtask))
+ deptid = depfn + ":" + idependtask.replace("_setscene", "")
+ if deptid not in self.rqdata.runtaskentries:
+ bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask))
- if not self.rqdata.runq_setscene.index(taskid) in self.sq_harddeps:
- self.sq_harddeps[self.rqdata.runq_setscene.index(taskid)] = set()
- self.sq_harddeps[self.rqdata.runq_setscene.index(taskid)].add(self.rqdata.runq_setscene.index(task))
+ if not deptid in self.sq_harddeps:
+ self.sq_harddeps[deptid] = set()
+ self.sq_harddeps[deptid].add(tid)
- sq_revdeps_squash[self.rqdata.runq_setscene.index(task)].add(self.rqdata.runq_setscene.index(taskid))
+ sq_revdeps_squash[tid].add(deptid)
# Have to zero this to avoid circular dependencies
- sq_revdeps_squash[self.rqdata.runq_setscene.index(taskid)] = set()
+ sq_revdeps_squash[deptid] = set()
+
+ self.rqdata.init_progress_reporter.next_stage()
for task in self.sq_harddeps:
for dep in self.sq_harddeps[task]:
sq_revdeps_squash[dep].add(task)
- #for task in xrange(len(sq_revdeps_squash)):
- # realtask = self.rqdata.runq_setscene[task]
- # bb.warn("Task %s: %s_setscene is %s " % (task, self.rqdata.get_user_idstring(realtask) , sq_revdeps_squash[task]))
+ self.rqdata.init_progress_reporter.next_stage()
+
+ #for tid in sq_revdeps_squash:
+ # for dep in sq_revdeps_squash[tid]:
+ # data = data + "\n %s" % dep
+ # bb.warn("Task %s_setscene: is %s " % (tid, data
- self.sq_deps = []
+ self.sq_deps = {}
self.sq_revdeps = sq_revdeps_squash
self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps)
- for task in xrange(len(self.sq_revdeps)):
- self.sq_deps.append(set())
- for task in xrange(len(self.sq_revdeps)):
- for dep in self.sq_revdeps[task]:
- self.sq_deps[dep].add(task)
+ for tid in self.sq_revdeps:
+ self.sq_deps[tid] = set()
+ for tid in self.sq_revdeps:
+ for dep in self.sq_revdeps[tid]:
+ self.sq_deps[dep].add(tid)
+
+ self.rqdata.init_progress_reporter.next_stage()
+
+ for tid in self.sq_revdeps:
+ if len(self.sq_revdeps[tid]) == 0:
+ self.runq_buildable.add(tid)
- for task in xrange(len(self.sq_revdeps)):
- if len(self.sq_revdeps[task]) == 0:
- self.runq_buildable[task] = 1
+ self.rqdata.init_progress_reporter.finish()
self.outrightfail = []
if self.rq.hashvalidate:
@@ -1906,35 +2041,34 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
sq_task = []
noexec = []
stamppresent = []
- for task in xrange(len(self.sq_revdeps)):
- realtask = self.rqdata.runq_setscene[task]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
- taskname = self.rqdata.runq_task[realtask]
- taskdep = self.rqdata.dataCache.task_deps[fn]
+ for tid in self.sq_revdeps:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+
+ taskdep = self.rqdata.dataCaches[mc].task_deps[fn]
if 'noexec' in taskdep and taskname in taskdep['noexec']:
- noexec.append(task)
- self.task_skip(task)
- bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
+ noexec.append(tid)
+ self.task_skip(tid)
+ bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCaches[mc], taskfn)
continue
- if self.rq.check_stamp_task(realtask, taskname + "_setscene", cache=self.stampcache):
- logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
- stamppresent.append(task)
- self.task_skip(task)
+ if self.rq.check_stamp_task(tid, taskname + "_setscene", cache=self.stampcache):
+ logger.debug(2, 'Setscene stamp current for task %s', tid)
+ stamppresent.append(tid)
+ self.task_skip(tid)
continue
- if self.rq.check_stamp_task(realtask, taskname, recurse = True, cache=self.stampcache):
- logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
- stamppresent.append(task)
- self.task_skip(task)
+ if self.rq.check_stamp_task(tid, taskname, recurse = True, cache=self.stampcache):
+ logger.debug(2, 'Normal stamp current for task %s', tid)
+ stamppresent.append(tid)
+ self.task_skip(tid)
continue
sq_fn.append(fn)
- sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
- sq_hash.append(self.rqdata.runq_hash[realtask])
+ sq_hashfn.append(self.rqdata.dataCaches[mc].hashfn[fn])
+ sq_hash.append(self.rqdata.runtaskentries[tid].hash)
sq_taskname.append(taskname)
- sq_task.append(task)
+ sq_task.append(tid)
call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
valid = bb.utils.better_eval(call, locs)
@@ -1943,12 +2077,10 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
for v in valid:
valid_new.append(sq_task[v])
- for task in xrange(len(self.sq_revdeps)):
- if task not in valid_new and task not in noexec:
- realtask = self.rqdata.runq_setscene[task]
- logger.debug(2, 'No package found, so skipping setscene task %s',
- self.rqdata.get_user_idstring(realtask))
- self.outrightfail.append(task)
+ for tid in self.sq_revdeps:
+ if tid not in valid_new and tid not in noexec:
+ logger.debug(2, 'No package found, so skipping setscene task %s', tid)
+ self.outrightfail.append(tid)
logger.info('Executing SetScene Tasks')
@@ -1957,9 +2089,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
def scenequeue_updatecounters(self, task, fail = False):
for dep in self.sq_deps[task]:
if fail and task in self.sq_harddeps and dep in self.sq_harddeps[task]:
- realtask = self.rqdata.runq_setscene[task]
- realdep = self.rqdata.runq_setscene[dep]
- logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
+ logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
self.scenequeue_updatecounters(dep, fail)
continue
if task not in self.sq_revdeps2[dep]:
@@ -1967,7 +2097,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
continue
self.sq_revdeps2[dep].remove(task)
if len(self.sq_revdeps2[dep]) == 0:
- self.runq_buildable[dep] = 1
+ self.runq_buildable.add(dep)
def task_completeoutright(self, task):
"""
@@ -1976,13 +2106,19 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
completed dependencies as buildable
"""
- index = self.rqdata.runq_setscene[task]
- logger.debug(1, 'Found task %s which could be accelerated',
- self.rqdata.get_user_idstring(index))
-
+ logger.debug(1, 'Found task %s which could be accelerated', task)
self.scenequeue_covered.add(task)
self.scenequeue_updatecounters(task)
+ def check_taskfail(self, task):
+ if self.rqdata.setscenewhitelist:
+ realtask = task.split('_setscene')[0]
+ (mc, fn, taskname, _) = split_tid_mcfn(realtask)
+ pn = self.rqdata.dataCaches[mc].pkg_fn[fn]
+ if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist):
+ logger.error('Task %s.%s failed' % (pn, taskname + "_setscene"))
+ self.rq.state = runQueueCleanUp
+
def task_complete(self, task):
self.stats.taskCompleted()
bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
@@ -1993,19 +2129,19 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
self.scenequeue_notcovered.add(task)
self.scenequeue_updatecounters(task, True)
+ self.check_taskfail(task)
def task_failoutright(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
+ self.runq_running.add(task)
+ self.runq_buildable.add(task)
self.stats.taskCompleted()
self.stats.taskSkipped()
- index = self.rqdata.runq_setscene[task]
self.scenequeue_notcovered.add(task)
self.scenequeue_updatecounters(task, True)
def task_skip(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
+ self.runq_running.add(task)
+ self.runq_buildable.add(task)
self.task_completeoutright(task)
self.stats.taskCompleted()
self.stats.taskSkipped()
@@ -2020,20 +2156,18 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
task = None
if self.stats.active < self.number_tasks:
# Find the next setscene to run
- for nexttask in xrange(self.stats.total):
- if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1:
+ for nexttask in self.rqdata.runq_setscene_tids:
+ if nexttask in self.runq_buildable and nexttask not in self.runq_running:
if nexttask in self.unskippable:
- logger.debug(2, "Setscene task %s is unskippable" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
+ logger.debug(2, "Setscene task %s is unskippable" % nexttask)
if nexttask not in self.unskippable and len(self.sq_revdeps[nexttask]) > 0 and self.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sq_revdeps[nexttask], True):
- realtask = self.rqdata.runq_setscene[nexttask]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
+ fn = fn_from_tid(nexttask)
foundtarget = False
- for target in self.rqdata.target_pairs:
- if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
- foundtarget = True
- break
+
+ if nexttask in self.rqdata.target_tids:
+ foundtarget = True
if not foundtarget:
- logger.debug(2, "Skipping setscene for task %s" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
+ logger.debug(2, "Skipping setscene for task %s" % nexttask)
self.task_skip(nexttask)
self.scenequeue_notneeded.add(nexttask)
return True
@@ -2043,42 +2177,37 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
task = nexttask
break
if task is not None:
- realtask = self.rqdata.runq_setscene[task]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
-
- taskname = self.rqdata.runq_task[realtask] + "_setscene"
- if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True, cache=self.stampcache):
- logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
- task, self.rqdata.get_user_idstring(realtask))
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
+ taskname = taskname + "_setscene"
+ if self.rq.check_stamp_task(task, taskname_from_tid(task), recurse = True, cache=self.stampcache):
+ logger.debug(2, 'Stamp for underlying task %s is current, so skipping setscene variant', task)
self.task_failoutright(task)
return True
if self.cooker.configuration.force:
- for target in self.rqdata.target_pairs:
- if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
- self.task_failoutright(task)
- return True
+ if task in self.rqdata.target_tids:
+ self.task_failoutright(task)
+ return True
- if self.rq.check_stamp_task(realtask, taskname, cache=self.stampcache):
- logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
- task, self.rqdata.get_user_idstring(realtask))
+ if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
+ logger.debug(2, 'Setscene stamp current task %s, so skip it and its dependencies', task)
self.task_skip(task)
return True
startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
bb.event.fire(startevent, self.cfgData)
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'fakeroot' in taskdep and taskname in taskdep['fakeroot']:
+ taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
+ if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not self.rq.fakeworker:
self.rq.start_fakeworker(self)
- self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
- self.rq.fakeworker.stdin.flush()
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), None)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
- self.rq.worker.stdin.flush()
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), None)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.flush()
- self.runq_running[task] = 1
+ self.runq_running.add(task)
self.stats.taskActive()
if self.stats.active < self.number_tasks:
return True
@@ -2087,17 +2216,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
self.rq.read_workers()
return self.rq.active_fds()
- #for task in xrange(self.stats.total):
- # if self.runq_running[task] != 1:
- # buildable = self.runq_buildable[task]
- # revdeps = self.sq_revdeps[task]
- # bb.warn("Found we didn't run %s %s %s %s" % (task, buildable, str(revdeps), self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task])))
+ #for tid in self.sq_revdeps:
+ # if tid not in self.runq_running:
+ # buildable = tid in self.runq_buildable
+ # revdeps = self.sq_revdeps[tid]
+ # bb.warn("Found we didn't run %s %s %s" % (tid, buildable, str(revdeps)))
- # Convert scenequeue_covered task numbers into full taskgraph ids
- oldcovered = self.scenequeue_covered
- self.rq.scenequeue_covered = set()
- for task in oldcovered:
- self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
+ self.rq.scenequeue_covered = self.scenequeue_covered
+ self.rq.scenequeue_notcovered = self.scenequeue_notcovered
logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
@@ -2109,8 +2235,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
return True
def runqueue_process_waitpid(self, task, status):
- task = self.rq.rqdata.runq_setscene.index(task)
-
RunQueueExecute.runqueue_process_waitpid(self, task, status)
class TaskFailure(Exception):
@@ -2137,9 +2261,9 @@ class runQueueEvent(bb.event.Event):
"""
def __init__(self, task, stats, rq):
self.taskid = task
- self.taskstring = rq.rqdata.get_user_idstring(task)
- self.taskname = rq.rqdata.get_task_name(task)
- self.taskfile = rq.rqdata.get_task_file(task)
+ self.taskstring = task
+ self.taskname = taskname_from_tid(task)
+ self.taskfile = fn_from_tid(task)
self.taskhash = rq.rqdata.get_task_hash(task)
self.stats = stats.copy()
bb.event.Event.__init__(self)
@@ -2150,11 +2274,10 @@ class sceneQueueEvent(runQueueEvent):
"""
def __init__(self, task, stats, rq, noexec=False):
runQueueEvent.__init__(self, task, stats, rq)
- realtask = rq.rqdata.runq_setscene[task]
- self.taskstring = rq.rqdata.get_user_idstring(realtask, "_setscene")
- self.taskname = rq.rqdata.get_task_name(realtask) + "_setscene"
- self.taskfile = rq.rqdata.get_task_file(realtask)
- self.taskhash = rq.rqdata.get_task_hash(realtask)
+ self.taskstring = task + "_setscene"
+ self.taskname = taskname_from_tid(task) + "_setscene"
+ self.taskfile = fn_from_tid(task)
+ self.taskhash = rq.rqdata.get_task_hash(task)
class runQueueTaskStarted(runQueueEvent):
"""
@@ -2223,7 +2346,7 @@ class runQueuePipe():
if pipeout:
pipeout.close()
bb.utils.nonblockingfd(self.input)
- self.queue = ""
+ self.queue = b""
self.d = d
self.rq = rq
self.rqexec = rqexec
@@ -2232,22 +2355,16 @@ class runQueuePipe():
self.rqexec = rqexec
def read(self):
- for w in [self.rq.worker, self.rq.fakeworker]:
- if not w:
- continue
- w.poll()
- if w.returncode is not None and not self.rq.teardown:
- name = None
- if self.rq.worker and w.pid == self.rq.worker.pid:
- name = "Worker"
- elif self.rq.fakeworker and w.pid == self.rq.fakeworker.pid:
- name = "Fakeroot"
- bb.error("%s process (%s) exited unexpectedly (%s), shutting down..." % (name, w.pid, str(w.returncode)))
- self.rq.finish_runqueue(True)
+ for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]:
+ for worker in workers.values():
+ worker.process.poll()
+ if worker.process.returncode is not None and not self.rq.teardown:
+ bb.error("%s process (%s) exited unexpectedly (%s), shutting down..." % (name, worker.process.pid, str(worker.process.returncode)))
+ self.rq.finish_runqueue(True)
start = len(self.queue)
try:
- self.queue = self.queue + self.input.read(102400)
+ self.queue = self.queue + (self.input.read(102400) or b"")
except (OSError, IOError) as e:
if e.errno != errno.EAGAIN:
raise
@@ -2255,8 +2372,8 @@ class runQueuePipe():
found = True
while found and len(self.queue):
found = False
- index = self.queue.find("</event>")
- while index != -1 and self.queue.startswith("<event>"):
+ index = self.queue.find(b"</event>")
+ while index != -1 and self.queue.startswith(b"<event>"):
try:
event = pickle.loads(self.queue[7:index])
except ValueError as e:
@@ -2264,9 +2381,9 @@ class runQueuePipe():
bb.event.fire_from_worker(event, self.d)
found = True
self.queue = self.queue[index+8:]
- index = self.queue.find("</event>")
- index = self.queue.find("</exitcode>")
- while index != -1 and self.queue.startswith("<exitcode>"):
+ index = self.queue.find(b"</event>")
+ index = self.queue.find(b"</exitcode>")
+ while index != -1 and self.queue.startswith(b"<exitcode>"):
try:
task, status = pickle.loads(self.queue[10:index])
except ValueError as e:
@@ -2274,7 +2391,7 @@ class runQueuePipe():
self.rqexec.runqueue_process_waitpid(task, status)
found = True
self.queue = self.queue[index+11:]
- index = self.queue.find("</exitcode>")
+ index = self.queue.find(b"</exitcode>")
return (end > start)
def close(self):
@@ -2283,3 +2400,27 @@ class runQueuePipe():
if len(self.queue) > 0:
print("Warning, worker left partial message: %s" % self.queue)
self.input.close()
+
+def get_setscene_enforce_whitelist(d):
+ if d.getVar('BB_SETSCENE_ENFORCE', True) != '1':
+ return None
+ whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST", True) or "").split()
+ outlist = []
+ for item in whitelist[:]:
+ if item.startswith('%:'):
+ for target in sys.argv[1:]:
+ if not target.startswith('-'):
+ outlist.append(target.split(':')[0] + ':' + item.split(':')[1])
+ else:
+ outlist.append(item)
+ return outlist
+
+def check_setscene_enforce_whitelist(pn, taskname, whitelist):
+ import fnmatch
+ if whitelist:
+ item = '%s:%s' % (pn, taskname)
+ for whitelist_item in whitelist:
+ if fnmatch.fnmatch(item, whitelist_item):
+ return True
+ return False
+ return True
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/server/process.py b/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
index a3078a873..982fcf71c 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
@@ -30,7 +30,7 @@ import signal
import sys
import time
import select
-from Queue import Empty
+from queue import Empty
from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
@@ -137,7 +137,7 @@ class ProcessServer(Process, BaseImplServer):
if not fds:
fds = []
- for function, data in self._idlefuns.items():
+ for function, data in list(self._idlefuns.items()):
try:
retval = function(self, data, False)
if retval is False:
@@ -145,7 +145,7 @@ class ProcessServer(Process, BaseImplServer):
nextsleep = None
elif retval is True:
nextsleep = None
- elif isinstance(retval, float):
+ elif isinstance(retval, float) and nextsleep:
if (retval < nextsleep):
nextsleep = retval
elif nextsleep is None:
@@ -213,7 +213,7 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
# Wrap Queue to provide API which isn't server implementation specific
class ProcessEventQueue(multiprocessing.queues.Queue):
def __init__(self, maxsize):
- multiprocessing.queues.Queue.__init__(self, maxsize)
+ multiprocessing.queues.Queue.__init__(self, maxsize, ctx=multiprocessing.get_context())
self.exit = False
bb.utils.set_process_name("ProcessEQueue")
@@ -222,11 +222,10 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
def waitEvent(self, timeout):
if self.exit:
- sys.exit(1)
+ return self.getEvent()
try:
if not self.server.is_alive():
- self.setexit()
- return None
+ return self.getEvent()
return self.get(True, timeout)
except Empty:
return None
@@ -235,9 +234,10 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
try:
if not self.server.is_alive():
self.setexit()
- return None
return self.get(False)
except Empty:
+ if self.exit:
+ sys.exit(1)
return None
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py b/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py
index ace1cf646..452f14bb3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py
@@ -31,31 +31,33 @@
in the server's main loop.
"""
+import os
+import sys
+
+import hashlib
+import time
+import socket
+import signal
+import threading
+import pickle
+import inspect
+import select
+import http.client
+import xmlrpc.client
+from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
+
import bb
-import xmlrpclib, sys
from bb import daemonize
from bb.ui import uievent
-import hashlib, time
-import socket
-import os, signal
-import threading
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
+from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
DEBUG = False
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-import inspect, select, httplib
-
-from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
-
-class BBTransport(xmlrpclib.Transport):
+class BBTransport(xmlrpc.client.Transport):
def __init__(self, timeout):
self.timeout = timeout
self.connection_token = None
- xmlrpclib.Transport.__init__(self)
+ xmlrpc.client.Transport.__init__(self)
# Modified from default to pass timeout to HTTPConnection
def make_connection(self, host):
@@ -67,7 +69,7 @@ class BBTransport(xmlrpclib.Transport):
# create a HTTP connection object from a host descriptor
chost, self._extra_headers, x509 = self.get_host_info(host)
#store the host argument along with the connection object
- self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout)
+ self._connection = host, http.client.HTTPConnection(chost, timeout=self.timeout)
return self._connection[1]
def set_connection_token(self, token):
@@ -76,13 +78,30 @@ class BBTransport(xmlrpclib.Transport):
def send_content(self, h, body):
if self.connection_token:
h.putheader("Bitbake-token", self.connection_token)
- xmlrpclib.Transport.send_content(self, h, body)
+ xmlrpc.client.Transport.send_content(self, h, body)
def _create_server(host, port, timeout = 60):
t = BBTransport(timeout)
- s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
+ s = xmlrpc.client.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True, use_builtin_types=True)
return s, t
+def check_connection(remote, timeout):
+ try:
+ host, port = remote.split(":")
+ port = int(port)
+ except Exception as e:
+ bb.warn("Failed to read remote definition (%s)" % str(e))
+ raise e
+
+ server, _transport = _create_server(host, port, timeout)
+ try:
+ ret, err = server.runCommand(['getVariable', 'TOPDIR'])
+ if err or not ret:
+ return False
+ except ConnectionError:
+ return False
+ return True
+
class BitBakeServerCommands():
def __init__(self, server):
@@ -128,7 +147,7 @@ class BitBakeServerCommands():
def addClient(self):
if self.has_client:
return None
- token = hashlib.md5(str(time.time())).hexdigest()
+ token = hashlib.md5(str(time.time()).encode("utf-8")).hexdigest()
self.server.set_connection_token(token)
self.has_client = True
return token
@@ -178,7 +197,7 @@ class XMLRPCProxyServer(BaseImplServer):
""" not a real working server, but a stub for a proxy server connection
"""
- def __init__(self, host, port):
+ def __init__(self, host, port, use_builtin_types=True):
self.host = host
self.port = port
@@ -186,7 +205,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
# remove this when you're done with debugging
# allow_reuse_address = True
- def __init__(self, interface, single_use=False):
+ def __init__(self, interface, single_use=False, idle_timeout=0):
"""
Constructor
"""
@@ -204,6 +223,10 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
self.commands = BitBakeServerCommands(self)
self.autoregister_all_functions(self.commands, "")
self.interface = interface
+ self.time = time.time()
+ self.idle_timeout = idle_timeout
+ if idle_timeout:
+ self.register_idle_function(self.handle_idle_timeout, self)
def addcooker(self, cooker):
BaseImplServer.addcooker(self, cooker)
@@ -219,6 +242,12 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
if name.startswith(prefix):
self.register_function(method, name[len(prefix):])
+ def handle_idle_timeout(self, server, data, abort):
+ if not abort:
+ if time.time() - server.time > server.idle_timeout:
+ server.quit = True
+ print("Server idle timeout expired")
+ return []
def serve_forever(self):
# Start the actual XMLRPC server
@@ -232,7 +261,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
while not self.quit:
fds = [self]
nextsleep = 0.1
- for function, data in self._idlefuns.items():
+ for function, data in list(self._idlefuns.items()):
retval = None
try:
retval = function(self, data, False)
@@ -261,13 +290,15 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
try:
fd_sets = select.select(fds, [], [], socktimeout)
if fd_sets[0] and self in fd_sets[0]:
+ if self.idle_timeout:
+ self.time = time.time()
self._handle_request_noblock()
except IOError:
# we ignore interrupted calls
pass
# Tell idle functions we're exiting
- for function, data in self._idlefuns.items():
+ for function, data in list(self._idlefuns.items()):
try:
retval = function(self, data, True)
except:
@@ -332,9 +363,10 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
pass
class BitBakeServer(BitBakeBaseServer):
- def initServer(self, interface = ("localhost", 0), single_use = False):
+ def initServer(self, interface = ("localhost", 0),
+ single_use = False, idle_timeout=0):
self.interface = interface
- self.serverImpl = XMLRPCServer(interface, single_use)
+ self.serverImpl = XMLRPCServer(interface, single_use, idle_timeout)
def detach(self):
daemonize.createDaemon(self.serverImpl.serve_forever, "bitbake-cookerdaemon.log")
@@ -379,7 +411,7 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
raise e
try:
- self.serverImpl = XMLRPCProxyServer(host, port)
+ self.serverImpl = XMLRPCProxyServer(host, port, use_builtin_types=True)
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
return self.connection.connect(self.token)
except Exception as e:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/siggen.py b/import-layers/yocto-poky/bitbake/lib/bb/siggen.py
index 88fc0f1d5..3a7dac4cb 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/siggen.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/siggen.py
@@ -3,19 +3,14 @@ import logging
import os
import re
import tempfile
+import pickle
import bb.data
from bb.checksum import FileChecksumCache
logger = logging.getLogger('BitBake.SigGen')
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
def init(d):
- siggens = [obj for obj in globals().itervalues()
+ siggens = [obj for obj in globals().values()
if type(obj) is type and issubclass(obj, SignatureGenerator)]
desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
@@ -138,7 +133,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
var = lookupcache[dep]
if var is not None:
data = data + str(var)
- self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
+ self.basehash[fn + "." + task] = hashlib.md5(data.encode("utf-8")).hexdigest()
taskdeps[task] = alldeps
self.taskdeps[fn] = taskdeps
@@ -149,8 +144,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
def finalise(self, fn, d, variant):
- if variant:
- fn = "virtual:" + variant + ":" + fn
+ mc = d.getVar("__BBMULTICONFIG", False) or ""
+ if variant or mc:
+ fn = bb.cache.realfn2virtual(fn, variant, mc)
try:
taskdeps = self._build_data(fn, d)
@@ -221,9 +217,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
if taint:
data = data + taint
self.taints[k] = taint
- logger.warn("%s is tainted from a forced run" % k)
+ logger.warning("%s is tainted from a forced run" % k)
- h = hashlib.md5(data).hexdigest()
+ h = hashlib.md5(data.encode("utf-8")).hexdigest()
self.taskhash[k] = h
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
return h
@@ -287,7 +283,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
with os.fdopen(fd, "wb") as stream:
p = pickle.dump(data, stream, -1)
stream.flush()
- os.chmod(tmpfile, 0664)
+ os.chmod(tmpfile, 0o664)
os.rename(tmpfile, sigfile)
except (OSError, IOError) as err:
try:
@@ -298,23 +294,25 @@ class SignatureGeneratorBasic(SignatureGenerator):
computed_basehash = calc_basehash(data)
if computed_basehash != self.basehash[k]:
- bb.error("Basehash mismatch %s verses %s for %s" % (computed_basehash, self.basehash[k], k))
- if k in self.taskhash:
+ bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[k], k))
+ if runtime and k in self.taskhash:
computed_taskhash = calc_taskhash(data)
if computed_taskhash != self.taskhash[k]:
- bb.error("Taskhash mismatch %s verses %s for %s" % (computed_taskhash, self.taskhash[k], k))
+ bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[k], k))
- def dump_sigs(self, dataCache, options):
+ def dump_sigs(self, dataCaches, options):
for fn in self.taskdeps:
for task in self.taskdeps[fn]:
+ tid = fn + ":" + task
+ (mc, _, _) = bb.runqueue.split_tid(tid)
k = fn + "." + task
if k not in self.taskhash:
continue
- if dataCache.basetaskhash[k] != self.basehash[k]:
+ if dataCaches[mc].basetaskhash[k] != self.basehash[k]:
bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
- bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
- self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
+ bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[k], self.basehash[k]))
+ self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
name = "basichash"
@@ -368,10 +366,12 @@ def clean_basepaths_list(a):
def compare_sigfiles(a, b, recursecb = None):
output = []
- p1 = pickle.Unpickler(open(a, "rb"))
- a_data = p1.load()
- p2 = pickle.Unpickler(open(b, "rb"))
- b_data = p2.load()
+ with open(a, 'rb') as f:
+ p1 = pickle.Unpickler(f)
+ a_data = p1.load()
+ with open(b, 'rb') as f:
+ p2 = pickle.Unpickler(f)
+ b_data = p2.load()
def dict_diff(a, b, whitelist=set()):
sa = set(a.keys())
@@ -453,6 +453,11 @@ def compare_sigfiles(a, b, recursecb = None):
for dep in changed:
output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
+ if not 'file_checksum_values' in a_data:
+ a_data['file_checksum_values'] = {}
+ if not 'file_checksum_values' in b_data:
+ b_data['file_checksum_values'] = {}
+
changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
if changed:
for f, old, new in changed:
@@ -464,6 +469,10 @@ def compare_sigfiles(a, b, recursecb = None):
for f in removed:
output.append("Dependency on checksum of file %s was removed" % (f))
+ if not 'runtaskdeps' in a_data:
+ a_data['runtaskdeps'] = {}
+ if not 'runtaskdeps' in b_data:
+ b_data['runtaskdeps'] = {}
if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
changed = ["Number of task dependencies changed"]
@@ -536,7 +545,7 @@ def calc_basehash(sigdata):
if val is not None:
basedata = basedata + str(val)
- return hashlib.md5(basedata).hexdigest()
+ return hashlib.md5(basedata.encode("utf-8")).hexdigest()
def calc_taskhash(sigdata):
data = sigdata['basehash']
@@ -553,14 +562,15 @@ def calc_taskhash(sigdata):
else:
data = data + sigdata['taint']
- return hashlib.md5(data).hexdigest()
+ return hashlib.md5(data.encode("utf-8")).hexdigest()
def dump_sigfile(a):
output = []
- p1 = pickle.Unpickler(open(a, "rb"))
- a_data = p1.load()
+ with open(a, 'rb') as f:
+ p1 = pickle.Unpickler(f)
+ a_data = p1.load()
output.append("basewhitelist: %s" % (a_data['basewhitelist']))
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py b/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py
index 9ae52d77d..d8bdbcabf 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py
@@ -37,27 +37,24 @@ def re_match_strings(target, strings):
return any(name == target or re.match(name, target)
for name in strings)
+class TaskEntry:
+ def __init__(self):
+ self.tdepends = []
+ self.idepends = []
+ self.irdepends = []
+
class TaskData:
"""
BitBake Task Data implementation
"""
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None, allowincomplete = False):
- self.build_names_index = []
- self.run_names_index = []
- self.fn_index = []
-
self.build_targets = {}
self.run_targets = {}
self.external_targets = []
- self.tasks_fnid = []
- self.tasks_name = []
- self.tasks_tdepends = []
- self.tasks_idepends = []
- self.tasks_irdepends = []
- # Cache to speed up task ID lookups
- self.tasks_lookup = {}
+ self.seenfns = []
+ self.taskentries = {}
self.depids = {}
self.rdepids = {}
@@ -66,7 +63,7 @@ class TaskData:
self.failed_deps = []
self.failed_rdeps = []
- self.failed_fnids = []
+ self.failed_fns = []
self.abort = abort
self.tryaltconfigs = tryaltconfigs
@@ -74,88 +71,6 @@ class TaskData:
self.skiplist = skiplist
- def getbuild_id(self, name):
- """
- Return an ID number for the build target name.
- If it doesn't exist, create one.
- """
- if not name in self.build_names_index:
- self.build_names_index.append(name)
- return len(self.build_names_index) - 1
-
- return self.build_names_index.index(name)
-
- def getrun_id(self, name):
- """
- Return an ID number for the run target name.
- If it doesn't exist, create one.
- """
- if not name in self.run_names_index:
- self.run_names_index.append(name)
- return len(self.run_names_index) - 1
-
- return self.run_names_index.index(name)
-
- def getfn_id(self, name):
- """
- Return an ID number for the filename.
- If it doesn't exist, create one.
- """
- if not name in self.fn_index:
- self.fn_index.append(name)
- return len(self.fn_index) - 1
-
- return self.fn_index.index(name)
-
- def gettask_ids(self, fnid):
- """
- Return an array of the ID numbers matching a given fnid.
- """
- ids = []
- if fnid in self.tasks_lookup:
- for task in self.tasks_lookup[fnid]:
- ids.append(self.tasks_lookup[fnid][task])
- return ids
-
- def gettask_id_fromfnid(self, fnid, task):
- """
- Return an ID number for the task matching fnid and task.
- """
- if fnid in self.tasks_lookup:
- if task in self.tasks_lookup[fnid]:
- return self.tasks_lookup[fnid][task]
-
- return None
-
- def gettask_id(self, fn, task, create = True):
- """
- Return an ID number for the task matching fn and task.
- If it doesn't exist, create one by default.
- Optionally return None instead.
- """
- fnid = self.getfn_id(fn)
-
- if fnid in self.tasks_lookup:
- if task in self.tasks_lookup[fnid]:
- return self.tasks_lookup[fnid][task]
-
- if not create:
- return None
-
- self.tasks_name.append(task)
- self.tasks_fnid.append(fnid)
- self.tasks_tdepends.append([])
- self.tasks_idepends.append([])
- self.tasks_irdepends.append([])
-
- listid = len(self.tasks_name) - 1
-
- if fnid not in self.tasks_lookup:
- self.tasks_lookup[fnid] = {}
- self.tasks_lookup[fnid][task] = listid
-
- return listid
-
def add_tasks(self, fn, dataCache):
"""
Add tasks for a given fn to the database
@@ -163,29 +78,31 @@ class TaskData:
task_deps = dataCache.task_deps[fn]
- fnid = self.getfn_id(fn)
-
- if fnid in self.failed_fnids:
+ if fn in self.failed_fns:
bb.msg.fatal("TaskData", "Trying to re-add a failed file? Something is broken...")
# Check if we've already seen this fn
- if fnid in self.tasks_fnid:
+ if fn in self.seenfns:
return
+ self.seenfns.append(fn)
+
self.add_extra_deps(fn, dataCache)
for task in task_deps['tasks']:
+ tid = "%s:%s" % (fn, task)
+ self.taskentries[tid] = TaskEntry()
+
# Work out task dependencies
parentids = []
for dep in task_deps['parents'][task]:
if dep not in task_deps['tasks']:
bb.debug(2, "Not adding dependeny of %s on %s since %s does not exist" % (task, dep, dep))
continue
- parentid = self.gettask_id(fn, dep)
+ parentid = "%s:%s" % (fn, dep)
parentids.append(parentid)
- taskid = self.gettask_id(fn, task)
- self.tasks_tdepends[taskid].extend(parentids)
+ self.taskentries[tid].tdepends.extend(parentids)
# Touch all intertask dependencies
if 'depends' in task_deps and task in task_deps['depends']:
@@ -194,29 +111,30 @@ class TaskData:
if dep:
if ":" not in dep:
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
- ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
- self.tasks_idepends[taskid].extend(ids)
+ ids.append(((dep.split(":")[0]), dep.split(":")[1]))
+ self.seen_build_target(dep.split(":")[0])
+ self.taskentries[tid].idepends.extend(ids)
if 'rdepends' in task_deps and task in task_deps['rdepends']:
ids = []
for dep in task_deps['rdepends'][task].split():
if dep:
if ":" not in dep:
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
- ids.append(((self.getrun_id(dep.split(":")[0])), dep.split(":")[1]))
- self.tasks_irdepends[taskid].extend(ids)
-
+ ids.append(((dep.split(":")[0]), dep.split(":")[1]))
+ self.seen_run_target(dep.split(":")[0])
+ self.taskentries[tid].irdepends.extend(ids)
# Work out build dependencies
- if not fnid in self.depids:
- dependids = {}
+ if not fn in self.depids:
+ dependids = set()
for depend in dataCache.deps[fn]:
- dependids[self.getbuild_id(depend)] = None
- self.depids[fnid] = dependids.keys()
+ dependids.add(depend)
+ self.depids[fn] = list(dependids)
logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
# Work out runtime dependencies
- if not fnid in self.rdepids:
- rdependids = {}
+ if not fn in self.rdepids:
+ rdependids = set()
rdepends = dataCache.rundeps[fn]
rrecs = dataCache.runrecs[fn]
rdependlist = []
@@ -224,24 +142,26 @@ class TaskData:
for package in rdepends:
for rdepend in rdepends[package]:
rdependlist.append(rdepend)
- rdependids[self.getrun_id(rdepend)] = None
+ rdependids.add(rdepend)
for package in rrecs:
for rdepend in rrecs[package]:
rreclist.append(rdepend)
- rdependids[self.getrun_id(rdepend)] = None
+ rdependids.add(rdepend)
if rdependlist:
logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn)
if rreclist:
logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn)
- self.rdepids[fnid] = rdependids.keys()
+ self.rdepids[fn] = list(rdependids)
- for dep in self.depids[fnid]:
+ for dep in self.depids[fn]:
+ self.seen_build_target(dep)
if dep in self.failed_deps:
- self.fail_fnid(fnid)
+ self.fail_fn(fn)
return
- for dep in self.rdepids[fnid]:
+ for dep in self.rdepids[fn]:
+ self.seen_run_target(dep)
if dep in self.failed_rdeps:
- self.fail_fnid(fnid)
+ self.fail_fn(fn)
return
def add_extra_deps(self, fn, dataCache):
@@ -263,9 +183,7 @@ class TaskData:
"""
Have we a build target matching this name?
"""
- targetid = self.getbuild_id(target)
-
- if targetid in self.build_targets:
+ if target in self.build_targets and self.build_targets[target]:
return True
return False
@@ -273,50 +191,54 @@ class TaskData:
"""
Have we a runtime target matching this name?
"""
- targetid = self.getrun_id(target)
-
- if targetid in self.run_targets:
+ if target in self.run_targets and self.run_targets[target]:
return True
return False
+ def seen_build_target(self, name):
+ """
+ Maintain a list of build targets
+ """
+ if name not in self.build_targets:
+ self.build_targets[name] = []
+
def add_build_target(self, fn, item):
"""
Add a build target.
If already present, append the provider fn to the list
"""
- targetid = self.getbuild_id(item)
- fnid = self.getfn_id(fn)
-
- if targetid in self.build_targets:
- if fnid in self.build_targets[targetid]:
+ if item in self.build_targets:
+ if fn in self.build_targets[item]:
return
- self.build_targets[targetid].append(fnid)
+ self.build_targets[item].append(fn)
return
- self.build_targets[targetid] = [fnid]
+ self.build_targets[item] = [fn]
+
+ def seen_run_target(self, name):
+ """
+ Maintain a list of runtime build targets
+ """
+ if name not in self.run_targets:
+ self.run_targets[name] = []
def add_runtime_target(self, fn, item):
"""
Add a runtime target.
If already present, append the provider fn to the list
"""
- targetid = self.getrun_id(item)
- fnid = self.getfn_id(fn)
-
- if targetid in self.run_targets:
- if fnid in self.run_targets[targetid]:
+ if item in self.run_targets:
+ if fn in self.run_targets[item]:
return
- self.run_targets[targetid].append(fnid)
+ self.run_targets[item].append(fn)
return
- self.run_targets[targetid] = [fnid]
+ self.run_targets[item] = [fn]
- def mark_external_target(self, item):
+ def mark_external_target(self, target):
"""
Mark a build target as being externally requested
"""
- targetid = self.getbuild_id(item)
-
- if targetid not in self.external_targets:
- self.external_targets.append(targetid)
+ if target not in self.external_targets:
+ self.external_targets.append(target)
def get_unresolved_build_targets(self, dataCache):
"""
@@ -324,12 +246,12 @@ class TaskData:
are unknown.
"""
unresolved = []
- for target in self.build_names_index:
+ for target in self.build_targets:
if re_match_strings(target, dataCache.ignored_dependencies):
continue
- if self.build_names_index.index(target) in self.failed_deps:
+ if target in self.failed_deps:
continue
- if not self.have_build_target(target):
+ if not self.build_targets[target]:
unresolved.append(target)
return unresolved
@@ -339,12 +261,12 @@ class TaskData:
are unknown.
"""
unresolved = []
- for target in self.run_names_index:
+ for target in self.run_targets:
if re_match_strings(target, dataCache.ignored_dependencies):
continue
- if self.run_names_index.index(target) in self.failed_rdeps:
+ if target in self.failed_rdeps:
continue
- if not self.have_runtime_target(target):
+ if not self.run_targets[target]:
unresolved.append(target)
return unresolved
@@ -352,50 +274,26 @@ class TaskData:
"""
Return a list of providers of item
"""
- targetid = self.getbuild_id(item)
+ return self.build_targets[item]
- return self.build_targets[targetid]
-
- def get_dependees(self, itemid):
+ def get_dependees(self, item):
"""
Return a list of targets which depend on item
"""
dependees = []
- for fnid in self.depids:
- if itemid in self.depids[fnid]:
- dependees.append(fnid)
- return dependees
-
- def get_dependees_str(self, item):
- """
- Return a list of targets which depend on item as a user readable string
- """
- itemid = self.getbuild_id(item)
- dependees = []
- for fnid in self.depids:
- if itemid in self.depids[fnid]:
- dependees.append(self.fn_index[fnid])
+ for fn in self.depids:
+ if item in self.depids[fn]:
+ dependees.append(fn)
return dependees
- def get_rdependees(self, itemid):
+ def get_rdependees(self, item):
"""
Return a list of targets which depend on runtime item
"""
dependees = []
- for fnid in self.rdepids:
- if itemid in self.rdepids[fnid]:
- dependees.append(fnid)
- return dependees
-
- def get_rdependees_str(self, item):
- """
- Return a list of targets which depend on runtime item as a user readable string
- """
- itemid = self.getrun_id(item)
- dependees = []
- for fnid in self.rdepids:
- if itemid in self.rdepids[fnid]:
- dependees.append(self.fn_index[fnid])
+ for fn in self.rdepids:
+ if item in self.rdepids[fn]:
+ dependees.append(fn)
return dependees
def get_reasons(self, item, runtime=False):
@@ -431,7 +329,7 @@ class TaskData:
except bb.providers.NoProvider:
if self.abort:
raise
- self.remove_buildtarget(self.getbuild_id(item))
+ self.remove_buildtarget(item)
self.mark_external_target(item)
@@ -446,14 +344,14 @@ class TaskData:
return
if not item in dataCache.providers:
- close_matches = self.get_close_matches(item, dataCache.providers.keys())
+ close_matches = self.get_close_matches(item, list(dataCache.providers.keys()))
# Is it in RuntimeProviders ?
all_p = bb.providers.getRuntimeProviders(dataCache, item)
for fn in all_p:
new = dataCache.pkg_fn[fn] + " RPROVIDES " + item
if new not in close_matches:
close_matches.append(new)
- bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=self.get_reasons(item), close_matches=close_matches), cfgData)
+ bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees(item), reasons=self.get_reasons(item), close_matches=close_matches), cfgData)
raise bb.providers.NoProvider(item)
if self.have_build_target(item):
@@ -462,10 +360,10 @@ class TaskData:
all_p = dataCache.providers[item]
eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
- eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
+ eligible = [p for p in eligible if not p in self.failed_fns]
if not eligible:
- bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
+ bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
raise bb.providers.NoProvider(item)
if len(eligible) > 1 and foundUnique == False:
@@ -477,8 +375,7 @@ class TaskData:
self.consider_msgs_cache.append(item)
for fn in eligible:
- fnid = self.getfn_id(fn)
- if fnid in self.failed_fnids:
+ if fn in self.failed_fns:
continue
logger.debug(2, "adding %s to satisfy %s", fn, item)
self.add_build_target(fn, item)
@@ -502,14 +399,14 @@ class TaskData:
all_p = bb.providers.getRuntimeProviders(dataCache, item)
if not all_p:
- bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=self.get_reasons(item, True)), cfgData)
+ bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees(item), reasons=self.get_reasons(item, True)), cfgData)
raise bb.providers.NoRProvider(item)
eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
- eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
+ eligible = [p for p in eligible if not p in self.failed_fns]
if not eligible:
- bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
+ bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
raise bb.providers.NoRProvider(item)
if len(eligible) > 1 and numberPreferred == 0:
@@ -531,82 +428,80 @@ class TaskData:
# run through the list until we find one that we can build
for fn in eligible:
- fnid = self.getfn_id(fn)
- if fnid in self.failed_fnids:
+ if fn in self.failed_fns:
continue
logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
self.add_runtime_target(fn, item)
self.add_tasks(fn, dataCache)
- def fail_fnid(self, fnid, missing_list=None):
+ def fail_fn(self, fn, missing_list=None):
"""
Mark a file as failed (unbuildable)
Remove any references from build and runtime provider lists
missing_list, A list of missing requirements for this target
"""
- if fnid in self.failed_fnids:
+ if fn in self.failed_fns:
return
if not missing_list:
missing_list = []
- logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
- self.failed_fnids.append(fnid)
+ logger.debug(1, "File '%s' is unbuildable, removing...", fn)
+ self.failed_fns.append(fn)
for target in self.build_targets:
- if fnid in self.build_targets[target]:
- self.build_targets[target].remove(fnid)
+ if fn in self.build_targets[target]:
+ self.build_targets[target].remove(fn)
if len(self.build_targets[target]) == 0:
self.remove_buildtarget(target, missing_list)
for target in self.run_targets:
- if fnid in self.run_targets[target]:
- self.run_targets[target].remove(fnid)
+ if fn in self.run_targets[target]:
+ self.run_targets[target].remove(fn)
if len(self.run_targets[target]) == 0:
self.remove_runtarget(target, missing_list)
- def remove_buildtarget(self, targetid, missing_list=None):
+ def remove_buildtarget(self, target, missing_list=None):
"""
Mark a build target as failed (unbuildable)
Trigger removal of any files that have this as a dependency
"""
if not missing_list:
- missing_list = [self.build_names_index[targetid]]
+ missing_list = [target]
else:
- missing_list = [self.build_names_index[targetid]] + missing_list
- logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
- self.failed_deps.append(targetid)
- dependees = self.get_dependees(targetid)
- for fnid in dependees:
- self.fail_fnid(fnid, missing_list)
- for taskid in xrange(len(self.tasks_idepends)):
- idepends = self.tasks_idepends[taskid]
- for (idependid, idependtask) in idepends:
- if idependid == targetid:
- self.fail_fnid(self.tasks_fnid[taskid], missing_list)
-
- if self.abort and targetid in self.external_targets:
- target = self.build_names_index[targetid]
+ missing_list = [target] + missing_list
+ logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", target, missing_list)
+ self.failed_deps.append(target)
+ dependees = self.get_dependees(target)
+ for fn in dependees:
+ self.fail_fn(fn, missing_list)
+ for tid in self.taskentries:
+ for (idepend, idependtask) in self.taskentries[tid].idepends:
+ if idepend == target:
+ fn = tid.rsplit(":",1)[0]
+ self.fail_fn(fn, missing_list)
+
+ if self.abort and target in self.external_targets:
logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
raise bb.providers.NoProvider(target)
- def remove_runtarget(self, targetid, missing_list=None):
+ def remove_runtarget(self, target, missing_list=None):
"""
Mark a run target as failed (unbuildable)
Trigger removal of any files that have this as a dependency
"""
if not missing_list:
- missing_list = [self.run_names_index[targetid]]
+ missing_list = [target]
else:
- missing_list = [self.run_names_index[targetid]] + missing_list
-
- logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
- self.failed_rdeps.append(targetid)
- dependees = self.get_rdependees(targetid)
- for fnid in dependees:
- self.fail_fnid(fnid, missing_list)
- for taskid in xrange(len(self.tasks_irdepends)):
- irdepends = self.tasks_irdepends[taskid]
- for (idependid, idependtask) in irdepends:
- if idependid == targetid:
- self.fail_fnid(self.tasks_fnid[taskid], missing_list)
+ missing_list = [target] + missing_list
+
+ logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", target, missing_list)
+ self.failed_rdeps.append(target)
+ dependees = self.get_rdependees(target)
+ for fn in dependees:
+ self.fail_fn(fn, missing_list)
+ for tid in self.taskentries:
+ for (idepend, idependtask) in self.taskentries[tid].irdepends:
+ if idepend == target:
+ fn = tid.rsplit(":",1)[0]
+ self.fail_fn(fn, missing_list)
def add_unresolved(self, cfgData, dataCache):
"""
@@ -620,17 +515,16 @@ class TaskData:
self.add_provider_internal(cfgData, dataCache, target)
added = added + 1
except bb.providers.NoProvider:
- targetid = self.getbuild_id(target)
- if self.abort and targetid in self.external_targets and not self.allowincomplete:
+ if self.abort and target in self.external_targets and not self.allowincomplete:
raise
if not self.allowincomplete:
- self.remove_buildtarget(targetid)
+ self.remove_buildtarget(target)
for target in self.get_unresolved_run_targets(dataCache):
try:
self.add_rprovider(cfgData, dataCache, target)
added = added + 1
except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
- self.remove_runtarget(self.getrun_id(target))
+ self.remove_runtarget(target)
logger.debug(1, "Resolved " + str(added) + " extra dependencies")
if added == 0:
break
@@ -638,13 +532,13 @@ class TaskData:
def get_providermap(self, prefix=None):
provmap = {}
- for name in self.build_names_index:
+ for name in self.build_targets:
if prefix and not name.startswith(prefix):
continue
if self.have_build_target(name):
provider = self.get_provider(name)
if provider:
- provmap[name] = self.fn_index[provider[0]]
+ provmap[name] = provider[0]
return provmap
def dump_data(self):
@@ -652,39 +546,37 @@ class TaskData:
Dump some debug information on the internal data structures
"""
logger.debug(3, "build_names:")
- logger.debug(3, ", ".join(self.build_names_index))
+ logger.debug(3, ", ".join(self.build_targets))
logger.debug(3, "run_names:")
- logger.debug(3, ", ".join(self.run_names_index))
+ logger.debug(3, ", ".join(self.run_targets))
logger.debug(3, "build_targets:")
- for buildid in xrange(len(self.build_names_index)):
- target = self.build_names_index[buildid]
+ for target in self.build_targets:
targets = "None"
- if buildid in self.build_targets:
- targets = self.build_targets[buildid]
- logger.debug(3, " (%s)%s: %s", buildid, target, targets)
+ if target in self.build_targets:
+ targets = self.build_targets[target]
+ logger.debug(3, " %s: %s", target, targets)
logger.debug(3, "run_targets:")
- for runid in xrange(len(self.run_names_index)):
- target = self.run_names_index[runid]
+ for target in self.run_targets:
targets = "None"
- if runid in self.run_targets:
- targets = self.run_targets[runid]
- logger.debug(3, " (%s)%s: %s", runid, target, targets)
+ if target in self.run_targets:
+ targets = self.run_targets[target]
+ logger.debug(3, " %s: %s", target, targets)
logger.debug(3, "tasks:")
- for task in xrange(len(self.tasks_name)):
- logger.debug(3, " (%s)%s - %s: %s",
- task,
- self.fn_index[self.tasks_fnid[task]],
- self.tasks_name[task],
- self.tasks_tdepends[task])
+ for tid in self.taskentries:
+ logger.debug(3, " %s: %s %s %s",
+ tid,
+ self.taskentries[tid].idepends,
+ self.taskentries[tid].irdepends,
+ self.taskentries[tid].tdepends)
logger.debug(3, "dependency ids (per fn):")
- for fnid in self.depids:
- logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
+ for fn in self.depids:
+ logger.debug(3, " %s: %s", fn, self.depids[fn])
logger.debug(3, "runtime dependency ids (per fn):")
- for fnid in self.rdepids:
- logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
+ for fn in self.rdepids:
+ logger.debug(3, " %s: %s", fn, self.rdepids[fn])
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py
index bb820e403..14f0e2572 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py
@@ -191,8 +191,8 @@ class PythonReferenceTest(ReferenceTest):
if hasattr(bb.utils, "_context"):
self.context = bb.utils._context
else:
- import __builtin__
- self.context = __builtin__.__dict__
+ import builtins
+ self.context = builtins.__dict__
def parseExpression(self, exp):
parsedvar = self.d.expandWithRefs(exp, None)
@@ -302,7 +302,7 @@ bb.data.getVar(a(), d, False)
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
- self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
+ self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
shelldata = """
@@ -349,7 +349,7 @@ esac
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
- self.assertEquals(deps, set(["somevar", "inverted"] + execs))
+ self.assertEqual(deps, set(["somevar", "inverted"] + execs))
def test_vardeps(self):
@@ -359,7 +359,7 @@ esac
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
- self.assertEquals(deps, set(["oe_libinstall"]))
+ self.assertEqual(deps, set(["oe_libinstall"]))
def test_vardeps_expand(self):
self.d.setVar("oe_libinstall", "echo test")
@@ -368,7 +368,7 @@ esac
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
- self.assertEquals(deps, set(["oe_libinstall"]))
+ self.assertEqual(deps, set(["oe_libinstall"]))
#Currently no wildcard support
#def test_vardeps_wildcards(self):
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/cow.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/cow.py
index 35c5841f3..d149d84d0 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/cow.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/cow.py
@@ -34,14 +34,14 @@ class COWTestCase(unittest.TestCase):
from bb.COW import COWDictBase
a = COWDictBase.copy()
- self.assertEquals(False, a.has_key('a'))
+ self.assertEqual(False, 'a' in a)
a['a'] = 'a'
a['b'] = 'b'
- self.assertEquals(True, a.has_key('a'))
- self.assertEquals(True, a.has_key('b'))
- self.assertEquals('a', a['a'] )
- self.assertEquals('b', a['b'] )
+ self.assertEqual(True, 'a' in a)
+ self.assertEqual(True, 'b' in a)
+ self.assertEqual('a', a['a'] )
+ self.assertEqual('b', a['b'] )
def testCopyCopy(self):
"""
@@ -60,31 +60,31 @@ class COWTestCase(unittest.TestCase):
c['a'] = 30
# test separation of the two instances
- self.assertEquals(False, c.has_key('c'))
- self.assertEquals(30, c['a'])
- self.assertEquals(10, b['a'])
+ self.assertEqual(False, 'c' in c)
+ self.assertEqual(30, c['a'])
+ self.assertEqual(10, b['a'])
# test copy
b_2 = b.copy()
c_2 = c.copy()
- self.assertEquals(False, c_2.has_key('c'))
- self.assertEquals(10, b_2['a'])
+ self.assertEqual(False, 'c' in c_2)
+ self.assertEqual(10, b_2['a'])
b_2['d'] = 40
- self.assertEquals(False, c_2.has_key('d'))
- self.assertEquals(True, b_2.has_key('d'))
- self.assertEquals(40, b_2['d'])
- self.assertEquals(False, b.has_key('d'))
- self.assertEquals(False, c.has_key('d'))
+ self.assertEqual(False, 'd' in c_2)
+ self.assertEqual(True, 'd' in b_2)
+ self.assertEqual(40, b_2['d'])
+ self.assertEqual(False, 'd' in b)
+ self.assertEqual(False, 'd' in c)
c_2['d'] = 30
- self.assertEquals(True, c_2.has_key('d'))
- self.assertEquals(True, b_2.has_key('d'))
- self.assertEquals(30, c_2['d'])
- self.assertEquals(40, b_2['d'])
- self.assertEquals(False, b.has_key('d'))
- self.assertEquals(False, c.has_key('d'))
+ self.assertEqual(True, 'd' in c_2)
+ self.assertEqual(True, 'd' in b_2)
+ self.assertEqual(30, c_2['d'])
+ self.assertEqual(40, b_2['d'])
+ self.assertEqual(False, 'd' in b)
+ self.assertEqual(False, 'd' in c)
# test copy of the copy
c_3 = c_2.copy()
@@ -92,19 +92,19 @@ class COWTestCase(unittest.TestCase):
b_3_2 = b_2.copy()
c_3['e'] = 4711
- self.assertEquals(4711, c_3['e'])
- self.assertEquals(False, c_2.has_key('e'))
- self.assertEquals(False, b_3.has_key('e'))
- self.assertEquals(False, b_3_2.has_key('e'))
- self.assertEquals(False, b_2.has_key('e'))
+ self.assertEqual(4711, c_3['e'])
+ self.assertEqual(False, 'e' in c_2)
+ self.assertEqual(False, 'e' in b_3)
+ self.assertEqual(False, 'e' in b_3_2)
+ self.assertEqual(False, 'e' in b_2)
b_3['e'] = 'viel'
- self.assertEquals('viel', b_3['e'])
- self.assertEquals(4711, c_3['e'])
- self.assertEquals(False, c_2.has_key('e'))
- self.assertEquals(True, b_3.has_key('e'))
- self.assertEquals(False, b_3_2.has_key('e'))
- self.assertEquals(False, b_2.has_key('e'))
+ self.assertEqual('viel', b_3['e'])
+ self.assertEqual(4711, c_3['e'])
+ self.assertEqual(False, 'e' in c_2)
+ self.assertEqual(True, 'e' in b_3)
+ self.assertEqual(False, 'e' in b_3_2)
+ self.assertEqual(False, 'e' in b_2)
def testCow(self):
from bb.COW import COWDictBase
@@ -115,12 +115,12 @@ class COWTestCase(unittest.TestCase):
copy = c.copy()
- self.assertEquals(1027, c['123'])
- self.assertEquals(4711, c['other'])
- self.assertEquals({'abc':10, 'bcd':20}, c['d'])
- self.assertEquals(1027, copy['123'])
- self.assertEquals(4711, copy['other'])
- self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
+ self.assertEqual(1027, c['123'])
+ self.assertEqual(4711, c['other'])
+ self.assertEqual({'abc':10, 'bcd':20}, c['d'])
+ self.assertEqual(1027, copy['123'])
+ self.assertEqual(4711, copy['other'])
+ self.assertEqual({'abc':10, 'bcd':20}, copy['d'])
# cow it now
copy['123'] = 1028
@@ -128,9 +128,9 @@ class COWTestCase(unittest.TestCase):
copy['d']['abc'] = 20
- self.assertEquals(1027, c['123'])
- self.assertEquals(4711, c['other'])
- self.assertEquals({'abc':10, 'bcd':20}, c['d'])
- self.assertEquals(1028, copy['123'])
- self.assertEquals(4712, copy['other'])
- self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
+ self.assertEqual(1027, c['123'])
+ self.assertEqual(4711, c['other'])
+ self.assertEqual({'abc':10, 'bcd':20}, c['d'])
+ self.assertEqual(1028, copy['123'])
+ self.assertEqual(4712, copy['other'])
+ self.assertEqual({'abc':20, 'bcd':20}, copy['d'])
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py
index 12232305c..b54eb0679 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py
@@ -147,14 +147,14 @@ class DataExpansions(unittest.TestCase):
self.assertEqual(self.d.getVar("foo", False), None)
def test_keys(self):
- keys = self.d.keys()
- self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
+ keys = list(self.d.keys())
+ self.assertCountEqual(keys, ['value_of_foo', 'foo', 'bar'])
def test_keys_deletion(self):
newd = bb.data.createCopy(self.d)
newd.delVar("bar")
- keys = newd.keys()
- self.assertEqual(keys, ['value_of_foo', 'foo'])
+ keys = list(newd.keys())
+ self.assertCountEqual(keys, ['value_of_foo', 'foo'])
class TestNestedExpansions(unittest.TestCase):
def setUp(self):
@@ -334,7 +334,7 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("TEST2_bar", "testvalue2")
bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
- self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
+ self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
def test_multiple_override(self):
self.d.setVar("TEST_bar", "testvalue2")
@@ -342,7 +342,7 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("TEST_foo", "testvalue4")
bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
- self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
+ self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
def test_multiple_combined_overrides(self):
self.d.setVar("TEST_local_foo_bar", "testvalue3")
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
index 4ba688bfe..0fd2c0216 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
@@ -22,6 +22,7 @@
import unittest
import tempfile
import subprocess
+import collections
import os
from bb.fetch2 import URI
from bb.fetch2 import FetchMethod
@@ -133,10 +134,10 @@ class URITest(unittest.TestCase):
'userinfo': 'anoncvs:anonymous',
'username': 'anoncvs',
'password': 'anonymous',
- 'params': {
- 'tag': 'V0-99-81',
- 'module': 'familiar/dist/ipkg'
- },
+ 'params': collections.OrderedDict([
+ ('tag', 'V0-99-81'),
+ ('module', 'familiar/dist/ipkg')
+ ]),
'query': {},
'relative': False
},
@@ -359,7 +360,10 @@ class FetcherTest(unittest.TestCase):
def tearDown(self):
os.chdir(self.origdir)
- bb.utils.prunedir(self.tempdir)
+ if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
+ print("Not cleaning up %s. Please remove manually." % self.tempdir)
+ else:
+ bb.utils.prunedir(self.tempdir)
class MirrorUriTest(FetcherTest):
@@ -450,7 +454,7 @@ class MirrorUriTest(FetcherTest):
class FetcherLocalTest(FetcherTest):
def setUp(self):
def touch(fn):
- with file(fn, 'a'):
+ with open(fn, 'a'):
os.utime(fn, None)
super(FetcherLocalTest, self).setUp()
@@ -504,6 +508,15 @@ class FetcherLocalTest(FetcherTest):
tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
self.assertEqual(tree, ['bar/dir/subdir/e'])
+ def test_local_absolutedir(self):
+ # Unpacking to an absolute path that is a subdirectory of the root
+ # should work
+ tree = self.fetchUnpack(['file://a;subdir=%s' % os.path.join(self.unpackdir, 'bar')])
+
+ # Unpacking to an absolute path outside of the root should fail
+ with self.assertRaises(bb.fetch2.UnpackError):
+ self.fetchUnpack(['file://a;subdir=/bin/sh'])
+
class FetcherNetworkTest(FetcherTest):
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
@@ -584,6 +597,36 @@ class FetcherNetworkTest(FetcherTest):
url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
+ def test_gitfetch_localusehead(self):
+ # Create dummy local Git repo
+ src_dir = tempfile.mkdtemp(dir=self.tempdir,
+ prefix='gitfetch_localusehead_')
+ src_dir = os.path.abspath(src_dir)
+ bb.process.run("git init", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit'",
+ cwd=src_dir)
+ # Use other branch than master
+ bb.process.run("git checkout -b my-devel", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
+ cwd=src_dir)
+ stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
+ orig_rev = stdout[0].strip()
+
+ # Fetch and check revision
+ self.d.setVar("SRCREV", "AUTOINC")
+ url = "git://" + src_dir + ";protocol=file;usehead=1"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ stdout = bb.process.run("git rev-parse HEAD",
+ cwd=os.path.join(self.unpackdir, 'git'))
+ unpack_rev = stdout[0].strip()
+ self.assertEqual(orig_rev, unpack_rev)
+
+ def test_gitfetch_remoteusehead(self):
+ url = "git://git.openembedded.org/bitbake;usehead=1"
+ self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
+
def test_gitfetch_premirror(self):
url1 = "git://git.openembedded.org/bitbake"
url2 = "git://someserver.org/bitbake"
@@ -660,7 +703,7 @@ class URLHandle(unittest.TestCase):
datatable = {
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
- "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}),
+ "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
"git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}),
"file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
}
@@ -767,7 +810,6 @@ class FetchLatestVersionTest(FetcherTest):
class FetchCheckStatusTest(FetcherTest):
test_wget_uris = ["http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2",
- "http://www.cups.org/software/ipptool/ipptool-20130731-linux-ubuntu-i686.tar.gz",
"http://www.cups.org/",
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py
index 6beb76a48..0b2706af0 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py
@@ -50,7 +50,7 @@ C = "3"
def parsehelper(self, content, suffix = ".bb"):
f = tempfile.NamedTemporaryFile(suffix = suffix)
- f.write(content)
+ f.write(bytes(content, "utf-8"))
f.flush()
os.chdir(os.path.dirname(f.name))
return f
@@ -68,6 +68,23 @@ C = "3"
with self.assertRaises(bb.parse.ParseError):
d = bb.parse.handle(f.name, self.d)['']
+ unsettest = """
+A = "1"
+B = "2"
+B[flag] = "3"
+
+unset A
+unset B[flag]
+"""
+
+ def test_parse_unset(self):
+ f = self.parsehelper(self.unsettest)
+ d = bb.parse.handle(f.name, self.d)['']
+ self.assertEqual(d.getVar("A", True), None)
+ self.assertEqual(d.getVarFlag("A","flag", True), None)
+ self.assertEqual(d.getVar("B", True), "2")
+
+
overridetest = """
RRECOMMENDS_${PN} = "a"
RRECOMMENDS_${PN}_libc = "b"
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py b/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
index 7aa653f1a..8899e861c 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
@@ -59,6 +59,12 @@ class Tinfoil:
def register_idle_function(self, function, data):
pass
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.shutdown()
+
def parseRecipes(self):
sys.stderr.write("Parsing recipes..")
self.logger.setLevel(logging.WARNING)
@@ -74,16 +80,52 @@ class Tinfoil:
self.logger.setLevel(logging.INFO)
sys.stderr.write("done.\n")
- self.cooker_data = self.cooker.recipecache
+ self.cooker_data = self.cooker.recipecaches['']
def prepare(self, config_only = False):
if not self.cooker_data:
if config_only:
self.cooker.parseConfiguration()
- self.cooker_data = self.cooker.recipecache
+ self.cooker_data = self.cooker.recipecaches['']
else:
self.parseRecipes()
+ def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None):
+ """
+ Parse the specified recipe file (with or without bbappends)
+ and return a datastore object representing the environment
+ for the recipe.
+ Parameters:
+ fn: recipe file to parse - can be a file path or virtual
+ specification
+ appends: True to apply bbappends, False otherwise
+ appendlist: optional list of bbappend files to apply, if you
+ want to filter them
+ config_data: custom config datastore to use. NOTE: if you
+ specify config_data then you cannot use a virtual
+ specification for fn.
+ """
+ if appends and appendlist == []:
+ appends = False
+ if appends:
+ if appendlist:
+ appendfiles = appendlist
+ else:
+ if not hasattr(self.cooker, 'collection'):
+ raise Exception('You must call tinfoil.prepare() with config_only=False in order to get bbappends')
+ appendfiles = self.cooker.collection.get_file_appends(fn)
+ else:
+ appendfiles = None
+ if config_data:
+ # We have to use a different function here if we're passing in a datastore
+ localdata = bb.data.createCopy(config_data)
+ envdata = bb.cache.parse_recipe(localdata, fn, appendfiles)['']
+ else:
+ # Use the standard path
+ parser = bb.cache.NoCache(self.cooker.databuilder)
+ envdata = parser.loadDataFull(fn, appendfiles)
+ return envdata
+
def shutdown(self):
self.cooker.shutdown(force=True)
self.cooker.post_serve()
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
index 93979054d..5b69660a3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
@@ -21,28 +21,29 @@ import bb
import re
import os
-os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
-
-
import django
from django.utils import timezone
+import toaster
+# Add toaster module to the search path to help django.setup() find the right
+# modules
+sys.path.insert(0, os.path.dirname(toaster.__file__))
-def _configure_toaster():
- """ Add toaster to sys path for importing modules
- """
- sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster'))
-_configure_toaster()
-
+#Set the DJANGO_SETTINGS_MODULE if it's not already set
+os.environ["DJANGO_SETTINGS_MODULE"] =\
+ os.environ.get("DJANGO_SETTINGS_MODULE",
+ "toaster.toastermain.settings")
+# Setup django framework (needs to be done before importing modules)
django.setup()
from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
-from orm.models import Target_Image_File, BuildArtifact
+from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile
from orm.models import Variable, VariableHistory
from orm.models import Package, Package_File, Target_Installed_Package, Target_File
from orm.models import Task_Dependency, Package_Dependency
from orm.models import Recipe_Dependency, Provides
from orm.models import Project, CustomImagePackage, CustomImageRecipe
+from orm.models import signal_runbuilds
from bldcontrol.models import BuildEnvironment, BuildRequest
@@ -54,11 +55,11 @@ from datetime import datetime, timedelta
from django.db import transaction, connection
+
# pylint: disable=invalid-name
# the logger name is standard throughout BitBake
logger = logging.getLogger("ToasterLogger")
-
class NotExisting(Exception):
pass
@@ -121,11 +122,27 @@ class ORMWrapper(object):
return vars(self)[dictname][key]
+ def get_similar_target_with_image_files(self, target):
+ """
+ Get a Target object "similar" to target; i.e. with the same target
+ name ('core-image-minimal' etc.) and machine.
+ """
+ return target.get_similar_target_with_image_files()
+
+ def get_similar_target_with_sdk_files(self, target):
+ return target.get_similar_target_with_sdk_files()
+
+ def clone_image_artifacts(self, target_from, target_to):
+ target_to.clone_image_artifacts_from(target_from)
+
+ def clone_sdk_artifacts(self, target_from, target_to):
+ target_to.clone_sdk_artifacts_from(target_from)
+
def _timestamp_to_datetime(self, secs):
"""
Convert timestamp in seconds to Python datetime
"""
- return datetime(1970, 1, 1) + timedelta(seconds=secs)
+ return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs))
# pylint: disable=no-self-use
# we disable detection of no self use in functions because the methods actually work on the object
@@ -134,55 +151,33 @@ class ORMWrapper(object):
# pylint: disable=bad-continuation
# we do not follow the python conventions for continuation indentation due to long lines here
- def create_build_object(self, build_info, brbe, project_id):
- assert 'machine' in build_info
- assert 'distro' in build_info
- assert 'distro_version' in build_info
- assert 'started_on' in build_info
- assert 'cooker_log_path' in build_info
- assert 'build_name' in build_info
- assert 'bitbake_version' in build_info
-
+ def get_or_create_build_object(self, brbe):
prj = None
buildrequest = None
- if brbe is not None: # this build was triggered by a request from a user
+ if brbe is not None:
+ # Toaster-triggered build
logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
br, _ = brbe.split(":")
- buildrequest = BuildRequest.objects.get(pk = br)
+ buildrequest = BuildRequest.objects.get(pk=br)
prj = buildrequest.project
-
- elif project_id is not None: # this build was triggered by an external system for a specific project
- logger.debug(1, "buildinfohelper: project is %s" % prj)
- prj = Project.objects.get(pk = project_id)
-
- else: # this build was triggered by a legacy system, or command line interactive mode
+ else:
+ # CLI build
prj = Project.objects.get_or_create_default_project()
logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
-
if buildrequest is not None:
+ # reuse existing Build object
build = buildrequest.build
- logger.info("Updating existing build, with %s", build_info)
build.project = prj
- build.machine=build_info['machine']
- build.distro=build_info['distro']
- build.distro_version=build_info['distro_version']
- build.cooker_log_path=build_info['cooker_log_path']
- build.build_name=build_info['build_name']
- build.bitbake_version=build_info['bitbake_version']
build.save()
-
else:
+ # create new Build object
+ now = timezone.now()
build = Build.objects.create(
- project = prj,
- machine=build_info['machine'],
- distro=build_info['distro'],
- distro_version=build_info['distro_version'],
- started_on=build_info['started_on'],
- completed_on=build_info['started_on'],
- cooker_log_path=build_info['cooker_log_path'],
- build_name=build_info['build_name'],
- bitbake_version=build_info['bitbake_version'])
+ project=prj,
+ started_on=now,
+ completed_on=now,
+ build_name='')
logger.debug(1, "buildinfohelper: build is created %s" % build)
@@ -192,8 +187,17 @@ class ORMWrapper(object):
return build
+ def update_build(self, build, data_dict):
+ for key in data_dict:
+ setattr(build, key, data_dict[key])
+ build.save()
+
@staticmethod
def get_or_create_targets(target_info):
+ """
+ NB get_or_create() is used here because for Toaster-triggered builds,
+ we already created the targets when the build was triggered.
+ """
result = []
for target in target_info['targets']:
task = ''
@@ -203,17 +207,14 @@ class ORMWrapper(object):
task = task[3:]
if task == 'build':
task = ''
- obj, created = Target.objects.get_or_create(build=target_info['build'],
- target=target)
- if created:
- obj.is_image = False
- if task:
- obj.task = task
- obj.save()
+
+ obj, _ = Target.objects.get_or_create(build=target_info['build'],
+ target=target,
+ task=task)
result.append(obj)
return result
- def update_build_object(self, build, errors, warnings, taskfailures):
+ def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures):
assert isinstance(build,Build)
assert isinstance(errors, int)
assert isinstance(warnings, int)
@@ -234,11 +235,16 @@ class ORMWrapper(object):
build.completed_on = timezone.now()
build.outcome = outcome
build.save()
+ signal_runbuilds()
def update_target_set_license_manifest(self, target, license_manifest_path):
target.license_manifest_path = license_manifest_path
target.save()
+ def update_target_set_package_manifest(self, target, package_manifest_path):
+ target.package_manifest_path = package_manifest_path
+ target.save()
+
def update_task_object(self, build, task_name, recipe_name, task_stats):
"""
Find the task for build which matches the recipe and task name
@@ -372,7 +378,7 @@ class ORMWrapper(object):
layer_copy, c = Layer_Version.objects.get_or_create(
build=build_obj,
layer=layer_obj.layer,
- up_branch=layer_obj.up_branch,
+ release=layer_obj.release,
branch=layer_version_information['branch'],
commit=layer_version_information['commit'],
local_path=layer_version_information['local_path'],
@@ -415,13 +421,24 @@ class ORMWrapper(object):
assert 'name' in layer_information
assert 'layer_index_url' in layer_information
+ # From command line builds we have no brbe as the request is directly
+ # from bitbake
if brbe is None:
- layer_object, _ = Layer.objects.get_or_create(
- name=layer_information['name'],
- layer_index_url=layer_information['layer_index_url'])
+ # If we don't have git commit sha then we're using a non-git
+ # layer so set the layer_source_dir to identify it as such
+ if not layer_information['version']['commit']:
+ local_source_dir = layer_information["local_path"]
+ else:
+ local_source_dir = None
+
+ layer_object, _ = \
+ Layer.objects.get_or_create(
+ name=layer_information['name'],
+ local_source_dir=local_source_dir,
+ layer_index_url=layer_information['layer_index_url'])
+
return layer_object
else:
- # we are under managed mode; we must match the layer used in the Project Layer
br_id, be_id = brbe.split(":")
# find layer by checkout path;
@@ -434,7 +451,11 @@ class ORMWrapper(object):
# note that this is different
buildrequest = BuildRequest.objects.get(pk = br_id)
for brl in buildrequest.brlayer_set.all():
- localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
+ if brl.local_source_dir:
+ localdirname = os.path.join(brl.local_source_dir,
+ brl.dirpath)
+ else:
+ localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
# we get a relative path, unless running in HEAD mode where the path is absolute
if not localdirname.startswith("/"):
localdirname = os.path.join(bc.be.sourcedir, localdirname)
@@ -446,6 +467,11 @@ class ORMWrapper(object):
if brl.layer_version:
return brl.layer_version
+ # This might be a local layer (i.e. no git info) so try
+ # matching local_source_dir
+ if brl.local_source_dir and brl.local_source_dir == layer_information["local_path"]:
+ return brl.layer_version
+
# we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
#logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
@@ -494,7 +520,7 @@ class ORMWrapper(object):
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
tf_obj = Target_File.objects.create(
target = target_obj,
- path = unicode(path, 'utf-8'),
+ path = path,
size = size,
inodetype = Target_File.ITYPE_DIRECTORY,
permission = permission,
@@ -519,7 +545,7 @@ class ORMWrapper(object):
tf_obj = Target_File.objects.create(
target = target_obj,
- path = unicode(path, 'utf-8'),
+ path = path,
size = size,
inodetype = inodetype,
permission = permission,
@@ -550,9 +576,7 @@ class ORMWrapper(object):
filetarget_path = "/".join(fcpl)
try:
- filetarget_obj = Target_File.objects.get(
- target = target_obj,
- path = unicode(filetarget_path, 'utf-8'))
+ filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
except Target_File.DoesNotExist:
# we might have an invalid link; no way to detect this. just set it to None
filetarget_obj = None
@@ -561,7 +585,7 @@ class ORMWrapper(object):
tf_obj = Target_File.objects.create(
target = target_obj,
- path = unicode(path, 'utf-8'),
+ path = path,
size = size,
inodetype = Target_File.ITYPE_SYMLINK,
permission = permission,
@@ -606,8 +630,8 @@ class ORMWrapper(object):
Recipe,
name=built_recipe.name,
layer_version__build=None,
- layer_version__up_branch=
- built_recipe.layer_version.up_branch,
+ layer_version__release=
+ built_recipe.layer_version.release,
file_path=built_recipe.file_path,
version=built_recipe.version
)
@@ -664,8 +688,8 @@ class ORMWrapper(object):
dep_type = tdeptype,
target = target_obj))
except KeyError as e:
- logger.warn("Could not add dependency to the package %s "
- "because %s is an unknown package", p, px)
+ logger.warning("Could not add dependency to the package %s "
+ "because %s is an unknown package", p, px)
if len(packagedeps_objs) > 0:
Package_Dependency.objects.bulk_create(packagedeps_objs)
@@ -673,23 +697,26 @@ class ORMWrapper(object):
logger.info("No package dependencies created")
if len(errormsg) > 0:
- logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
+ logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
def save_target_image_file_information(self, target_obj, file_name, file_size):
- Target_Image_File.objects.create( target = target_obj,
- file_name = file_name,
- file_size = file_size)
-
- def save_artifact_information(self, build_obj, file_name, file_size):
- # we skip the image files from other builds
- if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
- return
+ Target_Image_File.objects.create(target=target_obj,
+ file_name=file_name, file_size=file_size)
- # do not update artifacts found in other builds
- if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
- return
+ def save_target_kernel_file(self, target_obj, file_name, file_size):
+ """
+ Save kernel file (bzImage, modules*) information for a Target target_obj.
+ """
+ TargetKernelFile.objects.create(target=target_obj,
+ file_name=file_name, file_size=file_size)
- BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
+ def save_target_sdk_file(self, target_obj, file_name, file_size):
+ """
+ Save SDK artifacts to the database, associating them with a
+ Target object.
+ """
+ TargetSDKFile.objects.create(target=target_obj, file_name=file_name,
+ file_size=file_size)
def create_logmessage(self, log_information):
assert 'build' in log_information
@@ -857,6 +884,11 @@ class BuildInfoHelper(object):
Keeps in memory all data that needs matching before writing it to the database
"""
+ # tasks which produce image files; note we include '', as we set
+ # the task for a target to '' (i.e. 'build') if no target is
+ # explicitly defined
+ IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext']
+
# pylint: disable=protected-access
# the code will look into the protected variables of the event; no easy way around this
# pylint: disable=bad-continuation
@@ -888,22 +920,55 @@ class BuildInfoHelper(object):
###################
## methods to convert event/external info into objects that the ORM layer uses
+ def _ensure_build(self):
+ """
+ Ensure the current build object exists and is up to date with
+ data on the bitbake server
+ """
+ if not 'build' in self.internal_state or not self.internal_state['build']:
+ # create the Build object
+ self.internal_state['build'] = \
+ self.orm_wrapper.get_or_create_build_object(self.brbe)
+
+ build = self.internal_state['build']
- def _get_build_information(self, build_log_path):
+ # update missing fields on the Build object with found data
build_info = {}
- build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
- build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
- build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
- build_info['started_on'] = timezone.now()
- build_info['completed_on'] = timezone.now()
- build_info['cooker_log_path'] = build_log_path
- build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
- build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
- build_info['project'] = self.project = self.server.runCommand(["getVariable", "TOASTER_PROJECT"])[0]
- return build_info
+
+ # set to True if at least one field is going to be set
+ changed = False
+
+ if not build.build_name:
+ build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
+
+ # only reset the build name if the one on the server is actually
+ # a valid value for the build_name field
+ if build_name != None:
+ build_info['build_name'] = build_name
+ changed = True
+
+ if not build.machine:
+ build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
+ changed = True
+
+ if not build.distro:
+ build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
+ changed = True
+
+ if not build.distro_version:
+ build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
+ changed = True
+
+ if not build.bitbake_version:
+ build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
+ changed = True
+
+ if changed:
+ self.orm_wrapper.update_build(self.internal_state['build'], build_info)
def _get_task_information(self, event, recipe):
assert 'taskname' in vars(event)
+ self._ensure_build()
task_information = {}
task_information['build'] = self.internal_state['build']
@@ -918,8 +983,7 @@ class BuildInfoHelper(object):
return task_information
def _get_layer_version_for_path(self, path):
- assert path.startswith("/")
- assert 'build' in self.internal_state
+ self._ensure_build()
def _slkey_interactive(layer_version):
assert isinstance(layer_version, Layer_Version)
@@ -930,9 +994,12 @@ class BuildInfoHelper(object):
# we can match to the recipe file path
if path.startswith(lvo.local_path):
return lvo
+ if lvo.layer.local_source_dir and \
+ path.startswith(lvo.layer.local_source_dir):
+ return lvo
#if we get here, we didn't read layers correctly; dump whatever information we have on the error log
- logger.warn("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
+ logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
#mockup the new layer
unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
@@ -963,6 +1030,8 @@ class BuildInfoHelper(object):
return recipe_info
def _get_path_information(self, task_object):
+ self._ensure_build()
+
assert isinstance(task_object, Task)
build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
build_stats_path = []
@@ -1003,19 +1072,33 @@ class BuildInfoHelper(object):
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
except NotExisting as nee:
- logger.warn("buildinfohelper: cannot identify layer exception:%s ", nee)
+ logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
+
+ def store_started_build(self):
+ self._ensure_build()
+ def save_build_log_file_path(self, build_log_path):
+ self._ensure_build()
- def store_started_build(self, event, build_log_path):
+ if not self.internal_state['build'].cooker_log_path:
+ data_dict = {'cooker_log_path': build_log_path}
+ self.orm_wrapper.update_build(self.internal_state['build'], data_dict)
+
+ def save_build_targets(self, event):
+ self._ensure_build()
+
+ # create target information
assert '_pkgs' in vars(event)
- build_information = self._get_build_information(build_log_path)
+ target_information = {}
+ target_information['targets'] = event._pkgs
+ target_information['build'] = self.internal_state['build']
- # Update brbe and project as they can be changed for every build
- self.project = build_information['project']
+ self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
- build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe, self.project)
+ def save_build_layers_and_variables(self):
+ self._ensure_build()
- self.internal_state['build'] = build_obj
+ build_obj = self.internal_state['build']
# save layer version information for this build
if not 'lvs' in self.internal_state:
@@ -1026,13 +1109,6 @@ class BuildInfoHelper(object):
del self.internal_state['lvs']
- # create target information
- target_information = {}
- target_information['targets'] = event._pkgs
- target_information['build'] = build_obj
-
- self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
-
# Save build configuration
data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
@@ -1055,7 +1131,8 @@ class BuildInfoHelper(object):
abs_file_name = vh['file']
for pp in path_prefixes:
if abs_file_name.startswith(pp + "/"):
- vh['file']=abs_file_name[len(pp + "/"):]
+ # preserve layer name in relative path
+ vh['file']=abs_file_name[pp.rfind("/")+1:]
break
# save the variables
@@ -1063,35 +1140,48 @@ class BuildInfoHelper(object):
return self.brbe
+ def set_recipes_to_parse(self, num_recipes):
+ """
+ Set the number of recipes which need to be parsed for this build.
+ This is set the first time ParseStarted is received by toasterui.
+ """
+ self._ensure_build()
+ self.internal_state['build'].recipes_to_parse = num_recipes
+ self.internal_state['build'].save()
+
+ def set_recipes_parsed(self, num_recipes):
+ """
+ Set the number of recipes parsed so far for this build; this is updated
+ each time a ParseProgress or ParseCompleted event is received by
+ toasterui.
+ """
+ self._ensure_build()
+ if num_recipes <= self.internal_state['build'].recipes_to_parse:
+ self.internal_state['build'].recipes_parsed = num_recipes
+ self.internal_state['build'].save()
def update_target_image_file(self, event):
evdata = BuildInfoHelper._get_data_from_event(event)
for t in self.internal_state['targets']:
if t.is_image == True:
- output_files = list(evdata.viewkeys())
+ output_files = list(evdata.keys())
for output in output_files:
if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
def update_artifact_image_file(self, event):
+ self._ensure_build()
evdata = BuildInfoHelper._get_data_from_event(event)
for artifact_path in evdata.keys():
- self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
+ self.orm_wrapper.save_artifact_information(
+ self.internal_state['build'], artifact_path,
+ evdata[artifact_path])
def update_build_information(self, event, errors, warnings, taskfailures):
- if 'build' in self.internal_state:
- self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures)
-
-
- def store_license_manifest_path(self, event):
- deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
- image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
- path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
- for target in self.internal_state['targets']:
- if target.target in image_name:
- self.orm_wrapper.update_target_set_license_manifest(target, path)
-
+ self._ensure_build()
+ self.orm_wrapper.update_build_stats_and_outcome(
+ self.internal_state['build'], errors, warnings, taskfailures)
def store_started_task(self, event):
assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
@@ -1134,6 +1224,7 @@ class BuildInfoHelper(object):
def store_tasks_stats(self, event):
+ self._ensure_build()
task_data = BuildInfoHelper._get_data_from_event(event)
for (task_file, task_name, task_stats, recipe_name) in task_data:
@@ -1229,6 +1320,8 @@ class BuildInfoHelper(object):
def store_target_package_data(self, event):
+ self._ensure_build()
+
# for all image targets
for target in self.internal_state['targets']:
if target.is_image:
@@ -1240,17 +1333,32 @@ class BuildInfoHelper(object):
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
except KeyError as e:
- logger.warn("KeyError in save_target_package_information"
- "%s ", e)
+ logger.warning("KeyError in save_target_package_information"
+ "%s ", e)
- try:
- self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
- except KeyError as e:
- logger.warn("KeyError in save_target_file_information"
- "%s ", e)
+ # only try to find files in the image if the task for this
+ # target is one which produces image files; otherwise, the old
+ # list of files in the files-in-image.txt file will be
+ # appended to the target even if it didn't produce any images
+ if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS:
+ try:
+ self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
+ except KeyError as e:
+ logger.warning("KeyError in save_target_file_information"
+ "%s ", e)
+ def cancel_cli_build(self):
+ """
+ If a build is currently underway, set its state to CANCELLED;
+ note that this only gets called for command line builds which are
+ interrupted, so it doesn't touch any BuildRequest objects
+ """
+ self._ensure_build()
+ self.internal_state['build'].outcome = Build.CANCELLED
+ self.internal_state['build'].save()
+ signal_runbuilds()
def store_dependency_information(self, event):
assert '_depgraph' in vars(event)
@@ -1392,10 +1500,12 @@ class BuildInfoHelper(object):
Task_Dependency.objects.bulk_create(taskdeps_objects)
if len(errormsg) > 0:
- logger.warn("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
+ logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
def store_build_package_information(self, event):
+ self._ensure_build()
+
package_info = BuildInfoHelper._get_data_from_event(event)
self.orm_wrapper.save_build_package_information(
self.internal_state['build'],
@@ -1411,10 +1521,12 @@ class BuildInfoHelper(object):
def _store_build_done(self, errorcode):
logger.info("Build exited with errorcode %d", errorcode)
+
+ if not self.brbe:
+ return
+
br_id, be_id = self.brbe.split(":")
- be = BuildEnvironment.objects.get(pk = be_id)
- be.lock = BuildEnvironment.LOCK_LOCK
- be.save()
+
br = BuildRequest.objects.get(pk = br_id)
# if we're 'done' because we got cancelled update the build outcome
@@ -1432,6 +1544,10 @@ class BuildInfoHelper(object):
br.state = BuildRequest.REQ_FAILED
br.save()
+ be = BuildEnvironment.objects.get(pk = be_id)
+ be.lock = BuildEnvironment.LOCK_FREE
+ be.save()
+ signal_runbuilds()
def store_log_error(self, text):
mockevent = MockEvent()
@@ -1449,30 +1565,25 @@ class BuildInfoHelper(object):
mockevent.lineno = -1
self.store_log_event(mockevent)
-
def store_log_event(self, event):
+ self._ensure_build()
+
if event.levelno < formatter.WARNING:
return
- if 'args' in vars(event):
- event.msg = event.msg % event.args
-
- if not 'build' in self.internal_state:
- if self.brbe is None:
- if not 'backlog' in self.internal_state:
- self.internal_state['backlog'] = []
- self.internal_state['backlog'].append(event)
- return
- else: # we're under Toaster control, the build is already created
- br, _ = self.brbe.split(":")
- buildrequest = BuildRequest.objects.get(pk = br)
- self.internal_state['build'] = buildrequest.build
+ # early return for CLI builds
+ if self.brbe is None:
+ if not 'backlog' in self.internal_state:
+ self.internal_state['backlog'] = []
+ self.internal_state['backlog'].append(event)
+ return
- if 'build' in self.internal_state and 'backlog' in self.internal_state:
+ if 'backlog' in self.internal_state:
# if we have a backlog of events, do our best to save them here
if len(self.internal_state['backlog']):
tempevent = self.internal_state['backlog'].pop()
- logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
+ logger.debug(1, "buildinfohelper: Saving stored event %s "
+ % tempevent)
self.store_log_event(tempevent)
else:
logger.info("buildinfohelper: All events saved")
@@ -1491,26 +1602,324 @@ class BuildInfoHelper(object):
else:
log_information['level'] = LogMessage.INFO
- log_information['message'] = event.msg
+ log_information['message'] = event.getMessage()
log_information['pathname'] = event.pathname
log_information['lineno'] = event.lineno
logger.info("Logging error 2: %s", log_information)
self.orm_wrapper.create_logmessage(log_information)
+ def _get_filenames_from_image_license(self, image_license_manifest_path):
+ """
+ Find the FILES line in the image_license.manifest file,
+ which has the basenames of the bzImage and modules files
+ in this format:
+ FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz
+ """
+ files = []
+ with open(image_license_manifest_path) as image_license:
+ for line in image_license:
+ if line.startswith('FILES'):
+ files_str = line.split(':')[1].strip()
+ files_str = re.sub(r' {2,}', ' ', files_str)
+
+ # ignore lines like "FILES:" with no filenames
+ if files_str:
+ files += files_str.split(' ')
+ return files
+
+ def _endswith(self, str_to_test, endings):
+ """
+ Returns True if str ends with one of the strings in the list
+ endings, False otherwise
+ """
+ endswith = False
+ for ending in endings:
+ if str_to_test.endswith(ending):
+ endswith = True
+ break
+ return endswith
+
+ def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
+ """
+ Find files in deploy_dir_image whose basename starts with the
+ string image_name and ends with one of the strings in
+ image_file_extensions.
+
+ Returns a list of file dictionaries like
+
+ [
+ {
+ 'path': '/path/to/image/file',
+ 'size': <file size in bytes>
+ }
+ ]
+ """
+ image_files = []
+
+ for dirpath, _, filenames in os.walk(deploy_dir_image):
+ for filename in filenames:
+ if filename.startswith(image_name) and \
+ self._endswith(filename, image_file_extensions):
+ image_file_path = os.path.join(dirpath, filename)
+ image_file_size = os.stat(image_file_path).st_size
+
+ image_files.append({
+ 'path': image_file_path,
+ 'size': image_file_size
+ })
+
+ return image_files
+
+ def scan_image_artifacts(self):
+ """
+ Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them
+ with a Target object in self.internal_state['targets'].
+
+ We have two situations to handle:
+
+ 1. This is the first time a target + machine has been built, so
+ add files from the DEPLOY_DIR_IMAGE to the target.
+
+ OR
+
+ 2. There are no new files for the target (they were already produced by
+ a previous build), so copy them from the most recent previous build with
+ the same target, task and machine.
+ """
+ deploy_dir_image = \
+ self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0]
+
+ # if there's no DEPLOY_DIR_IMAGE, there aren't going to be
+ # any image artifacts, so we can return immediately
+ if not deploy_dir_image:
+ return
+
+ buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
+ machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
+ image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
+
+ # location of the manifest files for this build;
+ # note that this file is only produced if an image is produced
+ license_directory = \
+ self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0]
+
+ # file name extensions for image files
+ image_file_extensions_unique = {}
+ image_fstypes = self.server.runCommand(
+ ['getVariable', 'IMAGE_FSTYPES'])[0]
+ if image_fstypes != None:
+ image_types_str = image_fstypes.strip()
+ image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
+ image_file_extensions_unique = set(image_file_extensions.split(' '))
+
+ targets = self.internal_state['targets']
+
+ # filter out anything which isn't an image target
+ image_targets = [target for target in targets if target.is_image]
+
+ for image_target in image_targets:
+ # this is set to True if we find at least one file relating to
+ # this target; if this remains False after the scan, we copy the
+ # files from the most-recent Target with the same target + machine
+ # onto this Target instead
+ has_files = False
+
+ # we construct this because by the time we reach
+ # BuildCompleted, this has reset to
+ # 'defaultpkgname-<MACHINE>-<BUILDNAME>';
+ # we need to change it to
+ # <TARGET>-<MACHINE>-<BUILDNAME>
+ real_image_name = re.sub(r'^defaultpkgname', image_target.target,
+ image_name)
+
+ image_license_manifest_path = os.path.join(
+ license_directory,
+ real_image_name,
+ 'image_license.manifest')
+
+ image_package_manifest_path = os.path.join(
+ license_directory,
+ real_image_name,
+ 'image_license.manifest')
+
+ # if image_license.manifest exists, we can read the names of
+ # bzImage, modules etc. files for this build from it, then look for
+ # them in the DEPLOY_DIR_IMAGE; note that this file is only produced
+ # if an image file was produced
+ if os.path.isfile(image_license_manifest_path):
+ has_files = True
+
+ basenames = self._get_filenames_from_image_license(
+ image_license_manifest_path)
+
+ for basename in basenames:
+ artifact_path = os.path.join(deploy_dir_image, basename)
+ if not os.path.exists(artifact_path):
+ logger.warning("artifact %s doesn't exist, skipping" % artifact_path)
+ continue
+ artifact_size = os.stat(artifact_path).st_size
+
+ # note that the artifact will only be saved against this
+ # build if it hasn't been already
+ self.orm_wrapper.save_target_kernel_file(image_target,
+ artifact_path, artifact_size)
+
+ # store the license manifest path on the target
+ # (this file is also created any time an image file is created)
+ license_manifest_path = os.path.join(license_directory,
+ real_image_name, 'license.manifest')
+
+ self.orm_wrapper.update_target_set_license_manifest(
+ image_target, license_manifest_path)
+
+ # store the package manifest path on the target (this file
+ # is created any time an image file is created)
+ package_manifest_path = os.path.join(deploy_dir_image,
+ real_image_name + '.rootfs.manifest')
+
+ if os.path.exists(package_manifest_path):
+ self.orm_wrapper.update_target_set_package_manifest(
+ image_target, package_manifest_path)
+
+ # scan the directory for image files relating to this build
+ # (via real_image_name); note that we don't have to set
+ # has_files = True, as searching for the license manifest file
+ # will already have set it to true if at least one image file was
+ # produced; note that the real_image_name includes BUILDNAME, which
+ # in turn includes a timestamp; so if no files were produced for
+ # this timestamp (i.e. the build reused existing image files already
+ # in the directory), no files will be recorded against this target
+ image_files = self._get_image_files(deploy_dir_image,
+ real_image_name, image_file_extensions_unique)
+
+ for image_file in image_files:
+ self.orm_wrapper.save_target_image_file_information(
+ image_target, image_file['path'], image_file['size'])
+
+ if not has_files:
+ # copy image files and build artifacts from the
+ # most-recently-built Target with the
+ # same target + machine as this Target; also copy the license
+ # manifest path, as that is not treated as an artifact and needs
+ # to be set separately
+ similar_target = \
+ self.orm_wrapper.get_similar_target_with_image_files(
+ image_target)
+
+ if similar_target:
+ logger.info('image artifacts for target %s cloned from ' \
+ 'target %s' % (image_target.pk, similar_target.pk))
+ self.orm_wrapper.clone_image_artifacts(similar_target,
+ image_target)
+
+ def _get_sdk_targets(self):
+ """
+ Return targets which could generate SDK artifacts, i.e.
+ "do_populate_sdk" and "do_populate_sdk_ext".
+ """
+ return [target for target in self.internal_state['targets'] \
+ if target.task in ['populate_sdk', 'populate_sdk_ext']]
+
+ def scan_sdk_artifacts(self, event):
+ """
+ Note that we have to intercept an SDKArtifactInfo event from
+ toaster.bbclass (via toasterui) to get hold of the SDK variables we
+ need to be able to scan for files accurately: this is because
+ variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time
+ BuildCompleted is fired by bitbake, so we have to get those values
+ while the build is still in progress.
+
+ For populate_sdk_ext, this runs twice, with two different
+ TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the
+ files in the SDK output directory.
+ """
+ sdk_vars = BuildInfoHelper._get_data_from_event(event)
+ toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME']
+
+ # targets which might have created SDK artifacts
+ sdk_targets = self._get_sdk_targets()
+
+ # location of SDK artifacts
+ tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0]
+ sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk')
+
+ # all files in the SDK directory
+ artifacts = []
+ for dir_path, _, filenames in os.walk(sdk_dir):
+ for filename in filenames:
+ full_path = os.path.join(dir_path, filename)
+ if not os.path.islink(full_path):
+ artifacts.append(full_path)
+
+ for sdk_target in sdk_targets:
+ # find files in the SDK directory which haven't already been
+ # recorded against a Target and whose basename matches
+ # TOOLCHAIN_OUTPUTNAME
+ for artifact_path in artifacts:
+ basename = os.path.basename(artifact_path)
+
+ toolchain_match = basename.startswith(toolchain_outputname)
+
+ # files which match the name of the target which produced them;
+ # for example,
+ # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh
+ target_match = re.search(sdk_target.target, basename)
+
+ # targets which produce "*-nativesdk-*" files
+ is_ext_sdk_target = sdk_target.task in \
+ ['do_populate_sdk_ext', 'populate_sdk_ext']
+
+ # SDK files which don't match the target name, i.e.
+ # x86_64-nativesdk-libc.*
+ # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot*
+ is_ext_sdk_file = re.search('-nativesdk-', basename)
+
+ file_from_target = (toolchain_match and target_match) or \
+ (is_ext_sdk_target and is_ext_sdk_file)
+
+ if file_from_target:
+ # don't record the file if it's already been added to this
+ # target
+ matching_files = TargetSDKFile.objects.filter(
+ target=sdk_target, file_name=artifact_path)
+
+ if matching_files.count() == 0:
+ artifact_size = os.stat(artifact_path).st_size
+
+ self.orm_wrapper.save_target_sdk_file(
+ sdk_target, artifact_path, artifact_size)
+
+ def clone_required_sdk_artifacts(self):
+ """
+ If an SDK target doesn't have any SDK artifacts, this means that
+ the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which
+ in turn means that the targets of this build didn't generate any new
+ artifacts.
+
+ In this case, clone SDK artifacts for targets in the current build
+ from existing targets for this build.
+ """
+ sdk_targets = self._get_sdk_targets()
+ for sdk_target in sdk_targets:
+ # only clone for SDK targets which have no TargetSDKFiles yet
+ if sdk_target.targetsdkfile_set.all().count() == 0:
+ similar_target = \
+ self.orm_wrapper.get_similar_target_with_sdk_files(
+ sdk_target)
+ if similar_target:
+ logger.info('SDK artifacts for target %s cloned from ' \
+ 'target %s' % (sdk_target.pk, similar_target.pk))
+ self.orm_wrapper.clone_sdk_artifacts(similar_target,
+ sdk_target)
+
def close(self, errorcode):
- if self.brbe is not None:
- self._store_build_done(errorcode)
+ self._store_build_done(errorcode)
if 'backlog' in self.internal_state:
- if 'build' in self.internal_state:
- # we save missed events in the database for the current build
- tempevent = self.internal_state['backlog'].pop()
- self.store_log_event(tempevent)
- else:
- # we have no build, and we still have events; something amazingly wrong happend
- for event in self.internal_state['backlog']:
- logger.error("UNSAVED log: %s", event.msg)
+ # we save missed events in the database for the current build
+ tempevent = self.internal_state['backlog'].pop()
+ self.store_log_event(tempevent)
if not connection.features.autocommits_when_autocommit_is_off:
transaction.set_autocommit(True)
@@ -1519,3 +1928,7 @@ class BuildInfoHelper(object):
# being incorrectly attached to the previous Toaster-triggered build;
# see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
self.brbe = None
+
+ # unset the internal Build object to prevent it being reused for the
+ # next build
+ self.internal_state['build'] = None
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/__init__.py
deleted file mode 100644
index b7cbe1a4f..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Gtk+ UI pieces for BitBake
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/__init__.py
+++ /dev/null
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
deleted file mode 100644
index c679f9a07..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class CrumbsDialog(gtk.Dialog):
- """
- A GNOME HIG compliant dialog widget.
- Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
- """
- def __init__(self, title="", parent=None, flags=0, buttons=None):
- super(CrumbsDialog, self).__init__(title, parent, flags, buttons)
-
- self.set_property("has-separator", False) # note: deprecated in 2.22
-
- self.set_border_width(6)
- self.vbox.set_property("spacing", 12)
- self.action_area.set_property("spacing", 12)
- self.action_area.set_property("border-width", 6)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
deleted file mode 100644
index 3b998e463..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import glib
-import gtk
-from bb.ui.crumbs.hobwidget import HobIconChecker
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class CrumbsMessageDialog(gtk.MessageDialog):
- """
- A GNOME HIG compliant dialog widget.
- Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
- """
- def __init__(self, parent = None, label="", dialog_type = gtk.MESSAGE_QUESTION, msg=""):
- super(CrumbsMessageDialog, self).__init__(None,
- gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
- dialog_type,
- gtk.BUTTONS_NONE,
- None)
-
- self.set_skip_taskbar_hint(False)
-
- self.set_markup(label)
-
- if 0 <= len(msg) < 300:
- self.format_secondary_markup(msg)
- else:
- vbox = self.get_message_area()
- vbox.set_border_width(1)
- vbox.set_property("spacing", 12)
- self.textWindow = gtk.ScrolledWindow()
- self.textWindow.set_shadow_type(gtk.SHADOW_IN)
- self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- self.msgView = gtk.TextView()
- self.msgView.set_editable(False)
- self.msgView.set_wrap_mode(gtk.WRAP_WORD)
- self.msgView.set_cursor_visible(False)
- self.msgView.set_size_request(300, 300)
- self.buf = gtk.TextBuffer()
- self.buf.set_text(msg)
- self.msgView.set_buffer(self.buf)
- self.textWindow.add(self.msgView)
- self.msgView.show()
- vbox.add(self.textWindow)
- self.textWindow.show()
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
deleted file mode 100644
index a13fff906..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
+++ /dev/null
@@ -1,219 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import glob
-import gtk
-import gobject
-import os
-import re
-import shlex
-import subprocess
-import tempfile
-from bb.ui.crumbs.hobwidget import hic, HobButton
-from bb.ui.crumbs.progressbar import HobProgressBar
-import bb.ui.crumbs.utils
-import bb.process
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class DeployImageDialog (CrumbsDialog):
-
- __dummy_usb__ = "--select a usb drive--"
-
- def __init__(self, title, image_path, parent, flags, buttons=None, standalone=False):
- super(DeployImageDialog, self).__init__(title, parent, flags, buttons)
-
- self.image_path = image_path
- self.standalone = standalone
-
- self.create_visual_elements()
- self.connect("response", self.response_cb)
-
- def create_visual_elements(self):
- self.set_size_request(600, 400)
- label = gtk.Label()
- label.set_alignment(0.0, 0.5)
- markup = "<span font_desc='12'>The image to be written into usb drive:</span>"
- label.set_markup(markup)
- self.vbox.pack_start(label, expand=False, fill=False, padding=2)
-
- table = gtk.Table(2, 10, False)
- table.set_col_spacings(5)
- table.set_row_spacings(5)
- self.vbox.pack_start(table, expand=True, fill=True)
-
- scroll = gtk.ScrolledWindow()
- scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
- scroll.set_shadow_type(gtk.SHADOW_IN)
- tv = gtk.TextView()
- tv.set_editable(False)
- tv.set_wrap_mode(gtk.WRAP_WORD)
- tv.set_cursor_visible(False)
- self.buf = gtk.TextBuffer()
- self.buf.set_text(self.image_path)
- tv.set_buffer(self.buf)
- scroll.add(tv)
- table.attach(scroll, 0, 10, 0, 1)
-
- # There are 2 ways to use DeployImageDialog
- # One way is that called by HOB when the 'Deploy Image' button is clicked
- # The other way is that called by a standalone script.
- # Following block of codes handles the latter way. It adds a 'Select Image' button and
- # emit a signal when the button is clicked.
- if self.standalone:
- gobject.signal_new("select_image_clicked", self, gobject.SIGNAL_RUN_FIRST,
- gobject.TYPE_NONE, ())
- icon = gtk.Image()
- pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_IMAGES_DISPLAY_FILE)
- icon.set_from_pixbuf(pix_buffer)
- button = gtk.Button("Select Image")
- button.set_image(icon)
- #button.set_size_request(140, 50)
- table.attach(button, 9, 10, 1, 2, gtk.FILL, 0, 0, 0)
- button.connect("clicked", self.select_image_button_clicked_cb)
-
- separator = gtk.HSeparator()
- self.vbox.pack_start(separator, expand=False, fill=False, padding=10)
-
- self.usb_desc = gtk.Label()
- self.usb_desc.set_alignment(0.0, 0.5)
- markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
- self.usb_desc.set_markup(markup)
-
- self.usb_combo = gtk.combo_box_new_text()
- self.usb_combo.connect("changed", self.usb_combo_changed_cb)
- model = self.usb_combo.get_model()
- model.clear()
- self.usb_combo.append_text(self.__dummy_usb__)
- for usb in self.find_all_usb_devices():
- self.usb_combo.append_text("/dev/" + usb)
- self.usb_combo.set_active(0)
- self.vbox.pack_start(self.usb_combo, expand=False, fill=False)
- self.vbox.pack_start(self.usb_desc, expand=False, fill=False, padding=2)
-
- self.progress_bar = HobProgressBar()
- self.vbox.pack_start(self.progress_bar, expand=False, fill=False)
- separator = gtk.HSeparator()
- self.vbox.pack_start(separator, expand=False, fill=True, padding=10)
-
- self.vbox.show_all()
- self.progress_bar.hide()
-
- def set_image_text_buffer(self, image_path):
- self.buf.set_text(image_path)
-
- def set_image_path(self, image_path):
- self.image_path = image_path
-
- def popen_read(self, cmd):
- tmpout, errors = bb.process.run("%s" % cmd)
- return tmpout.strip()
-
- def find_all_usb_devices(self):
- usb_devs = [ os.readlink(u)
- for u in glob.glob('/dev/disk/by-id/usb*')
- if not re.search(r'part\d+', u) ]
- return [ '%s' % u[u.rfind('/')+1:] for u in usb_devs ]
-
- def get_usb_info(self, dev):
- return "%s %s" % \
- (self.popen_read('cat /sys/class/block/%s/device/vendor' % dev),
- self.popen_read('cat /sys/class/block/%s/device/model' % dev))
-
- def select_image_button_clicked_cb(self, button):
- self.emit('select_image_clicked')
-
- def usb_combo_changed_cb(self, usb_combo):
- combo_item = self.usb_combo.get_active_text()
- if not combo_item or combo_item == self.__dummy_usb__:
- markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
- self.usb_desc.set_markup(markup)
- else:
- markup = "<span font_desc='12'>" + self.get_usb_info(combo_item.lstrip("/dev/")) + "</span>"
- self.usb_desc.set_markup(markup)
-
- def response_cb(self, dialog, response_id):
- if response_id == gtk.RESPONSE_YES:
- lbl = ''
- msg = ''
- combo_item = self.usb_combo.get_active_text()
- if combo_item and combo_item != self.__dummy_usb__ and self.image_path:
- cmdline = bb.ui.crumbs.utils.which_terminal()
- if cmdline:
- tmpfile = tempfile.NamedTemporaryFile()
- cmdline += "\"sudo dd if=" + self.image_path + \
- " of=" + combo_item + " && sync; echo $? > " + tmpfile.name + "\""
- subprocess.call(shlex.split(cmdline))
-
- if int(tmpfile.readline().strip()) == 0:
- lbl = "<b>Deploy image successfully.</b>"
- else:
- lbl = "<b>Failed to deploy image.</b>"
- msg = "Please check image <b>%s</b> exists and USB device <b>%s</b> is writable." % (self.image_path, combo_item)
- tmpfile.close()
- else:
- if not self.image_path:
- lbl = "<b>No selection made.</b>"
- msg = "You have not selected an image to deploy."
- else:
- lbl = "<b>No selection made.</b>"
- msg = "You have not selected a USB device."
- if len(lbl):
- crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
- button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
- HobButton.style_button(button)
- crumbs_dialog.run()
- crumbs_dialog.destroy()
-
- def update_progress_bar(self, title, fraction, status=None):
- self.progress_bar.update(fraction)
- self.progress_bar.set_title(title)
- self.progress_bar.set_rcstyle(status)
-
- def write_file(self, ifile, ofile):
- self.progress_bar.reset()
- self.progress_bar.show()
-
- f_from = os.open(ifile, os.O_RDONLY)
- f_to = os.open(ofile, os.O_WRONLY)
-
- total_size = os.stat(ifile).st_size
- written_size = 0
-
- while True:
- buf = os.read(f_from, 1024*1024)
- if not buf:
- break
- os.write(f_to, buf)
- written_size += 1024*1024
- self.update_progress_bar("Writing to usb:", written_size * 1.0/total_size)
-
- self.update_progress_bar("Writing completed:", 1.0)
- os.close(f_from)
- os.close(f_to)
- self.progress_bar.hide()
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
deleted file mode 100644
index 21216adc9..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import os
-from bb.ui.crumbs.hobwidget import HobViewTable, HobInfoButton, HobButton, HobAltButton
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class ImageSelectionDialog (CrumbsDialog):
-
- __columns__ = [{
- 'col_name' : 'Image name',
- 'col_id' : 0,
- 'col_style': 'text',
- 'col_min' : 400,
- 'col_max' : 400
- }, {
- 'col_name' : 'Select',
- 'col_id' : 1,
- 'col_style': 'radio toggle',
- 'col_min' : 160,
- 'col_max' : 160
- }]
-
-
- def __init__(self, image_folder, image_types, title, parent, flags, buttons=None, image_extension = {}):
- super(ImageSelectionDialog, self).__init__(title, parent, flags, buttons)
- self.connect("response", self.response_cb)
-
- self.image_folder = image_folder
- self.image_types = image_types
- self.image_list = []
- self.image_names = []
- self.image_extension = image_extension
-
- # create visual elements on the dialog
- self.create_visual_elements()
-
- self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
- self.fill_image_store()
-
- def create_visual_elements(self):
- hbox = gtk.HBox(False, 6)
-
- self.vbox.pack_start(hbox, expand=False, fill=False)
-
- entry = gtk.Entry()
- entry.set_text(self.image_folder)
- table = gtk.Table(1, 10, True)
- table.set_size_request(560, -1)
- hbox.pack_start(table, expand=False, fill=False)
- table.attach(entry, 0, 9, 0, 1)
- image = gtk.Image()
- image.set_from_stock(gtk.STOCK_OPEN, gtk.ICON_SIZE_BUTTON)
- open_button = gtk.Button()
- open_button.set_image(image)
- open_button.connect("clicked", self.select_path_cb, self, entry)
- table.attach(open_button, 9, 10, 0, 1)
-
- self.image_table = HobViewTable(self.__columns__, "Images")
- self.image_table.set_size_request(-1, 300)
- self.image_table.connect("toggled", self.toggled_cb)
- self.image_table.connect_group_selection(self.table_selected_cb)
- self.image_table.connect("row-activated", self.row_actived_cb)
- self.vbox.pack_start(self.image_table, expand=True, fill=True)
-
- self.show_all()
-
- def change_image_cb(self, model, path, columnid):
- if not model:
- return
- iter = model.get_iter_first()
- while iter:
- rowpath = model.get_path(iter)
- model[rowpath][columnid] = False
- iter = model.iter_next(iter)
-
- model[path][columnid] = True
-
- def toggled_cb(self, table, cell, path, columnid, tree):
- model = tree.get_model()
- self.change_image_cb(model, path, columnid)
-
- def table_selected_cb(self, selection):
- model, paths = selection.get_selected_rows()
- if paths:
- self.change_image_cb(model, paths[0], 1)
-
- def row_actived_cb(self, tab, model, path):
- self.change_image_cb(model, path, 1)
- self.emit('response', gtk.RESPONSE_YES)
-
- def select_path_cb(self, action, parent, entry):
- dialog = gtk.FileChooserDialog("", parent,
- gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
- text = entry.get_text()
- dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
- button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
- HobAltButton.style_button(button)
- button = dialog.add_button("Open", gtk.RESPONSE_YES)
- HobButton.style_button(button)
- response = dialog.run()
- if response == gtk.RESPONSE_YES:
- path = dialog.get_filename()
- entry.set_text(path)
- self.image_folder = path
- self.fill_image_store()
-
- dialog.destroy()
-
- def fill_image_store(self):
- self.image_list = []
- self.image_store.clear()
- imageset = set()
- for root, dirs, files in os.walk(self.image_folder):
- # ignore the sub directories
- dirs[:] = []
- for f in files:
- for image_type in self.image_types:
- if image_type in self.image_extension:
- real_types = self.image_extension[image_type]
- else:
- real_types = [image_type]
- for real_image_type in real_types:
- if f.endswith('.' + real_image_type):
- imageset.add(f.rsplit('.' + real_image_type)[0].rsplit('.rootfs')[0])
- self.image_list.append(f)
-
- for image in imageset:
- self.image_store.set(self.image_store.append(), 0, image, 1, False)
-
- self.image_table.set_model(self.image_store)
-
- def response_cb(self, dialog, response_id):
- self.image_names = []
- if response_id == gtk.RESPONSE_YES:
- iter = self.image_store.get_iter_first()
- while iter:
- path = self.image_store.get_path(iter)
- if self.image_store[path][1]:
- for f in self.image_list:
- if f.startswith(self.image_store[path][0] + '.'):
- self.image_names.append(f)
- break
- iter = self.image_store.iter_next(iter)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
deleted file mode 100644
index 52d57b673..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
+++ /dev/null
@@ -1,298 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import os
-import tempfile
-from bb.ui.crumbs.hobwidget import hic, HobButton, HobAltButton
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class CellRendererPixbufActivatable(gtk.CellRendererPixbuf):
- """
- A custom CellRenderer implementation which is activatable
- so that we can handle user clicks
- """
- __gsignals__ = { 'clicked' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_STRING,)), }
-
- def __init__(self):
- gtk.CellRendererPixbuf.__init__(self)
- self.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
- self.set_property('follow-state', True)
-
- """
- Respond to a user click on a cell
- """
- def do_activate(self, even, widget, path, background_area, cell_area, flags):
- self.emit('clicked', path)
-
-#
-# LayerSelectionDialog
-#
-class LayerSelectionDialog (CrumbsDialog):
-
- TARGETS = [
- ("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
- ("text/plain", 0, 1),
- ("TEXT", 0, 2),
- ("STRING", 0, 3),
- ]
-
- def gen_label_widget(self, content):
- label = gtk.Label()
- label.set_alignment(0, 0)
- label.set_markup(content)
- label.show()
- return label
-
- def layer_widget_toggled_cb(self, cell, path, layer_store):
- name = layer_store[path][0]
- toggle = not layer_store[path][1]
- layer_store[path][1] = toggle
-
- def layer_widget_add_clicked_cb(self, action, layer_store, parent):
- dialog = gtk.FileChooserDialog("Add new layer", parent,
- gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
- button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
- HobAltButton.style_button(button)
- button = dialog.add_button("Open", gtk.RESPONSE_YES)
- HobButton.style_button(button)
- label = gtk.Label("Select the layer you wish to add")
- label.show()
- dialog.set_extra_widget(label)
- response = dialog.run()
- path = dialog.get_filename()
- dialog.destroy()
-
- lbl = "<b>Error</b>"
- msg = "Unable to load layer <i>%s</i> because " % path
- if response == gtk.RESPONSE_YES:
- import os
- import os.path
- layers = []
- it = layer_store.get_iter_first()
- while it:
- layers.append(layer_store.get_value(it, 0))
- it = layer_store.iter_next(it)
-
- if not path:
- msg += "it is an invalid path."
- elif not os.path.exists(path+"/conf/layer.conf"):
- msg += "there is no layer.conf inside the directory."
- elif path in layers:
- msg += "it is already in loaded layers."
- else:
- layer_store.append([path])
- return
- dialog = CrumbsMessageDialog(parent, lbl, gtk.MESSAGE_ERROR, msg)
- dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
- response = dialog.run()
- dialog.destroy()
-
- def layer_widget_del_clicked_cb(self, action, tree_selection, layer_store):
- model, iter = tree_selection.get_selected()
- if iter:
- layer_store.remove(iter)
-
-
- def gen_layer_widget(self, layers, layers_avail, window, tooltip=""):
- hbox = gtk.HBox(False, 6)
-
- layer_tv = gtk.TreeView()
- layer_tv.set_rules_hint(True)
- layer_tv.set_headers_visible(False)
- tree_selection = layer_tv.get_selection()
- tree_selection.set_mode(gtk.SELECTION_SINGLE)
-
- # Allow enable drag and drop of rows including row move
- dnd_internal_target = ''
- dnd_targets = [(dnd_internal_target, gtk.TARGET_SAME_WIDGET, 0)]
- layer_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
- dnd_targets,
- gtk.gdk.ACTION_MOVE)
- layer_tv.enable_model_drag_dest(dnd_targets,
- gtk.gdk.ACTION_MOVE)
- layer_tv.connect("drag_data_get", self.drag_data_get_cb)
- layer_tv.connect("drag_data_received", self.drag_data_received_cb)
-
- col0= gtk.TreeViewColumn('Path')
- cell0 = gtk.CellRendererText()
- cell0.set_padding(5,2)
- col0.pack_start(cell0, True)
- col0.set_cell_data_func(cell0, self.draw_layer_path_cb)
- layer_tv.append_column(col0)
-
- scroll = gtk.ScrolledWindow()
- scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
- scroll.set_shadow_type(gtk.SHADOW_IN)
- scroll.add(layer_tv)
-
- table_layer = gtk.Table(2, 10, False)
- hbox.pack_start(table_layer, expand=True, fill=True)
-
- table_layer.attach(scroll, 0, 10, 0, 1)
-
- layer_store = gtk.ListStore(gobject.TYPE_STRING)
- for layer in layers:
- layer_store.append([layer])
-
- col1 = gtk.TreeViewColumn('Enabled')
- layer_tv.append_column(col1)
-
- cell1 = CellRendererPixbufActivatable()
- cell1.set_fixed_size(-1,35)
- cell1.connect("clicked", self.del_cell_clicked_cb, layer_store)
- col1.pack_start(cell1, True)
- col1.set_cell_data_func(cell1, self.draw_delete_button_cb, layer_tv)
-
- add_button = gtk.Button()
- add_button.set_relief(gtk.RELIEF_NONE)
- box = gtk.HBox(False, 6)
- box.show()
- add_button.add(box)
- add_button.connect("enter-notify-event", self.add_hover_cb)
- add_button.connect("leave-notify-event", self.add_leave_cb)
- self.im = gtk.Image()
- self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
- self.im.show()
- box.pack_start(self.im, expand=False, fill=False, padding=6)
- lbl = gtk.Label("Add layer")
- lbl.set_alignment(0.0, 0.5)
- lbl.show()
- box.pack_start(lbl, expand=True, fill=True, padding=6)
- add_button.connect("clicked", self.layer_widget_add_clicked_cb, layer_store, window)
- table_layer.attach(add_button, 0, 10, 1, 2, gtk.EXPAND | gtk.FILL, 0, 0, 6)
- layer_tv.set_model(layer_store)
-
- hbox.show_all()
-
- return hbox, layer_store
-
- def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
- treeselection = treeview.get_selection()
- model, iter = treeselection.get_selected()
- data = model.get_value(iter, 0)
- selection.set(selection.target, 8, data)
-
- def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
- model = treeview.get_model()
- data = selection.data
- drop_info = treeview.get_dest_row_at_pos(x, y)
- if drop_info:
- path, position = drop_info
- iter = model.get_iter(path)
- if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
- model.insert_before(iter, [data])
- else:
- model.insert_after(iter, [data])
- else:
- model.append([data])
- if context.action == gtk.gdk.ACTION_MOVE:
- context.finish(True, True, etime)
- return
-
- def add_hover_cb(self, button, event):
- self.im.set_from_file(hic.ICON_INDI_ADD_HOVER_FILE)
-
- def add_leave_cb(self, button, event):
- self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
-
- def __init__(self, title, layers, layers_non_removable, all_layers, parent, flags, buttons=None):
- super(LayerSelectionDialog, self).__init__(title, parent, flags, buttons)
-
- # class members from other objects
- self.layers = layers
- self.layers_non_removable = layers_non_removable
- self.all_layers = all_layers
- self.layers_changed = False
-
- # icon for remove button in TreeView
- im = gtk.Image()
- im.set_from_file(hic.ICON_INDI_REMOVE_FILE)
- self.rem_icon = im.get_pixbuf()
-
- # class members for internal use
- self.layer_store = None
-
- # create visual elements on the dialog
- self.create_visual_elements()
- self.connect("response", self.response_cb)
-
- def create_visual_elements(self):
- layer_widget, self.layer_store = self.gen_layer_widget(self.layers, self.all_layers, self, None)
- layer_widget.set_size_request(450, 250)
- self.vbox.pack_start(layer_widget, expand=True, fill=True)
- self.show_all()
-
- def response_cb(self, dialog, response_id):
- model = self.layer_store
- it = model.get_iter_first()
- layers = []
- while it:
- layers.append(model.get_value(it, 0))
- it = model.iter_next(it)
-
- self.layers_changed = (self.layers != layers)
- self.layers = layers
-
- """
- A custom cell_data_func to draw a delete 'button' in the TreeView for layers
- other than the meta layer. The deletion of which is prevented so that the
- user can't shoot themselves in the foot too badly.
- """
- def draw_delete_button_cb(self, col, cell, model, it, tv):
- path = model.get_value(it, 0)
- if path in self.layers_non_removable:
- cell.set_sensitive(False)
- cell.set_property('pixbuf', None)
- cell.set_property('mode', gtk.CELL_RENDERER_MODE_INERT)
- else:
- cell.set_property('pixbuf', self.rem_icon)
- cell.set_sensitive(True)
- cell.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
-
- return True
-
- """
- A custom cell_data_func to write an extra message into the layer path cell
- for the meta layer. We should inform the user that they can't remove it for
- their own safety.
- """
- def draw_layer_path_cb(self, col, cell, model, it):
- path = model.get_value(it, 0)
- if path in self.layers_non_removable:
- cell.set_property('markup', "<b>It cannot be removed</b>\n%s" % path)
- else:
- cell.set_property('text', path)
-
- def del_cell_clicked_cb(self, cell, path, model):
- it = model.get_iter_from_string(path)
- model.remove(it)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
deleted file mode 100644
index 09b9ce6de..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
+++ /dev/null
@@ -1,437 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2013 Intel Corporation
-#
-# Authored by Andrei Dinu <andrei.adrianx.dinu@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import string
-import gtk
-import gobject
-import os
-import tempfile
-import glib
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
-from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
-from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class PropertyDialog(CrumbsDialog):
-
- def __init__(self, title, parent, information, flags, buttons=None):
-
- super(PropertyDialog, self).__init__(title, parent, flags, buttons)
-
- self.properties = information
-
- if len(self.properties) == 10:
- self.create_recipe_visual_elements()
- elif len(self.properties) == 5:
- self.create_package_visual_elements()
- else:
- self.create_information_visual_elements()
-
-
- def create_information_visual_elements(self):
-
- HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("icons/"))
- ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
-
- self.set_resizable(False)
-
- self.table = gtk.Table(1,1,False)
- self.table.set_row_spacings(0)
- self.table.set_col_spacings(0)
-
- self.image = gtk.Image()
- self.image.set_from_file(ICON_PACKAGES_DISPLAY_FILE)
- self.image.set_property("xalign",0)
- #self.vbox.add(self.image)
-
- image_info = self.properties.split("*")[0]
- info = self.properties.split("*")[1]
-
- vbox = gtk.VBox(True, spacing=30)
-
- self.label_short = gtk.Label()
- self.label_short.set_line_wrap(False)
- self.label_short.set_markup(image_info)
- self.label_short.set_property("xalign", 0)
-
- self.info_label = gtk.Label()
- self.info_label.set_line_wrap(True)
- self.info_label.set_markup(info)
- self.info_label.set_property("yalign", 0.5)
-
- self.table.attach(self.image, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=5,ypadding=5)
- self.table.attach(self.label_short, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=5)
- self.table.attach(self.info_label, 0,1,1,2, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=10)
-
- self.vbox.add(self.table)
- self.connect('delete-event', lambda w, e: self.destroy() or True)
-
- def treeViewTooltip( self, widget, e, tooltips, cell, emptyText="" ):
- try:
- (path,col,x,y) = widget.get_path_at_pos( int(e.x), int(e.y) )
- it = widget.get_model().get_iter(path)
- value = widget.get_model().get_value(it,cell)
- if value in self.tooltip_items:
- tooltips.set_tip(widget, self.tooltip_items[value])
- tooltips.enable()
- else:
- tooltips.set_tip(widget, emptyText)
- except:
- tooltips.set_tip(widget, emptyText)
-
-
- def create_package_visual_elements(self):
-
- import json
-
- name = self.properties['name']
- binb = self.properties['binb']
- size = self.properties['size']
- recipe = self.properties['recipe']
- file_list = json.loads(self.properties['files_list'])
-
- files_temp = ''
- paths_temp = ''
- files_binb = []
- paths_binb = []
-
- self.tooltip_items = {}
-
- self.set_resizable(False)
-
- #cleaning out the recipe variable
- recipe = recipe.split("+")[0]
-
- vbox = gtk.VBox(True,spacing = 0)
-
- ###################################### NAME ROW + COL #################################
-
- self.label_short = gtk.Label()
- self.label_short.set_size_request(300,-1)
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ###################################### SIZE ROW + COL ######################################
-
- self.label_short = gtk.Label()
- self.label_short.set_size_request(300,-1)
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Size: </span>" + size)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ##################################### RECIPE ROW + COL #########################################
-
- self.label_short = gtk.Label()
- self.label_short.set_size_request(300,-1)
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Recipe: </span>" + recipe)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ##################################### BINB ROW + COL #######################################
-
- if binb != '':
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
- self.label_short.set_property("xalign", 0)
-
- self.label_info = gtk.Label()
- self.label_info.set_size_request(300,-1)
- self.label_info.set_selectable(True)
- self.label_info.set_line_wrap(True)
- self.label_info.set_markup(binb)
- self.label_info.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
- self.vbox.add(self.label_info)
-
- #################################### FILES BROUGHT BY PACKAGES ###################################
-
- if file_list:
-
- self.textWindow = gtk.ScrolledWindow()
- self.textWindow.set_shadow_type(gtk.SHADOW_IN)
- self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- self.textWindow.set_size_request(100, 170)
-
- packagefiles_store = gtk.ListStore(str)
-
- self.packagefiles_tv = gtk.TreeView()
- self.packagefiles_tv.set_rules_hint(True)
- self.packagefiles_tv.set_headers_visible(True)
- self.textWindow.add(self.packagefiles_tv)
-
- self.cell1 = gtk.CellRendererText()
- col1 = gtk.TreeViewColumn('Package files', self.cell1)
- col1.set_cell_data_func(self.cell1, self.regex_field)
- self.packagefiles_tv.append_column(col1)
-
- items = file_list.keys()
- items.sort()
- for item in items:
- fullpath = item
- while len(item) > 35:
- item = item[:len(item)/2] + "" + item[len(item)/2+1:]
- if len(item) == 35:
- item = item[:len(item)/2] + "..." + item[len(item)/2+3:]
- self.tooltip_items[item] = fullpath
-
- packagefiles_store.append([str(item)])
-
- self.packagefiles_tv.set_model(packagefiles_store)
-
- tips = gtk.Tooltips()
- tips.set_tip(self.packagefiles_tv, "")
- self.packagefiles_tv.connect("motion-notify-event", self.treeViewTooltip, tips, 0)
- self.packagefiles_tv.set_events(gtk.gdk.POINTER_MOTION_MASK)
-
- self.vbox.add(self.textWindow)
-
- self.vbox.show_all()
-
-
- def regex_field(self, column, cell, model, iter):
- cell.set_property('text', model.get_value(iter, 0))
- return
-
-
- def create_recipe_visual_elements(self):
-
- summary = self.properties['summary']
- name = self.properties['name']
- version = self.properties['version']
- revision = self.properties['revision']
- binb = self.properties['binb']
- group = self.properties['group']
- license = self.properties['license']
- homepage = self.properties['homepage']
- bugtracker = self.properties['bugtracker']
- description = self.properties['description']
-
- self.set_resizable(False)
-
- #cleaning out the version variable and also the summary
- version = version.split(":")[1]
- if len(version) > 30:
- version = version.split("+")[0]
- else:
- version = version.split("-")[0]
- license = license.replace("&" , "and")
- if (homepage == ''):
- homepage = 'unknown'
- if (bugtracker == ''):
- bugtracker = 'unknown'
- summary = summary.split("+")[0]
-
- #calculating the rows needed for the table
- binb_items_count = len(binb.split(','))
- binb_items = binb.split(',')
-
- vbox = gtk.VBox(False,spacing = 0)
-
- ######################################## SUMMARY LABEL #########################################
-
- if summary != '':
- self.label_short = gtk.Label()
- self.label_short.set_width_chars(37)
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<b>" + summary + "</b>")
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ########################################## NAME ROW + COL #######################################
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ####################################### VERSION ROW + COL ####################################
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Version: </span>" + version)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ##################################### REVISION ROW + COL #####################################
-
- self.label_short = gtk.Label()
- self.label_short.set_line_wrap(True)
- self.label_short.set_selectable(True)
- self.label_short.set_markup("<span weight=\"bold\">Revision: </span>" + revision)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ################################## GROUP ROW + COL ############################################
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Group: </span>" + group)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ################################# HOMEPAGE ROW + COL ############################################
-
- if homepage != 'unknown':
- self.label_info = gtk.Label()
- self.label_info.set_selectable(True)
- self.label_info.set_line_wrap(True)
- if len(homepage) > 35:
- self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:35] + "..." + "</a>")
- else:
- self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:60] + "</a>")
-
- self.label_info.set_property("xalign", 0)
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<b>Homepage: </b>")
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
- self.vbox.add(self.label_info)
-
- ################################# BUGTRACKER ROW + COL ###########################################
-
- if bugtracker != 'unknown':
- self.label_info = gtk.Label()
- self.label_info.set_selectable(True)
- self.label_info.set_line_wrap(True)
- if len(bugtracker) > 35:
- self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:35] + "..." + "</a>")
- else:
- self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:60] + "</a>")
- self.label_info.set_property("xalign", 0)
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<b>Bugtracker: </b>")
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
- self.vbox.add(self.label_info)
-
- ################################# LICENSE ROW + COL ############################################
-
- self.label_info = gtk.Label()
- self.label_info.set_selectable(True)
- self.label_info.set_line_wrap(True)
- self.label_info.set_markup(license)
- self.label_info.set_property("xalign", 0)
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">License: </span>")
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
- self.vbox.add(self.label_info)
-
- ################################### BINB ROW+COL #############################################
-
- if binb != '':
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
- self.label_short.set_property("xalign", 0)
- self.vbox.add(self.label_short)
- self.label_info = gtk.Label()
- self.label_info.set_selectable(True)
- self.label_info.set_width_chars(36)
- if len(binb) > 200:
- scrolled_window = gtk.ScrolledWindow()
- scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
- scrolled_window.set_size_request(100,100)
- self.label_info.set_markup(binb)
- self.label_info.set_padding(6,6)
- self.label_info.set_alignment(0,0)
- self.label_info.set_line_wrap(True)
- scrolled_window.add_with_viewport(self.label_info)
- self.vbox.add(scrolled_window)
- else:
- self.label_info.set_markup(binb)
- self.label_info.set_property("xalign", 0)
- self.label_info.set_line_wrap(True)
- self.vbox.add(self.label_info)
-
- ################################ DESCRIPTION TAG ROW #################################################
-
- self.label_short = gtk.Label()
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Description </span>")
- self.label_short.set_property("xalign", 0)
- self.vbox.add(self.label_short)
-
- ################################ DESCRIPTION INFORMATION ROW ##########################################
-
- hbox = gtk.HBox(True,spacing = 0)
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_width_chars(36)
- if len(description) > 200:
- scrolled_window = gtk.ScrolledWindow()
- scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
- scrolled_window.set_size_request(100,100)
- self.label_short.set_markup(description)
- self.label_short.set_padding(6,6)
- self.label_short.set_alignment(0,0)
- self.label_short.set_line_wrap(True)
- scrolled_window.add_with_viewport(self.label_short)
- self.vbox.add(scrolled_window)
- else:
- self.label_short.set_markup(description)
- self.label_short.set_property("xalign", 0)
- self.label_short.set_line_wrap(True)
- self.vbox.add(self.label_short)
-
- self.vbox.show_all()
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
deleted file mode 100644
index e0285c93c..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import os
-from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton, HobAltButton
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class SettingsUIHelper():
-
- def gen_label_widget(self, content):
- label = gtk.Label()
- label.set_alignment(0, 0)
- label.set_markup(content)
- label.show()
- return label
-
- def gen_label_info_widget(self, content, tooltip):
- table = gtk.Table(1, 10, False)
- label = self.gen_label_widget(content)
- info = HobInfoButton(tooltip, self)
- table.attach(label, 0, 1, 0, 1, xoptions=gtk.FILL)
- table.attach(info, 1, 2, 0, 1, xoptions=gtk.FILL, xpadding=10)
- return table
-
- def gen_spinner_widget(self, content, lower, upper, tooltip=""):
- hbox = gtk.HBox(False, 12)
- adjust = gtk.Adjustment(value=content, lower=lower, upper=upper, step_incr=1)
- spinner = gtk.SpinButton(adjustment=adjust, climb_rate=1, digits=0)
-
- spinner.set_value(content)
- hbox.pack_start(spinner, expand=False, fill=False)
-
- info = HobInfoButton(tooltip, self)
- hbox.pack_start(info, expand=False, fill=False)
-
- hbox.show_all()
- return hbox, spinner
-
- def gen_combo_widget(self, curr_item, all_item, tooltip=""):
- hbox = gtk.HBox(False, 12)
- combo = gtk.combo_box_new_text()
- hbox.pack_start(combo, expand=False, fill=False)
-
- index = 0
- for item in all_item or []:
- combo.append_text(item)
- if item == curr_item:
- combo.set_active(index)
- index += 1
-
- info = HobInfoButton(tooltip, self)
- hbox.pack_start(info, expand=False, fill=False)
-
- hbox.show_all()
- return hbox, combo
-
- def entry_widget_select_path_cb(self, action, parent, entry):
- dialog = gtk.FileChooserDialog("", parent,
- gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
- text = entry.get_text()
- dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
- button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
- HobAltButton.style_button(button)
- button = dialog.add_button("Open", gtk.RESPONSE_YES)
- HobButton.style_button(button)
- response = dialog.run()
- if response == gtk.RESPONSE_YES:
- path = dialog.get_filename()
- entry.set_text(path)
-
- dialog.destroy()
-
- def gen_entry_widget(self, content, parent, tooltip="", need_button=True):
- hbox = gtk.HBox(False, 12)
- entry = gtk.Entry()
- entry.set_text(content)
- entry.set_size_request(350,30)
-
- if need_button:
- table = gtk.Table(1, 10, False)
- hbox.pack_start(table, expand=True, fill=True)
- table.attach(entry, 0, 9, 0, 1, xoptions=gtk.SHRINK)
- image = gtk.Image()
- image.set_from_stock(gtk.STOCK_OPEN,gtk.ICON_SIZE_BUTTON)
- open_button = gtk.Button()
- open_button.set_image(image)
- open_button.connect("clicked", self.entry_widget_select_path_cb, parent, entry)
- table.attach(open_button, 9, 10, 0, 1, xoptions=gtk.SHRINK)
- else:
- hbox.pack_start(entry, expand=True, fill=True)
-
- if tooltip != "":
- info = HobInfoButton(tooltip, self)
- hbox.pack_start(info, expand=False, fill=False)
-
- hbox.show_all()
- return hbox, entry
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hobcolor.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hobcolor.py
deleted file mode 100644
index 3316542a2..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hobcolor.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2012 Intel Corporation
-#
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-class HobColors:
- WHITE = "#ffffff"
- PALE_GREEN = "#aaffaa"
- ORANGE = "#eb8e68"
- PALE_RED = "#ffaaaa"
- GRAY = "#aaaaaa"
- LIGHT_GRAY = "#dddddd"
- SLIGHT_DARK = "#5f5f5f"
- DARK = "#3c3b37"
- BLACK = "#000000"
- PALE_BLUE = "#53b8ff"
- DEEP_RED = "#aa3e3e"
- KHAKI = "#fff68f"
-
- OK = WHITE
- RUNNING = PALE_GREEN
- WARNING = ORANGE
- ERROR = PALE_RED
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hobwidget.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hobwidget.py
deleted file mode 100644
index 2b969c146..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/hobwidget.py
+++ /dev/null
@@ -1,904 +0,0 @@
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-import gtk
-import gobject
-import os
-import os.path
-import sys
-import pango, pangocairo
-import cairo
-import math
-
-from bb.ui.crumbs.hobcolor import HobColors
-from bb.ui.crumbs.persistenttooltip import PersistentTooltip
-
-class hwc:
-
- MAIN_WIN_WIDTH = 1024
- MAIN_WIN_HEIGHT = 700
-
-class hic:
-
- HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("ui/icons/"))
-
- ICON_RCIPE_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_display.png'))
- ICON_RCIPE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_hover.png'))
- ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_display.png'))
- ICON_PACKAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_hover.png'))
- ICON_LAYERS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_display.png'))
- ICON_LAYERS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_hover.png'))
- ICON_IMAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_display.png'))
- ICON_IMAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_hover.png'))
- ICON_SETTINGS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_display.png'))
- ICON_SETTINGS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_hover.png'))
- ICON_INFO_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
- ICON_INFO_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_hover.png'))
- ICON_INDI_CONFIRM_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/confirmation.png'))
- ICON_INDI_ERROR_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/denied.png'))
- ICON_INDI_REMOVE_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove.png'))
- ICON_INDI_REMOVE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove-hover.png'))
- ICON_INDI_ADD_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add.png'))
- ICON_INDI_ADD_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add-hover.png'))
- ICON_INDI_REFRESH_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/refresh.png'))
- ICON_INDI_ALERT_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/alert.png'))
- ICON_INDI_TICK_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/tick.png'))
- ICON_INDI_INFO_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/info.png'))
-
-class HobViewTable (gtk.VBox):
- """
- A VBox to contain the table for different recipe views and package view
- """
- __gsignals__ = {
- "toggled" : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,
- gobject.TYPE_STRING,
- gobject.TYPE_INT,
- gobject.TYPE_PYOBJECT,)),
- "row-activated" : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,
- gobject.TYPE_PYOBJECT,)),
- "cell-fadeinout-stopped" : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,
- gobject.TYPE_PYOBJECT,
- gobject.TYPE_PYOBJECT,)),
- }
-
- def __init__(self, columns, name):
- gtk.VBox.__init__(self, False, 6)
- self.table_tree = gtk.TreeView()
- self.table_tree.set_headers_visible(True)
- self.table_tree.set_headers_clickable(True)
- self.table_tree.set_rules_hint(True)
- self.table_tree.set_enable_tree_lines(True)
- self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
- self.toggle_columns = []
- self.table_tree.connect("row-activated", self.row_activated_cb)
- self.top_bar = None
- self.tab_name = name
-
- for i, column in enumerate(columns):
- col_name = column['col_name']
- col = gtk.TreeViewColumn(col_name)
- col.set_clickable(True)
- col.set_resizable(True)
- if self.tab_name.startswith('Included'):
- if col_name!='Included':
- col.set_sort_column_id(column['col_id'])
- else:
- col.set_sort_column_id(column['col_id'])
- if 'col_min' in column.keys():
- col.set_min_width(column['col_min'])
- if 'col_max' in column.keys():
- col.set_max_width(column['col_max'])
- if 'expand' in column.keys():
- col.set_expand(True)
- self.table_tree.append_column(col)
-
- if (not 'col_style' in column.keys()) or column['col_style'] == 'text':
- cell = gtk.CellRendererText()
- col.pack_start(cell, True)
- col.set_attributes(cell, text=column['col_id'])
- if 'col_t_id' in column.keys():
- col.add_attribute(cell, 'font', column['col_t_id'])
- elif column['col_style'] == 'check toggle':
- cell = HobCellRendererToggle()
- cell.set_property('activatable', True)
- cell.connect("toggled", self.toggled_cb, i, self.table_tree)
- cell.connect_render_state_changed(self.stop_cell_fadeinout_cb, self.table_tree)
- self.toggle_id = i
- col.pack_end(cell, True)
- col.set_attributes(cell, active=column['col_id'])
- self.toggle_columns.append(col_name)
- if 'col_group' in column.keys():
- col.set_cell_data_func(cell, self.set_group_number_cb)
- elif column['col_style'] == 'radio toggle':
- cell = gtk.CellRendererToggle()
- cell.set_property('activatable', True)
- cell.set_radio(True)
- cell.connect("toggled", self.toggled_cb, i, self.table_tree)
- self.toggle_id = i
- col.pack_end(cell, True)
- col.set_attributes(cell, active=column['col_id'])
- self.toggle_columns.append(col_name)
- elif column['col_style'] == 'binb':
- cell = gtk.CellRendererText()
- col.pack_start(cell, True)
- col.set_cell_data_func(cell, self.display_binb_cb, column['col_id'])
- if 'col_t_id' in column.keys():
- col.add_attribute(cell, 'font', column['col_t_id'])
-
- self.scroll = gtk.ScrolledWindow()
- self.scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
- self.scroll.add(self.table_tree)
-
- self.pack_end(self.scroll, True, True, 0)
-
- def add_no_result_bar(self, entry):
- color = HobColors.KHAKI
- self.top_bar = gtk.EventBox()
- self.top_bar.set_size_request(-1, 70)
- self.top_bar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
- self.top_bar.set_flags(gtk.CAN_DEFAULT)
- self.top_bar.grab_default()
-
- no_result_tab = gtk.Table(5, 20, True)
- self.top_bar.add(no_result_tab)
-
- label = gtk.Label()
- label.set_alignment(0.0, 0.5)
- title = "No results matching your search"
- label.set_markup("<span size='x-large'><b>%s</b></span>" % title)
- no_result_tab.attach(label, 1, 14, 1, 4)
-
- clear_button = HobButton("Clear search")
- clear_button.set_tooltip_text("Clear search query")
- clear_button.connect('clicked', self.set_search_entry_clear_cb, entry)
- no_result_tab.attach(clear_button, 16, 19, 1, 4)
-
- self.pack_start(self.top_bar, False, True, 12)
- self.top_bar.show_all()
-
- def set_search_entry_clear_cb(self, button, search):
- if search.get_editable() == True:
- search.set_text("")
- search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
- search.grab_focus()
-
- def display_binb_cb(self, col, cell, model, it, col_id):
- binb = model.get_value(it, col_id)
- # Just display the first item
- if binb:
- bin = binb.split(', ')
- total_no = len(bin)
- if total_no > 1 and bin[0] == "User Selected":
- if total_no > 2:
- present_binb = bin[1] + ' (+' + str(total_no - 1) + ')'
- else:
- present_binb = bin[1]
- else:
- if total_no > 1:
- present_binb = bin[0] + ' (+' + str(total_no - 1) + ')'
- else:
- present_binb = bin[0]
- cell.set_property('text', present_binb)
- else:
- cell.set_property('text', "")
- return True
-
- def set_model(self, tree_model):
- self.table_tree.set_model(tree_model)
-
- def toggle_default(self):
- model = self.table_tree.get_model()
- if not model:
- return
- iter = model.get_iter_first()
- if iter:
- rowpath = model.get_path(iter)
- model[rowpath][self.toggle_id] = True
-
- def toggled_cb(self, cell, path, columnid, tree):
- self.emit("toggled", cell, path, columnid, tree)
-
- def row_activated_cb(self, tree, path, view_column):
- if not view_column.get_title() in self.toggle_columns:
- self.emit("row-activated", tree.get_model(), path)
-
- def stop_cell_fadeinout_cb(self, ctrl, cell, tree):
- self.emit("cell-fadeinout-stopped", ctrl, cell, tree)
-
- def set_group_number_cb(self, col, cell, model, iter):
- if model and (model.iter_parent(iter) == None):
- cell.cell_attr["number_of_children"] = model.iter_n_children(iter)
- else:
- cell.cell_attr["number_of_children"] = 0
-
- def connect_group_selection(self, cb_func):
- self.table_tree.get_selection().connect("changed", cb_func)
-
-"""
-A method to calculate a softened value for the colour of widget when in the
-provided state.
-
-widget: the widget whose style to use
-state: the state of the widget to use the style for
-
-Returns a string value representing the softened colour
-"""
-def soften_color(widget, state=gtk.STATE_NORMAL):
- # this colour munging routine is heavily inspired bu gdu_util_get_mix_color()
- # from gnome-disk-utility:
- # http://git.gnome.org/browse/gnome-disk-utility/tree/src/gdu-gtk/gdu-gtk.c?h=gnome-3-0
- blend = 0.7
- style = widget.get_style()
- color = style.text[state]
- color.red = color.red * blend + style.base[state].red * (1.0 - blend)
- color.green = color.green * blend + style.base[state].green * (1.0 - blend)
- color.blue = color.blue * blend + style.base[state].blue * (1.0 - blend)
- return color.to_string()
-
-class BaseHobButton(gtk.Button):
- """
- A gtk.Button subclass which follows the visual design of Hob for primary
- action buttons
-
- label: the text to display as the button's label
- """
- def __init__(self, label):
- gtk.Button.__init__(self, label)
- HobButton.style_button(self)
-
- @staticmethod
- def style_button(button):
- style = button.get_style()
- style = gtk.rc_get_style_by_paths(gtk.settings_get_default(), 'gtk-button', 'gtk-button', gobject.TYPE_NONE)
-
- button.set_flags(gtk.CAN_DEFAULT)
- button.grab_default()
-
-# label = "<span size='x-large'><b>%s</b></span>" % gobject.markup_escape_text(button.get_label())
- label = button.get_label()
- button.set_label(label)
- button.child.set_use_markup(True)
-
-class HobButton(BaseHobButton):
- """
- A gtk.Button subclass which follows the visual design of Hob for primary
- action buttons
-
- label: the text to display as the button's label
- """
- def __init__(self, label):
- BaseHobButton.__init__(self, label)
- HobButton.style_button(self)
-
-class HobAltButton(BaseHobButton):
- """
- A gtk.Button subclass which has no relief, and so is more discrete
- """
- def __init__(self, label):
- BaseHobButton.__init__(self, label)
- HobAltButton.style_button(self)
-
- """
- A callback for the state-changed event to ensure the text is displayed
- differently when the widget is not sensitive
- """
- @staticmethod
- def desensitise_on_state_change_cb(button, state):
- if not button.get_property("sensitive"):
- HobAltButton.set_text(button, False)
- else:
- HobAltButton.set_text(button, True)
-
- """
- Set the button label with an appropriate colour for the current widget state
- """
- @staticmethod
- def set_text(button, sensitive=True):
- if sensitive:
- colour = HobColors.PALE_BLUE
- else:
- colour = HobColors.LIGHT_GRAY
- button.set_label("<span size='large' color='%s'><b>%s</b></span>" % (colour, gobject.markup_escape_text(button.text)))
- button.child.set_use_markup(True)
-
-class HobImageButton(gtk.Button):
- """
- A gtk.Button with an icon and two rows of text, the second of which is
- displayed in a blended colour.
-
- primary_text: the main button label
- secondary_text: optional second line of text
- icon_path: path to the icon file to display on the button
- """
- def __init__(self, primary_text, secondary_text="", icon_path="", hover_icon_path=""):
- gtk.Button.__init__(self)
- self.set_relief(gtk.RELIEF_NONE)
-
- self.icon_path = icon_path
- self.hover_icon_path = hover_icon_path
-
- hbox = gtk.HBox(False, 10)
- hbox.show()
- self.add(hbox)
- self.icon = gtk.Image()
- self.icon.set_from_file(self.icon_path)
- self.icon.set_alignment(0.5, 0.0)
- self.icon.show()
- if self.hover_icon_path and len(self.hover_icon_path):
- self.connect("enter-notify-event", self.set_hover_icon_cb)
- self.connect("leave-notify-event", self.set_icon_cb)
- hbox.pack_start(self.icon, False, False, 0)
- label = gtk.Label()
- label.set_alignment(0.0, 0.5)
- colour = soften_color(label)
- mark = "<span size='x-large'>%s</span>\n<span size='medium' fgcolor='%s' weight='ultralight'>%s</span>" % (primary_text, colour, secondary_text)
- label.set_markup(mark)
- label.show()
- hbox.pack_start(label, True, True, 0)
-
- def set_hover_icon_cb(self, widget, event):
- self.icon.set_from_file(self.hover_icon_path)
-
- def set_icon_cb(self, widget, event):
- self.icon.set_from_file(self.icon_path)
-
-class HobInfoButton(gtk.EventBox):
- """
- This class implements a button-like widget per the Hob visual and UX designs
- which will display a persistent tooltip, with the contents of tip_markup, when
- clicked.
-
- tip_markup: the Pango Markup to be displayed in the persistent tooltip
- """
- def __init__(self, tip_markup, parent=None):
- gtk.EventBox.__init__(self)
- self.image = gtk.Image()
- self.image.set_from_file(
- hic.ICON_INFO_DISPLAY_FILE)
- self.image.show()
- self.add(self.image)
- self.tip_markup = tip_markup
- self.my_parent = parent
-
- self.set_events(gtk.gdk.BUTTON_RELEASE |
- gtk.gdk.ENTER_NOTIFY_MASK |
- gtk.gdk.LEAVE_NOTIFY_MASK)
-
- self.connect("button-release-event", self.button_release_cb)
- self.connect("enter-notify-event", self.mouse_in_cb)
- self.connect("leave-notify-event", self.mouse_out_cb)
-
- """
- When the mouse click is released emulate a button-click and show the associated
- PersistentTooltip
- """
- def button_release_cb(self, widget, event):
- from bb.ui.crumbs.hig.propertydialog import PropertyDialog
- self.dialog = PropertyDialog(title = '',
- parent = self.my_parent,
- information = self.tip_markup,
- flags = gtk.DIALOG_DESTROY_WITH_PARENT
- | gtk.DIALOG_NO_SEPARATOR)
-
- button = self.dialog.add_button("Close", gtk.RESPONSE_CANCEL)
- HobAltButton.style_button(button)
- button.connect("clicked", lambda w: self.dialog.destroy())
- self.dialog.show_all()
- self.dialog.run()
-
- """
- Change to the prelight image when the mouse enters the widget
- """
- def mouse_in_cb(self, widget, event):
- self.image.set_from_file(hic.ICON_INFO_HOVER_FILE)
-
- """
- Change to the stock image when the mouse enters the widget
- """
- def mouse_out_cb(self, widget, event):
- self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
-
-class HobIndicator(gtk.DrawingArea):
- def __init__(self, count):
- gtk.DrawingArea.__init__(self)
- # Set no window for transparent background
- self.set_has_window(False)
- self.set_size_request(38,38)
- # We need to pass through button clicks
- self.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
-
- self.connect('expose-event', self.expose)
-
- self.count = count
- self.color = HobColors.GRAY
-
- def expose(self, widget, event):
- if self.count and self.count > 0:
- ctx = widget.window.cairo_create()
-
- x, y, w, h = self.allocation
-
- ctx.set_operator(cairo.OPERATOR_OVER)
- ctx.set_source_color(gtk.gdk.color_parse(self.color))
- ctx.translate(w/2, h/2)
- ctx.arc(x, y, min(w,h)/2 - 2, 0, 2*math.pi)
- ctx.fill_preserve()
-
- layout = self.create_pango_layout(str(self.count))
- textw, texth = layout.get_pixel_size()
- x = (w/2)-(textw/2) + x
- y = (h/2) - (texth/2) + y
- ctx.move_to(x, y)
- self.window.draw_layout(self.style.light_gc[gtk.STATE_NORMAL], int(x), int(y), layout)
-
- def set_count(self, count):
- self.count = count
-
- def set_active(self, active):
- if active:
- self.color = HobColors.DEEP_RED
- else:
- self.color = HobColors.GRAY
-
-class HobTabLabel(gtk.HBox):
- def __init__(self, text, count=0):
- gtk.HBox.__init__(self, False, 0)
- self.indicator = HobIndicator(count)
- self.indicator.show()
- self.pack_end(self.indicator, False, False)
- self.lbl = gtk.Label(text)
- self.lbl.set_alignment(0.0, 0.5)
- self.lbl.show()
- self.pack_end(self.lbl, True, True, 6)
-
- def set_count(self, count):
- self.indicator.set_count(count)
-
- def set_active(self, active=True):
- self.indicator.set_active(active)
-
-class HobNotebook(gtk.Notebook):
- def __init__(self):
- gtk.Notebook.__init__(self)
- self.set_property('homogeneous', True)
-
- self.pages = []
-
- self.search = None
- self.search_focus = False
- self.page_changed = False
-
- self.connect("switch-page", self.page_changed_cb)
-
- self.show_all()
-
- def page_changed_cb(self, nb, page, page_num):
- for p, lbl in enumerate(self.pages):
- if p == page_num:
- lbl.set_active()
- else:
- lbl.set_active(False)
-
- if self.search:
- self.page_changed = True
- self.reset_entry(self.search, page_num)
-
- def append_page(self, child, tab_label, tab_tooltip=None):
- label = HobTabLabel(tab_label)
- if tab_tooltip:
- label.set_tooltip_text(tab_tooltip)
- label.set_active(False)
- self.pages.append(label)
- gtk.Notebook.append_page(self, child, label)
-
- def set_entry(self, names, tips):
- self.search = gtk.Entry()
- self.search_names = names
- self.search_tips = tips
- style = self.search.get_style()
- style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
- self.search.set_style(style)
- self.search.set_text(names[0])
- self.search.set_tooltip_text(self.search_tips[0])
- self.search.props.has_tooltip = True
-
- self.search.set_editable(False)
- self.search.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
- self.search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
- self.search.connect("icon-release", self.set_search_entry_clear_cb)
- self.search.set_width_chars(30)
- self.search.show()
-
- self.search.connect("focus-in-event", self.set_search_entry_editable_cb)
- self.search.connect("focus-out-event", self.set_search_entry_reset_cb)
- self.set_action_widget(self.search, gtk.PACK_END)
-
- def show_indicator_icon(self, title, number):
- for child in self.pages:
- if child.lbl.get_label() == title:
- child.set_count(number)
-
- def hide_indicator_icon(self, title):
- for child in self.pages:
- if child.lbl.get_label() == title:
- child.set_count(0)
-
- def set_search_entry_editable_cb(self, search, event):
- self.search_focus = True
- search.set_editable(True)
- text = search.get_text()
- if text in self.search_names:
- search.set_text("")
- style = self.search.get_style()
- style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.BLACK, False, False)
- search.set_style(style)
-
- def set_search_entry_reset_cb(self, search, event):
- page_num = self.get_current_page()
- text = search.get_text()
- if not text:
- self.reset_entry(search, page_num)
-
- def reset_entry(self, entry, page_num):
- style = entry.get_style()
- style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
- entry.set_style(style)
- entry.set_text(self.search_names[page_num])
- entry.set_tooltip_text(self.search_tips[page_num])
- entry.set_editable(False)
- entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
-
- def set_search_entry_clear_cb(self, search, icon_pos, event):
- if search.get_editable() == True:
- search.set_text("")
- search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
- search.grab_focus()
-
- def set_page(self, title):
- for child in self.pages:
- if child.lbl.get_label() == title:
- child.grab_focus()
- self.set_current_page(self.pages.index(child))
- return
-
-class HobWarpCellRendererText(gtk.CellRendererText):
- def __init__(self, col_number):
- gtk.CellRendererText.__init__(self)
- self.set_property("wrap-mode", pango.WRAP_WORD_CHAR)
- self.set_property("wrap-width", 300) # default value wrap width is 300
- self.col_n = col_number
-
- def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
- if widget:
- self.props.wrap_width = self.get_resized_wrap_width(widget, widget.get_column(self.col_n))
- return gtk.CellRendererText.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
-
- def get_resized_wrap_width(self, treeview, column):
- otherCols = []
- for col in treeview.get_columns():
- if col != column:
- otherCols.append(col)
- adjwidth = treeview.allocation.width - sum(c.get_width() for c in otherCols)
- adjwidth -= treeview.style_get_property("horizontal-separator") * 4
- if self.props.wrap_width == adjwidth or adjwidth <= 0:
- adjwidth = self.props.wrap_width
- return adjwidth
-
-gobject.type_register(HobWarpCellRendererText)
-
-class HobIconChecker(hic):
- def set_hob_icon_to_stock_icon(self, file_path, stock_id=""):
- try:
- pixbuf = gtk.gdk.pixbuf_new_from_file(file_path)
- except Exception, e:
- return None
-
- if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None):
- icon_factory = gtk.IconFactory()
- icon_factory.add_default()
- icon_factory.add(stock_id, gtk.IconSet(pixbuf))
- gtk.stock_add([(stock_id, '_label', 0, 0, '')])
-
- return icon_factory.lookup(stock_id)
-
- return None
-
- """
- For make hob icon consistently by request, and avoid icon view diff by system or gtk version, we use some 'hob icon' to replace the 'gtk icon'.
- this function check the stock_id and make hob_id to replaced the gtk_id then return it or ""
- """
- def check_stock_icon(self, stock_name=""):
- HOB_CHECK_STOCK_NAME = {
- ('hic-dialog-info', 'gtk-dialog-info', 'dialog-info') : self.ICON_INDI_INFO_FILE,
- ('hic-ok', 'gtk-ok', 'ok') : self.ICON_INDI_TICK_FILE,
- ('hic-dialog-error', 'gtk-dialog-error', 'dialog-error') : self.ICON_INDI_ERROR_FILE,
- ('hic-dialog-warning', 'gtk-dialog-warning', 'dialog-warning') : self.ICON_INDI_ALERT_FILE,
- ('hic-task-refresh', 'gtk-execute', 'execute') : self.ICON_INDI_REFRESH_FILE,
- }
- valid_stock_id = stock_name
- if stock_name:
- for names, path in HOB_CHECK_STOCK_NAME.iteritems():
- if stock_name in names:
- valid_stock_id = names[0]
- if not gtk.icon_factory_lookup_default(valid_stock_id):
- self.set_hob_icon_to_stock_icon(path, valid_stock_id)
-
- return valid_stock_id
-
-class HobCellRendererController(gobject.GObject):
- (MODE_CYCLE_RUNNING, MODE_ONE_SHORT) = range(2)
- __gsignals__ = {
- "run-timer-stopped" : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- }
- def __init__(self, runningmode=MODE_CYCLE_RUNNING, is_draw_row=False):
- gobject.GObject.__init__(self)
- self.timeout_id = None
- self.current_angle_pos = 0.0
- self.step_angle = 0.0
- self.tree_headers_height = 0
- self.running_cell_areas = []
- self.running_mode = runningmode
- self.is_queue_draw_row_area = is_draw_row
- self.force_stop_enable = False
-
- def is_active(self):
- if self.timeout_id:
- return True
- else:
- return False
-
- def reset_run(self):
- self.force_stop()
- self.running_cell_areas = []
- self.current_angle_pos = 0.0
- self.step_angle = 0.0
-
- ''' time_iterval: (1~1000)ms, which will be as the basic interval count for timer
- init_usrdata: the current data which related the progress-bar will be at
- min_usrdata: the range of min of user data
- max_usrdata: the range of max of user data
- step: each step which you want to progress
- Note: the init_usrdata should in the range of from min to max, and max should > min
- step should < (max - min)
- '''
- def start_run(self, time_iterval, init_usrdata, min_usrdata, max_usrdata, step, tree):
- if (not time_iterval) or (not max_usrdata):
- return
- usr_range = (max_usrdata - min_usrdata) * 1.0
- self.current_angle_pos = (init_usrdata * 1.0) / usr_range
- self.step_angle = (step * 1) / usr_range
- self.timeout_id = gobject.timeout_add(int(time_iterval),
- self.make_image_on_progressing_cb, tree)
- self.tree_headers_height = self.get_treeview_headers_height(tree)
- self.force_stop_enable = False
-
- def force_stop(self):
- self.emit("run-timer-stopped")
- self.force_stop_enable = True
- if self.timeout_id:
- if gobject.source_remove(self.timeout_id):
- self.timeout_id = None
-
- def on_draw_pixbuf_cb(self, pixbuf, cr, x, y, img_width, img_height, do_refresh=True):
- if pixbuf:
- r = max(img_width/2, img_height/2)
- cr.translate(x + r, y + r)
- if do_refresh:
- cr.rotate(2 * math.pi * self.current_angle_pos)
-
- cr.set_source_pixbuf(pixbuf, -img_width/2, -img_height/2)
- cr.paint()
-
- def on_draw_fadeinout_cb(self, cr, color, x, y, width, height, do_fadeout=True):
- if do_fadeout:
- alpha = self.current_angle_pos * 0.8
- else:
- alpha = (1.0 - self.current_angle_pos) * 0.8
-
- cr.set_source_rgba(color.red, color.green, color.blue, alpha)
- cr.rectangle(x, y, width, height)
- cr.fill()
-
- def get_treeview_headers_height(self, tree):
- if tree and (tree.get_property("headers-visible") == True):
- height = tree.get_allocation().height - tree.get_bin_window().get_size()[1]
- return height
-
- return 0
-
- def make_image_on_progressing_cb(self, tree):
- self.current_angle_pos += self.step_angle
- if self.running_mode == self.MODE_CYCLE_RUNNING:
- if (self.current_angle_pos >= 1):
- self.current_angle_pos = 0
- else:
- if self.current_angle_pos > 1:
- self.force_stop()
- return False
-
- if self.is_queue_draw_row_area:
- for path in self.running_cell_areas:
- rect = tree.get_cell_area(path, tree.get_column(0))
- row_x, _, row_width, _ = tree.get_visible_rect()
- tree.queue_draw_area(row_x, rect.y + self.tree_headers_height, row_width, rect.height)
- else:
- for rect in self.running_cell_areas:
- tree.queue_draw_area(rect.x, rect.y + self.tree_headers_height, rect.width, rect.height)
-
- return (not self.force_stop_enable)
-
- def append_running_cell_area(self, cell_area):
- if cell_area and (cell_area not in self.running_cell_areas):
- self.running_cell_areas.append(cell_area)
-
- def remove_running_cell_area(self, cell_area):
- if cell_area in self.running_cell_areas:
- self.running_cell_areas.remove(cell_area)
- if not self.running_cell_areas:
- self.reset_run()
-
-gobject.type_register(HobCellRendererController)
-
-class HobCellRendererPixbuf(gtk.CellRendererPixbuf):
- def __init__(self):
- gtk.CellRendererPixbuf.__init__(self)
- self.control = HobCellRendererController()
- # add icon checker for make the gtk-icon transfer to hob-icon
- self.checker = HobIconChecker()
- self.set_property("stock-size", gtk.ICON_SIZE_DND)
-
- def get_pixbuf_from_stock_icon(self, widget, stock_id="", size=gtk.ICON_SIZE_DIALOG):
- if widget and stock_id and gtk.icon_factory_lookup_default(stock_id):
- return widget.render_icon(stock_id, size)
-
- return None
-
- def set_icon_name_to_id(self, new_name):
- if new_name and type(new_name) == str:
- # check the name is need to transfer to hob icon or not
- name = self.checker.check_stock_icon(new_name)
- if name.startswith("hic") or name.startswith("gtk"):
- stock_id = name
- else:
- stock_id = 'gtk-' + name
-
- return stock_id
-
- ''' render cell exactly, "icon-name" is priority
- if use the 'hic-task-refresh' will make the pix animation
- if 'pix' will change the pixbuf for it from the pixbuf or image.
- '''
- def do_render(self, window, tree, background_area,cell_area, expose_area, flags):
- if (not self.control) or (not tree):
- return
-
- x, y, w, h = self.on_get_size(tree, cell_area)
- x += cell_area.x
- y += cell_area.y
- w -= 2 * self.get_property("xpad")
- h -= 2 * self.get_property("ypad")
-
- stock_id = ""
- if self.props.icon_name:
- stock_id = self.set_icon_name_to_id(self.props.icon_name)
- elif self.props.stock_id:
- stock_id = self.props.stock_id
- elif self.props.pixbuf:
- pix = self.props.pixbuf
- else:
- return
-
- if stock_id:
- pix = self.get_pixbuf_from_stock_icon(tree, stock_id, self.props.stock_size)
- if stock_id == 'hic-task-refresh':
- self.control.append_running_cell_area(cell_area)
- if self.control.is_active():
- self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, True)
- else:
- self.control.start_run(200, 0, 0, 1000, 150, tree)
- else:
- self.control.remove_running_cell_area(cell_area)
- self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, False)
-
- def on_get_size(self, widget, cell_area):
- if self.props.icon_name or self.props.pixbuf or self.props.stock_id:
- w, h = gtk.icon_size_lookup(self.props.stock_size)
- calc_width = self.get_property("xpad") * 2 + w
- calc_height = self.get_property("ypad") * 2 + h
- x_offset = 0
- y_offset = 0
- if cell_area and w > 0 and h > 0:
- x_offset = self.get_property("xalign") * (cell_area.width - calc_width - self.get_property("xpad"))
- y_offset = self.get_property("yalign") * (cell_area.height - calc_height - self.get_property("ypad"))
-
- return x_offset, y_offset, w, h
-
- return 0, 0, 0, 0
-
-gobject.type_register(HobCellRendererPixbuf)
-
-class HobCellRendererToggle(gtk.CellRendererToggle):
- def __init__(self):
- gtk.CellRendererToggle.__init__(self)
- self.ctrl = HobCellRendererController(is_draw_row=True)
- self.ctrl.running_mode = self.ctrl.MODE_ONE_SHORT
- self.cell_attr = {"fadeout": False, "number_of_children": 0}
-
- def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
- if (not self.ctrl) or (not widget):
- return
-
- if flags & gtk.CELL_RENDERER_SELECTED:
- state = gtk.STATE_SELECTED
- else:
- state = gtk.STATE_NORMAL
-
- if self.ctrl.is_active():
- path = widget.get_path_at_pos(cell_area.x + cell_area.width/2, cell_area.y + cell_area.height/2)
- # sometimes the parameters of cell_area will be a negative number,such as pull up down the scroll bar
- # it's over the tree container range, so the path will be bad
- if not path: return
- path = path[0]
- if path in self.ctrl.running_cell_areas:
- cr = window.cairo_create()
- color = widget.get_style().base[state]
-
- row_x, _, row_width, _ = widget.get_visible_rect()
- border_y = self.get_property("ypad")
- self.ctrl.on_draw_fadeinout_cb(cr, color, row_x, cell_area.y - border_y, row_width, \
- cell_area.height + border_y * 2, self.cell_attr["fadeout"])
- # draw number of a group
- if self.cell_attr["number_of_children"]:
- text = "%d pkg" % self.cell_attr["number_of_children"]
- pangolayout = widget.create_pango_layout(text)
- textw, texth = pangolayout.get_pixel_size()
- x = cell_area.x + (cell_area.width/2) - (textw/2)
- y = cell_area.y + (cell_area.height/2) - (texth/2)
-
- widget.style.paint_layout(window, state, True, cell_area, widget, "checkbox", x, y, pangolayout)
- else:
- return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
-
- '''delay: normally delay time is 1000ms
- cell_list: whilch cells need to be render
- '''
- def fadeout(self, tree, delay, cell_list=None):
- if (delay < 200) or (not tree):
- return
- self.cell_attr["fadeout"] = True
- self.ctrl.running_cell_areas = cell_list
- self.ctrl.start_run(200, 0, 0, delay, (delay * 200 / 1000), tree)
-
- def connect_render_state_changed(self, func, usrdata=None):
- if not func:
- return
- if usrdata:
- self.ctrl.connect("run-timer-stopped", func, self, usrdata)
- else:
- self.ctrl.connect("run-timer-stopped", func, self)
-
-gobject.type_register(HobCellRendererToggle)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/persistenttooltip.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/persistenttooltip.py
deleted file mode 100644
index 927c19429..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/persistenttooltip.py
+++ /dev/null
@@ -1,186 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gobject
-import gtk
-try:
- import gconf
-except:
- pass
-
-class PersistentTooltip(gtk.Window):
- """
- A tooltip which persists once shown until the user dismisses it with the Esc
- key or by clicking the close button.
-
- # FIXME: the PersistentTooltip should be disabled when the user clicks anywhere off
- # it. We can't do this with focus-out-event becuase modal ensures we have focus?
-
- markup: some Pango text markup to display in the tooltip
- """
- def __init__(self, markup, parent_win=None):
- gtk.Window.__init__(self, gtk.WINDOW_POPUP)
-
- # Inherit the system theme for a tooltip
- style = gtk.rc_get_style_by_paths(gtk.settings_get_default(),
- 'gtk-tooltip', 'gtk-tooltip', gobject.TYPE_NONE)
- self.set_style(style)
-
- # The placement of the close button on the tip should reflect how the
- # window manager of the users system places close buttons. Try to read
- # the metacity gconf key to determine whether the close button is on the
- # left or the right.
- # In the case that we can't determine the users configuration we default
- # to close buttons being on the right.
- __button_right = True
- try:
- client = gconf.client_get_default()
- order = client.get_string("/apps/metacity/general/button_layout")
- if order and order.endswith(":"):
- __button_right = False
- except NameError:
- pass
-
- # We need to ensure we're only shown once
- self.shown = False
-
- # We don't want any WM decorations
- self.set_decorated(False)
- # We don't want to show in the taskbar or window switcher
- self.set_skip_pager_hint(True)
- self.set_skip_taskbar_hint(True)
- # We must be modal to ensure we grab focus when presented from a gtk.Dialog
- self.set_modal(True)
-
- self.set_border_width(0)
- self.set_position(gtk.WIN_POS_MOUSE)
- self.set_opacity(0.95)
-
- # Ensure a reasonable minimum size
- self.set_geometry_hints(self, 100, 50)
-
- # Set this window as a transient window for parent(main window)
- if parent_win:
- self.set_transient_for(parent_win)
- self.set_destroy_with_parent(True)
- # Draw our label and close buttons
- hbox = gtk.HBox(False, 0)
- hbox.show()
- self.add(hbox)
-
- img = gtk.Image()
- img.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_BUTTON)
-
- self.button = gtk.Button()
- self.button.set_image(img)
- self.button.connect("clicked", self._dismiss_cb)
- self.button.set_flags(gtk.CAN_DEFAULT)
- self.button.grab_focus()
- self.button.show()
- vbox = gtk.VBox(False, 0)
- vbox.show()
- vbox.pack_start(self.button, False, False, 0)
- if __button_right:
- hbox.pack_end(vbox, True, True, 0)
- else:
- hbox.pack_start(vbox, True, True, 0)
-
- self.set_default(self.button)
-
- bin = gtk.HBox(True, 6)
- bin.set_border_width(6)
- bin.show()
- self.label = gtk.Label()
- self.label.set_line_wrap(True)
- # We want to match the colours of the normal tooltips, as dictated by
- # the users gtk+-2.0 theme, wherever possible - on some systems this
- # requires explicitly setting a fg_color for the label which matches the
- # tooltip_fg_color
- settings = gtk.settings_get_default()
- colours = settings.get_property('gtk-color-scheme').split('\n')
- # remove any empty lines, there's likely to be a trailing one after
- # calling split on a dictionary-like string
- colours = filter(None, colours)
- for col in colours:
- item, val = col.split(': ')
- if item == 'tooltip_fg_color':
- style = self.label.get_style()
- style.fg[gtk.STATE_NORMAL] = gtk.gdk.color_parse(val)
- self.label.set_style(style)
- break # we only care for the tooltip_fg_color
-
- self.label.set_markup(markup)
- self.label.show()
- bin.add(self.label)
- hbox.pack_end(bin, True, True, 6)
-
- # add the original URL display for user reference
- if 'a href' in markup:
- hbox.set_tooltip_text(self.get_markup_url(markup))
- hbox.show()
-
- self.connect("key-press-event", self._catch_esc_cb)
-
- """
- Callback when the PersistentTooltip's close button is clicked.
- Hides the PersistentTooltip.
- """
- def _dismiss_cb(self, button):
- self.hide()
- return True
-
- """
- Callback when the Esc key is detected. Hides the PersistentTooltip.
- """
- def _catch_esc_cb(self, widget, event):
- keyname = gtk.gdk.keyval_name(event.keyval)
- if keyname == "Escape":
- self.hide()
- return True
-
- """
- Called to present the PersistentTooltip.
- Overrides the superclasses show() method to include state tracking.
- """
- def show(self):
- if not self.shown:
- self.shown = True
- gtk.Window.show(self)
-
- """
- Called to hide the PersistentTooltip.
- Overrides the superclasses hide() method to include state tracking.
- """
- def hide(self):
- self.shown = False
- gtk.Window.hide(self)
-
- """
- Called to get the hyperlink URL from markup text.
- """
- def get_markup_url(self, markup):
- url = "http:"
- if markup and type(markup) == str:
- s = markup
- if 'http:' in s:
- import re
- url = re.search('(http:[^,\\ "]+)', s).group(0)
-
- return url
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/progress.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/progress.py
deleted file mode 100644
index 1d28a111b..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/progress.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import gtk
-
-class ProgressBar(gtk.Dialog):
- def __init__(self, parent):
-
- gtk.Dialog.__init__(self, flags=(gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT))
- self.set_title("Parsing metadata, please wait...")
- self.set_default_size(500, 0)
- self.set_transient_for(parent)
- self.progress = gtk.ProgressBar()
- self.vbox.pack_start(self.progress)
- self.show_all()
-
- def set_text(self, msg):
- self.progress.set_text(msg)
-
- def update(self, x, y):
- self.progress.set_fraction(float(x)/float(y))
- self.progress.set_text("%2d %%" % (x*100/y))
-
- def pulse(self):
- self.progress.set_text("Loading...")
- self.progress.pulse()
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/progressbar.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/progressbar.py
deleted file mode 100644
index 3e2c660e4..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/progressbar.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011 Intel Corporation
-#
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-from bb.ui.crumbs.hobcolor import HobColors
-
-class HobProgressBar (gtk.ProgressBar):
- def __init__(self):
- gtk.ProgressBar.__init__(self)
- self.set_rcstyle(True)
- self.percentage = 0
-
- def set_rcstyle(self, status):
- rcstyle = gtk.RcStyle()
- rcstyle.fg[2] = gtk.gdk.Color(HobColors.BLACK)
- if status == "stop":
- rcstyle.bg[3] = gtk.gdk.Color(HobColors.WARNING)
- elif status == "fail":
- rcstyle.bg[3] = gtk.gdk.Color(HobColors.ERROR)
- else:
- rcstyle.bg[3] = gtk.gdk.Color(HobColors.RUNNING)
- self.modify_style(rcstyle)
-
- def set_title(self, text=None):
- if not text:
- text = ""
- text += " %.0f%%" % self.percentage
- self.set_text(text)
-
- def set_stop_title(self, text=None):
- if not text:
- text = ""
- self.set_text(text)
-
- def reset(self):
- self.set_fraction(0)
- self.set_text("")
- self.set_rcstyle(True)
- self.percentage = 0
-
- def update(self, fraction):
- self.percentage = int(fraction * 100)
- self.set_fraction(fraction)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/puccho.glade b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/puccho.glade
deleted file mode 100644
index d7553a6e1..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/puccho.glade
+++ /dev/null
@@ -1,606 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
-<!--Generated with glade3 3.4.5 on Mon Nov 10 12:24:12 2008 -->
-<glade-interface>
- <widget class="GtkDialog" id="build_dialog">
- <property name="title" translatable="yes">Start a build</property>
- <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
- <property name="has_separator">False</property>
- <child internal-child="vbox">
- <widget class="GtkVBox" id="dialog-vbox1">
- <property name="visible">True</property>
- <property name="spacing">2</property>
- <child>
- <widget class="GtkTable" id="build_table">
- <property name="visible">True</property>
- <property name="border_width">6</property>
- <property name="n_rows">7</property>
- <property name="n_columns">3</property>
- <property name="column_spacing">5</property>
- <property name="row_spacing">6</property>
- <child>
- <widget class="GtkAlignment" id="status_alignment">
- <property name="visible">True</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkHBox" id="status_hbox">
- <property name="spacing">6</property>
- <child>
- <widget class="GtkImage" id="status_image">
- <property name="visible">True</property>
- <property name="no_show_all">True</property>
- <property name="xalign">0</property>
- <property name="stock">gtk-dialog-error</property>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="status_label">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">If you see this text something is wrong...</property>
- <property name="use_markup">True</property>
- <property name="use_underline">True</property>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">2</property>
- <property name="bottom_attach">3</property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label2">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Build configuration&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="image_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="image_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Image:</property>
- </widget>
- <packing>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="distribution_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="distribution_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Distribution:</property>
- </widget>
- <packing>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="machine_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="machine_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Machine:</property>
- </widget>
- <packing>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkButton" id="refresh_button">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-refresh</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="location_entry">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="width_chars">32</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label3">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Location:</property>
- </widget>
- <packing>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label1">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Repository&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment1">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment2">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment3">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child internal-child="action_area">
- <widget class="GtkHButtonBox" id="dialog-action_area1">
- <property name="visible">True</property>
- <property name="layout_style">GTK_BUTTONBOX_END</property>
- <child>
- <placeholder/>
- </child>
- <child>
- <placeholder/>
- </child>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="pack_type">GTK_PACK_END</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <widget class="GtkDialog" id="dialog2">
- <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
- <property name="has_separator">False</property>
- <child internal-child="vbox">
- <widget class="GtkVBox" id="dialog-vbox2">
- <property name="visible">True</property>
- <property name="spacing">2</property>
- <child>
- <widget class="GtkTable" id="table2">
- <property name="visible">True</property>
- <property name="border_width">6</property>
- <property name="n_rows">7</property>
- <property name="n_columns">3</property>
- <property name="column_spacing">6</property>
- <property name="row_spacing">6</property>
- <child>
- <widget class="GtkLabel" id="label7">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Repositories&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment4">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkScrolledWindow" id="scrolledwindow1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <widget class="GtkTreeView" id="treeview1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="headers_clickable">True</property>
- </widget>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">2</property>
- <property name="bottom_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="entry1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label9">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Additional packages&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment6">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="xscale">0</property>
- <child>
- <widget class="GtkLabel" id="label8">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="yalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Location: </property>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment7">
- <property name="visible">True</property>
- <property name="xalign">1</property>
- <property name="xscale">0</property>
- <child>
- <widget class="GtkHButtonBox" id="hbuttonbox1">
- <property name="visible">True</property>
- <property name="spacing">5</property>
- <child>
- <widget class="GtkButton" id="button7">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-remove</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- </child>
- <child>
- <widget class="GtkButton" id="button6">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-edit</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child>
- <widget class="GtkButton" id="button5">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-add</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="position">2</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment5">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label10">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="yalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Search:</property>
- </widget>
- <packing>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="entry2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment8">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkScrolledWindow" id="scrolledwindow2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <widget class="GtkTreeView" id="treeview2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="headers_clickable">True</property>
- </widget>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child internal-child="action_area">
- <widget class="GtkHButtonBox" id="dialog-action_area2">
- <property name="visible">True</property>
- <property name="layout_style">GTK_BUTTONBOX_END</property>
- <child>
- <widget class="GtkButton" id="button4">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-close</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="pack_type">GTK_PACK_END</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <widget class="GtkWindow" id="main_window">
- <child>
- <widget class="GtkVBox" id="main_window_vbox">
- <property name="visible">True</property>
- <child>
- <widget class="GtkToolbar" id="main_toolbar">
- <property name="visible">True</property>
- <child>
- <widget class="GtkToolButton" id="main_toolbutton_build">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Build</property>
- <property name="stock_id">gtk-execute</property>
- </widget>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- <child>
- <widget class="GtkVPaned" id="vpaned1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <child>
- <widget class="GtkScrolledWindow" id="results_scrolledwindow">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="resize">False</property>
- <property name="shrink">True</property>
- </packing>
- </child>
- <child>
- <widget class="GtkScrolledWindow" id="progress_scrolledwindow">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="resize">True</property>
- <property name="shrink">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
-</glade-interface>
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/runningbuild.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/runningbuild.py
deleted file mode 100644
index 16a955d2b..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/runningbuild.py
+++ /dev/null
@@ -1,551 +0,0 @@
-
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import logging
-import time
-import urllib
-import urllib2
-import pango
-from bb.ui.crumbs.hobcolor import HobColors
-from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf
-
-class RunningBuildModel (gtk.TreeStore):
- (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
-
- def __init__ (self):
- gtk.TreeStore.__init__ (self,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_INT)
-
- def failure_model_filter(self, model, it):
- color = model.get(it, self.COL_COLOR)[0]
- if not color:
- return False
- if color == HobColors.ERROR or color == HobColors.WARNING:
- return True
- return False
-
- def failure_model(self):
- model = self.filter_new()
- model.set_visible_func(self.failure_model_filter)
- return model
-
- def foreach_cell_func(self, model, path, iter, usr_data=None):
- if model.get_value(iter, self.COL_ICON) == "gtk-execute":
- model.set(iter, self.COL_ICON, "")
-
- def close_task_refresh(self):
- self.foreach(self.foreach_cell_func, None)
-
-class RunningBuild (gobject.GObject):
- __gsignals__ = {
- 'build-started' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-failed' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-complete' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-aborted' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'task-started' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,)),
- 'log-error' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'log-warning' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'disk-full' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'no-provider' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,)),
- 'log' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
- }
- pids_to_task = {}
- tasks_to_iter = {}
-
- def __init__ (self, sequential=False):
- gobject.GObject.__init__ (self)
- self.model = RunningBuildModel()
- self.sequential = sequential
- self.buildaborted = False
-
- def reset (self):
- self.pids_to_task.clear()
- self.tasks_to_iter.clear()
- self.model.clear()
-
- def handle_event (self, event, pbar=None):
- # Handle an event from the event queue, this may result in updating
- # the model and thus the UI. Or it may be to tell us that the build
- # has finished successfully (or not, as the case may be.)
-
- parent = None
- pid = 0
- package = None
- task = None
-
- # If we have a pid attached to this message/event try and get the
- # (package, task) pair for it. If we get that then get the parent iter
- # for the message.
- if hasattr(event, 'pid'):
- pid = event.pid
- if hasattr(event, 'process'):
- pid = event.process
-
- if pid and pid in self.pids_to_task:
- (package, task) = self.pids_to_task[pid]
- parent = self.tasks_to_iter[(package, task)]
-
- if(isinstance(event, logging.LogRecord)):
- if event.taskpid == 0 or event.levelno > logging.INFO:
- self.emit("log", "handle", event)
- # FIXME: this is a hack! More info in Yocto #1433
- # http://bugzilla.pokylinux.org/show_bug.cgi?id=1433, temporarily
- # mask the error message as it's not informative for the user.
- if event.msg.startswith("Execution of event handler 'run_buildstats' failed"):
- return
-
- if (event.levelno < logging.INFO or
- event.msg.startswith("Running task")):
- return # don't add these to the list
-
- if event.levelno >= logging.ERROR:
- icon = "dialog-error"
- color = HobColors.ERROR
- self.emit("log-error")
- elif event.levelno >= logging.WARNING:
- icon = "dialog-warning"
- color = HobColors.WARNING
- self.emit("log-warning")
- else:
- icon = None
- color = HobColors.OK
-
- # if we know which package we belong to, we'll append onto its list.
- # otherwise, we'll jump to the top of the master list
- if self.sequential or not parent:
- tree_add = self.model.append
- else:
- tree_add = self.model.prepend
- tree_add(parent,
- (None,
- package,
- task,
- event.getMessage(),
- icon,
- color,
- 0))
-
- # if there are warnings while processing a package
- # (parent), mark the task with warning color;
- # in case there are errors, the updates will be
- # handled on TaskFailed.
- if color == HobColors.WARNING and parent:
- self.model.set(parent, self.model.COL_COLOR, color)
- if task: #then we have a parent (package), and update it's color
- self.model.set(self.tasks_to_iter[(package, None)], self.model.COL_COLOR, color)
-
- elif isinstance(event, bb.build.TaskStarted):
- (package, task) = (event._package, event._task)
-
- # Save out this PID.
- self.pids_to_task[pid] = (package, task)
-
- # Check if we already have this package in our model. If so then
- # that can be the parent for the task. Otherwise we create a new
- # top level for the package.
- if ((package, None) in self.tasks_to_iter):
- parent = self.tasks_to_iter[(package, None)]
- else:
- if self.sequential:
- add = self.model.append
- else:
- add = self.model.prepend
- parent = add(None, (None,
- package,
- None,
- "Package: %s" % (package),
- None,
- HobColors.OK,
- 0))
- self.tasks_to_iter[(package, None)] = parent
-
- # Because this parent package now has an active child mark it as
- # such.
- self.model.set(parent, self.model.COL_ICON, "gtk-execute")
- parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
- if parent_color != HobColors.ERROR and parent_color != HobColors.WARNING:
- self.model.set(parent, self.model.COL_COLOR, HobColors.RUNNING)
-
- # Add an entry in the model for this task
- i = self.model.append (parent, (None,
- package,
- task,
- "Task: %s" % (task),
- "gtk-execute",
- HobColors.RUNNING,
- 0))
-
- # update the parent's active task count
- num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] + 1
- self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
-
- # Save out the iter so that we can find it when we have a message
- # that we need to attach to a task.
- self.tasks_to_iter[(package, task)] = i
-
- elif isinstance(event, bb.build.TaskBase):
- self.emit("log", "info", event._message)
- current = self.tasks_to_iter[(package, task)]
- parent = self.tasks_to_iter[(package, None)]
-
- # remove this task from the parent's active count
- num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] - 1
- self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
-
- if isinstance(event, bb.build.TaskFailed):
- # Mark the task and parent as failed
- icon = "dialog-error"
- color = HobColors.ERROR
-
- logfile = event.logfile
- if logfile and os.path.exists(logfile):
- with open(logfile) as f:
- logdata = f.read()
- self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', HobColors.OK, 0))
-
- for i in (current, parent):
- self.model.set(i, self.model.COL_ICON, icon,
- self.model.COL_COLOR, color)
- else:
- # Mark the parent package and the task as inactive,
- # but make sure to preserve error, warnings and active
- # states
- parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
- task_color = self.model.get(current, self.model.COL_COLOR)[0]
-
- # Mark the task as inactive
- self.model.set(current, self.model.COL_ICON, None)
- if task_color != HobColors.ERROR:
- if task_color == HobColors.WARNING:
- self.model.set(current, self.model.COL_ICON, 'dialog-warning')
- else:
- self.model.set(current, self.model.COL_COLOR, HobColors.OK)
-
- # Mark the parent as inactive
- if parent_color != HobColors.ERROR:
- if parent_color == HobColors.WARNING:
- self.model.set(parent, self.model.COL_ICON, "dialog-warning")
- else:
- self.model.set(parent, self.model.COL_ICON, None)
- if num_active == 0:
- self.model.set(parent, self.model.COL_COLOR, HobColors.OK)
-
- # Clear the iters and the pids since when the task goes away the
- # pid will no longer be used for messages
- del self.tasks_to_iter[(package, task)]
- del self.pids_to_task[pid]
-
- elif isinstance(event, bb.event.BuildStarted):
-
- self.emit("build-started")
- self.model.prepend(None, (None,
- None,
- None,
- "Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
- None,
- HobColors.OK,
- 0))
- if pbar:
- pbar.update(0, self.progress_total)
- pbar.set_title(bb.event.getName(event))
-
- elif isinstance(event, bb.event.BuildCompleted):
- failures = int (event._failures)
- self.model.prepend(None, (None,
- None,
- None,
- "Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
- None,
- HobColors.OK,
- 0))
-
- # Emit the appropriate signal depending on the number of failures
- if self.buildaborted:
- self.emit ("build-aborted")
- self.buildaborted = False
- elif (failures >= 1):
- self.emit ("build-failed")
- else:
- self.emit ("build-succeeded")
- # Emit a generic "build-complete" signal for things wishing to
- # handle when the build is finished
- self.emit("build-complete")
- # reset the all cell's icon indicator
- self.model.close_task_refresh()
- if pbar:
- pbar.set_text(event.msg)
-
- elif isinstance(event, bb.event.DiskFull):
- self.buildaborted = True
- self.emit("disk-full")
-
- elif isinstance(event, bb.command.CommandFailed):
- self.emit("log", "error", "Command execution failed: %s" % (event.error))
- if event.error.startswith("Exited with"):
- # If the command fails with an exit code we're done, emit the
- # generic signal for the UI to notify the user
- self.emit("build-complete")
- # reset the all cell's icon indicator
- self.model.close_task_refresh()
-
- elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
- pbar.set_title("Loading cache")
- self.progress_total = event.total
- pbar.update(0, self.progress_total)
- elif isinstance(event, bb.event.CacheLoadProgress) and pbar:
- pbar.update(event.current, self.progress_total)
- elif isinstance(event, bb.event.CacheLoadCompleted) and pbar:
- pbar.update(self.progress_total, self.progress_total)
- pbar.hide()
- elif isinstance(event, bb.event.ParseStarted) and pbar:
- if event.total == 0:
- return
- pbar.set_title("Processing recipes")
- self.progress_total = event.total
- pbar.update(0, self.progress_total)
- elif isinstance(event, bb.event.ParseProgress) and pbar:
- pbar.update(event.current, self.progress_total)
- elif isinstance(event, bb.event.ParseCompleted) and pbar:
- pbar.hide()
- #using runqueue events as many as possible to update the progress bar
- elif isinstance(event, bb.runqueue.runQueueTaskFailed):
- self.emit("log", "error", "Task %s (%s) failed with exit code '%s'" % (event.taskid, event.taskstring, event.exitcode))
- elif isinstance(event, bb.runqueue.sceneQueueTaskFailed):
- self.emit("log", "warn", "Setscene task %s (%s) failed with exit code '%s' - real task will be run instead" \
- % (event.taskid, event.taskstring, event.exitcode))
- elif isinstance(event, (bb.runqueue.runQueueTaskStarted, bb.runqueue.sceneQueueTaskStarted)):
- if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
- self.emit("log", "info", "Running setscene task %d of %d (%s)" % \
- (event.stats.completed + event.stats.active + event.stats.failed + 1,
- event.stats.total, event.taskstring))
- else:
- if event.noexec:
- tasktype = 'noexec task'
- else:
- tasktype = 'task'
- self.emit("log", "info", "Running %s %s of %s (ID: %s, %s)" % \
- (tasktype, event.stats.completed + event.stats.active + event.stats.failed + 1,
- event.stats.total, event.taskid, event.taskstring))
- message = {}
- message["eventname"] = bb.event.getName(event)
- num_of_completed = event.stats.completed + event.stats.failed
- message["current"] = num_of_completed
- message["total"] = event.stats.total
- message["title"] = ""
- message["task"] = event.taskstring
- self.emit("task-started", message)
- elif isinstance(event, bb.event.MultipleProviders):
- self.emit("log", "info", "multiple providers are available for %s%s (%s)" \
- % (event._is_runtime and "runtime " or "", event._item, ", ".join(event._candidates)))
- self.emit("log", "info", "consider defining a PREFERRED_PROVIDER entry to match %s" % (event._item))
- elif isinstance(event, bb.event.NoProvider):
- msg = ""
- if event._runtime:
- r = "R"
- else:
- r = ""
-
- extra = ''
- if not event._reasons:
- if event._close_matches:
- extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
-
- if event._dependees:
- msg = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s\n" % (r, event._item, ", ".join(event._dependees), r, extra)
- else:
- msg = "Nothing %sPROVIDES '%s'%s\n" % (r, event._item, extra)
- if event._reasons:
- for reason in event._reasons:
- msg += ("%s\n" % reason)
- self.emit("no-provider", msg)
- self.emit("log", "error", msg)
- elif isinstance(event, bb.event.LogExecTTY):
- icon = "dialog-warning"
- color = HobColors.WARNING
- if self.sequential or not parent:
- tree_add = self.model.append
- else:
- tree_add = self.model.prepend
- tree_add(parent,
- (None,
- package,
- task,
- event.msg,
- icon,
- color,
- 0))
- else:
- if not isinstance(event, (bb.event.BuildBase,
- bb.event.StampUpdate,
- bb.event.ConfigParsed,
- bb.event.RecipeParsed,
- bb.event.RecipePreFinalise,
- bb.runqueue.runQueueEvent,
- bb.runqueue.runQueueExitWait,
- bb.event.OperationStarted,
- bb.event.OperationCompleted,
- bb.event.OperationProgress)):
- self.emit("log", "error", "Unknown event: %s" % (event.error if hasattr(event, 'error') else 'error'))
-
- return
-
-
-def do_pastebin(text):
- url = 'http://pastebin.com/api_public.php'
- params = {'paste_code': text, 'paste_format': 'text'}
-
- req = urllib2.Request(url, urllib.urlencode(params))
- response = urllib2.urlopen(req)
- paste_url = response.read()
-
- return paste_url
-
-
-class RunningBuildTreeView (gtk.TreeView):
- __gsignals__ = {
- "button_press_event" : "override"
- }
- def __init__ (self, readonly=False, hob=False):
- gtk.TreeView.__init__ (self)
- self.readonly = readonly
-
- # The icon that indicates whether we're building or failed.
- # add 'hob' flag because there has not only hob to share this code
- if hob:
- renderer = HobCellRendererPixbuf ()
- else:
- renderer = gtk.CellRendererPixbuf()
- col = gtk.TreeViewColumn ("Status", renderer)
- col.add_attribute (renderer, "icon-name", 4)
- self.append_column (col)
-
- # The message of the build.
- # add 'hob' flag because there has not only hob to share this code
- if hob:
- self.message_renderer = HobWarpCellRendererText (col_number=1)
- else:
- self.message_renderer = gtk.CellRendererText ()
- self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
- self.message_column.add_attribute(self.message_renderer, 'background', 5)
- self.message_renderer.set_property('editable', (not self.readonly))
- self.append_column (self.message_column)
-
- def do_button_press_event(self, event):
- gtk.TreeView.do_button_press_event(self, event)
-
- if event.button == 3:
- selection = super(RunningBuildTreeView, self).get_selection()
- (model, it) = selection.get_selected()
- if it is not None:
- can_paste = model.get(it, model.COL_LOG)[0]
- if can_paste == 'pastebin':
- # build a simple menu with a pastebin option
- menu = gtk.Menu()
- menuitem = gtk.MenuItem("Copy")
- menu.append(menuitem)
- menuitem.connect("activate", self.clipboard_handler, (model, it))
- menuitem.show()
- menuitem = gtk.MenuItem("Send log to pastebin")
- menu.append(menuitem)
- menuitem.connect("activate", self.pastebin_handler, (model, it))
- menuitem.show()
- menu.show()
- menu.popup(None, None, None, event.button, event.time)
-
- def _add_to_clipboard(self, clipping):
- """
- Add the contents of clipping to the system clipboard.
- """
- clipboard = gtk.clipboard_get()
- clipboard.set_text(clipping)
- clipboard.store()
-
- def pastebin_handler(self, widget, data):
- """
- Send the log data to pastebin, then add the new paste url to the
- clipboard.
- """
- (model, it) = data
- paste_url = do_pastebin(model.get(it, model.COL_MESSAGE)[0])
-
- # @todo Provide visual feedback to the user that it is done and that
- # it worked.
- print paste_url
-
- self._add_to_clipboard(paste_url)
-
- def clipboard_handler(self, widget, data):
- """
- """
- (model, it) = data
- message = model.get(it, model.COL_MESSAGE)[0]
-
- self._add_to_clipboard(message)
-
-class BuildFailureTreeView(gtk.TreeView):
-
- def __init__ (self):
- gtk.TreeView.__init__(self)
- self.set_rules_hint(False)
- self.set_headers_visible(False)
- self.get_selection().set_mode(gtk.SELECTION_SINGLE)
-
- # The icon that indicates whether we're building or failed.
- renderer = HobCellRendererPixbuf ()
- col = gtk.TreeViewColumn ("Status", renderer)
- col.add_attribute (renderer, "icon-name", RunningBuildModel.COL_ICON)
- self.append_column (col)
-
- # The message of the build.
- self.message_renderer = HobWarpCellRendererText (col_number=1)
- self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=RunningBuildModel.COL_MESSAGE, background=RunningBuildModel.COL_COLOR)
- self.append_column (self.message_column)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/utils.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/utils.py
deleted file mode 100644
index 939864fa6..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/crumbs/utils.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# BitBake UI Utils
-#
-# Copyright (C) 2012 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-# This utility method looks for xterm or vte and return the
-# frist to exist, currently we are keeping this simple, but
-# we will likely move the oe.terminal implementation into
-# bitbake which will allow more flexibility.
-
-import os
-import bb
-
-def which_terminal():
- term = bb.utils.which(os.environ["PATH"], "xterm")
- if term:
- return term + " -e "
- term = bb.utils.which(os.environ["PATH"], "vte")
- if term:
- return term + " -c "
- return None
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py
index 240aafc3e..d879e04c0 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py
@@ -18,14 +18,15 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
-import gobject
-import gtk
-import Queue
+import gi
+gi.require_version('Gtk', '3.0')
+from gi.repository import Gtk, Gdk, GObject
+from multiprocessing import Queue
import threading
-import xmlrpclib
+from xmlrpc import client
+import time
import bb
import bb.event
-from bb.ui.crumbs.progressbar import HobProgressBar
# Package Model
(COL_PKG_NAME) = (0)
@@ -35,19 +36,19 @@ from bb.ui.crumbs.progressbar import HobProgressBar
(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
-class PackageDepView(gtk.TreeView):
+class PackageDepView(Gtk.TreeView):
def __init__(self, model, dep_type, label):
- gtk.TreeView.__init__(self)
+ Gtk.TreeView.__init__(self)
self.current = None
self.dep_type = dep_type
self.filter_model = model.filter_new()
- self.filter_model.set_visible_func(self._filter)
+ self.filter_model.set_visible_func(self._filter, data=None)
self.set_model(self.filter_model)
- #self.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PACKAGE))
+ self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PACKAGE))
- def _filter(self, model, iter):
- (this_type, package) = model.get(iter, COL_DEP_TYPE, COL_DEP_PARENT)
+ def _filter(self, model, iter, data):
+ this_type = model[iter][COL_DEP_TYPE]
+ package = model[iter][COL_DEP_PARENT]
if this_type != self.dep_type: return False
return package == self.current
@@ -56,17 +57,17 @@ class PackageDepView(gtk.TreeView):
self.filter_model.refilter()
-class PackageReverseDepView(gtk.TreeView):
+class PackageReverseDepView(Gtk.TreeView):
def __init__(self, model, label):
- gtk.TreeView.__init__(self)
+ Gtk.TreeView.__init__(self)
self.current = None
self.filter_model = model.filter_new()
self.filter_model.set_visible_func(self._filter)
self.set_model(self.filter_model)
- self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PARENT))
+ self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PARENT))
- def _filter(self, model, iter):
- package = model.get_value(iter, COL_DEP_PACKAGE)
+ def _filter(self, model, iter, data):
+ package = model[iter][COL_DEP_PACKAGE]
return package == self.current
def set_current_package(self, package):
@@ -74,50 +75,50 @@ class PackageReverseDepView(gtk.TreeView):
self.filter_model.refilter()
-class DepExplorer(gtk.Window):
+class DepExplorer(Gtk.Window):
def __init__(self):
- gtk.Window.__init__(self)
+ Gtk.Window.__init__(self)
self.set_title("Dependency Explorer")
self.set_default_size(500, 500)
- self.connect("delete-event", gtk.main_quit)
+ self.connect("delete-event", Gtk.main_quit)
# Create the data models
- self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
- self.pkg_model.set_sort_column_id(COL_PKG_NAME, gtk.SORT_ASCENDING)
- self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
- self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, gtk.SORT_ASCENDING)
+ self.pkg_model = Gtk.ListStore(GObject.TYPE_STRING)
+ self.pkg_model.set_sort_column_id(COL_PKG_NAME, Gtk.SortType.ASCENDING)
+ self.depends_model = Gtk.ListStore(GObject.TYPE_INT, GObject.TYPE_STRING, GObject.TYPE_STRING)
+ self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, Gtk.SortType.ASCENDING)
- pane = gtk.HPaned()
+ pane = Gtk.HPaned()
pane.set_position(250)
self.add(pane)
# The master list of packages
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
+ scrolled = Gtk.ScrolledWindow()
+ scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
+ scrolled.set_shadow_type(Gtk.ShadowType.IN)
- self.pkg_treeview = gtk.TreeView(self.pkg_model)
+ self.pkg_treeview = Gtk.TreeView(self.pkg_model)
self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
- column = gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME)
+ column = Gtk.TreeViewColumn("Package", Gtk.CellRendererText(), text=COL_PKG_NAME)
self.pkg_treeview.append_column(column)
pane.add1(scrolled)
scrolled.add(self.pkg_treeview)
- box = gtk.VBox(homogeneous=True, spacing=4)
+ box = Gtk.VBox(homogeneous=True, spacing=4)
# Runtime Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
+ scrolled = Gtk.ScrolledWindow()
+ scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
+ scrolled.set_shadow_type(Gtk.ShadowType.IN)
self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
scrolled.add(self.rdep_treeview)
box.add(scrolled)
# Build Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
+ scrolled = Gtk.ScrolledWindow()
+ scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
+ scrolled.set_shadow_type(Gtk.ShadowType.IN)
self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
scrolled.add(self.dep_treeview)
@@ -125,9 +126,9 @@ class DepExplorer(gtk.Window):
pane.add2(box)
# Reverse Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
+ scrolled = Gtk.ScrolledWindow()
+ scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
+ scrolled.set_shadow_type(Gtk.ShadowType.IN)
self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
scrolled.add(self.revdep_treeview)
@@ -183,15 +184,23 @@ class gtkthread(threading.Thread):
threading.Thread.__init__(self)
self.setDaemon(True)
self.shutdown = shutdown
+ if not Gtk.init_check()[0]:
+ sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n")
+ gtkthread.quit.set()
def run(self):
- gobject.threads_init()
- gtk.gdk.threads_init()
- gtk.main()
+ GObject.threads_init()
+ Gdk.threads_init()
+ Gtk.main()
gtkthread.quit.set()
def main(server, eventHandler, params):
+ shutdown = 0
+
+ gtkgui = gtkthread(shutdown)
+ gtkgui.start()
+
try:
params.updateFromServer(server)
cmdline = params.parseActions()
@@ -212,31 +221,24 @@ def main(server, eventHandler, params):
elif ret != True:
print("Error running command '%s': returned %s" % (cmdline, ret))
return 1
- except xmlrpclib.Fault as x:
+ except client.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return
- try:
- gtk.init_check()
- except RuntimeError:
- sys.stderr.write("Please set DISPLAY variable before running this command \n")
+ if gtkthread.quit.isSet():
return
- shutdown = 0
-
- gtkgui = gtkthread(shutdown)
- gtkgui.start()
-
- gtk.gdk.threads_enter()
+ Gdk.threads_enter()
dep = DepExplorer()
- bardialog = gtk.Dialog(parent=dep,
- flags=gtk.DIALOG_MODAL|gtk.DIALOG_DESTROY_WITH_PARENT)
+ bardialog = Gtk.Dialog(parent=dep,
+ flags=Gtk.DialogFlags.MODAL|Gtk.DialogFlags.DESTROY_WITH_PARENT)
bardialog.set_default_size(400, 50)
- pbar = HobProgressBar()
- bardialog.vbox.pack_start(pbar)
+ box = bardialog.get_content_area()
+ pbar = Gtk.ProgressBar()
+ box.pack_start(pbar, True, True, 0)
bardialog.show_all()
- bardialog.connect("delete-event", gtk.main_quit)
- gtk.gdk.threads_leave()
+ bardialog.connect("delete-event", Gtk.main_quit)
+ Gdk.threads_leave()
progress_total = 0
while True:
@@ -253,53 +255,76 @@ def main(server, eventHandler, params):
if isinstance(event, bb.event.CacheLoadStarted):
progress_total = event.total
- gtk.gdk.threads_enter()
+ Gdk.threads_enter()
bardialog.set_title("Loading Cache")
- pbar.update(0)
- gtk.gdk.threads_leave()
+ pbar.set_fraction(0.0)
+ Gdk.threads_leave()
if isinstance(event, bb.event.CacheLoadProgress):
x = event.current
- gtk.gdk.threads_enter()
- pbar.update(x * 1.0 / progress_total)
- pbar.set_title('')
- gtk.gdk.threads_leave()
+ Gdk.threads_enter()
+ pbar.set_fraction(x * 1.0 / progress_total)
+ Gdk.threads_leave()
continue
if isinstance(event, bb.event.CacheLoadCompleted):
- bardialog.hide()
continue
if isinstance(event, bb.event.ParseStarted):
progress_total = event.total
if progress_total == 0:
continue
- gtk.gdk.threads_enter()
- pbar.update(0)
+ Gdk.threads_enter()
+ pbar.set_fraction(0.0)
bardialog.set_title("Processing recipes")
-
- gtk.gdk.threads_leave()
+ Gdk.threads_leave()
if isinstance(event, bb.event.ParseProgress):
x = event.current
- gtk.gdk.threads_enter()
- pbar.update(x * 1.0 / progress_total)
- pbar.set_title('')
- gtk.gdk.threads_leave()
+ Gdk.threads_enter()
+ pbar.set_fraction(x * 1.0 / progress_total)
+ Gdk.threads_leave()
continue
if isinstance(event, bb.event.ParseCompleted):
- bardialog.hide()
+ Gdk.threads_enter()
+ bardialog.set_title("Generating dependency tree")
+ Gdk.threads_leave()
continue
if isinstance(event, bb.event.DepTreeGenerated):
- gtk.gdk.threads_enter()
+ Gdk.threads_enter()
+ bardialog.hide()
dep.parse(event._depgraph)
- gtk.gdk.threads_leave()
+ Gdk.threads_leave()
if isinstance(event, bb.command.CommandCompleted):
continue
+ if isinstance(event, bb.event.NoProvider):
+ if event._runtime:
+ r = "R"
+ else:
+ r = ""
+
+ extra = ''
+ if not event._reasons:
+ if event._close_matches:
+ extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
+
+ if event._dependees:
+ print("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s" % r, event._item, ", ".join(event._dependees), r, extra)
+ else:
+ print("Nothing %sPROVIDES '%s'%s" % (r, event._item, extra))
+ if event._reasons:
+ for reason in event._reasons:
+ print(reason)
+
+ _, error = server.runCommand(["stateShutdown"])
+ if error:
+ print('Unable to cleanly shutdown: %s' % error)
+ break
+
if isinstance(event, bb.command.CommandFailed):
print("Command execution failed: %s" % event.error)
return event.exitcode
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/goggle.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/goggle.py
deleted file mode 100644
index f4ee7b41a..000000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/goggle.py
+++ /dev/null
@@ -1,121 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gobject
-import gtk
-import xmlrpclib
-from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
-from bb.ui.crumbs.progress import ProgressBar
-
-import Queue
-
-
-def event_handle_idle_func (eventHandler, build, pbar):
-
- # Consume as many messages as we can in the time available to us
- event = eventHandler.getEvent()
- while event:
- build.handle_event (event, pbar)
- event = eventHandler.getEvent()
-
- return True
-
-def scroll_tv_cb (model, path, iter, view):
- view.scroll_to_cell (path)
-
-
-# @todo hook these into the GUI so the user has feedback...
-def running_build_failed_cb (running_build):
- pass
-
-
-def running_build_succeeded_cb (running_build):
- pass
-
-
-class MainWindow (gtk.Window):
- def __init__ (self):
- gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
-
- # Setup tree view and the scrolled window
- scrolled_window = gtk.ScrolledWindow ()
- self.add (scrolled_window)
- self.cur_build_tv = RunningBuildTreeView()
- self.connect("delete-event", gtk.main_quit)
- self.set_default_size(640, 480)
- scrolled_window.add (self.cur_build_tv)
-
-
-def main (server, eventHandler, params):
- gobject.threads_init()
- gtk.gdk.threads_init()
-
- window = MainWindow ()
- window.show_all ()
- pbar = ProgressBar(window)
- pbar.connect("delete-event", gtk.main_quit)
-
- # Create the object for the current build
- running_build = RunningBuild ()
- window.cur_build_tv.set_model (running_build.model)
- running_build.model.connect("row-inserted", scroll_tv_cb, window.cur_build_tv)
- running_build.connect ("build-succeeded", running_build_succeeded_cb)
- running_build.connect ("build-failed", running_build_failed_cb)
-
- try:
- params.updateFromServer(server)
- cmdline = params.parseActions()
- if not cmdline:
- print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
- return 1
- if 'msg' in cmdline and cmdline['msg']:
- logger.error(cmdline['msg'])
- return 1
- cmdline = cmdline['action']
- ret, error = server.runCommand(cmdline)
- if error:
- print("Error running command '%s': %s" % (cmdline, error))
- return 1
- elif ret != True:
- print("Error running command '%s': returned %s" % (cmdline, ret))
- return 1
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return 1
-
- # Use a timeout function for probing the event queue to find out if we
- # have a message waiting for us.
- gobject.timeout_add (100,
- event_handle_idle_func,
- eventHandler,
- running_build,
- pbar)
-
- try:
- gtk.main()
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- except KeyboardInterrupt:
- pass
- finally:
- server.runCommand(["stateForceShutdown"])
-
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py
index 268562770..948f52769 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py
@@ -22,7 +22,7 @@ from __future__ import division
import os
import sys
-import xmlrpclib
+import xmlrpc.client as xmlrpclib
import logging
import progressbar
import signal
@@ -32,6 +32,7 @@ import fcntl
import struct
import copy
import atexit
+
from bb.ui import uihelper
featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
@@ -40,34 +41,59 @@ logger = logging.getLogger("BitBake")
interactive = sys.stdout.isatty()
class BBProgress(progressbar.ProgressBar):
- def __init__(self, msg, maxval):
+ def __init__(self, msg, maxval, widgets=None, extrapos=-1, resize_handler=None):
self.msg = msg
- widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
- progressbar.ETA()]
-
- try:
+ self.extrapos = extrapos
+ if not widgets:
+ widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
+ progressbar.ETA()]
+ self.extrapos = 4
+
+ if resize_handler:
+ self._resize_default = resize_handler
+ else:
self._resize_default = signal.getsignal(signal.SIGWINCH)
- except:
- self._resize_default = None
progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets, fd=sys.stdout)
- def _handle_resize(self, signum, frame):
+ def _handle_resize(self, signum=None, frame=None):
progressbar.ProgressBar._handle_resize(self, signum, frame)
if self._resize_default:
self._resize_default(signum, frame)
+
def finish(self):
progressbar.ProgressBar.finish(self)
if self._resize_default:
signal.signal(signal.SIGWINCH, self._resize_default)
+ def setmessage(self, msg):
+ self.msg = msg
+ self.widgets[0] = msg
+
+ def setextra(self, extra):
+ if self.extrapos > -1:
+ if extra:
+ extrastr = str(extra)
+ if extrastr[0] != ' ':
+ extrastr = ' ' + extrastr
+ if extrastr[-1] != ' ':
+ extrastr += ' '
+ else:
+ extrastr = ' '
+ self.widgets[self.extrapos] = extrastr
+
+ def _need_update(self):
+ # We always want the bar to print when update() is called
+ return True
+
class NonInteractiveProgress(object):
fobj = sys.stdout
def __init__(self, msg, maxval):
self.msg = msg
self.maxval = maxval
+ self.finished = False
- def start(self):
+ def start(self, update=True):
self.fobj.write("%s..." % self.msg)
self.fobj.flush()
return self
@@ -76,8 +102,11 @@ class NonInteractiveProgress(object):
pass
def finish(self):
+ if self.finished:
+ return
self.fobj.write("done.\n")
self.fobj.flush()
+ self.finished = True
def new_progress(msg, maxval):
if interactive:
@@ -134,7 +163,7 @@ class TerminalFilter(object):
cr = (25, 80)
return cr
- def __init__(self, main, helper, console, errconsole, format):
+ def __init__(self, main, helper, console, errconsole, format, quiet):
self.main = main
self.helper = helper
self.cuu = None
@@ -142,6 +171,8 @@ class TerminalFilter(object):
self.interactive = sys.stdout.isatty()
self.footer_present = False
self.lastpids = []
+ self.lasttime = None
+ self.quiet = quiet
if not self.interactive:
return
@@ -181,11 +212,14 @@ class TerminalFilter(object):
console.addFilter(InteractConsoleLogFilter(self, format))
errconsole.addFilter(InteractConsoleLogFilter(self, format))
+ self.main_progress = None
+
def clearFooter(self):
if self.footer_present:
lines = self.footer_present
- sys.stdout.write(self.curses.tparm(self.cuu, lines))
- sys.stdout.write(self.curses.tparm(self.ed))
+ sys.stdout.buffer.write(self.curses.tparm(self.cuu, lines))
+ sys.stdout.buffer.write(self.curses.tparm(self.ed))
+ sys.stdout.flush()
self.footer_present = False
def updateFooter(self):
@@ -194,28 +228,81 @@ class TerminalFilter(object):
activetasks = self.helper.running_tasks
failedtasks = self.helper.failed_tasks
runningpids = self.helper.running_pids
- if self.footer_present and (self.lastcount == self.helper.tasknumber_current) and (self.lastpids == runningpids):
+ currenttime = time.time()
+ if not self.lasttime or (currenttime - self.lasttime > 5):
+ self.helper.needUpdate = True
+ self.lasttime = currenttime
+ if self.footer_present and not self.helper.needUpdate:
return
+ self.helper.needUpdate = False
if self.footer_present:
self.clearFooter()
if (not self.helper.tasknumber_total or self.helper.tasknumber_current == self.helper.tasknumber_total) and not len(activetasks):
return
tasks = []
for t in runningpids:
- tasks.append("%s (pid %s)" % (activetasks[t]["title"], t))
+ progress = activetasks[t].get("progress", None)
+ if progress is not None:
+ pbar = activetasks[t].get("progressbar", None)
+ rate = activetasks[t].get("rate", None)
+ start_time = activetasks[t].get("starttime", None)
+ if not pbar or pbar.bouncing != (progress < 0):
+ if progress < 0:
+ pbar = BBProgress("0: %s (pid %s) " % (activetasks[t]["title"], t), 100, widgets=[progressbar.BouncingSlider(), ''], extrapos=2, resize_handler=self.sigwinch_handle)
+ pbar.bouncing = True
+ else:
+ pbar = BBProgress("0: %s (pid %s) " % (activetasks[t]["title"], t), 100, widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=4, resize_handler=self.sigwinch_handle)
+ pbar.bouncing = False
+ activetasks[t]["progressbar"] = pbar
+ tasks.append((pbar, progress, rate, start_time))
+ else:
+ start_time = activetasks[t].get("starttime", None)
+ if start_time:
+ tasks.append("%s - %ds (pid %s)" % (activetasks[t]["title"], currenttime - start_time, t))
+ else:
+ tasks.append("%s (pid %s)" % (activetasks[t]["title"], t))
if self.main.shutdown:
content = "Waiting for %s running tasks to finish:" % len(activetasks)
- elif not len(activetasks):
- content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
+ print(content)
else:
- content = "Currently %s running tasks (%s of %s):" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
- print(content)
+ if self.quiet:
+ content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
+ elif not len(activetasks):
+ content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
+ else:
+ content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
+ maxtask = self.helper.tasknumber_total
+ if not self.main_progress or self.main_progress.maxval != maxtask:
+ widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()]
+ self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle)
+ self.main_progress.start(False)
+ self.main_progress.setmessage(content)
+ progress = self.helper.tasknumber_current - 1
+ if progress < 0:
+ progress = 0
+ content = self.main_progress.update(progress)
+ print('')
lines = 1 + int(len(content) / (self.columns + 1))
- for tasknum, task in enumerate(tasks[:(self.rows - 2)]):
- content = "%s: %s" % (tasknum, task)
- print(content)
- lines = lines + 1 + int(len(content) / (self.columns + 1))
+ if not self.quiet:
+ for tasknum, task in enumerate(tasks[:(self.rows - 2)]):
+ if isinstance(task, tuple):
+ pbar, progress, rate, start_time = task
+ if not pbar.start_time:
+ pbar.start(False)
+ if start_time:
+ pbar.start_time = start_time
+ pbar.setmessage('%s:%s' % (tasknum, pbar.msg.split(':', 1)[1]))
+ if progress > -1:
+ pbar.setextra(rate)
+ content = pbar.update(progress)
+ else:
+ content = pbar.update(1)
+ print('')
+ else:
+ content = "%s: %s" % (tasknum, task)
+ print(content)
+ lines = lines + 1 + int(len(content) / (self.columns + 1))
self.footer_present = lines
self.lastpids = runningpids[:]
self.lastcount = self.helper.tasknumber_current
@@ -248,7 +335,8 @@ _evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.Lo
"bb.command.CommandExit", "bb.command.CommandCompleted", "bb.cooker.CookerExit",
"bb.event.MultipleProviders", "bb.event.NoProvider", "bb.runqueue.sceneQueueTaskStarted",
"bb.runqueue.runQueueTaskStarted", "bb.runqueue.runQueueTaskFailed", "bb.runqueue.sceneQueueTaskFailed",
- "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent"]
+ "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent",
+ "bb.build.TaskProgress", "bb.event.ProcessStarted", "bb.event.ProcessProgress", "bb.event.ProcessFinished"]
def main(server, eventHandler, params, tf = TerminalFilter):
@@ -265,7 +353,10 @@ def main(server, eventHandler, params, tf = TerminalFilter):
errconsole = logging.StreamHandler(sys.stderr)
format_str = "%(levelname)s: %(message)s"
format = bb.msg.BBLogFormatter(format_str)
- bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut)
+ if params.options.quiet:
+ bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut, bb.msg.BBLogFormatter.WARNING)
+ else:
+ bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut)
bb.msg.addDefaultlogFilter(errconsole, bb.msg.BBLogFilterStdErr)
console.setFormatter(format)
errconsole.setFormatter(format)
@@ -278,6 +369,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
server.terminateServer()
return
+ consolelog = None
if consolelogfile and not params.options.show_environment and not params.options.show_versions:
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
conlogformat = bb.msg.BBLogFormatter(format_str)
@@ -285,6 +377,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
bb.msg.addDefaultlogFilter(consolelog)
consolelog.setFormatter(conlogformat)
logger.addHandler(consolelog)
+ loglink = os.path.join(os.path.dirname(consolelogfile), 'console-latest.log')
+ bb.utils.remove(loglink)
+ try:
+ os.symlink(os.path.basename(consolelogfile), loglink)
+ except OSError:
+ pass
llevel, debug_domains = bb.msg.constructLogOptions()
server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
@@ -321,7 +419,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
warnings = 0
taskfailures = []
- termfilter = tf(main, helper, console, errconsole, format)
+ termfilter = tf(main, helper, console, errconsole, format, params.options.quiet)
atexit.register(termfilter.finish)
while True:
@@ -350,7 +448,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
tries -= 1
if tries:
continue
- logger.warn(event.msg)
+ logger.warning(event.msg)
continue
if isinstance(event, logging.LogRecord):
@@ -377,7 +475,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
continue
if isinstance(event, bb.build.TaskFailedSilent):
- logger.warn("Logfile for failed setscene task is %s" % event.logfile)
+ logger.warning("Logfile for failed setscene task is %s" % event.logfile)
continue
if isinstance(event, bb.build.TaskFailed):
return_value = 1
@@ -413,15 +511,19 @@ def main(server, eventHandler, params, tf = TerminalFilter):
parseprogress = new_progress("Parsing recipes", event.total).start()
continue
if isinstance(event, bb.event.ParseProgress):
- parseprogress.update(event.current)
+ if parseprogress:
+ parseprogress.update(event.current)
+ else:
+ bb.warn("Got ParseProgress event for parsing that never started?")
continue
if isinstance(event, bb.event.ParseCompleted):
if not parseprogress:
continue
-
parseprogress.finish()
- print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
- % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
+ pasreprogress = None
+ if not params.options.quiet:
+ print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
+ % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
continue
if isinstance(event, bb.event.CacheLoadStarted):
@@ -432,7 +534,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
continue
if isinstance(event, bb.event.CacheLoadCompleted):
cacheprogress.finish()
- print("Loaded %d entries from dependency cache." % event.num_entries)
+ if not params.options.quiet:
+ print("Loaded %d entries from dependency cache." % event.num_entries)
continue
if isinstance(event, bb.command.CommandFailed):
@@ -494,28 +597,44 @@ def main(server, eventHandler, params, tf = TerminalFilter):
tasktype = 'noexec task'
else:
tasktype = 'task'
- logger.info("Running %s %s of %s (ID: %s, %s)",
+ logger.info("Running %s %d of %d (%s)",
tasktype,
event.stats.completed + event.stats.active +
event.stats.failed + 1,
- event.stats.total, event.taskid, event.taskstring)
+ event.stats.total, event.taskstring)
continue
if isinstance(event, bb.runqueue.runQueueTaskFailed):
return_value = 1
taskfailures.append(event.taskstring)
- logger.error("Task %s (%s) failed with exit code '%s'",
- event.taskid, event.taskstring, event.exitcode)
+ logger.error("Task (%s) failed with exit code '%s'",
+ event.taskstring, event.exitcode)
continue
if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
- logger.warn("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
- event.taskid, event.taskstring, event.exitcode)
+ logger.warning("Setscene task (%s) failed with exit code '%s' - real task will be run instead",
+ event.taskstring, event.exitcode)
continue
if isinstance(event, bb.event.DepTreeGenerated):
continue
+ if isinstance(event, bb.event.ProcessStarted):
+ parseprogress = new_progress(event.processname, event.total)
+ parseprogress.start(False)
+ continue
+ if isinstance(event, bb.event.ProcessProgress):
+ if parseprogress:
+ parseprogress.update(event.progress)
+ else:
+ bb.warn("Got ProcessProgress event for someting that never started?")
+ continue
+ if isinstance(event, bb.event.ProcessFinished):
+ if parseprogress:
+ parseprogress.finish()
+ parseprogress = None
+ continue
+
# ignore
if isinstance(event, (bb.event.BuildBase,
bb.event.MetadataEvent,
@@ -527,7 +646,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
bb.event.OperationStarted,
bb.event.OperationCompleted,
bb.event.OperationProgress,
- bb.event.DiskFull)):
+ bb.event.DiskFull,
+ bb.build.TaskProgress)):
continue
logger.error("Unknown event: %s", event)
@@ -567,6 +687,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
main.shutdown = 2
return_value = 1
try:
+ termfilter.clearFooter()
summary = ""
if taskfailures:
summary += pluralise("\nSummary: %s task failed:",
@@ -579,7 +700,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if return_value and errors:
summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
"\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
- if summary:
+ if summary and not params.options.quiet:
print(summary)
if interrupted:
@@ -591,4 +712,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if e.errno == errno.EPIPE:
pass
+ if consolelog:
+ logger.removeHandler(consolelog)
+ consolelog.close()
+
return return_value
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py
index 9589a77d7..d81e4138b 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py
@@ -45,7 +45,7 @@
"""
-from __future__ import division
+
import logging
import os, sys, itertools, time, subprocess
@@ -55,7 +55,7 @@ except ImportError:
sys.exit("FATAL: The ncurses ui could not load the required curses python module.")
import bb
-import xmlrpclib
+import xmlrpc.client
from bb import ui
from bb.ui import uihelper
@@ -252,7 +252,7 @@ class NCursesUI:
elif ret != True:
print("Couldn't get default commandlind! %s" % ret)
return
- except xmlrpclib.Fault as x:
+ except xmlrpc.client.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return
@@ -331,7 +331,7 @@ class NCursesUI:
taw.setText(0, 0, "")
if activetasks:
taw.appendText("Active Tasks:\n")
- for task in activetasks.itervalues():
+ for task in activetasks.values():
taw.appendText(task["title"] + '\n')
if failedtasks:
taw.appendText("Failed Tasks:\n")
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py
index 6bf4c1f03..9808f6bc8 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py
@@ -39,7 +39,7 @@ import os
# module properties for UI modules are read by bitbake and the contract should not be broken
-featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
+featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
logger = logging.getLogger("ToasterLogger")
interactive = sys.stdout.isatty()
@@ -102,6 +102,7 @@ _evt_list = [
"bb.command.CommandExit",
"bb.command.CommandFailed",
"bb.cooker.CookerExit",
+ "bb.event.BuildInit",
"bb.event.BuildCompleted",
"bb.event.BuildStarted",
"bb.event.CacheLoadCompleted",
@@ -115,6 +116,7 @@ _evt_list = [
"bb.event.NoProvider",
"bb.event.ParseCompleted",
"bb.event.ParseProgress",
+ "bb.event.ParseStarted",
"bb.event.RecipeParsed",
"bb.event.SanityCheck",
"bb.event.SanityCheckPassed",
@@ -163,7 +165,7 @@ def main(server, eventHandler, params):
inheritlist, _ = server.runCommand(["getVariable", "INHERIT"])
if not "buildhistory" in inheritlist.split(" "):
- logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
+ logger.warning("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
build_history_enabled = False
if not params.observe_only:
@@ -231,19 +233,35 @@ def main(server, eventHandler, params):
# pylint: disable=protected-access
# the code will look into the protected variables of the event; no easy way around this
- # we treat ParseStarted as the first event of toaster-triggered
- # builds; that way we get the Build Configuration included in the log
- # and any errors that occur before BuildStarted is fired
if isinstance(event, bb.event.ParseStarted):
if not (build_log and build_log_file_path):
build_log, build_log_file_path = _open_build_log(log_dir)
+
+ buildinfohelper.store_started_build()
+ buildinfohelper.save_build_log_file_path(build_log_file_path)
+ buildinfohelper.set_recipes_to_parse(event.total)
continue
- if isinstance(event, bb.event.BuildStarted):
+ # create a build object in buildinfohelper from either BuildInit
+ # (if available) or BuildStarted (for jethro and previous versions)
+ if isinstance(event, (bb.event.BuildStarted, bb.event.BuildInit)):
if not (build_log and build_log_file_path):
build_log, build_log_file_path = _open_build_log(log_dir)
- buildinfohelper.store_started_build(event, build_log_file_path)
+ buildinfohelper.save_build_targets(event)
+ buildinfohelper.save_build_log_file_path(build_log_file_path)
+
+ # get additional data from BuildStarted
+ if isinstance(event, bb.event.BuildStarted):
+ buildinfohelper.save_build_layers_and_variables()
+ continue
+
+ if isinstance(event, bb.event.ParseProgress):
+ buildinfohelper.set_recipes_parsed(event.current)
+ continue
+
+ if isinstance(event, bb.event.ParseCompleted):
+ buildinfohelper.set_recipes_parsed(event.total)
continue
if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
@@ -289,10 +307,6 @@ def main(server, eventHandler, params):
# timing and error informations from the parsing phase in Toaster
if isinstance(event, (bb.event.SanityCheckPassed, bb.event.SanityCheck)):
continue
- if isinstance(event, bb.event.ParseProgress):
- continue
- if isinstance(event, bb.event.ParseCompleted):
- continue
if isinstance(event, bb.event.CacheLoadStarted):
continue
if isinstance(event, bb.event.CacheLoadProgress):
@@ -344,8 +358,8 @@ def main(server, eventHandler, params):
if isinstance(event, bb.runqueue.runQueueTaskFailed):
buildinfohelper.update_and_store_task(event)
taskfailures.append(event.taskstring)
- logger.error("Task %s (%s) failed with exit code '%s'",
- event.taskid, event.taskstring, event.exitcode)
+ logger.error("Task (%s) failed with exit code '%s'",
+ event.taskstring, event.exitcode)
continue
if isinstance(event, (bb.runqueue.sceneQueueTaskCompleted, bb.runqueue.sceneQueueTaskFailed)):
@@ -363,6 +377,9 @@ def main(server, eventHandler, params):
errors += 1
errorcode = 1
logger.error("Command execution failed: %s", event.error)
+ elif isinstance(event, bb.event.BuildCompleted):
+ buildinfohelper.scan_image_artifacts()
+ buildinfohelper.clone_required_sdk_artifacts()
# turn off logging to the current build log
_close_build_log(build_log)
@@ -410,18 +427,18 @@ def main(server, eventHandler, params):
buildinfohelper.store_target_package_data(event)
elif event.type == "MissedSstate":
buildinfohelper.store_missed_state_tasks(event)
- elif event.type == "ImageFileSize":
- buildinfohelper.update_target_image_file(event)
- elif event.type == "ArtifactFileSize":
- buildinfohelper.update_artifact_image_file(event)
- elif event.type == "LicenseManifestPath":
- buildinfohelper.store_license_manifest_path(event)
+ elif event.type == "SDKArtifactInfo":
+ buildinfohelper.scan_sdk_artifacts(event)
elif event.type == "SetBRBE":
buildinfohelper.brbe = buildinfohelper._get_data_from_event(event)
+ elif event.type == "TaskArtifacts":
+ # not implemented yet
+ # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=10283 for details
+ pass
elif event.type == "OSErrorException":
logger.error(event)
else:
- logger.error("Unprocessed MetadataEvent %s ", str(event))
+ logger.error("Unprocessed MetadataEvent %s", event.type)
continue
if isinstance(event, bb.cooker.CookerExit):
@@ -433,15 +450,33 @@ def main(server, eventHandler, params):
buildinfohelper.store_dependency_information(event)
continue
- logger.warn("Unknown event: %s", event)
+ logger.warning("Unknown event: %s", event)
return_value += 1
except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
+ logger.warning("EnvironmentError: %s" % ioerror)
+ # ignore interrupted io system calls
+ if ioerror.args[0] == 4: # errno 4 is EINTR
+ logger.warning("Skipped EINTR: %s" % ioerror)
+ else:
+ raise
except KeyboardInterrupt:
- main.shutdown = 1
+ if params.observe_only:
+ print("\nKeyboard Interrupt, exiting observer...")
+ main.shutdown = 2
+ if not params.observe_only and main.shutdown == 1:
+ print("\nSecond Keyboard Interrupt, stopping...\n")
+ _, error = server.runCommand(["stateForceShutdown"])
+ if error:
+ logger.error("Unable to cleanly stop: %s" % error)
+ if not params.observe_only and main.shutdown == 0:
+ print("\nKeyboard Interrupt, closing down...\n")
+ interrupted = True
+ _, error = server.runCommand(["stateShutdown"])
+ if error:
+ logger.error("Unable to cleanly shutdown: %s" % error)
+ buildinfohelper.cancel_cli_build()
+ main.shutdown = main.shutdown + 1
except Exception as e:
# print errors to log
import traceback
@@ -461,5 +496,5 @@ def main(server, eventHandler, params):
if interrupted and return_value == 0:
return_value += 1
- logger.warn("Return value is %d", return_value)
+ logger.warning("Return value is %d", return_value)
return return_value
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/uievent.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/uievent.py
index df093c53c..9542b911c 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/uievent.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/uievent.py
@@ -25,7 +25,7 @@ client/server deadlocks.
"""
import socket, threading, pickle, collections
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
+from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
class BBUIEventQueue:
def __init__(self, BBServer, clientinfo=("localhost, 0")):
@@ -116,7 +116,7 @@ class BBUIEventQueue:
self.server.handle_request()
except Exception as e:
import traceback
- logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc(e)))
+ logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc()))
self.server.server_close()
@@ -137,7 +137,7 @@ class UIXMLRPCServer (SimpleXMLRPCServer):
SimpleXMLRPCServer.__init__( self,
interface,
requestHandler=SimpleXMLRPCRequestHandler,
- logRequests=False, allow_none=True)
+ logRequests=False, allow_none=True, use_builtin_types=True)
def get_request(self):
while not self.quit:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/uihelper.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/uihelper.py
index db70b763f..fda7cc2c7 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/uihelper.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/uihelper.py
@@ -18,6 +18,7 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import bb.build
+import time
class BBUIHelper:
def __init__(self):
@@ -31,29 +32,33 @@ class BBUIHelper:
def eventHandler(self, event):
if isinstance(event, bb.build.TaskStarted):
- self.running_tasks[event.pid] = { 'title' : "%s %s" % (event._package, event._task) }
+ self.running_tasks[event.pid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time() }
self.running_pids.append(event.pid)
self.needUpdate = True
- if isinstance(event, bb.build.TaskSucceeded):
+ elif isinstance(event, bb.build.TaskSucceeded):
del self.running_tasks[event.pid]
self.running_pids.remove(event.pid)
self.needUpdate = True
- if isinstance(event, bb.build.TaskFailedSilent):
+ elif isinstance(event, bb.build.TaskFailedSilent):
del self.running_tasks[event.pid]
self.running_pids.remove(event.pid)
# Don't add to the failed tasks list since this is e.g. a setscene task failure
self.needUpdate = True
- if isinstance(event, bb.build.TaskFailed):
+ elif isinstance(event, bb.build.TaskFailed):
del self.running_tasks[event.pid]
self.running_pids.remove(event.pid)
self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
self.needUpdate = True
- if isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
+ elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1
self.tasknumber_total = event.stats.total
self.needUpdate = True
+ elif isinstance(event, bb.build.TaskProgress):
+ if event.pid > 0:
+ self.running_tasks[event.pid]['progress'] = event.progress
+ self.running_tasks[event.pid]['rate'] = event.rate
+ self.needUpdate = True
def getTasks(self):
self.needUpdate = False
return (self.running_tasks, self.failed_tasks)
-
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/utils.py b/import-layers/yocto-poky/bitbake/lib/bb/utils.py
index 3544bbe17..729848a1c 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/utils.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/utils.py
@@ -27,6 +27,8 @@ import bb
import bb.msg
import multiprocessing
import fcntl
+import imp
+import itertools
import subprocess
import glob
import fnmatch
@@ -34,12 +36,15 @@ import traceback
import errno
import signal
import ast
-from commands import getstatusoutput
+import collections
+import copy
+from subprocess import getstatusoutput
from contextlib import contextmanager
from ctypes import cdll
-
logger = logging.getLogger("BitBake.Util")
+python_extensions = [e for e, _, _ in imp.get_suffixes()]
+
def clean_context():
return {
@@ -71,7 +76,7 @@ def explode_version(s):
r.append((0, int(m.group(1))))
s = m.group(2)
continue
- if s[0] in string.letters:
+ if s[0] in string.ascii_letters:
m = alpha_regexp.match(s)
r.append((1, m.group(1)))
s = m.group(2)
@@ -188,7 +193,7 @@ def explode_dep_versions2(s):
"DEPEND1 (optional version) DEPEND2 (optional version) ..."
and return a dictionary of dependencies and versions.
"""
- r = {}
+ r = collections.OrderedDict()
l = s.replace(",", "").split()
lastdep = None
lastcmp = ""
@@ -245,6 +250,7 @@ def explode_dep_versions2(s):
if not (i in r and r[i]):
r[lastdep] = []
+ r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
return r
def explode_dep_versions(s):
@@ -369,6 +375,12 @@ def _print_exception(t, value, tb, realfile, text, context):
level = level + 1
error.append("Exception: %s" % ''.join(exception))
+
+ # If the exception is from spwaning a task, let's be helpful and display
+ # the output (which hopefully includes stderr).
+ if isinstance(value, subprocess.CalledProcessError):
+ error.append("Subprocess output:")
+ error.append(value.output.decode("utf-8", errors="ignore"))
finally:
logger.error("\n".join(error))
@@ -403,8 +415,13 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
def simple_exec(code, context):
exec(code, get_context(), context)
-def better_eval(source, locals):
- return eval(source, get_context(), locals)
+def better_eval(source, locals, extraglobals = None):
+ ctx = get_context()
+ if extraglobals:
+ ctx = copy.copy(ctx)
+ for g in extraglobals:
+ ctx[g] = extraglobals[g]
+ return eval(source, ctx, locals)
@contextmanager
def fileslocked(files):
@@ -563,6 +580,8 @@ def preserved_envvars_exported():
'SHELL',
'TERM',
'USER',
+ 'LC_ALL',
+ 'BBSERVER',
]
def preserved_envvars():
@@ -582,14 +601,19 @@ def filter_environment(good_vars):
"""
removed_vars = {}
- for key in os.environ.keys():
+ for key in list(os.environ):
if key in good_vars:
continue
removed_vars[key] = os.environ[key]
- os.unsetenv(key)
del os.environ[key]
+ # If we spawn a python process, we need to have a UTF-8 locale, else python's file
+ # access methods will use ascii. You can't change that mode once the interpreter is
+ # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
+ # distros support that and we need to set something.
+ os.environ["LC_ALL"] = "en_US.UTF-8"
+
if removed_vars:
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
@@ -629,7 +653,7 @@ def empty_environment():
"""
Remove all variables from the environment.
"""
- for s in os.environ.keys():
+ for s in list(os.environ.keys()):
os.unsetenv(s)
del os.environ[s]
@@ -818,7 +842,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
if not sstat:
sstat = os.lstat(src)
except Exception as e:
- logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
+ logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
return False
destexists = 1
@@ -845,7 +869,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
#os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
return os.lstat(dest)
except Exception as e:
- logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
+ logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
return False
if stat.S_ISREG(sstat[stat.ST_MODE]):
@@ -860,7 +884,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
shutil.copyfile(src, dest + "#new")
os.rename(dest + "#new", dest)
except Exception as e:
- logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
+ logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
return False
finally:
if srcchown:
@@ -871,13 +895,13 @@ def copyfile(src, dest, newmtime = None, sstat = None):
#we don't yet handle special, so we need to fall back to /bin/mv
a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
if a[0] != 0:
- logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
+ logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
return False # failure
try:
os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
except Exception as e:
- logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
+ logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
return False
if newmtime:
@@ -946,7 +970,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
if not val:
return falsevalue
val = set(val.split())
- if isinstance(checkvalues, basestring):
+ if isinstance(checkvalues, str):
checkvalues = set(checkvalues.split())
else:
checkvalues = set(checkvalues)
@@ -959,7 +983,7 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
if not val:
return falsevalue
val = set(val.split())
- if isinstance(checkvalues, basestring):
+ if isinstance(checkvalues, str):
checkvalues = set(checkvalues.split())
else:
checkvalues = set(checkvalues)
@@ -1028,7 +1052,7 @@ def exec_flat_python_func(func, *args, **kwargs):
aidx += 1
# Handle keyword arguments
context.update(kwargs)
- funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
+ funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
comp = bb.utils.better_compile(code, '<string>', '<string>')
bb.utils.better_exec(comp, context, code, '<string>')
@@ -1057,7 +1081,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
newlines: list of lines up to this point. You can use
this to prepend lines before this variable setting
if you wish.
- and should return a three-element tuple:
+ and should return a four-element tuple:
newvalue: new value to substitute in, or None to drop
the variable setting entirely. (If the removal
results in two consecutive blank lines, one of the
@@ -1071,6 +1095,8 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
multi-line value to continue on the same line as
the assignment, False to indent before the first
element.
+ To clarify, if you wish not to change the value, then you
+ would return like this: return origvalue, None, 0, True
match_overrides: True to match items with _overrides on the end,
False otherwise
Returns a tuple:
@@ -1115,7 +1141,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
else:
varset_new = varset_start
- if isinstance(indent, (int, long)):
+ if isinstance(indent, int):
if indent == -1:
indentspc = ' ' * (len(varset_new) + 2)
else:
@@ -1183,7 +1209,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
in_var = None
else:
skip = False
- for (varname, var_re) in var_res.iteritems():
+ for (varname, var_re) in var_res.items():
res = var_re.match(line)
if res:
isfunc = varname.endswith('()')
@@ -1361,7 +1387,7 @@ def get_file_layer(filename, d):
# Use longest path so we handle nested layers
matchlen = 0
match = None
- for collection, regex in collection_res.iteritems():
+ for collection, regex in collection_res.items():
if len(regex) > matchlen and re.match(regex, path):
matchlen = len(regex)
match = collection
@@ -1427,9 +1453,8 @@ def set_process_name(name):
# This is nice to have for debugging, not essential
try:
libc = cdll.LoadLibrary('libc.so.6')
- buff = create_string_buffer(len(name)+1)
- buff.value = name
- libc.prctl(15, byref(buff), 0, 0, 0)
+ buf = create_string_buffer(bytes(name, 'utf-8'))
+ libc.prctl(15, byref(buf), 0, 0, 0)
except:
pass
@@ -1438,7 +1463,8 @@ def export_proxies(d):
import os
variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
- 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY']
+ 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
+ 'GIT_PROXY_COMMAND']
exported = False
for v in variables:
@@ -1451,3 +1477,29 @@ def export_proxies(d):
exported = True
return exported
+
+
+def load_plugins(logger, plugins, pluginpath):
+ def load_plugin(name):
+ logger.debug('Loading plugin %s' % name)
+ fp, pathname, description = imp.find_module(name, [pluginpath])
+ try:
+ return imp.load_module(name, fp, pathname, description)
+ finally:
+ if fp:
+ fp.close()
+
+ logger.debug('Loading plugins from %s...' % pluginpath)
+
+ expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
+ for ext in python_extensions)
+ files = itertools.chain.from_iterable(expanded)
+ names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
+ for name in names:
+ if name != '__init__':
+ plugin = load_plugin(name)
+ if hasattr(plugin, 'plugin_init'):
+ obj = plugin.plugin_init(plugins)
+ plugins.append(obj or plugin)
+ else:
+ plugins.append(plugin)
OpenPOWER on IntegriCloud