summaryrefslogtreecommitdiffstats
path: root/yocto-poky/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'yocto-poky/bitbake/lib/bb')
-rw-r--r--yocto-poky/bitbake/lib/bb/COW.py323
-rw-r--r--yocto-poky/bitbake/lib/bb/__init__.py144
-rw-r--r--yocto-poky/bitbake/lib/bb/build.py784
-rw-r--r--yocto-poky/bitbake/lib/bb/cache.py849
-rw-r--r--yocto-poky/bitbake/lib/bb/cache_extra.py75
-rw-r--r--yocto-poky/bitbake/lib/bb/checksum.py139
-rw-r--r--yocto-poky/bitbake/lib/bb/codeparser.py436
-rw-r--r--yocto-poky/bitbake/lib/bb/command.py474
-rw-r--r--yocto-poky/bitbake/lib/bb/compat.py6
-rw-r--r--yocto-poky/bitbake/lib/bb/cooker.py2196
-rw-r--r--yocto-poky/bitbake/lib/bb/cookerdata.py345
-rw-r--r--yocto-poky/bitbake/lib/bb/daemonize.py193
-rw-r--r--yocto-poky/bitbake/lib/bb/data.py448
-rw-r--r--yocto-poky/bitbake/lib/bb/data_smart.py969
-rw-r--r--yocto-poky/bitbake/lib/bb/event.py679
-rw-r--r--yocto-poky/bitbake/lib/bb/exceptions.py91
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/__init__.py1773
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/bzr.py143
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/clearcase.py263
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/cvs.py171
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/git.py435
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/gitannex.py76
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/gitsm.py134
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/hg.py278
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/local.py129
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/npm.py284
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/perforce.py187
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/sftp.py129
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/ssh.py128
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/svn.py197
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/wget.py565
-rwxr-xr-xyocto-poky/bitbake/lib/bb/main.py440
-rw-r--r--yocto-poky/bitbake/lib/bb/methodpool.py40
-rw-r--r--yocto-poky/bitbake/lib/bb/monitordisk.py263
-rw-r--r--yocto-poky/bitbake/lib/bb/msg.py199
-rw-r--r--yocto-poky/bitbake/lib/bb/namedtuple_with_abc.py255
-rw-r--r--yocto-poky/bitbake/lib/bb/parse/__init__.py170
-rw-r--r--yocto-poky/bitbake/lib/bb/parse/ast.py476
-rw-r--r--yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py254
-rw-r--r--yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py193
-rw-r--r--yocto-poky/bitbake/lib/bb/parse/parse_py/__init__.py33
-rw-r--r--yocto-poky/bitbake/lib/bb/persist_data.py218
-rw-r--r--yocto-poky/bitbake/lib/bb/process.py156
-rw-r--r--yocto-poky/bitbake/lib/bb/providers.py428
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/__init__.py0
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/builtin.py710
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/interp.py1367
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/lsprof.py116
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/pysh.py167
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/pyshlex.py888
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py779
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/sherrors.py41
-rw-r--r--yocto-poky/bitbake/lib/bb/pysh/subprocess_fix.py77
-rw-r--r--yocto-poky/bitbake/lib/bb/runqueue.py2285
-rw-r--r--yocto-poky/bitbake/lib/bb/server/__init__.py99
-rw-r--r--yocto-poky/bitbake/lib/bb/server/process.py268
-rw-r--r--yocto-poky/bitbake/lib/bb/server/xmlrpc.py390
-rw-r--r--yocto-poky/bitbake/lib/bb/shell.py820
-rw-r--r--yocto-poky/bitbake/lib/bb/siggen.py601
-rw-r--r--yocto-poky/bitbake/lib/bb/taskdata.py690
-rw-r--r--yocto-poky/bitbake/lib/bb/tests/__init__.py0
-rw-r--r--yocto-poky/bitbake/lib/bb/tests/codeparser.py380
-rw-r--r--yocto-poky/bitbake/lib/bb/tests/cow.py136
-rw-r--r--yocto-poky/bitbake/lib/bb/tests/data.py446
-rw-r--r--yocto-poky/bitbake/lib/bb/tests/fetch.py812
-rw-r--r--yocto-poky/bitbake/lib/bb/tests/parse.py147
-rw-r--r--yocto-poky/bitbake/lib/bb/tests/utils.py603
-rw-r--r--yocto-poky/bitbake/lib/bb/tinfoil.py105
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/__init__.py17
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py1521
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/__init__.py17
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hig/__init__.py0
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py44
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py70
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py219
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py172
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py298
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py437
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py122
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hobcolor.py38
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/hobwidget.py904
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/persistenttooltip.py186
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/progress.py23
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/progressbar.py59
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/puccho.glade606
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/runningbuild.py551
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/crumbs/utils.py34
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/depexp.py333
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/goggle.py121
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/images/images_display.pngbin6898 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/images/images_hover.pngbin7051 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/add-hover.pngbin1212 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/add.pngbin1176 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/alert.pngbin3954 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/confirmation.pngbin5789 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/denied.pngbin3955 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/error.pngbin6482 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/info.pngbin3311 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/issues.pngbin4549 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/refresh.pngbin5250 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/remove-hover.pngbin2809 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/remove.pngbin1971 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/indicators/tick.pngbin4563 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/info/info_display.pngbin4117 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/info/info_hover.pngbin4167 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/layers/layers_display.pngbin4840 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/layers/layers_hover.pngbin5257 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/packages/packages_display.pngbin7011 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/packages/packages_hover.pngbin7121 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/recipe/recipe_display.pngbin4723 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/recipe/recipe_hover.pngbin4866 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/settings/settings_display.pngbin6076 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/settings/settings_hover.pngbin6269 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/templates/templates_display.pngbin5651 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/icons/templates/templates_hover.pngbin5791 -> 0 bytes
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/knotty.py594
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/ncurses.py373
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/toasterui.py465
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/uievent.py161
-rw-r--r--yocto-poky/bitbake/lib/bb/ui/uihelper.py59
-rw-r--r--yocto-poky/bitbake/lib/bb/utils.py1453
123 files changed, 0 insertions, 37205 deletions
diff --git a/yocto-poky/bitbake/lib/bb/COW.py b/yocto-poky/bitbake/lib/bb/COW.py
deleted file mode 100644
index 6917ec378..000000000
--- a/yocto-poky/bitbake/lib/bb/COW.py
+++ /dev/null
@@ -1,323 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
-#
-# Copyright (C) 2006 Tim Amsell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Please Note:
-# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
-# Assign a file to __warn__ to get warnings about slow operations.
-#
-
-from __future__ import print_function
-import copy
-import types
-ImmutableTypes = (
- types.NoneType,
- bool,
- complex,
- float,
- int,
- long,
- tuple,
- frozenset,
- basestring
-)
-
-MUTABLE = "__mutable__"
-
-class COWMeta(type):
- pass
-
-class COWDictMeta(COWMeta):
- __warn__ = False
- __hasmutable__ = False
- __marker__ = tuple()
-
- def __str__(cls):
- # FIXME: I have magic numbers!
- return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
- __repr__ = __str__
-
- def cow(cls):
- class C(cls):
- __count__ = cls.__count__ + 1
- return C
- copy = cow
- __call__ = cow
-
- def __setitem__(cls, key, value):
- if not isinstance(value, ImmutableTypes):
- if not isinstance(value, COWMeta):
- cls.__hasmutable__ = True
- key += MUTABLE
- setattr(cls, key, value)
-
- def __getmutable__(cls, key, readonly=False):
- nkey = key + MUTABLE
- try:
- return cls.__dict__[nkey]
- except KeyError:
- pass
-
- value = getattr(cls, nkey)
- if readonly:
- return value
-
- if not cls.__warn__ is False and not isinstance(value, COWMeta):
- print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__)
- try:
- value = value.copy()
- except AttributeError as e:
- value = copy.copy(value)
- setattr(cls, nkey, value)
- return value
-
- __getmarker__ = []
- def __getreadonly__(cls, key, default=__getmarker__):
- """\
- Get a value (even if mutable) which you promise not to change.
- """
- return cls.__getitem__(key, default, True)
-
- def __getitem__(cls, key, default=__getmarker__, readonly=False):
- try:
- try:
- value = getattr(cls, key)
- except AttributeError:
- value = cls.__getmutable__(key, readonly)
-
- # This is for values which have been deleted
- if value is cls.__marker__:
- raise AttributeError("key %s does not exist." % key)
-
- return value
- except AttributeError as e:
- if not default is cls.__getmarker__:
- return default
-
- raise KeyError(str(e))
-
- def __delitem__(cls, key):
- cls.__setitem__(key, cls.__marker__)
-
- def __revertitem__(cls, key):
- if not cls.__dict__.has_key(key):
- key += MUTABLE
- delattr(cls, key)
-
- def __contains__(cls, key):
- return cls.has_key(key)
-
- def has_key(cls, key):
- value = cls.__getreadonly__(key, cls.__marker__)
- if value is cls.__marker__:
- return False
- return True
-
- def iter(cls, type, readonly=False):
- for key in dir(cls):
- if key.startswith("__"):
- continue
-
- if key.endswith(MUTABLE):
- key = key[:-len(MUTABLE)]
-
- if type == "keys":
- yield key
-
- try:
- if readonly:
- value = cls.__getreadonly__(key)
- else:
- value = cls[key]
- except KeyError:
- continue
-
- if type == "values":
- yield value
- if type == "items":
- yield (key, value)
- raise StopIteration()
-
- def iterkeys(cls):
- return cls.iter("keys")
- def itervalues(cls, readonly=False):
- if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
- print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
- return cls.iter("values", readonly)
- def iteritems(cls, readonly=False):
- if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
- print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
- return cls.iter("items", readonly)
-
-class COWSetMeta(COWDictMeta):
- def __str__(cls):
- # FIXME: I have magic numbers!
- return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
- __repr__ = __str__
-
- def cow(cls):
- class C(cls):
- __count__ = cls.__count__ + 1
- return C
-
- def add(cls, value):
- COWDictMeta.__setitem__(cls, repr(hash(value)), value)
-
- def remove(cls, value):
- COWDictMeta.__delitem__(cls, repr(hash(value)))
-
- def __in__(cls, value):
- return COWDictMeta.has_key(repr(hash(value)))
-
- def iterkeys(cls):
- raise TypeError("sets don't have keys")
-
- def iteritems(cls):
- raise TypeError("sets don't have 'items'")
-
-# These are the actual classes you use!
-class COWDictBase(object):
- __metaclass__ = COWDictMeta
- __count__ = 0
-
-class COWSetBase(object):
- __metaclass__ = COWSetMeta
- __count__ = 0
-
-if __name__ == "__main__":
- import sys
- COWDictBase.__warn__ = sys.stderr
- a = COWDictBase()
- print("a", a)
-
- a['a'] = 'a'
- a['b'] = 'b'
- a['dict'] = {}
-
- b = a.copy()
- print("b", b)
- b['c'] = 'b'
-
- print()
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems():
- print(x)
- print()
-
- b['dict']['a'] = 'b'
- b['a'] = 'c'
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems():
- print(x)
- print()
-
- try:
- b['dict2']
- except KeyError as e:
- print("Okay!")
-
- a['set'] = COWSetBase()
- a['set'].add("o1")
- a['set'].add("o1")
- a['set'].add("o2")
-
- print("a", a)
- for x in a['set'].itervalues():
- print(x)
- print("--")
- print("b", b)
- for x in b['set'].itervalues():
- print(x)
- print()
-
- b['set'].add('o3')
-
- print("a", a)
- for x in a['set'].itervalues():
- print(x)
- print("--")
- print("b", b)
- for x in b['set'].itervalues():
- print(x)
- print()
-
- a['set2'] = set()
- a['set2'].add("o1")
- a['set2'].add("o1")
- a['set2'].add("o2")
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- del b['b']
- try:
- print(b['b'])
- except KeyError:
- print("Yay! deleted key raises error")
-
- if b.has_key('b'):
- print("Boo!")
- else:
- print("Yay - has_key with delete works!")
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- b.__revertitem__('b')
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- b.__revertitem__('dict')
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
diff --git a/yocto-poky/bitbake/lib/bb/__init__.py b/yocto-poky/bitbake/lib/bb/__init__.py
deleted file mode 100644
index 502ad839e..000000000
--- a/yocto-poky/bitbake/lib/bb/__init__.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Build System Python Library
-#
-# Copyright (C) 2003 Holger Schurig
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# Based on Gentoo's portage.py.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-__version__ = "1.30.0"
-
-import sys
-if sys.version_info < (2, 7, 3):
- raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake")
-
-
-class BBHandledException(Exception):
- """
- The big dilemma for generic bitbake code is what information to give the user
- when an exception occurs. Any exception inheriting this base exception class
- has already provided information to the user via some 'fired' message type such as
- an explicitly fired event using bb.fire, or a bb.error message. If bitbake
- encounters an exception derived from this class, no backtrace or other information
- will be given to the user, its assumed the earlier event provided the relevant information.
- """
- pass
-
-import os
-import logging
-
-
-class NullHandler(logging.Handler):
- def emit(self, record):
- pass
-
-Logger = logging.getLoggerClass()
-class BBLogger(Logger):
- def __init__(self, name):
- if name.split(".")[0] == "BitBake":
- self.debug = self.bbdebug
- Logger.__init__(self, name)
-
- def bbdebug(self, level, msg, *args, **kwargs):
- return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs)
-
- def plain(self, msg, *args, **kwargs):
- return self.log(logging.INFO + 1, msg, *args, **kwargs)
-
- def verbose(self, msg, *args, **kwargs):
- return self.log(logging.INFO - 1, msg, *args, **kwargs)
-
-logging.raiseExceptions = False
-logging.setLoggerClass(BBLogger)
-
-logger = logging.getLogger("BitBake")
-logger.addHandler(NullHandler())
-logger.setLevel(logging.DEBUG - 2)
-
-mainlogger = logging.getLogger("BitBake.Main")
-
-# This has to be imported after the setLoggerClass, as the import of bb.msg
-# can result in construction of the various loggers.
-import bb.msg
-
-from bb import fetch2 as fetch
-sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
-
-# Messaging convenience functions
-def plain(*args):
- mainlogger.plain(''.join(args))
-
-def debug(lvl, *args):
- if isinstance(lvl, basestring):
- mainlogger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
- args = (lvl,) + args
- lvl = 1
- mainlogger.debug(lvl, ''.join(args))
-
-def note(*args):
- mainlogger.info(''.join(args))
-
-def warn(*args):
- mainlogger.warn(''.join(args))
-
-def error(*args, **kwargs):
- mainlogger.error(''.join(args), extra=kwargs)
-
-def fatal(*args, **kwargs):
- mainlogger.critical(''.join(args), extra=kwargs)
- raise BBHandledException()
-
-def deprecated(func, name=None, advice=""):
- """This is a decorator which can be used to mark functions
- as deprecated. It will result in a warning being emitted
- when the function is used."""
- import warnings
-
- if advice:
- advice = ": %s" % advice
- if name is None:
- name = func.__name__
-
- def newFunc(*args, **kwargs):
- warnings.warn("Call to deprecated function %s%s." % (name,
- advice),
- category=DeprecationWarning,
- stacklevel=2)
- return func(*args, **kwargs)
- newFunc.__name__ = func.__name__
- newFunc.__doc__ = func.__doc__
- newFunc.__dict__.update(func.__dict__)
- return newFunc
-
-# For compatibility
-def deprecate_import(current, modulename, fromlist, renames = None):
- """Import objects from one module into another, wrapping them with a DeprecationWarning"""
- import sys
-
- module = __import__(modulename, fromlist = fromlist)
- for position, objname in enumerate(fromlist):
- obj = getattr(module, objname)
- newobj = deprecated(obj, "{0}.{1}".format(current, objname),
- "Please use {0}.{1} instead".format(modulename, objname))
- if renames:
- newname = renames[position]
- else:
- newname = objname
-
- setattr(sys.modules[current], newname, newobj)
-
diff --git a/yocto-poky/bitbake/lib/bb/build.py b/yocto-poky/bitbake/lib/bb/build.py
deleted file mode 100644
index db5072cb4..000000000
--- a/yocto-poky/bitbake/lib/bb/build.py
+++ /dev/null
@@ -1,784 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake 'Build' implementation
-#
-# Core code for function execution and task handling in the
-# BitBake build tools.
-#
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# Based on Gentoo's portage.py.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import shlex
-import glob
-import time
-import stat
-import bb
-import bb.msg
-import bb.process
-from contextlib import nested
-from bb import event, utils
-
-bblogger = logging.getLogger('BitBake')
-logger = logging.getLogger('BitBake.Build')
-
-NULL = open(os.devnull, 'r+')
-
-__mtime_cache = {}
-
-def cached_mtime_noerror(f):
- if f not in __mtime_cache:
- try:
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- except OSError:
- return 0
- return __mtime_cache[f]
-
-def reset_cache():
- global __mtime_cache
- __mtime_cache = {}
-
-# When we execute a Python function, we'd like certain things
-# in all namespaces, hence we add them to __builtins__.
-# If we do not do this and use the exec globals, they will
-# not be available to subfunctions.
-__builtins__['bb'] = bb
-__builtins__['os'] = os
-
-class FuncFailed(Exception):
- def __init__(self, name = None, logfile = None):
- self.logfile = logfile
- self.name = name
- if name:
- self.msg = 'Function failed: %s' % name
- else:
- self.msg = "Function failed"
-
- def __str__(self):
- if self.logfile and os.path.exists(self.logfile):
- msg = ("%s (log file is located at %s)" %
- (self.msg, self.logfile))
- else:
- msg = self.msg
- return msg
-
-class TaskBase(event.Event):
- """Base class for task events"""
-
- def __init__(self, t, logfile, d):
- self._task = t
- self._package = d.getVar("PF", True)
- self.taskfile = d.getVar("FILE", True)
- self.taskname = self._task
- self.logfile = logfile
- self.time = time.time()
- event.Event.__init__(self)
- self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
-
- def getTask(self):
- return self._task
-
- def setTask(self, task):
- self._task = task
-
- def getDisplayName(self):
- return bb.event.getName(self)[4:]
-
- task = property(getTask, setTask, None, "task property")
-
-class TaskStarted(TaskBase):
- """Task execution started"""
- def __init__(self, t, logfile, taskflags, d):
- super(TaskStarted, self).__init__(t, logfile, d)
- self.taskflags = taskflags
-
-class TaskSucceeded(TaskBase):
- """Task execution completed"""
-
-class TaskFailed(TaskBase):
- """Task execution failed"""
-
- def __init__(self, task, logfile, metadata, errprinted = False):
- self.errprinted = errprinted
- super(TaskFailed, self).__init__(task, logfile, metadata)
-
-class TaskFailedSilent(TaskBase):
- """Task execution failed (silently)"""
- def getDisplayName(self):
- # Don't need to tell the user it was silent
- return "Failed"
-
-class TaskInvalid(TaskBase):
-
- def __init__(self, task, metadata):
- super(TaskInvalid, self).__init__(task, None, metadata)
- self._message = "No such task '%s'" % task
-
-
-class LogTee(object):
- def __init__(self, logger, outfile):
- self.outfile = outfile
- self.logger = logger
- self.name = self.outfile.name
-
- def write(self, string):
- self.logger.plain(string)
- self.outfile.write(string)
-
- def __enter__(self):
- self.outfile.__enter__()
- return self
-
- def __exit__(self, *excinfo):
- self.outfile.__exit__(*excinfo)
-
- def __repr__(self):
- return '<LogTee {0}>'.format(self.name)
- def flush(self):
- self.outfile.flush()
-
-#
-# pythonexception allows the python exceptions generated to be raised
-# as the real exceptions (not FuncFailed) and without a backtrace at the
-# origin of the failure.
-#
-def exec_func(func, d, dirs = None, pythonexception=False):
- """Execute a BB 'function'"""
-
- body = d.getVar(func, False)
- if not body:
- if body is None:
- logger.warn("Function %s doesn't exist", func)
- return
-
- flags = d.getVarFlags(func)
- cleandirs = flags.get('cleandirs')
- if cleandirs:
- for cdir in d.expand(cleandirs).split():
- bb.utils.remove(cdir, True)
- bb.utils.mkdirhier(cdir)
-
- if dirs is None:
- dirs = flags.get('dirs')
- if dirs:
- dirs = d.expand(dirs).split()
-
- if dirs:
- for adir in dirs:
- bb.utils.mkdirhier(adir)
- adir = dirs[-1]
- else:
- adir = d.getVar('B', True)
- bb.utils.mkdirhier(adir)
-
- ispython = flags.get('python')
-
- lockflag = flags.get('lockfiles')
- if lockflag:
- lockfiles = [f for f in d.expand(lockflag).split()]
- else:
- lockfiles = None
-
- tempdir = d.getVar('T', True)
-
- # or func allows items to be executed outside of the normal
- # task set, such as buildhistory
- task = d.getVar('BB_RUNTASK', True) or func
- if task == func:
- taskfunc = task
- else:
- taskfunc = "%s.%s" % (task, func)
-
- runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}"
- runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
- runfile = os.path.join(tempdir, runfn)
- bb.utils.mkdirhier(os.path.dirname(runfile))
-
- # Setup the courtesy link to the runfn, only for tasks
- # we create the link 'just' before the run script is created
- # if we create it after, and if the run script fails, then the
- # link won't be created as an exception would be fired.
- if task == func:
- runlink = os.path.join(tempdir, 'run.{0}'.format(task))
- if runlink:
- bb.utils.remove(runlink)
-
- try:
- os.symlink(runfn, runlink)
- except OSError:
- pass
-
- with bb.utils.fileslocked(lockfiles):
- if ispython:
- exec_func_python(func, d, runfile, cwd=adir, pythonexception=pythonexception)
- else:
- exec_func_shell(func, d, runfile, cwd=adir)
-
-_functionfmt = """
-{function}(d)
-"""
-logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
-def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
- """Execute a python BB 'function'"""
-
- code = _functionfmt.format(function=func)
- bb.utils.mkdirhier(os.path.dirname(runfile))
- with open(runfile, 'w') as script:
- bb.data.emit_func_python(func, script, d)
-
- if cwd:
- try:
- olddir = os.getcwd()
- except OSError:
- olddir = None
- os.chdir(cwd)
-
- bb.debug(2, "Executing python function %s" % func)
-
- try:
- text = "def %s(d):\n%s" % (func, d.getVar(func, False))
- fn = d.getVarFlag(func, "filename", False)
- lineno = int(d.getVarFlag(func, "lineno", False))
- bb.methodpool.insert_method(func, text, fn, lineno - 1)
-
- comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
- utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated", pythonexception=pythonexception)
- except (bb.parse.SkipRecipe, bb.build.FuncFailed):
- raise
- except:
- if pythonexception:
- raise
- raise FuncFailed(func, None)
- finally:
- bb.debug(2, "Python function %s finished" % func)
-
- if cwd and olddir:
- try:
- os.chdir(olddir)
- except OSError:
- pass
-
-def shell_trap_code():
- return '''#!/bin/sh\n
-# Emit a useful diagnostic if something fails:
-bb_exit_handler() {
- ret=$?
- case $ret in
- 0) ;;
- *) case $BASH_VERSION in
- "") echo "WARNING: exit code $ret from a shell command.";;
- *) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from '$BASH_COMMAND'";;
- esac
- exit $ret
- esac
-}
-trap 'bb_exit_handler' 0
-set -e
-'''
-
-def exec_func_shell(func, d, runfile, cwd=None):
- """Execute a shell function from the metadata
-
- Note on directory behavior. The 'dirs' varflag should contain a list
- of the directories you need created prior to execution. The last
- item in the list is where we will chdir/cd to.
- """
-
- # Don't let the emitted shell script override PWD
- d.delVarFlag('PWD', 'export')
-
- with open(runfile, 'w') as script:
- script.write(shell_trap_code())
-
- bb.data.emit_func(func, script, d)
-
- if bb.msg.loggerVerboseLogs:
- script.write("set -x\n")
- if cwd:
- script.write("cd '%s'\n" % cwd)
- script.write("%s\n" % func)
- script.write('''
-# cleanup
-ret=$?
-trap '' 0
-exit $ret
-''')
-
- os.chmod(runfile, 0775)
-
- cmd = runfile
- if d.getVarFlag(func, 'fakeroot', False):
- fakerootcmd = d.getVar('FAKEROOT', True)
- if fakerootcmd:
- cmd = [fakerootcmd, runfile]
-
- if bb.msg.loggerDefaultVerbose:
- logfile = LogTee(logger, sys.stdout)
- else:
- logfile = sys.stdout
-
- def readfifo(data):
- lines = data.split('\0')
- for line in lines:
- splitval = line.split(' ', 1)
- cmd = splitval[0]
- if len(splitval) > 1:
- value = splitval[1]
- else:
- value = ''
- if cmd == 'bbplain':
- bb.plain(value)
- elif cmd == 'bbnote':
- bb.note(value)
- elif cmd == 'bbwarn':
- bb.warn(value)
- elif cmd == 'bberror':
- bb.error(value)
- elif cmd == 'bbfatal':
- # The caller will call exit themselves, so bb.error() is
- # what we want here rather than bb.fatal()
- bb.error(value)
- elif cmd == 'bbfatal_log':
- bb.error(value, forcelog=True)
- elif cmd == 'bbdebug':
- splitval = value.split(' ', 1)
- level = int(splitval[0])
- value = splitval[1]
- bb.debug(level, value)
-
- tempdir = d.getVar('T', True)
- fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
- if os.path.exists(fifopath):
- os.unlink(fifopath)
- os.mkfifo(fifopath)
- with open(fifopath, 'r+') as fifo:
- try:
- bb.debug(2, "Executing shell function %s" % func)
-
- try:
- with open(os.devnull, 'r+') as stdin:
- bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
- except bb.process.CmdError:
- logfn = d.getVar('BB_LOGFILE', True)
- raise FuncFailed(func, logfn)
- finally:
- os.unlink(fifopath)
-
- bb.debug(2, "Shell function %s finished" % func)
-
-def _task_data(fn, task, d):
- localdata = bb.data.createCopy(d)
- localdata.setVar('BB_FILENAME', fn)
- localdata.setVar('BB_CURRENTTASK', task[3:])
- localdata.setVar('OVERRIDES', 'task-%s:%s' %
- (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
- localdata.finalize()
- bb.data.expandKeys(localdata)
- return localdata
-
-def _exec_task(fn, task, d, quieterr):
- """Execute a BB 'task'
-
- Execution of a task involves a bit more setup than executing a function,
- running it with its own local metadata, and with some useful variables set.
- """
- if not d.getVarFlag(task, 'task', False):
- event.fire(TaskInvalid(task, d), d)
- logger.error("No such task: %s" % task)
- return 1
-
- logger.debug(1, "Executing task %s", task)
-
- localdata = _task_data(fn, task, d)
- tempdir = localdata.getVar('T', True)
- if not tempdir:
- bb.fatal("T variable not set, unable to build")
-
- # Change nice level if we're asked to
- nice = localdata.getVar("BB_TASK_NICE_LEVEL", True)
- if nice:
- curnice = os.nice(0)
- nice = int(nice) - curnice
- newnice = os.nice(nice)
- logger.debug(1, "Renice to %s " % newnice)
- ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True)
- if ionice:
- try:
- cls, prio = ionice.split(".", 1)
- bb.utils.ioprio_set(os.getpid(), int(cls), int(prio))
- except:
- bb.warn("Invalid ionice level %s" % ionice)
-
- bb.utils.mkdirhier(tempdir)
-
- # Determine the logfile to generate
- logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
- logbase = logfmt.format(task=task, pid=os.getpid())
-
- # Document the order of the tasks...
- logorder = os.path.join(tempdir, 'log.task_order')
- try:
- with open(logorder, 'a') as logorderfile:
- logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
- except OSError:
- logger.exception("Opening log file '%s'", logorder)
- pass
-
- # Setup the courtesy link to the logfn
- loglink = os.path.join(tempdir, 'log.{0}'.format(task))
- logfn = os.path.join(tempdir, logbase)
- if loglink:
- bb.utils.remove(loglink)
-
- try:
- os.symlink(logbase, loglink)
- except OSError:
- pass
-
- prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
- postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
-
- class ErrorCheckHandler(logging.Handler):
- def __init__(self):
- self.triggered = False
- logging.Handler.__init__(self, logging.ERROR)
- def emit(self, record):
- if getattr(record, 'forcelog', False):
- self.triggered = False
- else:
- self.triggered = True
-
- # Handle logfiles
- si = open('/dev/null', 'r')
- try:
- bb.utils.mkdirhier(os.path.dirname(logfn))
- logfile = open(logfn, 'w')
- except OSError:
- logger.exception("Opening log file '%s'", logfn)
- pass
-
- # Dup the existing fds so we dont lose them
- osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
- oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
- ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
-
- # Replace those fds with our own
- os.dup2(si.fileno(), osi[1])
- os.dup2(logfile.fileno(), oso[1])
- os.dup2(logfile.fileno(), ose[1])
-
- # Ensure Python logging goes to the logfile
- handler = logging.StreamHandler(logfile)
- handler.setFormatter(logformatter)
- # Always enable full debug output into task logfiles
- handler.setLevel(logging.DEBUG - 2)
- bblogger.addHandler(handler)
-
- errchk = ErrorCheckHandler()
- bblogger.addHandler(errchk)
-
- localdata.setVar('BB_LOGFILE', logfn)
- localdata.setVar('BB_RUNTASK', task)
-
- flags = localdata.getVarFlags(task)
-
- event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
- try:
- for func in (prefuncs or '').split():
- exec_func(func, localdata)
- exec_func(task, localdata)
- for func in (postfuncs or '').split():
- exec_func(func, localdata)
- except FuncFailed as exc:
- if quieterr:
- event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
- else:
- errprinted = errchk.triggered
- logger.error(str(exc))
- event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
- return 1
- finally:
- sys.stdout.flush()
- sys.stderr.flush()
-
- bblogger.removeHandler(handler)
-
- # Restore the backup fds
- os.dup2(osi[0], osi[1])
- os.dup2(oso[0], oso[1])
- os.dup2(ose[0], ose[1])
-
- # Close the backup fds
- os.close(osi[0])
- os.close(oso[0])
- os.close(ose[0])
- si.close()
-
- logfile.close()
- if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
- logger.debug(2, "Zero size logfn %s, removing", logfn)
- bb.utils.remove(logfn)
- bb.utils.remove(loglink)
- event.fire(TaskSucceeded(task, logfn, localdata), localdata)
-
- if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
- make_stamp(task, localdata)
-
- return 0
-
-def exec_task(fn, task, d, profile = False):
- try:
- quieterr = False
- if d.getVarFlag(task, "quieterrors", False) is not None:
- quieterr = True
-
- if profile:
- profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task)
- try:
- import cProfile as profile
- except:
- import profile
- prof = profile.Profile()
- ret = profile.Profile.runcall(prof, _exec_task, fn, task, d, quieterr)
- prof.dump_stats(profname)
- bb.utils.process_profilelog(profname)
-
- return ret
- else:
- return _exec_task(fn, task, d, quieterr)
-
- except Exception:
- from traceback import format_exc
- if not quieterr:
- logger.error("Build of %s failed" % (task))
- logger.error(format_exc())
- failedevent = TaskFailed(task, None, d, True)
- event.fire(failedevent, d)
- return 1
-
-def stamp_internal(taskname, d, file_name, baseonly=False):
- """
- Internal stamp helper function
- Makes sure the stamp directory exists
- Returns the stamp path+filename
-
- In the bitbake core, d can be a CacheData and file_name will be set.
- When called in task context, d will be a data store, file_name will not be set
- """
- taskflagname = taskname
- if taskname.endswith("_setscene") and taskname != "do_setscene":
- taskflagname = taskname.replace("_setscene", "")
-
- if file_name:
- stamp = d.stamp[file_name]
- extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
- else:
- stamp = d.getVar('STAMP', True)
- file_name = d.getVar('BB_FILENAME', True)
- extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
-
- if baseonly:
- return stamp
-
- if not stamp:
- return
-
- stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
-
- stampdir = os.path.dirname(stamp)
- if cached_mtime_noerror(stampdir) == 0:
- bb.utils.mkdirhier(stampdir)
-
- return stamp
-
-def stamp_cleanmask_internal(taskname, d, file_name):
- """
- Internal stamp helper function to generate stamp cleaning mask
- Returns the stamp path+filename
-
- In the bitbake core, d can be a CacheData and file_name will be set.
- When called in task context, d will be a data store, file_name will not be set
- """
- taskflagname = taskname
- if taskname.endswith("_setscene") and taskname != "do_setscene":
- taskflagname = taskname.replace("_setscene", "")
-
- if file_name:
- stamp = d.stampclean[file_name]
- extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
- else:
- stamp = d.getVar('STAMPCLEAN', True)
- file_name = d.getVar('BB_FILENAME', True)
- extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
-
- if not stamp:
- return []
-
- cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
-
- return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
-
-def make_stamp(task, d, file_name = None):
- """
- Creates/updates a stamp for a given task
- (d can be a data dict or dataCache)
- """
- cleanmask = stamp_cleanmask_internal(task, d, file_name)
- for mask in cleanmask:
- for name in glob.glob(mask):
- # Preserve sigdata files in the stamps directory
- if "sigdata" in name:
- continue
- # Preserve taint files in the stamps directory
- if name.endswith('.taint'):
- continue
- os.unlink(name)
-
- stamp = stamp_internal(task, d, file_name)
- # Remove the file and recreate to force timestamp
- # change on broken NFS filesystems
- if stamp:
- bb.utils.remove(stamp)
- open(stamp, "w").close()
-
- # If we're in task context, write out a signature file for each task
- # as it completes
- if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
- stampbase = stamp_internal(task, d, None, True)
- file_name = d.getVar('BB_FILENAME', True)
- bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
-
-def del_stamp(task, d, file_name = None):
- """
- Removes a stamp for a given task
- (d can be a data dict or dataCache)
- """
- stamp = stamp_internal(task, d, file_name)
- bb.utils.remove(stamp)
-
-def write_taint(task, d, file_name = None):
- """
- Creates a "taint" file which will force the specified task and its
- dependents to be re-run the next time by influencing the value of its
- taskhash.
- (d can be a data dict or dataCache)
- """
- import uuid
- if file_name:
- taintfn = d.stamp[file_name] + '.' + task + '.taint'
- else:
- taintfn = d.getVar('STAMP', True) + '.' + task + '.taint'
- bb.utils.mkdirhier(os.path.dirname(taintfn))
- # The specific content of the taint file is not really important,
- # we just need it to be random, so a random UUID is used
- with open(taintfn, 'w') as taintf:
- taintf.write(str(uuid.uuid4()))
-
-def stampfile(taskname, d, file_name = None):
- """
- Return the stamp for a given task
- (d can be a data dict or dataCache)
- """
- return stamp_internal(taskname, d, file_name)
-
-def add_tasks(tasklist, d):
- task_deps = d.getVar('_task_deps', False)
- if not task_deps:
- task_deps = {}
- if not 'tasks' in task_deps:
- task_deps['tasks'] = []
- if not 'parents' in task_deps:
- task_deps['parents'] = {}
-
- for task in tasklist:
- task = d.expand(task)
-
- d.setVarFlag(task, 'task', 1)
-
- if not task in task_deps['tasks']:
- task_deps['tasks'].append(task)
-
- flags = d.getVarFlags(task)
- def getTask(name):
- if not name in task_deps:
- task_deps[name] = {}
- if name in flags:
- deptask = d.expand(flags[name])
- task_deps[name][task] = deptask
- getTask('depends')
- getTask('rdepends')
- getTask('deptask')
- getTask('rdeptask')
- getTask('recrdeptask')
- getTask('recideptask')
- getTask('nostamp')
- getTask('fakeroot')
- getTask('noexec')
- getTask('umask')
- task_deps['parents'][task] = []
- if 'deps' in flags:
- for dep in flags['deps']:
- dep = d.expand(dep)
- task_deps['parents'][task].append(dep)
-
- # don't assume holding a reference
- d.setVar('_task_deps', task_deps)
-
-def addtask(task, before, after, d):
- if task[:3] != "do_":
- task = "do_" + task
-
- d.setVarFlag(task, "task", 1)
- bbtasks = d.getVar('__BBTASKS', False) or []
- if task not in bbtasks:
- bbtasks.append(task)
- d.setVar('__BBTASKS', bbtasks)
-
- existing = d.getVarFlag(task, "deps", False) or []
- if after is not None:
- # set up deps for function
- for entry in after.split():
- if entry not in existing:
- existing.append(entry)
- d.setVarFlag(task, "deps", existing)
- if before is not None:
- # set up things that depend on this func
- for entry in before.split():
- existing = d.getVarFlag(entry, "deps", False) or []
- if task not in existing:
- d.setVarFlag(entry, "deps", [task] + existing)
-
-def deltask(task, d):
- if task[:3] != "do_":
- task = "do_" + task
-
- bbtasks = d.getVar('__BBTASKS', False) or []
- if task in bbtasks:
- bbtasks.remove(task)
- d.setVar('__BBTASKS', bbtasks)
-
- d.delVarFlag(task, 'deps')
- for bbtask in d.getVar('__BBTASKS', False) or []:
- deps = d.getVarFlag(bbtask, 'deps', False) or []
- if task in deps:
- deps.remove(task)
- d.setVarFlag(bbtask, 'deps', deps)
diff --git a/yocto-poky/bitbake/lib/bb/cache.py b/yocto-poky/bitbake/lib/bb/cache.py
deleted file mode 100644
index af5b9fbc6..000000000
--- a/yocto-poky/bitbake/lib/bb/cache.py
+++ /dev/null
@@ -1,849 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Cache implementation
-#
-# Caching of bitbake variables before task execution
-
-# Copyright (C) 2006 Richard Purdie
-# Copyright (C) 2012 Intel Corporation
-
-# but small sections based on code from bin/bitbake:
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-
-import os
-import logging
-from collections import defaultdict
-import bb.utils
-
-logger = logging.getLogger("BitBake.Cache")
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
-__cache_version__ = "149"
-
-def getCacheFile(path, filename, data_hash):
- return os.path.join(path, filename + "." + data_hash)
-
-# RecipeInfoCommon defines common data retrieving methods
-# from meta data for caches. CoreRecipeInfo as well as other
-# Extra RecipeInfo needs to inherit this class
-class RecipeInfoCommon(object):
-
- @classmethod
- def listvar(cls, var, metadata):
- return cls.getvar(var, metadata).split()
-
- @classmethod
- def intvar(cls, var, metadata):
- return int(cls.getvar(var, metadata) or 0)
-
- @classmethod
- def depvar(cls, var, metadata):
- return bb.utils.explode_deps(cls.getvar(var, metadata))
-
- @classmethod
- def pkgvar(cls, var, packages, metadata):
- return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
- for pkg in packages)
-
- @classmethod
- def taskvar(cls, var, tasks, metadata):
- return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
- for task in tasks)
-
- @classmethod
- def flaglist(cls, flag, varlist, metadata, squash=False):
- out_dict = dict((var, metadata.getVarFlag(var, flag, True))
- for var in varlist)
- if squash:
- return dict((k,v) for (k,v) in out_dict.iteritems() if v)
- else:
- return out_dict
-
- @classmethod
- def getvar(cls, var, metadata, expand = True):
- return metadata.getVar(var, expand) or ''
-
-
-class CoreRecipeInfo(RecipeInfoCommon):
- __slots__ = ()
-
- cachefile = "bb_cache.dat"
-
- def __init__(self, filename, metadata):
- self.file_depends = metadata.getVar('__depends', False)
- self.timestamp = bb.parse.cached_mtime(filename)
- self.variants = self.listvar('__VARIANTS', metadata) + ['']
- self.appends = self.listvar('__BBAPPEND', metadata)
- self.nocache = self.getvar('BB_DONT_CACHE', metadata)
-
- self.skipreason = self.getvar('__SKIPPED', metadata)
- if self.skipreason:
- self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
- self.skipped = True
- self.provides = self.depvar('PROVIDES', metadata)
- self.rprovides = self.depvar('RPROVIDES', metadata)
- return
-
- self.tasks = metadata.getVar('__BBTASKS', False)
-
- self.pn = self.getvar('PN', metadata)
- self.packages = self.listvar('PACKAGES', metadata)
- if not self.pn in self.packages:
- self.packages.append(self.pn)
-
- self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
- self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
-
- self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
-
- self.skipped = False
- self.pe = self.getvar('PE', metadata)
- self.pv = self.getvar('PV', metadata)
- self.pr = self.getvar('PR', metadata)
- self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
- self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
- self.stamp = self.getvar('STAMP', metadata)
- self.stampclean = self.getvar('STAMPCLEAN', metadata)
- self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
- self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
- self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
- self.depends = self.depvar('DEPENDS', metadata)
- self.provides = self.depvar('PROVIDES', metadata)
- self.rdepends = self.depvar('RDEPENDS', metadata)
- self.rprovides = self.depvar('RPROVIDES', metadata)
- self.rrecommends = self.depvar('RRECOMMENDS', metadata)
- self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
- self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
- self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
- self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
- self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
- self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
- self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
- self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
-
- @classmethod
- def init_cacheData(cls, cachedata):
- # CacheData in Core RecipeInfo Class
- cachedata.task_deps = {}
- cachedata.pkg_fn = {}
- cachedata.pkg_pn = defaultdict(list)
- cachedata.pkg_pepvpr = {}
- cachedata.pkg_dp = {}
-
- cachedata.stamp = {}
- cachedata.stampclean = {}
- cachedata.stamp_extrainfo = {}
- cachedata.file_checksums = {}
- cachedata.fn_provides = {}
- cachedata.pn_provides = defaultdict(list)
- cachedata.all_depends = []
-
- cachedata.deps = defaultdict(list)
- cachedata.packages = defaultdict(list)
- cachedata.providers = defaultdict(list)
- cachedata.rproviders = defaultdict(list)
- cachedata.packages_dynamic = defaultdict(list)
-
- cachedata.rundeps = defaultdict(lambda: defaultdict(list))
- cachedata.runrecs = defaultdict(lambda: defaultdict(list))
- cachedata.possible_world = []
- cachedata.universe_target = []
- cachedata.hashfn = {}
-
- cachedata.basetaskhash = {}
- cachedata.inherits = {}
- cachedata.fakerootenv = {}
- cachedata.fakerootnoenv = {}
- cachedata.fakerootdirs = {}
- cachedata.extradepsfunc = {}
-
- def add_cacheData(self, cachedata, fn):
- cachedata.task_deps[fn] = self.task_deps
- cachedata.pkg_fn[fn] = self.pn
- cachedata.pkg_pn[self.pn].append(fn)
- cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
- cachedata.pkg_dp[fn] = self.defaultpref
- cachedata.stamp[fn] = self.stamp
- cachedata.stampclean[fn] = self.stampclean
- cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
- cachedata.file_checksums[fn] = self.file_checksums
-
- provides = [self.pn]
- for provide in self.provides:
- if provide not in provides:
- provides.append(provide)
- cachedata.fn_provides[fn] = provides
-
- for provide in provides:
- cachedata.providers[provide].append(fn)
- if provide not in cachedata.pn_provides[self.pn]:
- cachedata.pn_provides[self.pn].append(provide)
-
- for dep in self.depends:
- if dep not in cachedata.deps[fn]:
- cachedata.deps[fn].append(dep)
- if dep not in cachedata.all_depends:
- cachedata.all_depends.append(dep)
-
- rprovides = self.rprovides
- for package in self.packages:
- cachedata.packages[package].append(fn)
- rprovides += self.rprovides_pkg[package]
-
- for rprovide in rprovides:
- if fn not in cachedata.rproviders[rprovide]:
- cachedata.rproviders[rprovide].append(fn)
-
- for package in self.packages_dynamic:
- cachedata.packages_dynamic[package].append(fn)
-
- # Build hash of runtime depends and recommends
- for package in self.packages + [self.pn]:
- cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
- cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
-
- # Collect files we may need for possible world-dep
- # calculations
- if self.not_world:
- logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
- else:
- cachedata.possible_world.append(fn)
-
- # create a collection of all targets for sanity checking
- # tasks, such as upstream versions, license, and tools for
- # task and image creation.
- cachedata.universe_target.append(self.pn)
-
- cachedata.hashfn[fn] = self.hashfilename
- for task, taskhash in self.basetaskhashes.iteritems():
- identifier = '%s.%s' % (fn, task)
- cachedata.basetaskhash[identifier] = taskhash
-
- cachedata.inherits[fn] = self.inherits
- cachedata.fakerootenv[fn] = self.fakerootenv
- cachedata.fakerootnoenv[fn] = self.fakerootnoenv
- cachedata.fakerootdirs[fn] = self.fakerootdirs
- cachedata.extradepsfunc[fn] = self.extradepsfunc
-
-
-
-class Cache(object):
- """
- BitBake Cache implementation
- """
-
- def __init__(self, data, data_hash, caches_array):
- # Pass caches_array information into Cache Constructor
- # It will be used later for deciding whether we
- # need extra cache file dump/load support
- self.caches_array = caches_array
- self.cachedir = data.getVar("CACHE", True)
- self.clean = set()
- self.checked = set()
- self.depends_cache = {}
- self.data = None
- self.data_fn = None
- self.cacheclean = True
- self.data_hash = data_hash
-
- if self.cachedir in [None, '']:
- self.has_cache = False
- logger.info("Not using a cache. "
- "Set CACHE = <directory> to enable.")
- return
-
- self.has_cache = True
- self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
-
- logger.debug(1, "Using cache in '%s'", self.cachedir)
- bb.utils.mkdirhier(self.cachedir)
-
- cache_ok = True
- if self.caches_array:
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- cache_ok = cache_ok and os.path.exists(cachefile)
- cache_class.init_cacheData(self)
- if cache_ok:
- self.load_cachefile()
- elif os.path.isfile(self.cachefile):
- logger.info("Out of date cache found, rebuilding...")
-
- def load_cachefile(self):
- # Firstly, using core cache file information for
- # valid checking
- with open(self.cachefile, "rb") as cachefile:
- pickled = pickle.Unpickler(cachefile)
- try:
- cache_ver = pickled.load()
- bitbake_ver = pickled.load()
- except Exception:
- logger.info('Invalid cache, rebuilding...')
- return
-
- if cache_ver != __cache_version__:
- logger.info('Cache version mismatch, rebuilding...')
- return
- elif bitbake_ver != bb.__version__:
- logger.info('Bitbake version mismatch, rebuilding...')
- return
-
-
- cachesize = 0
- previous_progress = 0
- previous_percent = 0
-
- # Calculate the correct cachesize of all those cache files
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- with open(cachefile, "rb") as cachefile:
- cachesize += os.fstat(cachefile.fileno()).st_size
-
- bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
-
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- with open(cachefile, "rb") as cachefile:
- pickled = pickle.Unpickler(cachefile)
- while cachefile:
- try:
- key = pickled.load()
- value = pickled.load()
- except Exception:
- break
- if self.depends_cache.has_key(key):
- self.depends_cache[key].append(value)
- else:
- self.depends_cache[key] = [value]
- # only fire events on even percentage boundaries
- current_progress = cachefile.tell() + previous_progress
- current_percent = 100 * current_progress / cachesize
- if current_percent > previous_percent:
- previous_percent = current_percent
- bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
- self.data)
-
- previous_progress += current_progress
-
- # Note: depends cache number is corresponding to the parsing file numbers.
- # The same file has several caches, still regarded as one item in the cache
- bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
- len(self.depends_cache)),
- self.data)
-
-
- @staticmethod
- def virtualfn2realfn(virtualfn):
- """
- Convert a virtual file name to a real one + the associated subclass keyword
- """
-
- fn = virtualfn
- cls = ""
- if virtualfn.startswith('virtual:'):
- elems = virtualfn.split(':')
- cls = ":".join(elems[1:-1])
- fn = elems[-1]
- return (fn, cls)
-
- @staticmethod
- def realfn2virtual(realfn, cls):
- """
- Convert a real filename + the associated subclass keyword to a virtual filename
- """
- if cls == "":
- return realfn
- return "virtual:" + cls + ":" + realfn
-
- @classmethod
- def loadDataFull(cls, virtualfn, appends, cfgData):
- """
- Return a complete set of data for fn.
- To do this, we need to parse the file.
- """
-
- (fn, virtual) = cls.virtualfn2realfn(virtualfn)
-
- logger.debug(1, "Parsing %s (full)", fn)
-
- cfgData.setVar("__ONLYFINALISE", virtual or "default")
- bb_data = cls.load_bbfile(fn, appends, cfgData)
- return bb_data[virtual]
-
- @classmethod
- def parse(cls, filename, appends, configdata, caches_array):
- """Parse the specified filename, returning the recipe information"""
- infos = []
- datastores = cls.load_bbfile(filename, appends, configdata)
- depends = []
- for variant, data in sorted(datastores.iteritems(),
- key=lambda i: i[0],
- reverse=True):
- virtualfn = cls.realfn2virtual(filename, variant)
- depends = depends + (data.getVar("__depends", False) or [])
- if depends and not variant:
- data.setVar("__depends", depends)
-
- info_array = []
- for cache_class in caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- info = cache_class(filename, data)
- info_array.append(info)
- infos.append((virtualfn, info_array))
-
- return infos
-
- def load(self, filename, appends, configdata):
- """Obtain the recipe information for the specified filename,
- using cached values if available, otherwise parsing.
-
- Note that if it does parse to obtain the info, it will not
- automatically add the information to the cache or to your
- CacheData. Use the add or add_info method to do so after
- running this, or use loadData instead."""
- cached = self.cacheValid(filename, appends)
- if cached:
- infos = []
- # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
- info_array = self.depends_cache[filename]
- for variant in info_array[0].variants:
- virtualfn = self.realfn2virtual(filename, variant)
- infos.append((virtualfn, self.depends_cache[virtualfn]))
- else:
- logger.debug(1, "Parsing %s", filename)
- return self.parse(filename, appends, configdata, self.caches_array)
-
- return cached, infos
-
- def loadData(self, fn, appends, cfgData, cacheData):
- """Load the recipe info for the specified filename,
- parsing and adding to the cache if necessary, and adding
- the recipe information to the supplied CacheData instance."""
- skipped, virtuals = 0, 0
-
- cached, infos = self.load(fn, appends, cfgData)
- for virtualfn, info_array in infos:
- if info_array[0].skipped:
- logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
- skipped += 1
- else:
- self.add_info(virtualfn, info_array, cacheData, not cached)
- virtuals += 1
-
- return cached, skipped, virtuals
-
- def cacheValid(self, fn, appends):
- """
- Is the cache valid for fn?
- Fast version, no timestamps checked.
- """
- if fn not in self.checked:
- self.cacheValidUpdate(fn, appends)
-
- # Is cache enabled?
- if not self.has_cache:
- return False
- if fn in self.clean:
- return True
- return False
-
- def cacheValidUpdate(self, fn, appends):
- """
- Is the cache valid for fn?
- Make thorough (slower) checks including timestamps.
- """
- # Is cache enabled?
- if not self.has_cache:
- return False
-
- self.checked.add(fn)
-
- # File isn't in depends_cache
- if not fn in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", fn)
- return False
-
- mtime = bb.parse.cached_mtime_noerror(fn)
-
- # Check file still exists
- if mtime == 0:
- logger.debug(2, "Cache: %s no longer exists", fn)
- self.remove(fn)
- return False
-
- info_array = self.depends_cache[fn]
- # Check the file's timestamp
- if mtime != info_array[0].timestamp:
- logger.debug(2, "Cache: %s changed", fn)
- self.remove(fn)
- return False
-
- # Check dependencies are still valid
- depends = info_array[0].file_depends
- if depends:
- for f, old_mtime in depends:
- fmtime = bb.parse.cached_mtime_noerror(f)
- # Check if file still exists
- if old_mtime != 0 and fmtime == 0:
- logger.debug(2, "Cache: %s's dependency %s was removed",
- fn, f)
- self.remove(fn)
- return False
-
- if (fmtime != old_mtime):
- logger.debug(2, "Cache: %s's dependency %s changed",
- fn, f)
- self.remove(fn)
- return False
-
- if hasattr(info_array[0], 'file_checksums'):
- for _, fl in info_array[0].file_checksums.items():
- fl = fl.strip()
- while fl:
- # A .split() would be simpler but means spaces or colons in filenames would break
- a = fl.find(":True")
- b = fl.find(":False")
- if ((a < 0) and b) or ((b > 0) and (b < a)):
- f = fl[:b+6]
- fl = fl[b+7:]
- elif ((b < 0) and a) or ((a > 0) and (a < b)):
- f = fl[:a+5]
- fl = fl[a+6:]
- else:
- break
- fl = fl.strip()
- if "*" in f:
- continue
- f, exist = f.split(":")
- if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
- logger.debug(2, "Cache: %s's file checksum list file %s changed",
- fn, f)
- self.remove(fn)
- return False
-
- if appends != info_array[0].appends:
- logger.debug(2, "Cache: appends for %s changed", fn)
- logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
- self.remove(fn)
- return False
-
- invalid = False
- for cls in info_array[0].variants:
- virtualfn = self.realfn2virtual(fn, cls)
- self.clean.add(virtualfn)
- if virtualfn not in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", virtualfn)
- invalid = True
-
- # If any one of the variants is not present, mark as invalid for all
- if invalid:
- for cls in info_array[0].variants:
- virtualfn = self.realfn2virtual(fn, cls)
- if virtualfn in self.clean:
- logger.debug(2, "Cache: Removing %s from cache", virtualfn)
- self.clean.remove(virtualfn)
- if fn in self.clean:
- logger.debug(2, "Cache: Marking %s as not clean", fn)
- self.clean.remove(fn)
- return False
-
- self.clean.add(fn)
- return True
-
- def remove(self, fn):
- """
- Remove a fn from the cache
- Called from the parser in error cases
- """
- if fn in self.depends_cache:
- logger.debug(1, "Removing %s from cache", fn)
- del self.depends_cache[fn]
- if fn in self.clean:
- logger.debug(1, "Marking %s as unclean", fn)
- self.clean.remove(fn)
-
- def sync(self):
- """
- Save the cache
- Called from the parser when complete (or exiting)
- """
-
- if not self.has_cache:
- return
-
- if self.cacheclean:
- logger.debug(2, "Cache is clean, not saving.")
- return
-
- file_dict = {}
- pickler_dict = {}
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cache_class_name = cache_class.__name__
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- file_dict[cache_class_name] = open(cachefile, "wb")
- pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
-
- pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
- pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
-
- try:
- for key, info_array in self.depends_cache.iteritems():
- for info in info_array:
- if isinstance(info, RecipeInfoCommon):
- cache_class_name = info.__class__.__name__
- pickler_dict[cache_class_name].dump(key)
- pickler_dict[cache_class_name].dump(info)
- finally:
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cache_class_name = cache_class.__name__
- file_dict[cache_class_name].close()
-
- del self.depends_cache
-
- @staticmethod
- def mtime(cachefile):
- return bb.parse.cached_mtime_noerror(cachefile)
-
- def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
- if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
- cacheData.add_from_recipeinfo(filename, info_array)
-
- if watcher:
- watcher(info_array[0].file_depends)
-
- if not self.has_cache:
- return
-
- if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
- if parsed:
- self.cacheclean = False
- self.depends_cache[filename] = info_array
-
- def add(self, file_name, data, cacheData, parsed=None):
- """
- Save data we need into the cache
- """
-
- realfn = self.virtualfn2realfn(file_name)[0]
-
- info_array = []
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- info_array.append(cache_class(realfn, data))
- self.add_info(file_name, info_array, cacheData, parsed)
-
- @staticmethod
- def load_bbfile(bbfile, appends, config):
- """
- Load and parse one .bb build file
- Return the data and whether parsing resulted in the file being skipped
- """
- chdir_back = False
-
- from bb import parse
-
- # expand tmpdir to include this topdir
- config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- oldpath = os.path.abspath(os.getcwd())
- parse.cached_mtime_noerror(bbfile_loc)
- bb_data = config.createCopy()
- # The ConfHandler first looks if there is a TOPDIR and if not
- # then it would call getcwd().
- # Previously, we chdir()ed to bbfile_loc, called the handler
- # and finally chdir()ed back, a couple of thousand times. We now
- # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
- if not bb_data.getVar('TOPDIR', False):
- chdir_back = True
- bb_data.setVar('TOPDIR', bbfile_loc)
- try:
- if appends:
- bb_data.setVar('__BBAPPEND', " ".join(appends))
- bb_data = parse.handle(bbfile, bb_data)
- if chdir_back:
- os.chdir(oldpath)
- return bb_data
- except:
- if chdir_back:
- os.chdir(oldpath)
- raise
-
-
-def init(cooker):
- """
- The Objective: Cache the minimum amount of data possible yet get to the
- stage of building packages (i.e. tryBuild) without reparsing any .bb files.
-
- To do this, we intercept getVar calls and only cache the variables we see
- being accessed. We rely on the cache getVar calls being made for all
- variables bitbake might need to use to reach this stage. For each cached
- file we need to track:
-
- * Its mtime
- * The mtimes of all its dependencies
- * Whether it caused a parse.SkipRecipe exception
-
- Files causing parsing errors are evicted from the cache.
-
- """
- return Cache(cooker.configuration.data, cooker.configuration.data_hash)
-
-
-class CacheData(object):
- """
- The data structures we compile from the cached data
- """
-
- def __init__(self, caches_array):
- self.caches_array = caches_array
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
- cache_class.init_cacheData(self)
-
- # Direct cache variables
- self.task_queues = {}
- self.preferred = {}
- self.tasks = {}
- # Indirect Cache variables (set elsewhere)
- self.ignored_dependencies = []
- self.world_target = set()
- self.bbfile_priority = {}
-
- def add_from_recipeinfo(self, fn, info_array):
- for info in info_array:
- info.add_cacheData(self, fn)
-
-class MultiProcessCache(object):
- """
- BitBake multi-process cache implementation
-
- Used by the codeparser & file checksum caches
- """
-
- def __init__(self):
- self.cachefile = None
- self.cachedata = self.create_cachedata()
- self.cachedata_extras = self.create_cachedata()
-
- def init_cache(self, d, cache_file_name=None):
- cachedir = (d.getVar("PERSISTENT_DIR", True) or
- d.getVar("CACHE", True))
- if cachedir in [None, '']:
- return
- bb.utils.mkdirhier(cachedir)
- self.cachefile = os.path.join(cachedir,
- cache_file_name or self.__class__.cache_file_name)
- logger.debug(1, "Using cache in '%s'", self.cachefile)
-
- glf = bb.utils.lockfile(self.cachefile + ".lock")
-
- try:
- with open(self.cachefile, "rb") as f:
- p = pickle.Unpickler(f)
- data, version = p.load()
- except:
- bb.utils.unlockfile(glf)
- return
-
- bb.utils.unlockfile(glf)
-
- if version != self.__class__.CACHE_VERSION:
- return
-
- self.cachedata = data
-
- def create_cachedata(self):
- data = [{}]
- return data
-
- def save_extras(self):
- if not self.cachefile:
- return
-
- glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
-
- i = os.getpid()
- lf = None
- while not lf:
- lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
- if not lf or os.path.exists(self.cachefile + "-" + str(i)):
- if lf:
- bb.utils.unlockfile(lf)
- lf = None
- i = i + 1
- continue
-
- with open(self.cachefile + "-" + str(i), "wb") as f:
- p = pickle.Pickler(f, -1)
- p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
-
- bb.utils.unlockfile(lf)
- bb.utils.unlockfile(glf)
-
- def merge_data(self, source, dest):
- for j in range(0,len(dest)):
- for h in source[j]:
- if h not in dest[j]:
- dest[j][h] = source[j][h]
-
- def save_merge(self):
- if not self.cachefile:
- return
-
- glf = bb.utils.lockfile(self.cachefile + ".lock")
-
- data = self.cachedata
-
- for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
- f = os.path.join(os.path.dirname(self.cachefile), f)
- try:
- with open(f, "rb") as fd:
- p = pickle.Unpickler(fd)
- extradata, version = p.load()
- except (IOError, EOFError):
- os.unlink(f)
- continue
-
- if version != self.__class__.CACHE_VERSION:
- os.unlink(f)
- continue
-
- self.merge_data(extradata, data)
- os.unlink(f)
-
- with open(self.cachefile, "wb") as f:
- p = pickle.Pickler(f, -1)
- p.dump([data, self.__class__.CACHE_VERSION])
-
- bb.utils.unlockfile(glf)
-
diff --git a/yocto-poky/bitbake/lib/bb/cache_extra.py b/yocto-poky/bitbake/lib/bb/cache_extra.py
deleted file mode 100644
index 83f4959d6..000000000
--- a/yocto-poky/bitbake/lib/bb/cache_extra.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Extra RecipeInfo will be all defined in this file. Currently,
-# Only Hob (Image Creator) Requests some extra fields. So
-# HobRecipeInfo is defined. It's named HobRecipeInfo because it
-# is introduced by 'hob'. Users could also introduce other
-# RecipeInfo or simply use those already defined RecipeInfo.
-# In the following patch, this newly defined new extra RecipeInfo
-# will be dynamically loaded and used for loading/saving the extra
-# cache fields
-
-# Copyright (C) 2011, Intel Corporation. All rights reserved.
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from bb.cache import RecipeInfoCommon
-
-class HobRecipeInfo(RecipeInfoCommon):
- __slots__ = ()
-
- classname = "HobRecipeInfo"
- # please override this member with the correct data cache file
- # such as (bb_cache.dat, bb_extracache_hob.dat)
- cachefile = "bb_extracache_" + classname +".dat"
-
- # override this member with the list of extra cache fields
- # that this class will provide
- cachefields = ['summary', 'license', 'section',
- 'description', 'homepage', 'bugtracker',
- 'prevision', 'files_info']
-
- def __init__(self, filename, metadata):
-
- self.summary = self.getvar('SUMMARY', metadata)
- self.license = self.getvar('LICENSE', metadata)
- self.section = self.getvar('SECTION', metadata)
- self.description = self.getvar('DESCRIPTION', metadata)
- self.homepage = self.getvar('HOMEPAGE', metadata)
- self.bugtracker = self.getvar('BUGTRACKER', metadata)
- self.prevision = self.getvar('PR', metadata)
- self.files_info = self.getvar('FILES_INFO', metadata)
-
- @classmethod
- def init_cacheData(cls, cachedata):
- # CacheData in Hob RecipeInfo Class
- cachedata.summary = {}
- cachedata.license = {}
- cachedata.section = {}
- cachedata.description = {}
- cachedata.homepage = {}
- cachedata.bugtracker = {}
- cachedata.prevision = {}
- cachedata.files_info = {}
-
- def add_cacheData(self, cachedata, fn):
- cachedata.summary[fn] = self.summary
- cachedata.license[fn] = self.license
- cachedata.section[fn] = self.section
- cachedata.description[fn] = self.description
- cachedata.homepage[fn] = self.homepage
- cachedata.bugtracker[fn] = self.bugtracker
- cachedata.prevision[fn] = self.prevision
- cachedata.files_info[fn] = self.files_info
diff --git a/yocto-poky/bitbake/lib/bb/checksum.py b/yocto-poky/bitbake/lib/bb/checksum.py
deleted file mode 100644
index 2ec964d73..000000000
--- a/yocto-poky/bitbake/lib/bb/checksum.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Local file checksum cache implementation
-#
-# Copyright (C) 2012 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import glob
-import operator
-import os
-import stat
-import bb.utils
-import logging
-from bb.cache import MultiProcessCache
-
-logger = logging.getLogger("BitBake.Cache")
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
-
-# mtime cache (non-persistent)
-# based upon the assumption that files do not change during bitbake run
-class FileMtimeCache(object):
- cache = {}
-
- def cached_mtime(self, f):
- if f not in self.cache:
- self.cache[f] = os.stat(f)[stat.ST_MTIME]
- return self.cache[f]
-
- def cached_mtime_noerror(self, f):
- if f not in self.cache:
- try:
- self.cache[f] = os.stat(f)[stat.ST_MTIME]
- except OSError:
- return 0
- return self.cache[f]
-
- def update_mtime(self, f):
- self.cache[f] = os.stat(f)[stat.ST_MTIME]
- return self.cache[f]
-
- def clear(self):
- self.cache.clear()
-
-# Checksum + mtime cache (persistent)
-class FileChecksumCache(MultiProcessCache):
- cache_file_name = "local_file_checksum_cache.dat"
- CACHE_VERSION = 1
-
- def __init__(self):
- self.mtime_cache = FileMtimeCache()
- MultiProcessCache.__init__(self)
-
- def get_checksum(self, f):
- entry = self.cachedata[0].get(f)
- cmtime = self.mtime_cache.cached_mtime(f)
- if entry:
- (mtime, hashval) = entry
- if cmtime == mtime:
- return hashval
- else:
- bb.debug(2, "file %s changed mtime, recompute checksum" % f)
-
- hashval = bb.utils.md5_file(f)
- self.cachedata_extras[0][f] = (cmtime, hashval)
- return hashval
-
- def merge_data(self, source, dest):
- for h in source[0]:
- if h in dest:
- (smtime, _) = source[0][h]
- (dmtime, _) = dest[0][h]
- if smtime > dmtime:
- dest[0][h] = source[0][h]
- else:
- dest[0][h] = source[0][h]
-
- def get_checksums(self, filelist, pn):
- """Get checksums for a list of files"""
-
- def checksum_file(f):
- try:
- checksum = self.get_checksum(f)
- except OSError as e:
- bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
- return None
- return checksum
-
- def checksum_dir(pth):
- # Handle directories recursively
- dirchecksums = []
- for root, dirs, files in os.walk(pth):
- for name in files:
- fullpth = os.path.join(root, name)
- checksum = checksum_file(fullpth)
- if checksum:
- dirchecksums.append((fullpth, checksum))
- return dirchecksums
-
- checksums = []
- for pth in filelist.split():
- exist = pth.split(":")[1]
- if exist == "False":
- continue
- pth = pth.split(":")[0]
- if '*' in pth:
- # Handle globs
- for f in glob.glob(pth):
- if os.path.isdir(f):
- if not os.path.islink(f):
- checksums.extend(checksum_dir(f))
- else:
- checksum = checksum_file(f)
- checksums.append((f, checksum))
- elif os.path.isdir(pth):
- if not os.path.islink(pth):
- checksums.extend(checksum_dir(pth))
- else:
- checksum = checksum_file(pth)
- checksums.append((pth, checksum))
-
- checksums.sort(key=operator.itemgetter(1))
- return checksums
diff --git a/yocto-poky/bitbake/lib/bb/codeparser.py b/yocto-poky/bitbake/lib/bb/codeparser.py
deleted file mode 100644
index 3ee4d5622..000000000
--- a/yocto-poky/bitbake/lib/bb/codeparser.py
+++ /dev/null
@@ -1,436 +0,0 @@
-import ast
-import codegen
-import logging
-import os.path
-import bb.utils, bb.data
-from itertools import chain
-from pysh import pyshyacc, pyshlex, sherrors
-from bb.cache import MultiProcessCache
-
-
-logger = logging.getLogger('BitBake.CodeParser')
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
-
-def check_indent(codestr):
- """If the code is indented, add a top level piece of code to 'remove' the indentation"""
-
- i = 0
- while codestr[i] in ["\n", "\t", " "]:
- i = i + 1
-
- if i == 0:
- return codestr
-
- if codestr[i-1] == "\t" or codestr[i-1] == " ":
- if codestr[0] == "\n":
- # Since we're adding a line, we need to remove one line of any empty padding
- # to ensure line numbers are correct
- codestr = codestr[1:]
- return "if 1:\n" + codestr
-
- return codestr
-
-
-# Basically pickle, in python 2.7.3 at least, does badly with data duplication
-# upon pickling and unpickling. Combine this with duplicate objects and things
-# are a mess.
-#
-# When the sets are originally created, python calls intern() on the set keys
-# which significantly improves memory usage. Sadly the pickle/unpickle process
-# doesn't call intern() on the keys and results in the same strings being duplicated
-# in memory. This also means pickle will save the same string multiple times in
-# the cache file.
-#
-# By having shell and python cacheline objects with setstate/getstate, we force
-# the object creation through our own routine where we can call intern (via internSet).
-#
-# We also use hashable frozensets and ensure we use references to these so that
-# duplicates can be removed, both in memory and in the resulting pickled data.
-#
-# By playing these games, the size of the cache file shrinks dramatically
-# meaning faster load times and the reloaded cache files also consume much less
-# memory. Smaller cache files, faster load times and lower memory usage is good.
-#
-# A custom getstate/setstate using tuples is actually worth 15% cachesize by
-# avoiding duplication of the attribute names!
-
-class SetCache(object):
- def __init__(self):
- self.setcache = {}
-
- def internSet(self, items):
-
- new = []
- for i in items:
- new.append(intern(i))
- s = frozenset(new)
- if hash(s) in self.setcache:
- return self.setcache[hash(s)]
- self.setcache[hash(s)] = s
- return s
-
-codecache = SetCache()
-
-class pythonCacheLine(object):
- def __init__(self, refs, execs, contains):
- self.refs = codecache.internSet(refs)
- self.execs = codecache.internSet(execs)
- self.contains = {}
- for c in contains:
- self.contains[c] = codecache.internSet(contains[c])
-
- def __getstate__(self):
- return (self.refs, self.execs, self.contains)
-
- def __setstate__(self, state):
- (refs, execs, contains) = state
- self.__init__(refs, execs, contains)
- def __hash__(self):
- l = (hash(self.refs), hash(self.execs))
- for c in sorted(self.contains.keys()):
- l = l + (c, hash(self.contains[c]))
- return hash(l)
- def __repr__(self):
- return " ".join([str(self.refs), str(self.execs), str(self.contains)])
-
-
-class shellCacheLine(object):
- def __init__(self, execs):
- self.execs = codecache.internSet(execs)
-
- def __getstate__(self):
- return (self.execs)
-
- def __setstate__(self, state):
- (execs) = state
- self.__init__(execs)
- def __hash__(self):
- return hash(self.execs)
- def __repr__(self):
- return str(self.execs)
-
-class CodeParserCache(MultiProcessCache):
- cache_file_name = "bb_codeparser.dat"
- CACHE_VERSION = 7
-
- def __init__(self):
- MultiProcessCache.__init__(self)
- self.pythoncache = self.cachedata[0]
- self.shellcache = self.cachedata[1]
- self.pythoncacheextras = self.cachedata_extras[0]
- self.shellcacheextras = self.cachedata_extras[1]
-
- # To avoid duplication in the codeparser cache, keep
- # a lookup of hashes of objects we already have
- self.pythoncachelines = {}
- self.shellcachelines = {}
-
- def newPythonCacheLine(self, refs, execs, contains):
- cacheline = pythonCacheLine(refs, execs, contains)
- h = hash(cacheline)
- if h in self.pythoncachelines:
- return self.pythoncachelines[h]
- self.pythoncachelines[h] = cacheline
- return cacheline
-
- def newShellCacheLine(self, execs):
- cacheline = shellCacheLine(execs)
- h = hash(cacheline)
- if h in self.shellcachelines:
- return self.shellcachelines[h]
- self.shellcachelines[h] = cacheline
- return cacheline
-
- def init_cache(self, d):
- # Check if we already have the caches
- if self.pythoncache:
- return
-
- MultiProcessCache.init_cache(self, d)
-
- # cachedata gets re-assigned in the parent
- self.pythoncache = self.cachedata[0]
- self.shellcache = self.cachedata[1]
-
- def create_cachedata(self):
- data = [{}, {}]
- return data
-
-codeparsercache = CodeParserCache()
-
-def parser_cache_init(d):
- codeparsercache.init_cache(d)
-
-def parser_cache_save():
- codeparsercache.save_extras()
-
-def parser_cache_savemerge():
- codeparsercache.save_merge()
-
-Logger = logging.getLoggerClass()
-class BufferedLogger(Logger):
- def __init__(self, name, level=0, target=None):
- Logger.__init__(self, name)
- self.setLevel(level)
- self.buffer = []
- self.target = target
-
- def handle(self, record):
- self.buffer.append(record)
-
- def flush(self):
- for record in self.buffer:
- self.target.handle(record)
- self.buffer = []
-
-class PythonParser():
- getvars = (".getVar", ".appendVar", ".prependVar")
- containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
- execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
-
- def warn(self, func, arg):
- """Warn about calls of bitbake APIs which pass a non-literal
- argument for the variable name, as we're not able to track such
- a reference.
- """
-
- try:
- funcstr = codegen.to_source(func)
- argstr = codegen.to_source(arg)
- except TypeError:
- self.log.debug(2, 'Failed to convert function and argument to source form')
- else:
- self.log.debug(1, self.unhandled_message % (funcstr, argstr))
-
- def visit_Call(self, node):
- name = self.called_node_name(node.func)
- if name and name.endswith(self.getvars) or name in self.containsfuncs:
- if isinstance(node.args[0], ast.Str):
- varname = node.args[0].s
- if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
- if varname not in self.contains:
- self.contains[varname] = set()
- self.contains[varname].add(node.args[1].s)
- else:
- self.references.add(node.args[0].s)
- else:
- self.warn(node.func, node.args[0])
- elif name and name.endswith(".expand"):
- if isinstance(node.args[0], ast.Str):
- value = node.args[0].s
- d = bb.data.init()
- parser = d.expandWithRefs(value, self.name)
- self.references |= parser.references
- self.execs |= parser.execs
- for varname in parser.contains:
- if varname not in self.contains:
- self.contains[varname] = set()
- self.contains[varname] |= parser.contains[varname]
- elif name in self.execfuncs:
- if isinstance(node.args[0], ast.Str):
- self.var_execs.add(node.args[0].s)
- else:
- self.warn(node.func, node.args[0])
- elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
- self.execs.add(name)
-
- def called_node_name(self, node):
- """Given a called node, return its original string form"""
- components = []
- while node:
- if isinstance(node, ast.Attribute):
- components.append(node.attr)
- node = node.value
- elif isinstance(node, ast.Name):
- components.append(node.id)
- return '.'.join(reversed(components))
- else:
- break
-
- def __init__(self, name, log):
- self.name = name
- self.var_execs = set()
- self.contains = {}
- self.execs = set()
- self.references = set()
- self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log)
-
- self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
- self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
-
- def parse_python(self, node, lineno=0, filename="<string>"):
- if not node or not node.strip():
- return
-
- h = hash(str(node))
-
- if h in codeparsercache.pythoncache:
- self.references = set(codeparsercache.pythoncache[h].refs)
- self.execs = set(codeparsercache.pythoncache[h].execs)
- self.contains = {}
- for i in codeparsercache.pythoncache[h].contains:
- self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
- return
-
- if h in codeparsercache.pythoncacheextras:
- self.references = set(codeparsercache.pythoncacheextras[h].refs)
- self.execs = set(codeparsercache.pythoncacheextras[h].execs)
- self.contains = {}
- for i in codeparsercache.pythoncacheextras[h].contains:
- self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
- return
-
- # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
- node = "\n" * int(lineno) + node
- code = compile(check_indent(str(node)), filename, "exec",
- ast.PyCF_ONLY_AST)
-
- for n in ast.walk(code):
- if n.__class__.__name__ == "Call":
- self.visit_Call(n)
-
- self.execs.update(self.var_execs)
-
- codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
-
-class ShellParser():
- def __init__(self, name, log):
- self.funcdefs = set()
- self.allexecs = set()
- self.execs = set()
- self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log)
- self.unhandled_template = "unable to handle non-literal command '%s'"
- self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
-
- def parse_shell(self, value):
- """Parse the supplied shell code in a string, returning the external
- commands it executes.
- """
-
- h = hash(str(value))
-
- if h in codeparsercache.shellcache:
- self.execs = set(codeparsercache.shellcache[h].execs)
- return self.execs
-
- if h in codeparsercache.shellcacheextras:
- self.execs = set(codeparsercache.shellcacheextras[h].execs)
- return self.execs
-
- self._parse_shell(value)
- self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
-
- codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
-
- return self.execs
-
- def _parse_shell(self, value):
- try:
- tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
- except pyshlex.NeedMore:
- raise sherrors.ShellSyntaxError("Unexpected EOF")
-
- for token in tokens:
- self.process_tokens(token)
-
- def process_tokens(self, tokens):
- """Process a supplied portion of the syntax tree as returned by
- pyshyacc.parse.
- """
-
- def function_definition(value):
- self.funcdefs.add(value.name)
- return [value.body], None
-
- def case_clause(value):
- # Element 0 of each item in the case is the list of patterns, and
- # Element 1 of each item in the case is the list of commands to be
- # executed when that pattern matches.
- words = chain(*[item[0] for item in value.items])
- cmds = chain(*[item[1] for item in value.items])
- return cmds, words
-
- def if_clause(value):
- main = chain(value.cond, value.if_cmds)
- rest = value.else_cmds
- if isinstance(rest, tuple) and rest[0] == "elif":
- return chain(main, if_clause(rest[1]))
- else:
- return chain(main, rest)
-
- def simple_command(value):
- return None, chain(value.words, (assign[1] for assign in value.assigns))
-
- token_handlers = {
- "and_or": lambda x: ((x.left, x.right), None),
- "async": lambda x: ([x], None),
- "brace_group": lambda x: (x.cmds, None),
- "for_clause": lambda x: (x.cmds, x.items),
- "function_definition": function_definition,
- "if_clause": lambda x: (if_clause(x), None),
- "pipeline": lambda x: (x.commands, None),
- "redirect_list": lambda x: ([x.cmd], None),
- "subshell": lambda x: (x.cmds, None),
- "while_clause": lambda x: (chain(x.condition, x.cmds), None),
- "until_clause": lambda x: (chain(x.condition, x.cmds), None),
- "simple_command": simple_command,
- "case_clause": case_clause,
- }
-
- for token in tokens:
- name, value = token
- try:
- more_tokens, words = token_handlers[name](value)
- except KeyError:
- raise NotImplementedError("Unsupported token type " + name)
-
- if more_tokens:
- self.process_tokens(more_tokens)
-
- if words:
- self.process_words(words)
-
- def process_words(self, words):
- """Process a set of 'words' in pyshyacc parlance, which includes
- extraction of executed commands from $() blocks, as well as grabbing
- the command name argument.
- """
-
- words = list(words)
- for word in list(words):
- wtree = pyshlex.make_wordtree(word[1])
- for part in wtree:
- if not isinstance(part, list):
- continue
-
- if part[0] in ('`', '$('):
- command = pyshlex.wordtree_as_string(part[1:-1])
- self._parse_shell(command)
-
- if word[0] in ("cmd_name", "cmd_word"):
- if word in words:
- words.remove(word)
-
- usetoken = False
- for word in words:
- if word[0] in ("cmd_name", "cmd_word") or \
- (usetoken and word[0] == "TOKEN"):
- if "=" in word[1]:
- usetoken = True
- continue
-
- cmd = word[1]
- if cmd.startswith("$"):
- self.log.debug(1, self.unhandled_template % cmd)
- elif cmd == "eval":
- command = " ".join(word for _, word in words[1:])
- self._parse_shell(command)
- else:
- self.allexecs.add(cmd)
- break
diff --git a/yocto-poky/bitbake/lib/bb/command.py b/yocto-poky/bitbake/lib/bb/command.py
deleted file mode 100644
index 0559ffc07..000000000
--- a/yocto-poky/bitbake/lib/bb/command.py
+++ /dev/null
@@ -1,474 +0,0 @@
-"""
-BitBake 'Command' module
-
-Provide an interface to interact with the bitbake server through 'commands'
-"""
-
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
-The bitbake server takes 'commands' from its UI/commandline.
-Commands are either synchronous or asynchronous.
-Async commands return data to the client in the form of events.
-Sync commands must only return data through the function return value
-and must not trigger events, directly or indirectly.
-Commands are queued in a CommandQueue
-"""
-
-import bb.event
-import bb.cooker
-
-class CommandCompleted(bb.event.Event):
- pass
-
-class CommandExit(bb.event.Event):
- def __init__(self, exitcode):
- bb.event.Event.__init__(self)
- self.exitcode = int(exitcode)
-
-class CommandFailed(CommandExit):
- def __init__(self, message):
- self.error = message
- CommandExit.__init__(self, 1)
-
-class CommandError(Exception):
- pass
-
-class Command:
- """
- A queue of asynchronous commands for bitbake
- """
- def __init__(self, cooker):
- self.cooker = cooker
- self.cmds_sync = CommandsSync()
- self.cmds_async = CommandsAsync()
-
- # FIXME Add lock for this
- self.currentAsyncCommand = None
-
- def runCommand(self, commandline, ro_only = False):
- command = commandline.pop(0)
- if hasattr(CommandsSync, command):
- # Can run synchronous commands straight away
- command_method = getattr(self.cmds_sync, command)
- if ro_only:
- if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
- return None, "Not able to execute not readonly commands in readonly mode"
- try:
- if getattr(command_method, 'needconfig', False):
- self.cooker.updateCacheSync()
- result = command_method(self, commandline)
- except CommandError as exc:
- return None, exc.args[0]
- except (Exception, SystemExit):
- import traceback
- return None, traceback.format_exc()
- else:
- return result, None
- if self.currentAsyncCommand is not None:
- return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
- if command not in CommandsAsync.__dict__:
- return None, "No such command"
- self.currentAsyncCommand = (command, commandline)
- self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker)
- return True, None
-
- def runAsyncCommand(self):
- try:
- if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
- # updateCache will trigger a shutdown of the parser
- # and then raise BBHandledException triggering an exit
- self.cooker.updateCache()
- return False
- if self.currentAsyncCommand is not None:
- (command, options) = self.currentAsyncCommand
- commandmethod = getattr(CommandsAsync, command)
- needcache = getattr( commandmethod, "needcache" )
- if needcache and self.cooker.state != bb.cooker.state.running:
- self.cooker.updateCache()
- return True
- else:
- commandmethod(self.cmds_async, self, options)
- return False
- else:
- return False
- except KeyboardInterrupt as exc:
- self.finishAsyncCommand("Interrupted")
- return False
- except SystemExit as exc:
- arg = exc.args[0]
- if isinstance(arg, basestring):
- self.finishAsyncCommand(arg)
- else:
- self.finishAsyncCommand("Exited with %s" % arg)
- return False
- except Exception as exc:
- import traceback
- if isinstance(exc, bb.BBHandledException):
- self.finishAsyncCommand("")
- else:
- self.finishAsyncCommand(traceback.format_exc())
- return False
-
- def finishAsyncCommand(self, msg=None, code=None):
- if msg or msg == "":
- bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
- elif code:
- bb.event.fire(CommandExit(code), self.cooker.expanded_data)
- else:
- bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
- self.currentAsyncCommand = None
- self.cooker.finishcommand()
-
-class CommandsSync:
- """
- A class of synchronous commands
- These should run quickly so as not to hurt interactive performance.
- These must not influence any running synchronous command.
- """
-
- def stateShutdown(self, command, params):
- """
- Trigger cooker 'shutdown' mode
- """
- command.cooker.shutdown(False)
-
- def stateForceShutdown(self, command, params):
- """
- Stop the cooker
- """
- command.cooker.shutdown(True)
-
- def getAllKeysWithFlags(self, command, params):
- """
- Returns a dump of the global state. Call with
- variable flags to be retrieved as params.
- """
- flaglist = params[0]
- return command.cooker.getAllKeysWithFlags(flaglist)
- getAllKeysWithFlags.readonly = True
-
- def getVariable(self, command, params):
- """
- Read the value of a variable from data
- """
- varname = params[0]
- expand = True
- if len(params) > 1:
- expand = (params[1] == "True")
-
- return command.cooker.data.getVar(varname, expand)
- getVariable.readonly = True
-
- def setVariable(self, command, params):
- """
- Set the value of variable in data
- """
- varname = params[0]
- value = str(params[1])
- command.cooker.data.setVar(varname, value)
-
- def getSetVariable(self, command, params):
- """
- Read the value of a variable from data and set it into the datastore
- which effectively expands and locks the value.
- """
- varname = params[0]
- result = self.getVariable(command, params)
- command.cooker.data.setVar(varname, result)
- return result
-
- def setConfig(self, command, params):
- """
- Set the value of variable in configuration
- """
- varname = params[0]
- value = str(params[1])
- setattr(command.cooker.configuration, varname, value)
-
- def enableDataTracking(self, command, params):
- """
- Enable history tracking for variables
- """
- command.cooker.enableDataTracking()
-
- def disableDataTracking(self, command, params):
- """
- Disable history tracking for variables
- """
- command.cooker.disableDataTracking()
-
- def setPrePostConfFiles(self, command, params):
- prefiles = params[0].split()
- postfiles = params[1].split()
- command.cooker.configuration.prefile = prefiles
- command.cooker.configuration.postfile = postfiles
- setPrePostConfFiles.needconfig = False
-
- def getCpuCount(self, command, params):
- """
- Get the CPU count on the bitbake server
- """
- return bb.utils.cpu_count()
- getCpuCount.readonly = True
- getCpuCount.needconfig = False
-
- def matchFile(self, command, params):
- fMatch = params[0]
- return command.cooker.matchFile(fMatch)
- matchFile.needconfig = False
-
- def generateNewImage(self, command, params):
- image = params[0]
- base_image = params[1]
- package_queue = params[2]
- timestamp = params[3]
- description = params[4]
- return command.cooker.generateNewImage(image, base_image,
- package_queue, timestamp, description)
-
- def ensureDir(self, command, params):
- directory = params[0]
- bb.utils.mkdirhier(directory)
- ensureDir.needconfig = False
-
- def setVarFile(self, command, params):
- """
- Save a variable in a file; used for saving in a configuration file
- """
- var = params[0]
- val = params[1]
- default_file = params[2]
- op = params[3]
- command.cooker.modifyConfigurationVar(var, val, default_file, op)
- setVarFile.needconfig = False
-
- def removeVarFile(self, command, params):
- """
- Remove a variable declaration from a file
- """
- var = params[0]
- command.cooker.removeConfigurationVar(var)
- removeVarFile.needconfig = False
-
- def createConfigFile(self, command, params):
- """
- Create an extra configuration file
- """
- name = params[0]
- command.cooker.createConfigFile(name)
- createConfigFile.needconfig = False
-
- def setEventMask(self, command, params):
- handlerNum = params[0]
- llevel = params[1]
- debug_domains = params[2]
- mask = params[3]
- return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
- setEventMask.needconfig = False
- setEventMask.readonly = True
-
- def setFeatures(self, command, params):
- """
- Set the cooker features to include the passed list of features
- """
- features = params[0]
- command.cooker.setFeatures(features)
- setFeatures.needconfig = False
- # although we change the internal state of the cooker, this is transparent since
- # we always take and leave the cooker in state.initial
- setFeatures.readonly = True
-
- def updateConfig(self, command, params):
- options = params[0]
- environment = params[1]
- command.cooker.updateConfigOpts(options, environment)
- updateConfig.needconfig = False
-
-class CommandsAsync:
- """
- A class of asynchronous commands
- These functions communicate via generated events.
- Any function that requires metadata parsing should be here.
- """
-
- def buildFile(self, command, params):
- """
- Build a single specified .bb file
- """
- bfile = params[0]
- task = params[1]
-
- command.cooker.buildFile(bfile, task)
- buildFile.needcache = False
-
- def buildTargets(self, command, params):
- """
- Build a set of targets
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.buildTargets(pkgs_to_build, task)
- buildTargets.needcache = True
-
- def generateDepTreeEvent(self, command, params):
- """
- Generate an event containing the dependency information
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.generateDepTreeEvent(pkgs_to_build, task)
- command.finishAsyncCommand()
- generateDepTreeEvent.needcache = True
-
- def generateDotGraph(self, command, params):
- """
- Dump dependency information to disk as .dot files
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.generateDotGraphFiles(pkgs_to_build, task)
- command.finishAsyncCommand()
- generateDotGraph.needcache = True
-
- def generateTargetsTree(self, command, params):
- """
- Generate a tree of buildable targets.
- If klass is provided ensure all recipes that inherit the class are
- included in the package list.
- If pkg_list provided use that list (plus any extras brought in by
- klass) rather than generating a tree for all packages.
- """
- klass = params[0]
- pkg_list = params[1]
-
- command.cooker.generateTargetsTree(klass, pkg_list)
- command.finishAsyncCommand()
- generateTargetsTree.needcache = True
-
- def findCoreBaseFiles(self, command, params):
- """
- Find certain files in COREBASE directory. i.e. Layers
- """
- subdir = params[0]
- filename = params[1]
-
- command.cooker.findCoreBaseFiles(subdir, filename)
- command.finishAsyncCommand()
- findCoreBaseFiles.needcache = False
-
- def findConfigFiles(self, command, params):
- """
- Find config files which provide appropriate values
- for the passed configuration variable. i.e. MACHINE
- """
- varname = params[0]
-
- command.cooker.findConfigFiles(varname)
- command.finishAsyncCommand()
- findConfigFiles.needcache = False
-
- def findFilesMatchingInDir(self, command, params):
- """
- Find implementation files matching the specified pattern
- in the requested subdirectory of a BBPATH
- """
- pattern = params[0]
- directory = params[1]
-
- command.cooker.findFilesMatchingInDir(pattern, directory)
- command.finishAsyncCommand()
- findFilesMatchingInDir.needcache = False
-
- def findConfigFilePath(self, command, params):
- """
- Find the path of the requested configuration file
- """
- configfile = params[0]
-
- command.cooker.findConfigFilePath(configfile)
- command.finishAsyncCommand()
- findConfigFilePath.needcache = False
-
- def showVersions(self, command, params):
- """
- Show the currently selected versions
- """
- command.cooker.showVersions()
- command.finishAsyncCommand()
- showVersions.needcache = True
-
- def showEnvironmentTarget(self, command, params):
- """
- Print the environment of a target recipe
- (needs the cache to work out which recipe to use)
- """
- pkg = params[0]
-
- command.cooker.showEnvironment(None, pkg)
- command.finishAsyncCommand()
- showEnvironmentTarget.needcache = True
-
- def showEnvironment(self, command, params):
- """
- Print the standard environment
- or if specified the environment for a specified recipe
- """
- bfile = params[0]
-
- command.cooker.showEnvironment(bfile)
- command.finishAsyncCommand()
- showEnvironment.needcache = False
-
- def parseFiles(self, command, params):
- """
- Parse the .bb files
- """
- command.cooker.updateCache()
- command.finishAsyncCommand()
- parseFiles.needcache = True
-
- def compareRevisions(self, command, params):
- """
- Parse the .bb files
- """
- if bb.fetch.fetcher_compare_revisions(command.cooker.data):
- command.finishAsyncCommand(code=1)
- else:
- command.finishAsyncCommand()
- compareRevisions.needcache = True
-
- def triggerEvent(self, command, params):
- """
- Trigger a certain event
- """
- event = params[0]
- bb.event.fire(eval(event), command.cooker.data)
- command.currentAsyncCommand = None
- triggerEvent.needcache = False
-
- def resetCooker(self, command, params):
- """
- Reset the cooker to its initial state, thus forcing a reparse for
- any async command that has the needcache property set to True
- """
- command.cooker.reset()
- command.finishAsyncCommand()
- resetCooker.needcache = False
-
diff --git a/yocto-poky/bitbake/lib/bb/compat.py b/yocto-poky/bitbake/lib/bb/compat.py
deleted file mode 100644
index de1923d28..000000000
--- a/yocto-poky/bitbake/lib/bb/compat.py
+++ /dev/null
@@ -1,6 +0,0 @@
-"""Code pulled from future python versions, here for compatibility"""
-
-from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
-from functools import total_ordering
-
-
diff --git a/yocto-poky/bitbake/lib/bb/cooker.py b/yocto-poky/bitbake/lib/bb/cooker.py
deleted file mode 100644
index 9b565fc37..000000000
--- a/yocto-poky/bitbake/lib/bb/cooker.py
+++ /dev/null
@@ -1,2196 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import print_function
-import sys, os, glob, os.path, re, time
-import atexit
-import itertools
-import logging
-import multiprocessing
-import sre_constants
-import threading
-from cStringIO import StringIO
-from contextlib import closing
-from functools import wraps
-from collections import defaultdict
-import bb, bb.exceptions, bb.command
-from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
-import Queue
-import signal
-import subprocess
-import errno
-import prserv.serv
-import pyinotify
-
-logger = logging.getLogger("BitBake")
-collectlog = logging.getLogger("BitBake.Collection")
-buildlog = logging.getLogger("BitBake.Build")
-parselog = logging.getLogger("BitBake.Parsing")
-providerlog = logging.getLogger("BitBake.Provider")
-
-class NoSpecificMatch(bb.BBHandledException):
- """
- Exception raised when no or multiple file matches are found
- """
-
-class NothingToBuild(Exception):
- """
- Exception raised when there is nothing to build
- """
-
-class CollectionError(bb.BBHandledException):
- """
- Exception raised when layer configuration is incorrect
- """
-
-class state:
- initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
-
- @classmethod
- def get_name(cls, code):
- for name in dir(cls):
- value = getattr(cls, name)
- if type(value) == type(cls.initial) and value == code:
- return name
- raise ValueError("Invalid status code: %s" % code)
-
-
-class SkippedPackage:
- def __init__(self, info = None, reason = None):
- self.pn = None
- self.skipreason = None
- self.provides = None
- self.rprovides = None
-
- if info:
- self.pn = info.pn
- self.skipreason = info.skipreason
- self.provides = info.provides
- self.rprovides = info.rprovides
- elif reason:
- self.skipreason = reason
-
-
-class CookerFeatures(object):
- _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
-
- def __init__(self):
- self._features=set()
-
- def setFeature(self, f):
- # validate we got a request for a feature we support
- if f not in CookerFeatures._feature_list:
- return
- self._features.add(f)
-
- def __contains__(self, f):
- return f in self._features
-
- def __iter__(self):
- return self._features.__iter__()
-
- def next(self):
- return self._features.next()
-
-
-#============================================================================#
-# BBCooker
-#============================================================================#
-class BBCooker:
- """
- Manages one bitbake build run
- """
-
- def __init__(self, configuration, featureSet=None):
- self.recipecache = None
- self.skiplist = {}
- self.featureset = CookerFeatures()
- if featureSet:
- for f in featureSet:
- self.featureset.setFeature(f)
-
- self.configuration = configuration
-
- self.configwatcher = pyinotify.WatchManager()
- self.configwatcher.bbseen = []
- self.configwatcher.bbwatchedfiles = []
- self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
- self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
- pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
- pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
- self.watcher = pyinotify.WatchManager()
- self.watcher.bbseen = []
- self.watcher.bbwatchedfiles = []
- self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
-
- # If being called by something like tinfoil, we need to clean cached data
- # which may now be invalid
- bb.parse.__mtime_cache = {}
- bb.parse.BBHandler.cached_statements = {}
-
- self.initConfigurationData()
-
- self.inotify_modified_files = []
-
- def _process_inotify_updates(server, notifier_list, abort):
- for n in notifier_list:
- if n.check_events(timeout=0):
- # read notified events and enqeue them
- n.read_events()
- n.process_events()
- return 1.0
-
- self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
-
- self.baseconfig_valid = True
- self.parsecache_valid = False
-
- # Take a lock so only one copy of bitbake can run against a given build
- # directory at a time
- if not self.lockBitbake():
- bb.fatal("Only one copy of bitbake should be run against a build directory")
- try:
- self.lock.seek(0)
- self.lock.truncate()
- if len(configuration.interface) >= 2:
- self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
- self.lock.flush()
- except:
- pass
-
- # TOSTOP must not be set or our children will hang when they output
- fd = sys.stdout.fileno()
- if os.isatty(fd):
- import termios
- tcattr = termios.tcgetattr(fd)
- if tcattr[3] & termios.TOSTOP:
- buildlog.info("The terminal had the TOSTOP bit set, clearing...")
- tcattr[3] = tcattr[3] & ~termios.TOSTOP
- termios.tcsetattr(fd, termios.TCSANOW, tcattr)
-
- self.command = bb.command.Command(self)
- self.state = state.initial
-
- self.parser = None
-
- signal.signal(signal.SIGTERM, self.sigterm_exception)
- # Let SIGHUP exit as SIGTERM
- signal.signal(signal.SIGHUP, self.sigterm_exception)
-
- def config_notifications(self, event):
- if not event.pathname in self.configwatcher.bbwatchedfiles:
- return
- if not event.pathname in self.inotify_modified_files:
- self.inotify_modified_files.append(event.pathname)
- self.baseconfig_valid = False
-
- def notifications(self, event):
- if not event.pathname in self.inotify_modified_files:
- self.inotify_modified_files.append(event.pathname)
- self.parsecache_valid = False
-
- def add_filewatch(self, deps, watcher=None):
- if not watcher:
- watcher = self.watcher
- for i in deps:
- watcher.bbwatchedfiles.append(i[0])
- f = os.path.dirname(i[0])
- if f in watcher.bbseen:
- continue
- watcher.bbseen.append(f)
- watchtarget = None
- while True:
- # We try and add watches for files that don't exist but if they did, would influence
- # the parser. The parent directory of these files may not exist, in which case we need
- # to watch any parent that does exist for changes.
- try:
- watcher.add_watch(f, self.watchmask, quiet=False)
- if watchtarget:
- watcher.bbwatchedfiles.append(watchtarget)
- break
- except pyinotify.WatchManagerError as e:
- if 'ENOENT' in str(e):
- watchtarget = f
- f = os.path.dirname(f)
- if f in watcher.bbseen:
- break
- watcher.bbseen.append(f)
- continue
- if 'ENOSPC' in str(e):
- providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
- providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
- providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
- providerlog.error("Root privilege is required to modify max_user_watches.")
- raise
-
- def sigterm_exception(self, signum, stackframe):
- if signum == signal.SIGTERM:
- bb.warn("Cooker received SIGTERM, shutting down...")
- elif signum == signal.SIGHUP:
- bb.warn("Cooker received SIGHUP, shutting down...")
- self.state = state.forceshutdown
-
- def setFeatures(self, features):
- # we only accept a new feature set if we're in state initial, so we can reset without problems
- if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
- raise Exception("Illegal state for feature set change")
- original_featureset = list(self.featureset)
- for feature in features:
- self.featureset.setFeature(feature)
- bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
- if (original_featureset != list(self.featureset)) and self.state != state.error:
- self.reset()
-
- def initConfigurationData(self):
-
- self.state = state.initial
- self.caches_array = []
-
- # Need to preserve BB_CONSOLELOG over resets
- consolelog = None
- if hasattr(self, "data"):
- consolelog = self.data.getVar("BB_CONSOLELOG", True)
-
- if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
- self.enableDataTracking()
-
- all_extra_cache_names = []
- # We hardcode all known cache types in a single place, here.
- if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
- all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
-
- caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
-
- # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
- # This is the entry point, no further check needed!
- for var in caches_name_array:
- try:
- module_name, cache_name = var.split(':')
- module = __import__(module_name, fromlist=(cache_name,))
- self.caches_array.append(getattr(module, cache_name))
- except ImportError as exc:
- logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
- sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
-
- self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
- self.databuilder.parseBaseConfiguration()
- self.data = self.databuilder.data
- self.data_hash = self.databuilder.data_hash
-
- if consolelog:
- self.data.setVar("BB_CONSOLELOG", consolelog)
-
- # we log all events to a file if so directed
- if self.configuration.writeeventlog:
- import json, pickle
- DEFAULT_EVENTFILE = self.configuration.writeeventlog
- class EventLogWriteHandler():
-
- class EventWriter():
- def __init__(self, cooker):
- self.file_inited = None
- self.cooker = cooker
- self.event_queue = []
-
- def init_file(self):
- try:
- # delete the old log
- os.remove(DEFAULT_EVENTFILE)
- except:
- pass
-
- # write current configuration data
- with open(DEFAULT_EVENTFILE, "w") as f:
- f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
-
- def write_event(self, event):
- with open(DEFAULT_EVENTFILE, "a") as f:
- try:
- f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
- except Exception as e:
- import traceback
- print(e, traceback.format_exc(e))
-
-
- def send(self, event):
- event_class = event.__module__ + "." + event.__class__.__name__
-
- # init on bb.event.BuildStarted
- if self.file_inited is None:
- if event_class == "bb.event.BuildStarted":
- self.init_file()
- self.file_inited = True
-
- # write pending events
- for e in self.event_queue:
- self.write_event(e)
-
- # also write the current event
- self.write_event(event)
-
- else:
- # queue all events until the file is inited
- self.event_queue.append(event)
-
- else:
- # we have the file, just write the event
- self.write_event(event)
-
- # set our handler's event processor
- event = EventWriter(self) # self is the cooker here
-
-
- # set up cooker features for this mock UI handler
-
- # we need to write the dependency tree in the log
- self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
- # register the log file writer as UI Handler
- bb.event.register_UIHhandler(EventLogWriteHandler())
-
-
- #
- # Copy of the data store which has been expanded.
- # Used for firing events and accessing variables where expansion needs to be accounted for
- #
- self.expanded_data = bb.data.createCopy(self.data)
- bb.data.update_data(self.expanded_data)
- bb.parse.init_parser(self.expanded_data)
-
- if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
- self.disableDataTracking()
-
- self.data.renameVar("__depends", "__base_depends")
- self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
-
-
- def enableDataTracking(self):
- self.configuration.tracking = True
- if hasattr(self, "data"):
- self.data.enableTracking()
-
- def disableDataTracking(self):
- self.configuration.tracking = False
- if hasattr(self, "data"):
- self.data.disableTracking()
-
- def modifyConfigurationVar(self, var, val, default_file, op):
- if op == "append":
- self.appendConfigurationVar(var, val, default_file)
- elif op == "set":
- self.saveConfigurationVar(var, val, default_file, "=")
- elif op == "earlyAssign":
- self.saveConfigurationVar(var, val, default_file, "?=")
-
-
- def appendConfigurationVar(self, var, val, default_file):
- #add append var operation to the end of default_file
- default_file = bb.cookerdata.findConfigFile(default_file, self.data)
-
- total = "#added by hob"
- total += "\n%s += \"%s\"\n" % (var, val)
-
- with open(default_file, 'a') as f:
- f.write(total)
-
- #add to history
- loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
- self.data.appendVar(var, val, **loginfo)
-
- def saveConfigurationVar(self, var, val, default_file, op):
-
- replaced = False
- #do not save if nothing changed
- if str(val) == self.data.getVar(var, False):
- return
-
- conf_files = self.data.varhistory.get_variable_files(var)
-
- #format the value when it is a list
- if isinstance(val, list):
- listval = ""
- for value in val:
- listval += "%s " % value
- val = listval
-
- topdir = self.data.getVar("TOPDIR", False)
-
- #comment or replace operations made on var
- for conf_file in conf_files:
- if topdir in conf_file:
- with open(conf_file, 'r') as f:
- contents = f.readlines()
-
- lines = self.data.varhistory.get_variable_lines(var, conf_file)
- for line in lines:
- total = ""
- i = 0
- for c in contents:
- total += c
- i = i + 1
- if i==int(line):
- end_index = len(total)
- index = total.rfind(var, 0, end_index)
-
- begin_line = total.count("\n",0,index)
- end_line = int(line)
-
- #check if the variable was saved before in the same way
- #if true it replace the place where the variable was declared
- #else it comments it
- if contents[begin_line-1]== "#added by hob\n":
- contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
- replaced = True
- else:
- for ii in range(begin_line, end_line):
- contents[ii] = "#" + contents[ii]
-
- with open(conf_file, 'w') as f:
- f.writelines(contents)
-
- if replaced == False:
- #remove var from history
- self.data.varhistory.del_var_history(var)
-
- #add var to the end of default_file
- default_file = bb.cookerdata.findConfigFile(default_file, self.data)
-
- #add the variable on a single line, to be easy to replace the second time
- total = "\n#added by hob"
- total += "\n%s %s \"%s\"\n" % (var, op, val)
-
- with open(default_file, 'a') as f:
- f.write(total)
-
- #add to history
- loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
- self.data.setVar(var, val, **loginfo)
-
- def removeConfigurationVar(self, var):
- conf_files = self.data.varhistory.get_variable_files(var)
- topdir = self.data.getVar("TOPDIR", False)
-
- for conf_file in conf_files:
- if topdir in conf_file:
- with open(conf_file, 'r') as f:
- contents = f.readlines()
-
- lines = self.data.varhistory.get_variable_lines(var, conf_file)
- for line in lines:
- total = ""
- i = 0
- for c in contents:
- total += c
- i = i + 1
- if i==int(line):
- end_index = len(total)
- index = total.rfind(var, 0, end_index)
-
- begin_line = total.count("\n",0,index)
-
- #check if the variable was saved before in the same way
- if contents[begin_line-1]== "#added by hob\n":
- contents[begin_line-1] = contents[begin_line] = "\n"
- else:
- contents[begin_line] = "\n"
- #remove var from history
- self.data.varhistory.del_var_history(var, conf_file, line)
- #remove variable
- self.data.delVar(var)
-
- with open(conf_file, 'w') as f:
- f.writelines(contents)
-
- def createConfigFile(self, name):
- path = os.getcwd()
- confpath = os.path.join(path, "conf", name)
- open(confpath, 'w').close()
-
- def parseConfiguration(self):
- # Set log file verbosity
- verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
- if verboselogs:
- bb.msg.loggerVerboseLogs = True
-
- # Change nice level if we're asked to
- nice = self.data.getVar("BB_NICE_LEVEL", True)
- if nice:
- curnice = os.nice(0)
- nice = int(nice) - curnice
- buildlog.verbose("Renice to %s " % os.nice(nice))
-
- if self.recipecache:
- del self.recipecache
- self.recipecache = bb.cache.CacheData(self.caches_array)
-
- self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
-
- def updateConfigOpts(self, options, environment):
- clean = True
- for o in options:
- if o in ['prefile', 'postfile']:
- clean = False
- server_val = getattr(self.configuration, "%s_server" % o)
- if not options[o] and server_val:
- # restore value provided on server start
- setattr(self.configuration, o, server_val)
- continue
- setattr(self.configuration, o, options[o])
- for k in bb.utils.approved_variables():
- if k in environment and k not in self.configuration.env:
- logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
- self.configuration.env[k] = environment[k]
- clean = False
- if k in self.configuration.env and k not in environment:
- logger.debug(1, "Updating environment variable %s (deleted)" % (k))
- del self.configuration.env[k]
- clean = False
- if k not in self.configuration.env and k not in environment:
- continue
- if environment[k] != self.configuration.env[k]:
- logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
- self.configuration.env[k] = environment[k]
- clean = False
- if not clean:
- logger.debug(1, "Base environment change, triggering reparse")
- self.baseconfig_valid = False
- self.reset()
-
- def runCommands(self, server, data, abort):
- """
- Run any queued asynchronous command
- This is done by the idle handler so it runs in true context rather than
- tied to any UI.
- """
-
- return self.command.runAsyncCommand()
-
- def showVersions(self):
-
- pkg_pn = self.recipecache.pkg_pn
- (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
-
- logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
- logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
-
- for p in sorted(pkg_pn):
- pref = preferred_versions[p]
- latest = latest_versions[p]
-
- prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
- lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
-
- if pref == latest:
- prefstr = ""
-
- logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
-
- def showEnvironment(self, buildfile=None, pkgs_to_build=None):
- """
- Show the outer or per-recipe environment
- """
- fn = None
- envdata = None
- if not pkgs_to_build:
- pkgs_to_build = []
-
- if buildfile:
- # Parse the configuration here. We need to do it explicitly here since
- # this showEnvironment() code path doesn't use the cache
- self.parseConfiguration()
-
- fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
- fn = self.matchFile(fn)
- fn = bb.cache.Cache.realfn2virtual(fn, cls)
- elif len(pkgs_to_build) == 1:
- ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
- if pkgs_to_build[0] in set(ignore.split()):
- bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
-
- taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
-
- targetid = taskdata.getbuild_id(pkgs_to_build[0])
- fnid = taskdata.build_targets[targetid][0]
- fn = taskdata.fn_index[fnid]
- else:
- envdata = self.data
-
- if fn:
- try:
- envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
- except Exception as e:
- parselog.exception("Unable to read %s", fn)
- raise
-
- # Display history
- with closing(StringIO()) as env:
- self.data.inchistory.emit(env)
- logger.plain(env.getvalue())
-
- # emit variables and shell functions
- data.update_data(envdata)
- with closing(StringIO()) as env:
- data.emit_env(env, envdata, True)
- logger.plain(env.getvalue())
-
- # emit the metadata which isnt valid shell
- data.expandKeys(envdata)
- for e in envdata.keys():
- if data.getVarFlag( e, 'python', envdata ):
- logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
-
-
- def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
- """
- Prepare a runqueue and taskdata object for iteration over pkgs_to_build
- """
- bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
-
- # A task of None means use the default task
- if task is None:
- task = self.configuration.cmd
-
- fulltargetlist = self.checkPackages(pkgs_to_build)
-
- localdata = data.createCopy(self.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
- taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
-
- current = 0
- runlist = []
- for k in fulltargetlist:
- ktask = task
- if ":do_" in k:
- k2 = k.split(":do_")
- k = k2[0]
- ktask = k2[1]
- taskdata.add_provider(localdata, self.recipecache, k)
- current += 1
- if not ktask.startswith("do_"):
- ktask = "do_%s" % ktask
- runlist.append([k, ktask])
- bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
- taskdata.add_unresolved(localdata, self.recipecache)
- bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
- return taskdata, runlist, fulltargetlist
-
- def prepareTreeData(self, pkgs_to_build, task):
- """
- Prepare a runqueue and taskdata object for iteration over pkgs_to_build
- """
-
- # We set abort to False here to prevent unbuildable targets raising
- # an exception when we're just generating data
- taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
-
- return runlist, taskdata
-
- ######## WARNING : this function requires cache_extra to be enabled ########
-
- def generateTaskDepTreeData(self, pkgs_to_build, task):
- """
- Create a dependency graph of pkgs_to_build including reverse dependency
- information.
- """
- runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
- rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
- rq.rqdata.prepare()
- return self.buildDependTree(rq, taskdata)
-
-
- def buildDependTree(self, rq, taskdata):
- seen_fnids = []
- depend_tree = {}
- depend_tree["depends"] = {}
- depend_tree["tdepends"] = {}
- depend_tree["pn"] = {}
- depend_tree["rdepends-pn"] = {}
- depend_tree["packages"] = {}
- depend_tree["rdepends-pkg"] = {}
- depend_tree["rrecs-pkg"] = {}
- depend_tree['providermap'] = {}
- depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
-
- for name, fn in taskdata.get_providermap().iteritems():
- pn = self.recipecache.pkg_fn[fn]
- if name != pn:
- version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
- depend_tree['providermap'][name] = (pn, version)
-
- for task in xrange(len(rq.rqdata.runq_fnid)):
- taskname = rq.rqdata.runq_task[task]
- fnid = rq.rqdata.runq_fnid[task]
- fn = taskdata.fn_index[fnid]
- pn = self.recipecache.pkg_fn[fn]
- version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
- if pn not in depend_tree["pn"]:
- depend_tree["pn"][pn] = {}
- depend_tree["pn"][pn]["filename"] = fn
- depend_tree["pn"][pn]["version"] = version
- depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
-
- # if we have extra caches, list all attributes they bring in
- extra_info = []
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
- cachefields = getattr(cache_class, 'cachefields', [])
- extra_info = extra_info + cachefields
-
- # for all attributes stored, add them to the dependency tree
- for ei in extra_info:
- depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
-
-
- for dep in rq.rqdata.runq_depends[task]:
- depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
- deppn = self.recipecache.pkg_fn[depfn]
- dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
- if not dotname in depend_tree["tdepends"]:
- depend_tree["tdepends"][dotname] = []
- depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
- if fnid not in seen_fnids:
- seen_fnids.append(fnid)
- packages = []
-
- depend_tree["depends"][pn] = []
- for dep in taskdata.depids[fnid]:
- depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
-
- depend_tree["rdepends-pn"][pn] = []
- for rdep in taskdata.rdepids[fnid]:
- depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
-
- rdepends = self.recipecache.rundeps[fn]
- for package in rdepends:
- depend_tree["rdepends-pkg"][package] = []
- for rdepend in rdepends[package]:
- depend_tree["rdepends-pkg"][package].append(rdepend)
- packages.append(package)
-
- rrecs = self.recipecache.runrecs[fn]
- for package in rrecs:
- depend_tree["rrecs-pkg"][package] = []
- for rdepend in rrecs[package]:
- depend_tree["rrecs-pkg"][package].append(rdepend)
- if not package in packages:
- packages.append(package)
-
- for package in packages:
- if package not in depend_tree["packages"]:
- depend_tree["packages"][package] = {}
- depend_tree["packages"][package]["pn"] = pn
- depend_tree["packages"][package]["filename"] = fn
- depend_tree["packages"][package]["version"] = version
-
- return depend_tree
-
- ######## WARNING : this function requires cache_extra to be enabled ########
- def generatePkgDepTreeData(self, pkgs_to_build, task):
- """
- Create a dependency tree of pkgs_to_build, returning the data.
- """
- _, taskdata = self.prepareTreeData(pkgs_to_build, task)
- tasks_fnid = []
- if len(taskdata.tasks_name) != 0:
- for task in xrange(len(taskdata.tasks_name)):
- tasks_fnid.append(taskdata.tasks_fnid[task])
-
- seen_fnids = []
- depend_tree = {}
- depend_tree["depends"] = {}
- depend_tree["pn"] = {}
- depend_tree["rdepends-pn"] = {}
- depend_tree["rdepends-pkg"] = {}
- depend_tree["rrecs-pkg"] = {}
-
- # if we have extra caches, list all attributes they bring in
- extra_info = []
- for cache_class in self.caches_array:
- if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
- cachefields = getattr(cache_class, 'cachefields', [])
- extra_info = extra_info + cachefields
-
- for task in xrange(len(tasks_fnid)):
- fnid = tasks_fnid[task]
- fn = taskdata.fn_index[fnid]
- pn = self.recipecache.pkg_fn[fn]
-
- if pn not in depend_tree["pn"]:
- depend_tree["pn"][pn] = {}
- depend_tree["pn"][pn]["filename"] = fn
- version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
- depend_tree["pn"][pn]["version"] = version
- rdepends = self.recipecache.rundeps[fn]
- rrecs = self.recipecache.runrecs[fn]
- depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
-
- # for all extra attributes stored, add them to the dependency tree
- for ei in extra_info:
- depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
-
- if fnid not in seen_fnids:
- seen_fnids.append(fnid)
-
- depend_tree["depends"][pn] = []
- for dep in taskdata.depids[fnid]:
- item = taskdata.build_names_index[dep]
- pn_provider = ""
- targetid = taskdata.getbuild_id(item)
- if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
- id = taskdata.build_targets[targetid][0]
- fn_provider = taskdata.fn_index[id]
- pn_provider = self.recipecache.pkg_fn[fn_provider]
- else:
- pn_provider = item
- depend_tree["depends"][pn].append(pn_provider)
-
- depend_tree["rdepends-pn"][pn] = []
- for rdep in taskdata.rdepids[fnid]:
- item = taskdata.run_names_index[rdep]
- pn_rprovider = ""
- targetid = taskdata.getrun_id(item)
- if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
- id = taskdata.run_targets[targetid][0]
- fn_rprovider = taskdata.fn_index[id]
- pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
- else:
- pn_rprovider = item
- depend_tree["rdepends-pn"][pn].append(pn_rprovider)
-
- depend_tree["rdepends-pkg"].update(rdepends)
- depend_tree["rrecs-pkg"].update(rrecs)
-
- return depend_tree
-
- def generateDepTreeEvent(self, pkgs_to_build, task):
- """
- Create a task dependency graph of pkgs_to_build.
- Generate an event with the result
- """
- depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
- bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
-
- def generateDotGraphFiles(self, pkgs_to_build, task):
- """
- Create a task dependency graph of pkgs_to_build.
- Save the result to a set of .dot files.
- """
-
- depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
-
- # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
- depends_file = file('pn-depends.dot', 'w' )
- buildlist_file = file('pn-buildlist', 'w' )
- print("digraph depends {", file=depends_file)
- for pn in depgraph["pn"]:
- fn = depgraph["pn"][pn]["filename"]
- version = depgraph["pn"][pn]["version"]
- print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
- print("%s" % pn, file=buildlist_file)
- buildlist_file.close()
- logger.info("PN build list saved to 'pn-buildlist'")
- for pn in depgraph["depends"]:
- for depend in depgraph["depends"][pn]:
- print('"%s" -> "%s" [style=solid]' % (pn, depend), file=depends_file)
- for pn in depgraph["rdepends-pn"]:
- for rdepend in depgraph["rdepends-pn"][pn]:
- print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
- print("}", file=depends_file)
- logger.info("PN dependencies saved to 'pn-depends.dot'")
-
- depends_file = file('package-depends.dot', 'w' )
- print("digraph depends {", file=depends_file)
- for package in depgraph["packages"]:
- pn = depgraph["packages"][package]["pn"]
- fn = depgraph["packages"][package]["filename"]
- version = depgraph["packages"][package]["version"]
- if package == pn:
- print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
- else:
- print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
- for depend in depgraph["depends"][pn]:
- print('"%s" -> "%s" [style=solid]' % (package, depend), file=depends_file)
- for package in depgraph["rdepends-pkg"]:
- for rdepend in depgraph["rdepends-pkg"][package]:
- print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
- for package in depgraph["rrecs-pkg"]:
- for rdepend in depgraph["rrecs-pkg"][package]:
- print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file)
- print("}", file=depends_file)
- logger.info("Package dependencies saved to 'package-depends.dot'")
-
- tdepends_file = file('task-depends.dot', 'w' )
- print("digraph depends {", file=tdepends_file)
- for task in depgraph["tdepends"]:
- (pn, taskname) = task.rsplit(".", 1)
- fn = depgraph["pn"][pn]["filename"]
- version = depgraph["pn"][pn]["version"]
- print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
- for dep in depgraph["tdepends"][task]:
- print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
- print("}", file=tdepends_file)
- logger.info("Task dependencies saved to 'task-depends.dot'")
-
- def show_appends_with_no_recipes(self):
- # Determine which bbappends haven't been applied
-
- # First get list of recipes, including skipped
- recipefns = self.recipecache.pkg_fn.keys()
- recipefns.extend(self.skiplist.keys())
-
- # Work out list of bbappends that have been applied
- applied_appends = []
- for fn in recipefns:
- applied_appends.extend(self.collection.get_file_appends(fn))
-
- appends_without_recipes = []
- for _, appendfn in self.collection.bbappends:
- if not appendfn in applied_appends:
- appends_without_recipes.append(appendfn)
-
- if appends_without_recipes:
- msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
- warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
- False) or "no"
- if warn_only.lower() in ("1", "yes", "true"):
- bb.warn(msg)
- else:
- bb.fatal(msg)
-
- def handlePrefProviders(self):
-
- localdata = data.createCopy(self.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
-
- # Handle PREFERRED_PROVIDERS
- for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
- try:
- (providee, provider) = p.split(':')
- except:
- providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
- continue
- if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
- providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
- self.recipecache.preferred[providee] = provider
-
- def findCoreBaseFiles(self, subdir, configfile):
- corebase = self.data.getVar('COREBASE', True) or ""
- paths = []
- for root, dirs, files in os.walk(corebase + '/' + subdir):
- for d in dirs:
- configfilepath = os.path.join(root, d, configfile)
- if os.path.exists(configfilepath):
- paths.append(os.path.join(root, d))
-
- if paths:
- bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
-
- def findConfigFilePath(self, configfile):
- """
- Find the location on disk of configfile and if it exists and was parsed by BitBake
- emit the ConfigFilePathFound event with the path to the file.
- """
- path = bb.cookerdata.findConfigFile(configfile, self.data)
- if not path:
- return
-
- # Generate a list of parsed configuration files by searching the files
- # listed in the __depends and __base_depends variables with a .conf suffix.
- conffiles = []
- dep_files = self.data.getVar('__base_depends', False) or []
- dep_files = dep_files + (self.data.getVar('__depends', False) or [])
-
- for f in dep_files:
- if f[0].endswith(".conf"):
- conffiles.append(f[0])
-
- _, conf, conffile = path.rpartition("conf/")
- match = os.path.join(conf, conffile)
- # Try and find matches for conf/conffilename.conf as we don't always
- # have the full path to the file.
- for cfg in conffiles:
- if cfg.endswith(match):
- bb.event.fire(bb.event.ConfigFilePathFound(path),
- self.data)
- break
-
- def findFilesMatchingInDir(self, filepattern, directory):
- """
- Searches for files containing the substring 'filepattern' which are children of
- 'directory' in each BBPATH. i.e. to find all rootfs package classes available
- to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
- or to find all machine configuration files one could call:
- findFilesMatchingInDir(self, '.conf', 'conf/machine')
- """
-
- matches = []
- bbpaths = self.data.getVar('BBPATH', True).split(':')
- for path in bbpaths:
- dirpath = os.path.join(path, directory)
- if os.path.exists(dirpath):
- for root, dirs, files in os.walk(dirpath):
- for f in files:
- if filepattern in f:
- matches.append(f)
-
- if matches:
- bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
-
- def findConfigFiles(self, varname):
- """
- Find config files which are appropriate values for varname.
- i.e. MACHINE, DISTRO
- """
- possible = []
- var = varname.lower()
-
- data = self.data
- # iterate configs
- bbpaths = data.getVar('BBPATH', True).split(':')
- for path in bbpaths:
- confpath = os.path.join(path, "conf", var)
- if os.path.exists(confpath):
- for root, dirs, files in os.walk(confpath):
- # get all child files, these are appropriate values
- for f in files:
- val, sep, end = f.rpartition('.')
- if end == 'conf':
- possible.append(val)
-
- if possible:
- bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
-
- def findInheritsClass(self, klass):
- """
- Find all recipes which inherit the specified class
- """
- pkg_list = []
-
- for pfn in self.recipecache.pkg_fn:
- inherits = self.recipecache.inherits.get(pfn, None)
- if inherits and klass in inherits:
- pkg_list.append(self.recipecache.pkg_fn[pfn])
-
- return pkg_list
-
- def generateTargetsTree(self, klass=None, pkgs=None):
- """
- Generate a dependency tree of buildable targets
- Generate an event with the result
- """
- # if the caller hasn't specified a pkgs list default to universe
- if not pkgs:
- pkgs = ['universe']
- # if inherited_class passed ensure all recipes which inherit the
- # specified class are included in pkgs
- if klass:
- extra_pkgs = self.findInheritsClass(klass)
- pkgs = pkgs + extra_pkgs
-
- # generate a dependency tree for all our packages
- tree = self.generatePkgDepTreeData(pkgs, 'build')
- bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
-
- def interactiveMode( self ):
- """Drop off into a shell"""
- try:
- from bb import shell
- except ImportError:
- parselog.exception("Interactive mode not available")
- sys.exit(1)
- else:
- shell.start( self )
-
-
- def handleCollections( self, collections ):
- """Handle collections"""
- errors = False
- self.recipecache.bbfile_config_priorities = []
- if collections:
- collection_priorities = {}
- collection_depends = {}
- collection_list = collections.split()
- min_prio = 0
- for c in collection_list:
- # Get collection priority if defined explicitly
- priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
- if priority:
- try:
- prio = int(priority)
- except ValueError:
- parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
- errors = True
- if min_prio == 0 or prio < min_prio:
- min_prio = prio
- collection_priorities[c] = prio
- else:
- collection_priorities[c] = None
-
- # Check dependencies and store information for priority calculation
- deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
- if deps:
- try:
- deplist = bb.utils.explode_dep_versions2(deps)
- except bb.utils.VersionStringException as vse:
- bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
- for dep, oplist in deplist.iteritems():
- if dep in collection_list:
- for opstr in oplist:
- layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
- (op, depver) = opstr.split()
- if layerver:
- try:
- res = bb.utils.vercmp_string_op(layerver, depver, op)
- except bb.utils.VersionStringException as vse:
- bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
- if not res:
- parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
- errors = True
- else:
- parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
- errors = True
- else:
- parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
- errors = True
- collection_depends[c] = deplist.keys()
- else:
- collection_depends[c] = []
-
- # Recursively work out collection priorities based on dependencies
- def calc_layer_priority(collection):
- if not collection_priorities[collection]:
- max_depprio = min_prio
- for dep in collection_depends[collection]:
- calc_layer_priority(dep)
- depprio = collection_priorities[dep]
- if depprio > max_depprio:
- max_depprio = depprio
- max_depprio += 1
- parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
- collection_priorities[collection] = max_depprio
-
- # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
- for c in collection_list:
- calc_layer_priority(c)
- regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
- if regex == None:
- parselog.error("BBFILE_PATTERN_%s not defined" % c)
- errors = True
- continue
- try:
- cre = re.compile(regex)
- except re.error:
- parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
- errors = True
- continue
- self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
- if errors:
- # We've already printed the actual error(s)
- raise CollectionError("Errors during parsing layer configuration")
-
- def buildSetVars(self):
- """
- Setup any variables needed before starting a build
- """
- t = time.gmtime()
- if not self.data.getVar("BUILDNAME", False):
- self.data.setVar("BUILDNAME", "${DATE}${TIME}")
- self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
- self.data.setVar("DATE", time.strftime('%Y%m%d', t))
- self.data.setVar("TIME", time.strftime('%H%M%S', t))
-
- def matchFiles(self, bf):
- """
- Find the .bb files which match the expression in 'buildfile'.
- """
- if bf.startswith("/") or bf.startswith("../"):
- bf = os.path.abspath(bf)
-
- self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
- filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
- try:
- os.stat(bf)
- bf = os.path.abspath(bf)
- return [bf]
- except OSError:
- regexp = re.compile(bf)
- matches = []
- for f in filelist:
- if regexp.search(f) and os.path.isfile(f):
- matches.append(f)
- return matches
-
- def matchFile(self, buildfile):
- """
- Find the .bb file which matches the expression in 'buildfile'.
- Raise an error if multiple files
- """
- matches = self.matchFiles(buildfile)
- if len(matches) != 1:
- if matches:
- msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
- if matches:
- for f in matches:
- msg += "\n %s" % f
- parselog.error(msg)
- else:
- parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
- raise NoSpecificMatch
- return matches[0]
-
- def buildFile(self, buildfile, task):
- """
- Build the file matching regexp buildfile
- """
-
- # Too many people use -b because they think it's how you normally
- # specify a target to be built, so show a warning
- bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
-
- # Parse the configuration here. We need to do it explicitly here since
- # buildFile() doesn't use the cache
- self.parseConfiguration()
-
- # If we are told to do the None task then query the default task
- if (task == None):
- task = self.configuration.cmd
-
- fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
- fn = self.matchFile(fn)
-
- self.buildSetVars()
-
- infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
- self.data,
- self.caches_array)
- infos = dict(infos)
-
- fn = bb.cache.Cache.realfn2virtual(fn, cls)
- try:
- info_array = infos[fn]
- except KeyError:
- bb.fatal("%s does not exist" % fn)
-
- if info_array[0].skipped:
- bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
-
- self.recipecache.add_from_recipeinfo(fn, info_array)
-
- # Tweak some variables
- item = info_array[0].pn
- self.recipecache.ignored_dependencies = set()
- self.recipecache.bbfile_priority[fn] = 1
-
- # Remove external dependencies
- self.recipecache.task_deps[fn]['depends'] = {}
- self.recipecache.deps[fn] = []
- self.recipecache.rundeps[fn] = []
- self.recipecache.runrecs[fn] = []
-
- # Invalidate task for target if force mode active
- if self.configuration.force:
- logger.verbose("Invalidate task %s, %s", task, fn)
- if not task.startswith("do_"):
- task = "do_%s" % task
- bb.parse.siggen.invalidate_task(task, self.recipecache, fn)
-
- # Setup taskdata structure
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
- taskdata.add_provider(self.data, self.recipecache, item)
-
- buildname = self.data.getVar("BUILDNAME", True)
- bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
-
- # Execute the runqueue
- if not task.startswith("do_"):
- task = "do_%s" % task
- runlist = [[item, task]]
-
- rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
-
- def buildFileIdle(server, rq, abort):
-
- msg = None
- interrupted = 0
- if abort or self.state == state.forceshutdown:
- rq.finish_runqueue(True)
- msg = "Forced shutdown"
- interrupted = 2
- elif self.state == state.shutdown:
- rq.finish_runqueue(False)
- msg = "Stopped build"
- interrupted = 1
- failures = 0
- try:
- retval = rq.execute_runqueue()
- except runqueue.TaskFailure as exc:
- failures += len(exc.args)
- retval = False
- except SystemExit as exc:
- self.command.finishAsyncCommand(str(exc))
- return False
-
- if not retval:
- bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures, interrupted), self.expanded_data)
- self.command.finishAsyncCommand(msg)
- return False
- if retval is True:
- return True
- return retval
-
- self.configuration.server_register_idlecallback(buildFileIdle, rq)
-
- def buildTargets(self, targets, task):
- """
- Attempt to build the targets specified
- """
-
- def buildTargetsIdle(server, rq, abort):
- msg = None
- interrupted = 0
- if abort or self.state == state.forceshutdown:
- rq.finish_runqueue(True)
- msg = "Forced shutdown"
- interrupted = 2
- elif self.state == state.shutdown:
- rq.finish_runqueue(False)
- msg = "Stopped build"
- interrupted = 1
- failures = 0
- try:
- retval = rq.execute_runqueue()
- except runqueue.TaskFailure as exc:
- failures += len(exc.args)
- retval = False
- except SystemExit as exc:
- self.command.finishAsyncCommand(str(exc))
- return False
-
- if not retval:
- bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures, interrupted), self.data)
- self.command.finishAsyncCommand(msg)
- return False
- if retval is True:
- return True
- return retval
-
- build.reset_cache()
- self.buildSetVars()
-
- # If we are told to do the None task then query the default task
- if (task == None):
- task = self.configuration.cmd
-
- if not task.startswith("do_"):
- task = "do_%s" % task
-
- taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
-
- buildname = self.data.getVar("BUILDNAME", False)
-
- # make targets to always look as <target>:do_<task>
- ntargets = []
- for target in fulltargetlist:
- if ":" in target:
- if ":do_" not in target:
- target = "%s:do_%s" % tuple(target.split(":", 1))
- else:
- target = "%s:%s" % (target, task)
- ntargets.append(target)
-
- bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
-
- rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
- if 'universe' in targets:
- rq.rqdata.warn_multi_bb = True
-
- self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
-
-
- def getAllKeysWithFlags(self, flaglist):
- dump = {}
- for k in self.data.keys():
- try:
- expand = True
- flags = self.data.getVarFlags(k)
- if flags and "func" in flags and "python" in flags:
- expand = False
- v = self.data.getVar(k, expand)
- if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
- dump[k] = {
- 'v' : v ,
- 'history' : self.data.varhistory.variable(k),
- }
- for d in flaglist:
- if flags and d in flags:
- dump[k][d] = flags[d]
- else:
- dump[k][d] = None
- except Exception as e:
- print(e)
- return dump
-
-
- def generateNewImage(self, image, base_image, package_queue, timestamp, description):
- '''
- Create a new image with a "require"/"inherit" base_image statement
- '''
- if timestamp:
- image_name = os.path.splitext(image)[0]
- timestr = time.strftime("-%Y%m%d-%H%M%S")
- dest = image_name + str(timestr) + ".bb"
- else:
- if not image.endswith(".bb"):
- dest = image + ".bb"
- else:
- dest = image
-
- basename = False
- if base_image:
- with open(base_image, 'r') as f:
- require_line = f.readline()
- p = re.compile("IMAGE_BASENAME *=")
- for line in f:
- if p.search(line):
- basename = True
-
- with open(dest, "w") as imagefile:
- if base_image is None:
- imagefile.write("inherit core-image\n")
- else:
- topdir = self.data.getVar("TOPDIR", False)
- if topdir in base_image:
- base_image = require_line.split()[1]
- imagefile.write("require " + base_image + "\n")
- image_install = "IMAGE_INSTALL = \""
- for package in package_queue:
- image_install += str(package) + " "
- image_install += "\"\n"
- imagefile.write(image_install)
-
- description_var = "DESCRIPTION = \"" + description + "\"\n"
- imagefile.write(description_var)
-
- if basename:
- # If this is overwritten in a inherited image, reset it to default
- image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
- imagefile.write(image_basename)
-
- self.state = state.initial
- if timestamp:
- return timestr
-
- def updateCacheSync(self):
- if self.state == state.running:
- return
-
- # reload files for which we got notifications
- for p in self.inotify_modified_files:
- bb.parse.update_cache(p)
- if p in bb.parse.BBHandler.cached_statements:
- del bb.parse.BBHandler.cached_statements[p]
- self.inotify_modified_files = []
-
- if not self.baseconfig_valid:
- logger.debug(1, "Reloading base configuration data")
- self.initConfigurationData()
- self.baseconfig_valid = True
- self.parsecache_valid = False
-
- # This is called for all async commands when self.state != running
- def updateCache(self):
- if self.state == state.running:
- return
-
- if self.state in (state.shutdown, state.forceshutdown, state.error):
- if hasattr(self.parser, 'shutdown'):
- self.parser.shutdown(clean=False, force = True)
- raise bb.BBHandledException()
-
- if self.state != state.parsing:
- self.updateCacheSync()
-
- if self.state != state.parsing and not self.parsecache_valid:
- self.parseConfiguration ()
- if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
- bb.event.fire(bb.event.SanityCheck(False), self.data)
-
- ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
- self.recipecache.ignored_dependencies = set(ignore.split())
-
- for dep in self.configuration.extra_assume_provided:
- self.recipecache.ignored_dependencies.add(dep)
-
- self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
- (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
-
- self.parser = CookerParser(self, filelist, masked)
- self.parsecache_valid = True
-
- self.state = state.parsing
-
- if not self.parser.parse_next():
- collectlog.debug(1, "parsing complete")
- if self.parser.error:
- raise bb.BBHandledException()
- self.show_appends_with_no_recipes()
- self.handlePrefProviders()
- self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
- self.state = state.running
-
- # Send an event listing all stamps reachable after parsing
- # which the metadata may use to clean up stale data
- event = bb.event.ReachableStamps(self.recipecache.stamp)
- bb.event.fire(event, self.expanded_data)
- return None
-
- return True
-
- def checkPackages(self, pkgs_to_build):
-
- # Return a copy, don't modify the original
- pkgs_to_build = pkgs_to_build[:]
-
- if len(pkgs_to_build) == 0:
- raise NothingToBuild
-
- ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
- for pkg in pkgs_to_build:
- if pkg in ignore:
- parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
-
- if 'world' in pkgs_to_build:
- bb.providers.buildWorldTargetList(self.recipecache)
- pkgs_to_build.remove('world')
- for t in self.recipecache.world_target:
- pkgs_to_build.append(t)
-
- if 'universe' in pkgs_to_build:
- parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
- parselog.debug(1, "collating packages for \"universe\"")
- pkgs_to_build.remove('universe')
- for t in self.recipecache.universe_target:
- pkgs_to_build.append(t)
-
- return pkgs_to_build
-
-
-
-
- def pre_serve(self):
- # Empty the environment. The environment will be populated as
- # necessary from the data store.
- #bb.utils.empty_environment()
- try:
- self.prhost = prserv.serv.auto_start(self.data)
- except prserv.serv.PRServiceConfigError:
- bb.event.fire(CookerExit(), self.expanded_data)
- self.state = state.error
- return
-
- def post_serve(self):
- prserv.serv.auto_shutdown(self.data)
- bb.event.fire(CookerExit(), self.expanded_data)
- lockfile = self.lock.name
- self.lock.close()
- self.lock = None
-
- while not self.lock:
- with bb.utils.timeout(3):
- self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
- if not self.lock:
- # Some systems may not have lsof available
- procs = None
- try:
- procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
- if procs is None:
- # Fall back to fuser if lsof is unavailable
- try:
- procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
-
- msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
- if procs:
- msg += ":\n%s" % str(procs)
- print(msg)
-
-
- def shutdown(self, force = False):
- if force:
- self.state = state.forceshutdown
- else:
- self.state = state.shutdown
-
- if self.parser:
- self.parser.shutdown(clean=not force, force=force)
-
- def finishcommand(self):
- self.state = state.initial
-
- def reset(self):
- self.initConfigurationData()
-
- def lockBitbake(self):
- if not hasattr(self, 'lock'):
- self.lock = None
- if self.data:
- lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
- if lockfile:
- self.lock = bb.utils.lockfile(lockfile, False, False)
- return self.lock
-
- def unlockBitbake(self):
- if hasattr(self, 'lock') and self.lock:
- bb.utils.unlockfile(self.lock)
-
-def server_main(cooker, func, *args):
- cooker.pre_serve()
-
- if cooker.configuration.profile:
- try:
- import cProfile as profile
- except:
- import profile
- prof = profile.Profile()
-
- ret = profile.Profile.runcall(prof, func, *args)
-
- prof.dump_stats("profile.log")
- bb.utils.process_profilelog("profile.log")
- print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
-
- else:
- ret = func(*args)
-
- cooker.post_serve()
-
- return ret
-
-class CookerExit(bb.event.Event):
- """
- Notify clients of the Cooker shutdown
- """
-
- def __init__(self):
- bb.event.Event.__init__(self)
-
-
-class CookerCollectFiles(object):
- def __init__(self, priorities):
- self.bbappends = []
- self.bbfile_config_priorities = priorities
-
- def calc_bbfile_priority( self, filename, matched = None ):
- for _, _, regex, pri in self.bbfile_config_priorities:
- if regex.match(filename):
- if matched != None:
- if not regex in matched:
- matched.add(regex)
- return pri
- return 0
-
- def get_bbfiles(self):
- """Get list of default .bb files by reading out the current directory"""
- path = os.getcwd()
- contents = os.listdir(path)
- bbfiles = []
- for f in contents:
- if f.endswith(".bb"):
- bbfiles.append(os.path.abspath(os.path.join(path, f)))
- return bbfiles
-
- def find_bbfiles(self, path):
- """Find all the .bb and .bbappend files in a directory"""
- found = []
- for dir, dirs, files in os.walk(path):
- for ignored in ('SCCS', 'CVS', '.svn'):
- if ignored in dirs:
- dirs.remove(ignored)
- found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
-
- return found
-
- def collect_bbfiles(self, config, eventdata):
- """Collect all available .bb build files"""
- masked = 0
-
- collectlog.debug(1, "collecting .bb files")
-
- files = (config.getVar( "BBFILES", True) or "").split()
- config.setVar("BBFILES", " ".join(files))
-
- # Sort files by priority
- files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
-
- if not len(files):
- files = self.get_bbfiles()
-
- if not len(files):
- collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
- bb.event.fire(CookerExit(), eventdata)
-
- # Can't use set here as order is important
- newfiles = []
- for f in files:
- if os.path.isdir(f):
- dirfiles = self.find_bbfiles(f)
- for g in dirfiles:
- if g not in newfiles:
- newfiles.append(g)
- else:
- globbed = glob.glob(f)
- if not globbed and os.path.exists(f):
- globbed = [f]
- # glob gives files in order on disk. Sort to be deterministic.
- for g in sorted(globbed):
- if g not in newfiles:
- newfiles.append(g)
-
- bbmask = config.getVar('BBMASK', True)
-
- if bbmask:
- # First validate the individual regular expressions and ignore any
- # that do not compile
- bbmasks = []
- for mask in bbmask.split():
- try:
- re.compile(mask)
- bbmasks.append(mask)
- except sre_constants.error:
- collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
-
- # Then validate the combined regular expressions. This should never
- # fail, but better safe than sorry...
- bbmask = "|".join(bbmasks)
- try:
- bbmask_compiled = re.compile(bbmask)
- except sre_constants.error:
- collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
- bbmask = None
-
- bbfiles = []
- bbappend = []
- for f in newfiles:
- if bbmask and bbmask_compiled.search(f):
- collectlog.debug(1, "skipping masked file %s", f)
- masked += 1
- continue
- if f.endswith('.bb'):
- bbfiles.append(f)
- elif f.endswith('.bbappend'):
- bbappend.append(f)
- else:
- collectlog.debug(1, "skipping %s: unknown file extension", f)
-
- # Build a list of .bbappend files for each .bb file
- for f in bbappend:
- base = os.path.basename(f).replace('.bbappend', '.bb')
- self.bbappends.append((base, f))
-
- # Find overlayed recipes
- # bbfiles will be in priority order which makes this easy
- bbfile_seen = dict()
- self.overlayed = defaultdict(list)
- for f in reversed(bbfiles):
- base = os.path.basename(f)
- if base not in bbfile_seen:
- bbfile_seen[base] = f
- else:
- topfile = bbfile_seen[base]
- self.overlayed[topfile].append(f)
-
- return (bbfiles, masked)
-
- def get_file_appends(self, fn):
- """
- Returns a list of .bbappend files to apply to fn
- """
- filelist = []
- f = os.path.basename(fn)
- for b in self.bbappends:
- (bbappend, filename) = b
- if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
- filelist.append(filename)
- return filelist
-
- def collection_priorities(self, pkgfns, d):
-
- priorities = {}
-
- # Calculate priorities for each file
- matched = set()
- for p in pkgfns:
- realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
- priorities[p] = self.calc_bbfile_priority(realfn, matched)
-
- # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
- unmatched = set()
- for _, _, regex, pri in self.bbfile_config_priorities:
- if not regex in matched:
- unmatched.add(regex)
-
- def findmatch(regex):
- for b in self.bbappends:
- (bbfile, append) = b
- if regex.match(append):
- return True
- return False
-
- for unmatch in unmatched.copy():
- if findmatch(unmatch):
- unmatched.remove(unmatch)
-
- for collection, pattern, regex, _ in self.bbfile_config_priorities:
- if regex in unmatched:
- if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
- collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
-
- return priorities
-
-class ParsingFailure(Exception):
- def __init__(self, realexception, recipe):
- self.realexception = realexception
- self.recipe = recipe
- Exception.__init__(self, realexception, recipe)
-
-class Feeder(multiprocessing.Process):
- def __init__(self, jobs, to_parsers, quit):
- self.quit = quit
- self.jobs = jobs
- self.to_parsers = to_parsers
- multiprocessing.Process.__init__(self)
-
- def run(self):
- while True:
- try:
- quit = self.quit.get_nowait()
- except Queue.Empty:
- pass
- else:
- if quit == 'cancel':
- self.to_parsers.cancel_join_thread()
- break
-
- try:
- job = self.jobs.pop()
- except IndexError:
- break
-
- try:
- self.to_parsers.put(job, timeout=0.5)
- except Queue.Full:
- self.jobs.insert(0, job)
- continue
-
-class Parser(multiprocessing.Process):
- def __init__(self, jobs, results, quit, init, profile):
- self.jobs = jobs
- self.results = results
- self.quit = quit
- self.init = init
- multiprocessing.Process.__init__(self)
- self.context = bb.utils.get_context().copy()
- self.handlers = bb.event.get_class_handlers().copy()
- self.profile = profile
-
- def run(self):
-
- if not self.profile:
- self.realrun()
- return
-
- try:
- import cProfile as profile
- except:
- import profile
- prof = profile.Profile()
- try:
- profile.Profile.runcall(prof, self.realrun)
- finally:
- logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
- prof.dump_stats(logfile)
-
- def realrun(self):
- if self.init:
- self.init()
-
- pending = []
- while True:
- try:
- self.quit.get_nowait()
- except Queue.Empty:
- pass
- else:
- self.results.cancel_join_thread()
- break
-
- if pending:
- result = pending.pop()
- else:
- try:
- job = self.jobs.get(timeout=0.25)
- except Queue.Empty:
- continue
-
- if job is None:
- break
- result = self.parse(*job)
-
- try:
- self.results.put(result, timeout=0.25)
- except Queue.Full:
- pending.append(result)
-
- def parse(self, filename, appends, caches_array):
- try:
- # Record the filename we're parsing into any events generated
- def parse_filter(self, record):
- record.taskpid = bb.event.worker_pid
- record.fn = filename
- return True
-
- # Reset our environment and handlers to the original settings
- bb.utils.set_context(self.context.copy())
- bb.event.set_class_handlers(self.handlers.copy())
- bb.event.LogHandler.filter = parse_filter
-
- return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
- except Exception as exc:
- tb = sys.exc_info()[2]
- exc.recipe = filename
- exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
- return True, exc
- # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
- # and for example a worker thread doesn't just exit on its own in response to
- # a SystemExit event for example.
- except BaseException as exc:
- return True, ParsingFailure(exc, filename)
-
-class CookerParser(object):
- def __init__(self, cooker, filelist, masked):
- self.filelist = filelist
- self.cooker = cooker
- self.cfgdata = cooker.data
- self.cfghash = cooker.data_hash
-
- # Accounting statistics
- self.parsed = 0
- self.cached = 0
- self.error = 0
- self.masked = masked
-
- self.skipped = 0
- self.virtuals = 0
- self.total = len(filelist)
-
- self.current = 0
- self.process_names = []
-
- self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
- self.fromcache = []
- self.willparse = []
- for filename in self.filelist:
- appends = self.cooker.collection.get_file_appends(filename)
- if not self.bb_cache.cacheValid(filename, appends):
- self.willparse.append((filename, appends, cooker.caches_array))
- else:
- self.fromcache.append((filename, appends))
- self.toparse = self.total - len(self.fromcache)
- self.progress_chunk = max(self.toparse / 100, 1)
-
- self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
- multiprocessing.cpu_count()), len(self.willparse))
-
- self.start()
- self.haveshutdown = False
-
- def start(self):
- self.results = self.load_cached()
- self.processes = []
- if self.toparse:
- bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
- def init():
- Parser.cfg = self.cfgdata
- bb.utils.set_process_name(multiprocessing.current_process().name)
- multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
- multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
-
- self.feeder_quit = multiprocessing.Queue(maxsize=1)
- self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
- self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
- self.result_queue = multiprocessing.Queue()
- self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
- self.feeder.start()
- for i in range(0, self.num_processes):
- parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
- parser.start()
- self.process_names.append(parser.name)
- self.processes.append(parser)
-
- self.results = itertools.chain(self.results, self.parse_generator())
-
- def shutdown(self, clean=True, force=False):
- if not self.toparse:
- return
- if self.haveshutdown:
- return
- self.haveshutdown = True
-
- if clean:
- event = bb.event.ParseCompleted(self.cached, self.parsed,
- self.skipped, self.masked,
- self.virtuals, self.error,
- self.total)
-
- bb.event.fire(event, self.cfgdata)
- self.feeder_quit.put(None)
- for process in self.processes:
- self.parser_quit.put(None)
- else:
- self.feeder_quit.put('cancel')
-
- self.parser_quit.cancel_join_thread()
- for process in self.processes:
- self.parser_quit.put(None)
-
- self.jobs.cancel_join_thread()
-
- for process in self.processes:
- if force:
- process.join(.1)
- process.terminate()
- else:
- process.join()
- self.feeder.join()
-
- sync = threading.Thread(target=self.bb_cache.sync)
- sync.start()
- multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
- bb.codeparser.parser_cache_savemerge()
- bb.fetch.fetcher_parse_done()
- if self.cooker.configuration.profile:
- profiles = []
- for i in self.process_names:
- logfile = "profile-parse-%s.log" % i
- if os.path.exists(logfile):
- profiles.append(logfile)
-
- pout = "profile-parse.log.processed"
- bb.utils.process_profilelog(profiles, pout = pout)
- print("Processed parsing statistics saved to %s" % (pout))
-
- def load_cached(self):
- for filename, appends in self.fromcache:
- cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
- yield not cached, infos
-
- def parse_generator(self):
- while True:
- if self.parsed >= self.toparse:
- break
-
- try:
- result = self.result_queue.get(timeout=0.25)
- except Queue.Empty:
- pass
- else:
- value = result[1]
- if isinstance(value, BaseException):
- raise value
- else:
- yield result
-
- def parse_next(self):
- result = []
- parsed = None
- try:
- parsed, result = self.results.next()
- except StopIteration:
- self.shutdown()
- return False
- except bb.BBHandledException as exc:
- self.error += 1
- logger.error('Failed to parse recipe: %s' % exc.recipe)
- self.shutdown(clean=False)
- return False
- except ParsingFailure as exc:
- self.error += 1
- logger.error('Unable to parse %s: %s' %
- (exc.recipe, bb.exceptions.to_string(exc.realexception)))
- self.shutdown(clean=False)
- return False
- except bb.parse.ParseError as exc:
- self.error += 1
- logger.error(str(exc))
- self.shutdown(clean=False)
- return False
- except bb.data_smart.ExpansionError as exc:
- self.error += 1
- _, value, _ = sys.exc_info()
- logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
- self.shutdown(clean=False)
- return False
- except Exception as exc:
- self.error += 1
- etype, value, tb = sys.exc_info()
- if hasattr(value, "recipe"):
- logger.error('Unable to parse %s', value.recipe,
- exc_info=(etype, value, exc.traceback))
- else:
- # Most likely, an exception occurred during raising an exception
- import traceback
- logger.error('Exception during parse: %s' % traceback.format_exc())
- self.shutdown(clean=False)
- return False
-
- self.current += 1
- self.virtuals += len(result)
- if parsed:
- self.parsed += 1
- if self.parsed % self.progress_chunk == 0:
- bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
- self.cfgdata)
- else:
- self.cached += 1
-
- for virtualfn, info_array in result:
- if info_array[0].skipped:
- self.skipped += 1
- self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
- self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
- parsed=parsed, watcher = self.cooker.add_filewatch)
- return True
-
- def reparse(self, filename):
- infos = self.bb_cache.parse(filename,
- self.cooker.collection.get_file_appends(filename),
- self.cfgdata, self.cooker.caches_array)
- for vfn, info_array in infos:
- self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)
diff --git a/yocto-poky/bitbake/lib/bb/cookerdata.py b/yocto-poky/bitbake/lib/bb/cookerdata.py
deleted file mode 100644
index 50259a9a0..000000000
--- a/yocto-poky/bitbake/lib/bb/cookerdata.py
+++ /dev/null
@@ -1,345 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os, sys
-from functools import wraps
-import logging
-import bb
-from bb import data
-import bb.parse
-
-logger = logging.getLogger("BitBake")
-parselog = logging.getLogger("BitBake.Parsing")
-
-class ConfigParameters(object):
- def __init__(self, argv=sys.argv):
- self.options, targets = self.parseCommandLine(argv)
- self.environment = self.parseEnvironment()
-
- self.options.pkgs_to_build = targets or []
-
- self.options.tracking = False
- if hasattr(self.options, "show_environment") and self.options.show_environment:
- self.options.tracking = True
-
- for key, val in self.options.__dict__.items():
- setattr(self, key, val)
-
- def parseCommandLine(self, argv=sys.argv):
- raise Exception("Caller must implement commandline option parsing")
-
- def parseEnvironment(self):
- return os.environ.copy()
-
- def updateFromServer(self, server):
- if not self.options.cmd:
- defaulttask, error = server.runCommand(["getVariable", "BB_DEFAULT_TASK"])
- if error:
- raise Exception("Unable to get the value of BB_DEFAULT_TASK from the server: %s" % error)
- self.options.cmd = defaulttask or "build"
- _, error = server.runCommand(["setConfig", "cmd", self.options.cmd])
- if error:
- raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
-
- if not self.options.pkgs_to_build:
- bbpkgs, error = server.runCommand(["getVariable", "BBTARGETS"])
- if error:
- raise Exception("Unable to get the value of BBTARGETS from the server: %s" % error)
- if bbpkgs:
- self.options.pkgs_to_build.extend(bbpkgs.split())
-
- def updateToServer(self, server, environment):
- options = {}
- for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
- "verbose", "debug", "dry_run", "dump_signatures",
- "debug_domains", "extra_assume_provided", "profile",
- "prefile", "postfile"]:
- options[o] = getattr(self.options, o)
-
- ret, error = server.runCommand(["updateConfig", options, environment])
- if error:
- raise Exception("Unable to update the server configuration with local parameters: %s" % error)
-
- def parseActions(self):
- # Parse any commandline into actions
- action = {'action':None, 'msg':None}
- if self.options.show_environment:
- if 'world' in self.options.pkgs_to_build:
- action['msg'] = "'world' is not a valid target for --environment."
- elif 'universe' in self.options.pkgs_to_build:
- action['msg'] = "'universe' is not a valid target for --environment."
- elif len(self.options.pkgs_to_build) > 1:
- action['msg'] = "Only one target can be used with the --environment option."
- elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
- action['msg'] = "No target should be used with the --environment and --buildfile options."
- elif len(self.options.pkgs_to_build) > 0:
- action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
- else:
- action['action'] = ["showEnvironment", self.options.buildfile]
- elif self.options.buildfile is not None:
- action['action'] = ["buildFile", self.options.buildfile, self.options.cmd]
- elif self.options.revisions_changed:
- action['action'] = ["compareRevisions"]
- elif self.options.show_versions:
- action['action'] = ["showVersions"]
- elif self.options.parse_only:
- action['action'] = ["parseFiles"]
- elif self.options.dot_graph:
- if self.options.pkgs_to_build:
- action['action'] = ["generateDotGraph", self.options.pkgs_to_build, self.options.cmd]
- else:
- action['msg'] = "Please specify a package name for dependency graph generation."
- else:
- if self.options.pkgs_to_build:
- action['action'] = ["buildTargets", self.options.pkgs_to_build, self.options.cmd]
- else:
- #action['msg'] = "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information."
- action = None
- self.options.initialaction = action
- return action
-
-class CookerConfiguration(object):
- """
- Manages build options and configurations for one run
- """
-
- def __init__(self):
- self.debug_domains = []
- self.extra_assume_provided = []
- self.prefile = []
- self.postfile = []
- self.prefile_server = []
- self.postfile_server = []
- self.debug = 0
- self.cmd = None
- self.abort = True
- self.force = False
- self.profile = False
- self.nosetscene = False
- self.setsceneonly = False
- self.invalidate_stamp = False
- self.dump_signatures = []
- self.dry_run = False
- self.tracking = False
- self.interface = []
- self.writeeventlog = False
-
- self.env = {}
-
- def setConfigParameters(self, parameters):
- for key in self.__dict__.keys():
- if key in parameters.options.__dict__:
- setattr(self, key, parameters.options.__dict__[key])
- self.env = parameters.environment.copy()
- self.tracking = parameters.tracking
-
- def setServerRegIdleCallback(self, srcb):
- self.server_register_idlecallback = srcb
-
- def __getstate__(self):
- state = {}
- for key in self.__dict__.keys():
- if key == "server_register_idlecallback":
- state[key] = None
- else:
- state[key] = getattr(self, key)
- return state
-
- def __setstate__(self,state):
- for k in state:
- setattr(self, k, state[k])
-
-
-def catch_parse_error(func):
- """Exception handling bits for our parsing"""
- @wraps(func)
- def wrapped(fn, *args):
- try:
- return func(fn, *args)
- except IOError as exc:
- import traceback
- parselog.critical(traceback.format_exc())
- parselog.critical("Unable to parse %s: %s" % (fn, exc))
- sys.exit(1)
- except bb.data_smart.ExpansionError as exc:
- import traceback
-
- bbdir = os.path.dirname(__file__) + os.sep
- exc_class, exc, tb = sys.exc_info()
- for tb in iter(lambda: tb.tb_next, None):
- # Skip frames in bitbake itself, we only want the metadata
- fn, _, _, _ = traceback.extract_tb(tb, 1)[0]
- if not fn.startswith(bbdir):
- break
- parselog.critical("Unable to parse %s", fn, exc_info=(exc_class, exc, tb))
- except bb.parse.ParseError as exc:
- parselog.critical(str(exc))
- sys.exit(1)
- return wrapped
-
-@catch_parse_error
-def parse_config_file(fn, data, include=True):
- return bb.parse.handle(fn, data, include)
-
-@catch_parse_error
-def _inherit(bbclass, data):
- bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
- return data
-
-def findConfigFile(configfile, data):
- search = []
- bbpath = data.getVar("BBPATH", True)
- if bbpath:
- for i in bbpath.split(":"):
- search.append(os.path.join(i, "conf", configfile))
- path = os.getcwd()
- while path != "/":
- search.append(os.path.join(path, "conf", configfile))
- path, _ = os.path.split(path)
-
- for i in search:
- if os.path.exists(i):
- return i
-
- return None
-
-class CookerDataBuilder(object):
-
- def __init__(self, cookercfg, worker = False):
-
- self.prefiles = cookercfg.prefile
- self.postfiles = cookercfg.postfile
- self.tracking = cookercfg.tracking
-
- bb.utils.set_context(bb.utils.clean_context())
- bb.event.set_class_handlers(bb.event.clean_class_handlers())
- self.data = bb.data.init()
- if self.tracking:
- self.data.enableTracking()
-
- # Keep a datastore of the initial environment variables and their
- # values from when BitBake was launched to enable child processes
- # to use environment variables which have been cleaned from the
- # BitBake processes env
- self.savedenv = bb.data.init()
- for k in cookercfg.env:
- self.savedenv.setVar(k, cookercfg.env[k])
-
- filtered_keys = bb.utils.approved_variables()
- bb.data.inheritFromOS(self.data, self.savedenv, filtered_keys)
- self.data.setVar("BB_ORIGENV", self.savedenv)
-
- if worker:
- self.data.setVar("BB_WORKERCONTEXT", "1")
-
- def parseBaseConfiguration(self):
- try:
- self.parseConfigurationFiles(self.prefiles, self.postfiles)
- except SyntaxError:
- raise bb.BBHandledException
- except bb.data_smart.ExpansionError as e:
- logger.error(str(e))
- raise bb.BBHandledException
- except Exception:
- logger.exception("Error parsing configuration files")
- raise bb.BBHandledException
-
- def _findLayerConf(self, data):
- return findConfigFile("bblayers.conf", data)
-
- def parseConfigurationFiles(self, prefiles, postfiles):
- data = self.data
- bb.parse.init_parser(data)
-
- # Parse files for loading *before* bitbake.conf and any includes
- for f in prefiles:
- data = parse_config_file(f, data)
-
- layerconf = self._findLayerConf(data)
- if layerconf:
- parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
- # By definition bblayers.conf is in conf/ of TOPDIR.
- # We may have been called with cwd somewhere else so reset TOPDIR
- data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
- data = parse_config_file(layerconf, data)
-
- layers = (data.getVar('BBLAYERS', True) or "").split()
-
- data = bb.data.createCopy(data)
- approved = bb.utils.approved_variables()
- for layer in layers:
- parselog.debug(2, "Adding layer %s", layer)
- if 'HOME' in approved and '~' in layer:
- layer = os.path.expanduser(layer)
- if layer.endswith('/'):
- layer = layer.rstrip('/')
- data.setVar('LAYERDIR', layer)
- data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
- data.expandVarref('LAYERDIR')
-
- data.delVar('LAYERDIR')
-
- if not data.getVar("BBPATH", True):
- msg = "The BBPATH variable is not set"
- if not layerconf:
- msg += (" and bitbake did not find a conf/bblayers.conf file in"
- " the expected location.\nMaybe you accidentally"
- " invoked bitbake from the wrong directory?")
- raise SystemExit(msg)
-
- data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
-
- # Parse files for loading *after* bitbake.conf and any includes
- for p in postfiles:
- data = parse_config_file(p, data)
-
- # Handle any INHERITs and inherit the base class
- bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split()
- for bbclass in bbclasses:
- data = _inherit(bbclass, data)
-
- # Nomally we only register event handlers at the end of parsing .bb files
- # We register any handlers we've found so far here...
- for var in data.getVar('__BBHANDLERS', False) or []:
- handlerfn = data.getVarFlag(var, "filename", False)
- handlerln = int(data.getVarFlag(var, "lineno", False))
- bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
-
- if data.getVar("BB_WORKERCONTEXT", False) is None:
- bb.fetch.fetcher_init(data)
- bb.codeparser.parser_cache_init(data)
- bb.event.fire(bb.event.ConfigParsed(), data)
-
- if data.getVar("BB_INVALIDCONF", False) is True:
- data.setVar("BB_INVALIDCONF", False)
- self.parseConfigurationFiles(self.prefiles, self.postfiles)
- return
-
- bb.parse.init_parser(data)
- data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
- self.data = data
- self.data_hash = data.get_hash()
-
-
-
diff --git a/yocto-poky/bitbake/lib/bb/daemonize.py b/yocto-poky/bitbake/lib/bb/daemonize.py
deleted file mode 100644
index 346a61858..000000000
--- a/yocto-poky/bitbake/lib/bb/daemonize.py
+++ /dev/null
@@ -1,193 +0,0 @@
-"""
-Python Daemonizing helper
-
-Configurable daemon behaviors:
-
- 1.) The current working directory set to the "/" directory.
- 2.) The current file creation mode mask set to 0.
- 3.) Close all open files (1024).
- 4.) Redirect standard I/O streams to "/dev/null".
-
-A failed call to fork() now raises an exception.
-
-References:
- 1) Advanced Programming in the Unix Environment: W. Richard Stevens
- http://www.apuebook.com/apue3e.html
- 2) The Linux Programming Interface: Michael Kerrisk
- http://man7.org/tlpi/index.html
- 3) Unix Programming Frequently Asked Questions:
- http://www.faqs.org/faqs/unix-faq/programmer/faq/
-
-Modified to allow a function to be daemonized and return for
-bitbake use by Richard Purdie
-"""
-
-__author__ = "Chad J. Schroeder"
-__copyright__ = "Copyright (C) 2005 Chad J. Schroeder"
-__version__ = "0.2"
-
-# Standard Python modules.
-import os # Miscellaneous OS interfaces.
-import sys # System-specific parameters and functions.
-
-# Default daemon parameters.
-# File mode creation mask of the daemon.
-# For BitBake's children, we do want to inherit the parent umask.
-UMASK = None
-
-# Default maximum for the number of available file descriptors.
-MAXFD = 1024
-
-# The standard I/O file descriptors are redirected to /dev/null by default.
-if (hasattr(os, "devnull")):
- REDIRECT_TO = os.devnull
-else:
- REDIRECT_TO = "/dev/null"
-
-def createDaemon(function, logfile):
- """
- Detach a process from the controlling terminal and run it in the
- background as a daemon, returning control to the caller.
- """
-
- try:
- # Fork a child process so the parent can exit. This returns control to
- # the command-line or shell. It also guarantees that the child will not
- # be a process group leader, since the child receives a new process ID
- # and inherits the parent's process group ID. This step is required
- # to insure that the next call to os.setsid is successful.
- pid = os.fork()
- except OSError as e:
- raise Exception("%s [%d]" % (e.strerror, e.errno))
-
- if (pid == 0): # The first child.
- # To become the session leader of this new session and the process group
- # leader of the new process group, we call os.setsid(). The process is
- # also guaranteed not to have a controlling terminal.
- os.setsid()
-
- # Is ignoring SIGHUP necessary?
- #
- # It's often suggested that the SIGHUP signal should be ignored before
- # the second fork to avoid premature termination of the process. The
- # reason is that when the first child terminates, all processes, e.g.
- # the second child, in the orphaned group will be sent a SIGHUP.
- #
- # "However, as part of the session management system, there are exactly
- # two cases where SIGHUP is sent on the death of a process:
- #
- # 1) When the process that dies is the session leader of a session that
- # is attached to a terminal device, SIGHUP is sent to all processes
- # in the foreground process group of that terminal device.
- # 2) When the death of a process causes a process group to become
- # orphaned, and one or more processes in the orphaned group are
- # stopped, then SIGHUP and SIGCONT are sent to all members of the
- # orphaned group." [2]
- #
- # The first case can be ignored since the child is guaranteed not to have
- # a controlling terminal. The second case isn't so easy to dismiss.
- # The process group is orphaned when the first child terminates and
- # POSIX.1 requires that every STOPPED process in an orphaned process
- # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the
- # second child is not STOPPED though, we can safely forego ignoring the
- # SIGHUP signal. In any case, there are no ill-effects if it is ignored.
- #
- # import signal # Set handlers for asynchronous events.
- # signal.signal(signal.SIGHUP, signal.SIG_IGN)
-
- try:
- # Fork a second child and exit immediately to prevent zombies. This
- # causes the second child process to be orphaned, making the init
- # process responsible for its cleanup. And, since the first child is
- # a session leader without a controlling terminal, it's possible for
- # it to acquire one by opening a terminal in the future (System V-
- # based systems). This second fork guarantees that the child is no
- # longer a session leader, preventing the daemon from ever acquiring
- # a controlling terminal.
- pid = os.fork() # Fork a second child.
- except OSError as e:
- raise Exception("%s [%d]" % (e.strerror, e.errno))
-
- if (pid == 0): # The second child.
- # We probably don't want the file mode creation mask inherited from
- # the parent, so we give the child complete control over permissions.
- if UMASK is not None:
- os.umask(UMASK)
- else:
- # Parent (the first child) of the second child.
- os._exit(0)
- else:
- # exit() or _exit()?
- # _exit is like exit(), but it doesn't call any functions registered
- # with atexit (and on_exit) or any registered signal handlers. It also
- # closes any open file descriptors. Using exit() may cause all stdio
- # streams to be flushed twice and any temporary files may be unexpectedly
- # removed. It's therefore recommended that child branches of a fork()
- # and the parent branch(es) of a daemon use _exit().
- return
-
- # Close all open file descriptors. This prevents the child from keeping
- # open any file descriptors inherited from the parent. There is a variety
- # of methods to accomplish this task. Three are listed below.
- #
- # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
- # number of open file descriptors to close. If it doesn't exist, use
- # the default value (configurable).
- #
- # try:
- # maxfd = os.sysconf("SC_OPEN_MAX")
- # except (AttributeError, ValueError):
- # maxfd = MAXFD
- #
- # OR
- #
- # if (os.sysconf_names.has_key("SC_OPEN_MAX")):
- # maxfd = os.sysconf("SC_OPEN_MAX")
- # else:
- # maxfd = MAXFD
- #
- # OR
- #
- # Use the getrlimit method to retrieve the maximum file descriptor number
- # that can be opened by this process. If there is no limit on the
- # resource, use the default value.
- #
- import resource # Resource usage information.
- maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
- if (maxfd == resource.RLIM_INFINITY):
- maxfd = MAXFD
-
- # Iterate through and close all file descriptors.
-# for fd in range(0, maxfd):
-# try:
-# os.close(fd)
-# except OSError: # ERROR, fd wasn't open to begin with (ignored)
-# pass
-
- # Redirect the standard I/O file descriptors to the specified file. Since
- # the daemon has no controlling terminal, most daemons redirect stdin,
- # stdout, and stderr to /dev/null. This is done to prevent side-effects
- # from reads and writes to the standard I/O file descriptors.
-
- # This call to open is guaranteed to return the lowest file descriptor,
- # which will be 0 (stdin), since it was closed above.
-# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
-
- # Duplicate standard input to standard output and standard error.
-# os.dup2(0, 1) # standard output (1)
-# os.dup2(0, 2) # standard error (2)
-
-
- si = file('/dev/null', 'r')
- so = file(logfile, 'w')
- se = so
-
-
- # Replace those fds with our own
- os.dup2(si.fileno(), sys.stdin.fileno())
- os.dup2(so.fileno(), sys.stdout.fileno())
- os.dup2(se.fileno(), sys.stderr.fileno())
-
- function()
-
- os._exit(0)
diff --git a/yocto-poky/bitbake/lib/bb/data.py b/yocto-poky/bitbake/lib/bb/data.py
deleted file mode 100644
index dbc6dea68..000000000
--- a/yocto-poky/bitbake/lib/bb/data.py
+++ /dev/null
@@ -1,448 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Data' implementations
-
-Functions for interacting with the data structure used by the
-BitBake build tools.
-
-The expandKeys and update_data are the most expensive
-operations. At night the cookie monster came by and
-suggested 'give me cookies on setting the variables and
-things will work out'. Taking this suggestion into account
-applying the skills from the not yet passed 'Entwurf und
-Analyse von Algorithmen' lecture and the cookie
-monster seems to be right. We will track setVar more carefully
-to have faster update_data and expandKeys operations.
-
-This is a trade-off between speed and memory again but
-the speed is more critical here.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2005 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import sys, os, re
-if sys.argv[0][-5:] == "pydoc":
- path = os.path.dirname(os.path.dirname(sys.argv[1]))
-else:
- path = os.path.dirname(os.path.dirname(sys.argv[0]))
-sys.path.insert(0, path)
-from itertools import groupby
-
-from bb import data_smart
-from bb import codeparser
-import bb
-
-logger = data_smart.logger
-_dict_type = data_smart.DataSmart
-
-def init():
- """Return a new object representing the Bitbake data"""
- return _dict_type()
-
-def init_db(parent = None):
- """Return a new object representing the Bitbake data,
- optionally based on an existing object"""
- if parent is not None:
- return parent.createCopy()
- else:
- return _dict_type()
-
-def createCopy(source):
- """Link the source set to the destination
- If one does not find the value in the destination set,
- search will go on to the source set to get the value.
- Value from source are copy-on-write. i.e. any try to
- modify one of them will end up putting the modified value
- in the destination set.
- """
- return source.createCopy()
-
-def initVar(var, d):
- """Non-destructive var init for data structure"""
- d.initVar(var)
-
-
-def setVar(var, value, d):
- """Set a variable to a given value"""
- d.setVar(var, value)
-
-
-def getVar(var, d, exp = False):
- """Gets the value of a variable"""
- return d.getVar(var, exp)
-
-
-def renameVar(key, newkey, d):
- """Renames a variable from key to newkey"""
- d.renameVar(key, newkey)
-
-def delVar(var, d):
- """Removes a variable from the data set"""
- d.delVar(var)
-
-def appendVar(var, value, d):
- """Append additional value to a variable"""
- d.appendVar(var, value)
-
-def setVarFlag(var, flag, flagvalue, d):
- """Set a flag for a given variable to a given value"""
- d.setVarFlag(var, flag, flagvalue)
-
-def getVarFlag(var, flag, d):
- """Gets given flag from given var"""
- return d.getVarFlag(var, flag, False)
-
-def delVarFlag(var, flag, d):
- """Removes a given flag from the variable's flags"""
- d.delVarFlag(var, flag)
-
-def setVarFlags(var, flags, d):
- """Set the flags for a given variable
-
- Note:
- setVarFlags will not clear previous
- flags. Think of this method as
- addVarFlags
- """
- d.setVarFlags(var, flags)
-
-def getVarFlags(var, d):
- """Gets a variable's flags"""
- return d.getVarFlags(var)
-
-def delVarFlags(var, d):
- """Removes a variable's flags"""
- d.delVarFlags(var)
-
-def keys(d):
- """Return a list of keys in d"""
- return d.keys()
-
-
-__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
-def expand(s, d, varname = None):
- """Variable expansion using the data store"""
- return d.expand(s, varname)
-
-def expandKeys(alterdata, readdata = None):
- if readdata == None:
- readdata = alterdata
-
- todolist = {}
- for key in alterdata:
- if not '${' in key:
- continue
-
- ekey = expand(key, readdata)
- if key == ekey:
- continue
- todolist[key] = ekey
-
- # These two for loops are split for performance to maximise the
- # usefulness of the expand cache
- for key in sorted(todolist):
- ekey = todolist[key]
- newval = alterdata.getVar(ekey, False)
- if newval is not None:
- val = alterdata.getVar(key, False)
- if val is not None:
- bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
- alterdata.renameVar(key, ekey)
-
-def inheritFromOS(d, savedenv, permitted):
- """Inherit variables from the initial environment."""
- exportlist = bb.utils.preserved_envvars_exported()
- for s in savedenv.keys():
- if s in permitted:
- try:
- d.setVar(s, savedenv.getVar(s, True), op = 'from env')
- if s in exportlist:
- d.setVarFlag(s, "export", True, op = 'auto env export')
- except TypeError:
- pass
-
-def emit_var(var, o=sys.__stdout__, d = init(), all=False):
- """Emit a variable to be sourced by a shell."""
- if d.getVarFlag(var, "python", False):
- return False
-
- export = d.getVarFlag(var, "export", False)
- unexport = d.getVarFlag(var, "unexport", False)
- func = d.getVarFlag(var, "func", False)
- if not all and not export and not unexport and not func:
- return False
-
- try:
- if all:
- oval = d.getVar(var, False)
- val = d.getVar(var, True)
- except (KeyboardInterrupt, bb.build.FuncFailed):
- raise
- except Exception as exc:
- o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
- return False
-
- if all:
- d.varhistory.emit(var, oval, val, o, d)
-
- if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
- return False
-
- varExpanded = d.expand(var)
-
- if unexport:
- o.write('unset %s\n' % varExpanded)
- return False
-
- if val is None:
- return False
-
- val = str(val)
-
- if varExpanded.startswith("BASH_FUNC_"):
- varExpanded = varExpanded[10:-2]
- val = val[3:] # Strip off "() "
- o.write("%s() %s\n" % (varExpanded, val))
- o.write("export -f %s\n" % (varExpanded))
- return True
-
- if func:
- # NOTE: should probably check for unbalanced {} within the var
- val = val.rstrip('\n')
- o.write("%s() {\n%s\n}\n" % (varExpanded, val))
- return 1
-
- if export:
- o.write('export ')
-
- # if we're going to output this within doublequotes,
- # to a shell, we need to escape the quotes in the var
- alter = re.sub('"', '\\"', val)
- alter = re.sub('\n', ' \\\n', alter)
- alter = re.sub('\\$', '\\\\$', alter)
- o.write('%s="%s"\n' % (varExpanded, alter))
- return False
-
-def emit_env(o=sys.__stdout__, d = init(), all=False):
- """Emits all items in the data store in a format such that it can be sourced by a shell."""
-
- isfunc = lambda key: bool(d.getVarFlag(key, "func", False))
- keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
- grouped = groupby(keys, isfunc)
- for isfunc, keys in grouped:
- for key in keys:
- emit_var(key, o, d, all and not isfunc) and o.write('\n')
-
-def exported_keys(d):
- return (key for key in d.keys() if not key.startswith('__') and
- d.getVarFlag(key, 'export', False) and
- not d.getVarFlag(key, 'unexport', False))
-
-def exported_vars(d):
- for key in exported_keys(d):
- try:
- value = d.getVar(key, True)
- except Exception:
- pass
-
- if value is not None:
- yield key, str(value)
-
-def emit_func(func, o=sys.__stdout__, d = init()):
- """Emits all items in the data store in a format such that it can be sourced by a shell."""
-
- keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func", False))
- for key in keys:
- emit_var(key, o, d, False)
-
- o.write('\n')
- emit_var(func, o, d, False) and o.write('\n')
- newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
- newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
- seen = set()
- while newdeps:
- deps = newdeps
- seen |= deps
- newdeps = set()
- for dep in deps:
- if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
- emit_var(dep, o, d, False) and o.write('\n')
- newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
- newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
- newdeps -= seen
-
-_functionfmt = """
-def {function}(d):
-{body}"""
-
-def emit_func_python(func, o=sys.__stdout__, d = init()):
- """Emits all items in the data store in a format such that it can be sourced by a shell."""
-
- def write_func(func, o, call = False):
- body = d.getVar(func, False)
- if not body.startswith("def"):
- body = _functionfmt.format(function=func, body=body)
-
- o.write(body.strip() + "\n\n")
- if call:
- o.write(func + "(d)" + "\n\n")
-
- write_func(func, o, True)
- pp = bb.codeparser.PythonParser(func, logger)
- pp.parse_python(d.getVar(func, False))
- newdeps = pp.execs
- newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
- seen = set()
- while newdeps:
- deps = newdeps
- seen |= deps
- newdeps = set()
- for dep in deps:
- if d.getVarFlag(dep, "func", False) and d.getVarFlag(dep, "python", False):
- write_func(dep, o)
- pp = bb.codeparser.PythonParser(dep, logger)
- pp.parse_python(d.getVar(dep, False))
- newdeps |= pp.execs
- newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
- newdeps -= seen
-
-def update_data(d):
- """Performs final steps upon the datastore, including application of overrides"""
- d.finalize(parent = True)
-
-def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
- deps = set()
- try:
- if key[-1] == ']':
- vf = key[:-1].split('[')
- value = d.getVarFlag(vf[0], vf[1], False)
- parser = d.expandWithRefs(value, key)
- deps |= parser.references
- deps = deps | (keys & parser.execs)
- return deps, value
- varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
- vardeps = varflags.get("vardeps")
- value = d.getVar(key, False)
-
- def handle_contains(value, contains, d):
- newvalue = ""
- for k in sorted(contains):
- l = (d.getVar(k, True) or "").split()
- for word in sorted(contains[k]):
- if word in l:
- newvalue += "\n%s{%s} = Set" % (k, word)
- else:
- newvalue += "\n%s{%s} = Unset" % (k, word)
- if not newvalue:
- return value
- if not value:
- return newvalue
- return value + newvalue
-
- if "vardepvalue" in varflags:
- value = varflags.get("vardepvalue")
- elif varflags.get("func"):
- if varflags.get("python"):
- parser = bb.codeparser.PythonParser(key, logger)
- if value and "\t" in value:
- logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
- parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
- deps = deps | parser.references
- deps = deps | (keys & parser.execs)
- value = handle_contains(value, parser.contains, d)
- else:
- parsedvar = d.expandWithRefs(value, key)
- parser = bb.codeparser.ShellParser(key, logger)
- parser.parse_shell(parsedvar.value)
- deps = deps | shelldeps
- deps = deps | parsedvar.references
- deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
- value = handle_contains(value, parsedvar.contains, d)
- if vardeps is None:
- parser.log.flush()
- if "prefuncs" in varflags:
- deps = deps | set(varflags["prefuncs"].split())
- if "postfuncs" in varflags:
- deps = deps | set(varflags["postfuncs"].split())
- else:
- parser = d.expandWithRefs(value, key)
- deps |= parser.references
- deps = deps | (keys & parser.execs)
- value = handle_contains(value, parser.contains, d)
-
- if "vardepvalueexclude" in varflags:
- exclude = varflags.get("vardepvalueexclude")
- for excl in exclude.split('|'):
- if excl:
- value = value.replace(excl, '')
-
- # Add varflags, assuming an exclusion list is set
- if varflagsexcl:
- varfdeps = []
- for f in varflags:
- if f not in varflagsexcl:
- varfdeps.append('%s[%s]' % (key, f))
- if varfdeps:
- deps |= set(varfdeps)
-
- deps |= set((vardeps or "").split())
- deps -= set(varflags.get("vardepsexclude", "").split())
- except Exception as e:
- bb.warn("Exception during build_dependencies for %s" % key)
- raise
- return deps, value
- #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
- #d.setVarFlag(key, "vardeps", deps)
-
-def generate_dependencies(d):
-
- keys = set(key for key in d if not key.startswith("__"))
- shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
- varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
-
- deps = {}
- values = {}
-
- tasklist = d.getVar('__BBTASKS', False) or []
- for task in tasklist:
- deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
- newdeps = deps[task]
- seen = set()
- while newdeps:
- nextdeps = newdeps
- seen |= nextdeps
- newdeps = set()
- for dep in nextdeps:
- if dep not in deps:
- deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d)
- newdeps |= deps[dep]
- newdeps -= seen
- #print "For %s: %s" % (task, str(deps[task]))
- return tasklist, deps, values
-
-def inherits_class(klass, d):
- val = d.getVar('__inherit_cache', False) or []
- needle = os.path.join('classes', '%s.bbclass' % klass)
- for v in val:
- if v.endswith(needle):
- return True
- return False
diff --git a/yocto-poky/bitbake/lib/bb/data_smart.py b/yocto-poky/bitbake/lib/bb/data_smart.py
deleted file mode 100644
index fa1e79427..000000000
--- a/yocto-poky/bitbake/lib/bb/data_smart.py
+++ /dev/null
@@ -1,969 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Smart Dictionary Implementation
-
-Functions for interacting with the data structure used by the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004, 2005 Seb Frankengul
-# Copyright (C) 2005, 2006 Holger Hans Peter Freyther
-# Copyright (C) 2005 Uli Luckas
-# Copyright (C) 2005 ROAD GmbH
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import copy, re, sys, traceback
-from collections import MutableMapping
-import logging
-import hashlib
-import bb, bb.codeparser
-from bb import utils
-from bb.COW import COWDictBase
-
-logger = logging.getLogger("BitBake.Data")
-
-__setvar_keyword__ = ["_append", "_prepend", "_remove"]
-__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>.*))?$')
-__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
-def infer_caller_details(loginfo, parent = False, varval = True):
- """Save the caller the trouble of specifying everything."""
- # Save effort.
- if 'ignore' in loginfo and loginfo['ignore']:
- return
- # If nothing was provided, mark this as possibly unneeded.
- if not loginfo:
- loginfo['ignore'] = True
- return
- # Infer caller's likely values for variable (var) and value (value),
- # to reduce clutter in the rest of the code.
- above = None
- def set_above():
- try:
- raise Exception
- except Exception:
- tb = sys.exc_info()[2]
- if parent:
- return tb.tb_frame.f_back.f_back.f_back
- else:
- return tb.tb_frame.f_back.f_back
-
- if varval and ('variable' not in loginfo or 'detail' not in loginfo):
- if not above:
- above = set_above()
- lcls = above.f_locals.items()
- for k, v in lcls:
- if k == 'value' and 'detail' not in loginfo:
- loginfo['detail'] = v
- if k == 'var' and 'variable' not in loginfo:
- loginfo['variable'] = v
- # Infer file/line/function from traceback
- # Don't use traceback.extract_stack() since it fills the line contents which
- # we don't need and that hits stat syscalls
- if 'file' not in loginfo:
- if not above:
- above = set_above()
- f = above.f_back
- line = f.f_lineno
- file = f.f_code.co_filename
- func = f.f_code.co_name
- loginfo['file'] = file
- loginfo['line'] = line
- if func not in loginfo:
- loginfo['func'] = func
-
-class VariableParse:
- def __init__(self, varname, d, val = None):
- self.varname = varname
- self.d = d
- self.value = val
-
- self.references = set()
- self.execs = set()
- self.contains = {}
-
- def var_sub(self, match):
- key = match.group()[2:-1]
- if self.varname and key:
- if self.varname == key:
- raise Exception("variable %s references itself!" % self.varname)
- if key in self.d.expand_cache:
- varparse = self.d.expand_cache[key]
- var = varparse.value
- else:
- var = self.d.getVarFlag(key, "_content", True)
- self.references.add(key)
- if var is not None:
- return var
- else:
- return match.group()
-
- def python_sub(self, match):
- code = match.group()[3:-1]
- codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
-
- parser = bb.codeparser.PythonParser(self.varname, logger)
- parser.parse_python(code)
- if self.varname:
- vardeps = self.d.getVarFlag(self.varname, "vardeps", True)
- if vardeps is None:
- parser.log.flush()
- else:
- parser.log.flush()
- self.references |= parser.references
- self.execs |= parser.execs
-
- for k in parser.contains:
- if k not in self.contains:
- self.contains[k] = parser.contains[k].copy()
- else:
- self.contains[k].update(parser.contains[k])
- value = utils.better_eval(codeobj, DataContext(self.d))
- return str(value)
-
-
-class DataContext(dict):
- def __init__(self, metadata, **kwargs):
- self.metadata = metadata
- dict.__init__(self, **kwargs)
- self['d'] = metadata
-
- def __missing__(self, key):
- value = self.metadata.getVar(key, True)
- if value is None or self.metadata.getVarFlag(key, 'func', False):
- raise KeyError(key)
- else:
- return value
-
-class ExpansionError(Exception):
- def __init__(self, varname, expression, exception):
- self.expression = expression
- self.variablename = varname
- self.exception = exception
- if varname:
- if expression:
- self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
- else:
- self.msg = "Failure expanding variable %s: %s: %s" % (varname, type(exception).__name__, exception)
- else:
- self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
- Exception.__init__(self, self.msg)
- self.args = (varname, expression, exception)
- def __str__(self):
- return self.msg
-
-class IncludeHistory(object):
- def __init__(self, parent = None, filename = '[TOP LEVEL]'):
- self.parent = parent
- self.filename = filename
- self.children = []
- self.current = self
-
- def copy(self):
- new = IncludeHistory(self.parent, self.filename)
- for c in self.children:
- new.children.append(c)
- return new
-
- def include(self, filename):
- newfile = IncludeHistory(self.current, filename)
- self.current.children.append(newfile)
- self.current = newfile
- return self
-
- def __enter__(self):
- pass
-
- def __exit__(self, a, b, c):
- if self.current.parent:
- self.current = self.current.parent
- else:
- bb.warn("Include log: Tried to finish '%s' at top level." % filename)
- return False
-
- def emit(self, o, level = 0):
- """Emit an include history file, and its children."""
- if level:
- spaces = " " * (level - 1)
- o.write("# %s%s" % (spaces, self.filename))
- if len(self.children) > 0:
- o.write(" includes:")
- else:
- o.write("#\n# INCLUDE HISTORY:\n#")
- level = level + 1
- for child in self.children:
- o.write("\n")
- child.emit(o, level)
-
-class VariableHistory(object):
- def __init__(self, dataroot):
- self.dataroot = dataroot
- self.variables = COWDictBase.copy()
-
- def copy(self):
- new = VariableHistory(self.dataroot)
- new.variables = self.variables.copy()
- return new
-
- def record(self, *kwonly, **loginfo):
- if not self.dataroot._tracking:
- return
- if len(kwonly) > 0:
- raise TypeError
- infer_caller_details(loginfo, parent = True)
- if 'ignore' in loginfo and loginfo['ignore']:
- return
- if 'op' not in loginfo or not loginfo['op']:
- loginfo['op'] = 'set'
- if 'detail' in loginfo:
- loginfo['detail'] = str(loginfo['detail'])
- if 'variable' not in loginfo or 'file' not in loginfo:
- raise ValueError("record() missing variable or file.")
- var = loginfo['variable']
-
- if var not in self.variables:
- self.variables[var] = []
- if not isinstance(self.variables[var], list):
- return
- if 'nodups' in loginfo and loginfo in self.variables[var]:
- return
- self.variables[var].append(loginfo.copy())
-
- def variable(self, var):
- if var in self.variables:
- return self.variables[var]
- else:
- return []
-
- def emit(self, var, oval, val, o, d):
- history = self.variable(var)
-
- # Append override history
- if var in d.overridedata:
- for (r, override) in d.overridedata[var]:
- for event in self.variable(r):
- loginfo = event.copy()
- if 'flag' in loginfo and not loginfo['flag'].startswith("_"):
- continue
- loginfo['variable'] = var
- loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
- history.append(loginfo)
-
- commentVal = re.sub('\n', '\n#', str(oval))
- if history:
- if len(history) == 1:
- o.write("#\n# $%s\n" % var)
- else:
- o.write("#\n# $%s [%d operations]\n" % (var, len(history)))
- for event in history:
- # o.write("# %s\n" % str(event))
- if 'func' in event:
- # If we have a function listed, this is internal
- # code, not an operation in a config file, and the
- # full path is distracting.
- event['file'] = re.sub('.*/', '', event['file'])
- display_func = ' [%s]' % event['func']
- else:
- display_func = ''
- if 'flag' in event:
- flag = '[%s] ' % (event['flag'])
- else:
- flag = ''
- o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
- if len(history) > 1:
- o.write("# pre-expansion value:\n")
- o.write('# "%s"\n' % (commentVal))
- else:
- o.write("#\n# $%s\n# [no history recorded]\n#\n" % var)
- o.write('# "%s"\n' % (commentVal))
-
- def get_variable_files(self, var):
- """Get the files where operations are made on a variable"""
- var_history = self.variable(var)
- files = []
- for event in var_history:
- files.append(event['file'])
- return files
-
- def get_variable_lines(self, var, f):
- """Get the line where a operation is made on a variable in file f"""
- var_history = self.variable(var)
- lines = []
- for event in var_history:
- if f== event['file']:
- line = event['line']
- lines.append(line)
- return lines
-
- def get_variable_items_files(self, var, d):
- """
- Use variable history to map items added to a list variable and
- the files in which they were added.
- """
- history = self.variable(var)
- finalitems = (d.getVar(var, True) or '').split()
- filemap = {}
- isset = False
- for event in history:
- if 'flag' in event:
- continue
- if event['op'] == '_remove':
- continue
- if isset and event['op'] == 'set?':
- continue
- isset = True
- items = d.expand(event['detail']).split()
- for item in items:
- # This is a little crude but is belt-and-braces to avoid us
- # having to handle every possible operation type specifically
- if item in finalitems and not item in filemap:
- filemap[item] = event['file']
- return filemap
-
- def del_var_history(self, var, f=None, line=None):
- """If file f and line are not given, the entire history of var is deleted"""
- if var in self.variables:
- if f and line:
- self.variables[var] = [ x for x in self.variables[var] if x['file']!=f and x['line']!=line]
- else:
- self.variables[var] = []
-
-class DataSmart(MutableMapping):
- def __init__(self):
- self.dict = {}
-
- self.inchistory = IncludeHistory()
- self.varhistory = VariableHistory(self)
- self._tracking = False
-
- self.expand_cache = {}
-
- # cookie monster tribute
- # Need to be careful about writes to overridedata as
- # its only a shallow copy, could influence other data store
- # copies!
- self.overridedata = {}
- self.overrides = None
- self.overridevars = set(["OVERRIDES", "FILE"])
- self.inoverride = False
-
- def enableTracking(self):
- self._tracking = True
-
- def disableTracking(self):
- self._tracking = False
-
- def expandWithRefs(self, s, varname):
-
- if not isinstance(s, basestring): # sanity check
- return VariableParse(varname, self, s)
-
- if varname and varname in self.expand_cache:
- return self.expand_cache[varname]
-
- varparse = VariableParse(varname, self)
-
- while s.find('${') != -1:
- olds = s
- try:
- s = __expand_var_regexp__.sub(varparse.var_sub, s)
- try:
- s = __expand_python_regexp__.sub(varparse.python_sub, s)
- except SyntaxError as e:
- # Likely unmatched brackets, just don't expand the expression
- if e.msg != "EOL while scanning string literal":
- raise
- if s == olds:
- break
- except ExpansionError:
- raise
- except bb.parse.SkipRecipe:
- raise
- except Exception as exc:
- exc_class, exc, tb = sys.exc_info()
- raise ExpansionError, ExpansionError(varname, s, exc), tb
-
- varparse.value = s
-
- if varname:
- self.expand_cache[varname] = varparse
-
- return varparse
-
- def expand(self, s, varname = None):
- return self.expandWithRefs(s, varname).value
-
- def finalize(self, parent = False):
- return
-
- def internal_finalize(self, parent = False):
- """Performs final steps upon the datastore, including application of overrides"""
- self.overrides = None
-
- def need_overrides(self):
- if self.overrides is not None:
- return
- if self.inoverride:
- return
- for count in range(5):
- self.inoverride = True
- # Can end up here recursively so setup dummy values
- self.overrides = []
- self.overridesset = set()
- self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
- self.overridesset = set(self.overrides)
- self.inoverride = False
- self.expand_cache = {}
- newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
- if newoverrides == self.overrides:
- break
- self.overrides = newoverrides
- self.overridesset = set(self.overrides)
- else:
- bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.")
-
- def initVar(self, var):
- self.expand_cache = {}
- if not var in self.dict:
- self.dict[var] = {}
-
- def _findVar(self, var):
- dest = self.dict
- while dest:
- if var in dest:
- return dest[var]
-
- if "_data" not in dest:
- break
- dest = dest["_data"]
-
- def _makeShadowCopy(self, var):
- if var in self.dict:
- return
-
- local_var = self._findVar(var)
-
- if local_var:
- self.dict[var] = copy.copy(local_var)
- else:
- self.initVar(var)
-
-
- def setVar(self, var, value, **loginfo):
- #print("var=" + str(var) + " val=" + str(value))
- parsing=False
- if 'parsing' in loginfo:
- parsing=True
-
- if 'op' not in loginfo:
- loginfo['op'] = "set"
- self.expand_cache = {}
- match = __setvar_regexp__.match(var)
- if match and match.group("keyword") in __setvar_keyword__:
- base = match.group('base')
- keyword = match.group("keyword")
- override = match.group('add')
- l = self.getVarFlag(base, keyword, False) or []
- l.append([value, override])
- self.setVarFlag(base, keyword, l, ignore=True)
- # And cause that to be recorded:
- loginfo['detail'] = value
- loginfo['variable'] = base
- if override:
- loginfo['op'] = '%s[%s]' % (keyword, override)
- else:
- loginfo['op'] = keyword
- self.varhistory.record(**loginfo)
- # todo make sure keyword is not __doc__ or __module__
- # pay the cookie monster
-
- # more cookies for the cookie monster
- if '_' in var:
- self._setvar_update_overrides(base, **loginfo)
-
- if base in self.overridevars:
- self._setvar_update_overridevars(var, value)
- return
-
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- if not parsing:
- if "_append" in self.dict[var]:
- del self.dict[var]["_append"]
- if "_prepend" in self.dict[var]:
- del self.dict[var]["_prepend"]
- if var in self.overridedata:
- active = []
- self.need_overrides()
- for (r, o) in self.overridedata[var]:
- if o in self.overridesset:
- active.append(r)
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
- active.append(r)
- for a in active:
- self.delVar(a)
- del self.overridedata[var]
-
- # more cookies for the cookie monster
- if '_' in var:
- self._setvar_update_overrides(var, **loginfo)
-
- # setting var
- self.dict[var]["_content"] = value
- self.varhistory.record(**loginfo)
-
- if var in self.overridevars:
- self._setvar_update_overridevars(var, value)
-
- def _setvar_update_overridevars(self, var, value):
- vardata = self.expandWithRefs(value, var)
- new = vardata.references
- new.update(vardata.contains.keys())
- while not new.issubset(self.overridevars):
- nextnew = set()
- self.overridevars.update(new)
- for i in new:
- vardata = self.expandWithRefs(self.getVar(i, True), i)
- nextnew.update(vardata.references)
- nextnew.update(vardata.contains.keys())
- new = nextnew
- self.internal_finalize(True)
-
- def _setvar_update_overrides(self, var, **loginfo):
- # aka pay the cookie monster
- override = var[var.rfind('_')+1:]
- shortvar = var[:var.rfind('_')]
- while override and override.islower():
- if shortvar not in self.overridedata:
- self.overridedata[shortvar] = []
- if [var, override] not in self.overridedata[shortvar]:
- # Force CoW by recreating the list first
- self.overridedata[shortvar] = list(self.overridedata[shortvar])
- self.overridedata[shortvar].append([var, override])
- override = None
- if "_" in shortvar:
- override = var[shortvar.rfind('_')+1:]
- shortvar = var[:shortvar.rfind('_')]
- if len(shortvar) == 0:
- override = None
-
- def getVar(self, var, expand, noweakdefault=False, parsing=False):
- return self.getVarFlag(var, "_content", expand, noweakdefault, parsing)
-
- def renameVar(self, key, newkey, **loginfo):
- """
- Rename the variable key to newkey
- """
- val = self.getVar(key, 0, parsing=True)
- if val is not None:
- loginfo['variable'] = newkey
- loginfo['op'] = 'rename from %s' % key
- loginfo['detail'] = val
- self.varhistory.record(**loginfo)
- self.setVar(newkey, val, ignore=True, parsing=True)
-
- for i in (__setvar_keyword__):
- src = self.getVarFlag(key, i, False)
- if src is None:
- continue
-
- dest = self.getVarFlag(newkey, i, False) or []
- dest.extend(src)
- self.setVarFlag(newkey, i, dest, ignore=True)
-
- if key in self.overridedata:
- self.overridedata[newkey] = []
- for (v, o) in self.overridedata[key]:
- self.overridedata[newkey].append([v.replace(key, newkey), o])
- self.renameVar(v, v.replace(key, newkey))
-
- if '_' in newkey and val is None:
- self._setvar_update_overrides(newkey, **loginfo)
-
- loginfo['variable'] = key
- loginfo['op'] = 'rename (to)'
- loginfo['detail'] = newkey
- self.varhistory.record(**loginfo)
- self.delVar(key, ignore=True)
-
- def appendVar(self, var, value, **loginfo):
- loginfo['op'] = 'append'
- self.varhistory.record(**loginfo)
- self.setVar(var + "_append", value, ignore=True, parsing=True)
-
- def prependVar(self, var, value, **loginfo):
- loginfo['op'] = 'prepend'
- self.varhistory.record(**loginfo)
- self.setVar(var + "_prepend", value, ignore=True, parsing=True)
-
- def delVar(self, var, **loginfo):
- loginfo['detail'] = ""
- loginfo['op'] = 'del'
- self.varhistory.record(**loginfo)
- self.expand_cache = {}
- self.dict[var] = {}
- if var in self.overridedata:
- del self.overridedata[var]
- if '_' in var:
- override = var[var.rfind('_')+1:]
- shortvar = var[:var.rfind('_')]
- while override and override.islower():
- try:
- if shortvar in self.overridedata:
- # Force CoW by recreating the list first
- self.overridedata[shortvar] = list(self.overridedata[shortvar])
- self.overridedata[shortvar].remove([var, override])
- except ValueError as e:
- pass
- override = None
- if "_" in shortvar:
- override = var[shortvar.rfind('_')+1:]
- shortvar = var[:shortvar.rfind('_')]
- if len(shortvar) == 0:
- override = None
-
- def setVarFlag(self, var, flag, value, **loginfo):
- self.expand_cache = {}
- if 'op' not in loginfo:
- loginfo['op'] = "set"
- loginfo['flag'] = flag
- self.varhistory.record(**loginfo)
- if not var in self.dict:
- self._makeShadowCopy(var)
- self.dict[var][flag] = value
-
- if flag == "_defaultval" and '_' in var:
- self._setvar_update_overrides(var, **loginfo)
- if flag == "_defaultval" and var in self.overridevars:
- self._setvar_update_overridevars(var, value)
-
- if flag == "unexport" or flag == "export":
- if not "__exportlist" in self.dict:
- self._makeShadowCopy("__exportlist")
- if not "_content" in self.dict["__exportlist"]:
- self.dict["__exportlist"]["_content"] = set()
- self.dict["__exportlist"]["_content"].add(var)
-
- def getVarFlag(self, var, flag, expand, noweakdefault=False, parsing=False):
- local_var = self._findVar(var)
- value = None
- if flag == "_content" and var in self.overridedata and not parsing:
- match = False
- active = {}
- self.need_overrides()
- for (r, o) in self.overridedata[var]:
- # What about double overrides both with "_" in the name?
- if o in self.overridesset:
- active[o] = r
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
- active[o] = r
-
- mod = True
- while mod:
- mod = False
- for o in self.overrides:
- for a in active.copy():
- if a.endswith("_" + o):
- t = active[a]
- del active[a]
- active[a.replace("_" + o, "")] = t
- mod = True
- elif a == o:
- match = active[a]
- del active[a]
- if match:
- value = self.getVar(match, False)
-
- if local_var is not None and value is None:
- if flag in local_var:
- value = copy.copy(local_var[flag])
- elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
- value = copy.copy(local_var["_defaultval"])
-
-
- if flag == "_content" and local_var is not None and "_append" in local_var and not parsing:
- if not value:
- value = ""
- self.need_overrides()
- for (r, o) in local_var["_append"]:
- match = True
- if o:
- for o2 in o.split("_"):
- if not o2 in self.overrides:
- match = False
- if match:
- value = value + r
-
- if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing:
- if not value:
- value = ""
- self.need_overrides()
- for (r, o) in local_var["_prepend"]:
-
- match = True
- if o:
- for o2 in o.split("_"):
- if not o2 in self.overrides:
- match = False
- if match:
- value = r + value
-
- if expand and value:
- # Only getvar (flag == _content) hits the expand cache
- cachename = None
- if flag == "_content":
- cachename = var
- else:
- cachename = var + "[" + flag + "]"
- value = self.expand(value, cachename)
-
- if value and flag == "_content" and local_var is not None and "_remove" in local_var:
- removes = []
- self.need_overrides()
- for (r, o) in local_var["_remove"]:
- match = True
- if o:
- for o2 in o.split("_"):
- if not o2 in self.overrides:
- match = False
- if match:
- removes.extend(self.expand(r).split())
-
- filtered = filter(lambda v: v not in removes,
- value.split())
- value = " ".join(filtered)
- if expand and var in self.expand_cache:
- # We need to ensure the expand cache has the correct value
- # flag == "_content" here
- self.expand_cache[var].value = value
- return value
-
- def delVarFlag(self, var, flag, **loginfo):
- self.expand_cache = {}
- local_var = self._findVar(var)
- if not local_var:
- return
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- if var in self.dict and flag in self.dict[var]:
- loginfo['detail'] = ""
- loginfo['op'] = 'delFlag'
- loginfo['flag'] = flag
- self.varhistory.record(**loginfo)
-
- del self.dict[var][flag]
-
- def appendVarFlag(self, var, flag, value, **loginfo):
- loginfo['op'] = 'append'
- loginfo['flag'] = flag
- self.varhistory.record(**loginfo)
- newvalue = (self.getVarFlag(var, flag, False) or "") + value
- self.setVarFlag(var, flag, newvalue, ignore=True)
-
- def prependVarFlag(self, var, flag, value, **loginfo):
- loginfo['op'] = 'prepend'
- loginfo['flag'] = flag
- self.varhistory.record(**loginfo)
- newvalue = value + (self.getVarFlag(var, flag, False) or "")
- self.setVarFlag(var, flag, newvalue, ignore=True)
-
- def setVarFlags(self, var, flags, **loginfo):
- self.expand_cache = {}
- infer_caller_details(loginfo)
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- for i in flags:
- if i == "_content":
- continue
- loginfo['flag'] = i
- loginfo['detail'] = flags[i]
- self.varhistory.record(**loginfo)
- self.dict[var][i] = flags[i]
-
- def getVarFlags(self, var, expand = False, internalflags=False):
- local_var = self._findVar(var)
- flags = {}
-
- if local_var:
- for i in local_var:
- if i.startswith("_") and not internalflags:
- continue
- flags[i] = local_var[i]
- if expand and i in expand:
- flags[i] = self.expand(flags[i], var + "[" + i + "]")
- if len(flags) == 0:
- return None
- return flags
-
-
- def delVarFlags(self, var, **loginfo):
- self.expand_cache = {}
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- if var in self.dict:
- content = None
-
- loginfo['op'] = 'delete flags'
- self.varhistory.record(**loginfo)
-
- # try to save the content
- if "_content" in self.dict[var]:
- content = self.dict[var]["_content"]
- self.dict[var] = {}
- self.dict[var]["_content"] = content
- else:
- del self.dict[var]
-
- def createCopy(self):
- """
- Create a copy of self by setting _data to self
- """
- # we really want this to be a DataSmart...
- data = DataSmart()
- data.dict["_data"] = self.dict
- data.varhistory = self.varhistory.copy()
- data.varhistory.datasmart = data
- data.inchistory = self.inchistory.copy()
-
- data._tracking = self._tracking
-
- data.overrides = None
- data.overridevars = copy.copy(self.overridevars)
- # Should really be a deepcopy but has heavy overhead.
- # Instead, we're careful with writes.
- data.overridedata = copy.copy(self.overridedata)
-
- return data
-
- def expandVarref(self, variable, parents=False):
- """Find all references to variable in the data and expand it
- in place, optionally descending to parent datastores."""
-
- if parents:
- keys = iter(self)
- else:
- keys = self.localkeys()
-
- ref = '${%s}' % variable
- value = self.getVar(variable, False)
- for key in keys:
- referrervalue = self.getVar(key, False)
- if referrervalue and ref in referrervalue:
- self.setVar(key, referrervalue.replace(ref, value))
-
- def localkeys(self):
- for key in self.dict:
- if key != '_data':
- yield key
-
- def __iter__(self):
- deleted = set()
- overrides = set()
- def keylist(d):
- klist = set()
- for key in d:
- if key == "_data":
- continue
- if key in deleted:
- continue
- if key in overrides:
- continue
- if not d[key]:
- deleted.add(key)
- continue
- klist.add(key)
-
- if "_data" in d:
- klist |= keylist(d["_data"])
-
- return klist
-
- self.need_overrides()
- for var in self.overridedata:
- for (r, o) in self.overridedata[var]:
- if o in self.overridesset:
- overrides.add(var)
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
- overrides.add(var)
-
- for k in keylist(self.dict):
- yield k
-
- for k in overrides:
- yield k
-
- def __len__(self):
- return len(frozenset(self))
-
- def __getitem__(self, item):
- value = self.getVar(item, False)
- if value is None:
- raise KeyError(item)
- else:
- return value
-
- def __setitem__(self, var, value):
- self.setVar(var, value)
-
- def __delitem__(self, var):
- self.delVar(var)
-
- def get_hash(self):
- data = {}
- d = self.createCopy()
- bb.data.expandKeys(d)
- bb.data.update_data(d)
-
- config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
- keys = set(key for key in iter(d) if not key.startswith("__"))
- for key in keys:
- if key in config_whitelist:
- continue
-
- value = d.getVar(key, False) or ""
- data.update({key:value})
-
- varflags = d.getVarFlags(key, internalflags = True)
- if not varflags:
- continue
- for f in varflags:
- if f == "_content":
- continue
- data.update({'%s[%s]' % (key, f):varflags[f]})
-
- for key in ["__BBTASKS", "__BBANONFUNCS", "__BBHANDLERS"]:
- bb_list = d.getVar(key, False) or []
- bb_list.sort()
- data.update({key:str(bb_list)})
-
- if key == "__BBANONFUNCS":
- for i in bb_list:
- value = d.getVar(i, False) or ""
- data.update({i:value})
-
- data_str = str([(k, data[k]) for k in sorted(data.keys())])
- return hashlib.md5(data_str).hexdigest()
diff --git a/yocto-poky/bitbake/lib/bb/event.py b/yocto-poky/bitbake/lib/bb/event.py
deleted file mode 100644
index 5ffe89eae..000000000
--- a/yocto-poky/bitbake/lib/bb/event.py
+++ /dev/null
@@ -1,679 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Event' implementation
-
-Classes and functions for manipulating 'events' in the
-BitBake build tools.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os, sys
-import warnings
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-import logging
-import atexit
-import traceback
-import ast
-import bb.utils
-import bb.compat
-import bb.exceptions
-
-# This is the pid for which we should generate the event. This is set when
-# the runqueue forks off.
-worker_pid = 0
-worker_fire = None
-
-logger = logging.getLogger('BitBake.Event')
-
-class Event(object):
- """Base class for events"""
-
- def __init__(self):
- self.pid = worker_pid
-
-Registered = 10
-AlreadyRegistered = 14
-
-def get_class_handlers():
- return _handlers
-
-def set_class_handlers(h):
- global _handlers
- _handlers = h
-
-def clean_class_handlers():
- return bb.compat.OrderedDict()
-
-# Internal
-_handlers = clean_class_handlers()
-_ui_handlers = {}
-_ui_logfilters = {}
-_ui_handler_seq = 0
-_event_handler_map = {}
-_catchall_handlers = {}
-_eventfilter = None
-_uiready = False
-
-def execute_handler(name, handler, event, d):
- event.data = d
- addedd = False
- if 'd' not in __builtins__:
- __builtins__['d'] = d
- addedd = True
- try:
- ret = handler(event)
- except (bb.parse.SkipRecipe, bb.BBHandledException):
- raise
- except Exception:
- etype, value, tb = sys.exc_info()
- logger.error("Execution of event handler '%s' failed" % name,
- exc_info=(etype, value, tb.tb_next))
- raise
- except SystemExit as exc:
- if exc.code != 0:
- logger.error("Execution of event handler '%s' failed" % name)
- raise
- finally:
- del event.data
- if addedd:
- del __builtins__['d']
-
-def fire_class_handlers(event, d):
- if isinstance(event, logging.LogRecord):
- return
-
- eid = str(event.__class__)[8:-2]
- evt_hmap = _event_handler_map.get(eid, {})
- for name, handler in _handlers.iteritems():
- if name in _catchall_handlers or name in evt_hmap:
- if _eventfilter:
- if not _eventfilter(name, handler, event, d):
- continue
- execute_handler(name, handler, event, d)
-
-ui_queue = []
-@atexit.register
-def print_ui_queue():
- """If we're exiting before a UI has been spawned, display any queued
- LogRecords to the console."""
- logger = logging.getLogger("BitBake")
- if not _uiready:
- from bb.msg import BBLogFormatter
- console = logging.StreamHandler(sys.stdout)
- console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
- logger.handlers = [console]
-
- # First check to see if we have any proper messages
- msgprint = False
- for event in ui_queue:
- if isinstance(event, logging.LogRecord):
- if event.levelno > logging.DEBUG:
- logger.handle(event)
- msgprint = True
- if msgprint:
- return
-
- # Nope, so just print all of the messages we have (including debug messages)
- for event in ui_queue:
- if isinstance(event, logging.LogRecord):
- logger.handle(event)
-
-def fire_ui_handlers(event, d):
- if not _uiready:
- # No UI handlers registered yet, queue up the messages
- ui_queue.append(event)
- return
-
- errors = []
- for h in _ui_handlers:
- #print "Sending event %s" % event
- try:
- if not _ui_logfilters[h].filter(event):
- continue
- # We use pickle here since it better handles object instances
- # which xmlrpc's marshaller does not. Events *must* be serializable
- # by pickle.
- if hasattr(_ui_handlers[h].event, "sendpickle"):
- _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
- else:
- _ui_handlers[h].event.send(event)
- except:
- errors.append(h)
- for h in errors:
- del _ui_handlers[h]
-
-def fire(event, d):
- """Fire off an Event"""
-
- # We can fire class handlers in the worker process context and this is
- # desired so they get the task based datastore.
- # UI handlers need to be fired in the server context so we defer this. They
- # don't have a datastore so the datastore context isn't a problem.
-
- fire_class_handlers(event, d)
- if worker_fire:
- worker_fire(event, d)
- else:
- fire_ui_handlers(event, d)
-
-def fire_from_worker(event, d):
- fire_ui_handlers(event, d)
-
-noop = lambda _: None
-def register(name, handler, mask=None, filename=None, lineno=None):
- """Register an Event handler"""
-
- # already registered
- if name in _handlers:
- return AlreadyRegistered
-
- if handler is not None:
- # handle string containing python code
- if isinstance(handler, basestring):
- tmp = "def %s(e):\n%s" % (name, handler)
- try:
- code = bb.methodpool.compile_cache(tmp)
- if not code:
- if filename is None:
- filename = "%s(e)" % name
- code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
- if lineno is not None:
- ast.increment_lineno(code, lineno-1)
- code = compile(code, filename, "exec")
- bb.methodpool.compile_cache_add(tmp, code)
- except SyntaxError:
- logger.error("Unable to register event handler '%s':\n%s", name,
- ''.join(traceback.format_exc(limit=0)))
- _handlers[name] = noop
- return
- env = {}
- bb.utils.better_exec(code, env)
- func = bb.utils.better_eval(name, env)
- _handlers[name] = func
- else:
- _handlers[name] = handler
-
- if not mask or '*' in mask:
- _catchall_handlers[name] = True
- else:
- for m in mask:
- if _event_handler_map.get(m, None) is None:
- _event_handler_map[m] = {}
- _event_handler_map[m][name] = True
-
- return Registered
-
-def remove(name, handler):
- """Remove an Event handler"""
- _handlers.pop(name)
-
-def set_eventfilter(func):
- global _eventfilter
- _eventfilter = func
-
-def register_UIHhandler(handler, mainui=False):
- if mainui:
- global _uiready
- _uiready = True
- bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
- _ui_handlers[_ui_handler_seq] = handler
- level, debug_domains = bb.msg.constructLogOptions()
- _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
- return _ui_handler_seq
-
-def unregister_UIHhandler(handlerNum):
- if handlerNum in _ui_handlers:
- del _ui_handlers[handlerNum]
- return
-
-# Class to allow filtering of events and specific filtering of LogRecords *before* we put them over the IPC
-class UIEventFilter(object):
- def __init__(self, level, debug_domains):
- self.update(None, level, debug_domains)
-
- def update(self, eventmask, level, debug_domains):
- self.eventmask = eventmask
- self.stdlevel = level
- self.debug_domains = debug_domains
-
- def filter(self, event):
- if isinstance(event, logging.LogRecord):
- if event.levelno >= self.stdlevel:
- return True
- if event.name in self.debug_domains and event.levelno >= self.debug_domains[event.name]:
- return True
- return False
- eid = str(event.__class__)[8:-2]
- if self.eventmask and eid not in self.eventmask:
- return False
- return True
-
-def set_UIHmask(handlerNum, level, debug_domains, mask):
- if not handlerNum in _ui_handlers:
- return False
- if '*' in mask:
- _ui_logfilters[handlerNum].update(None, level, debug_domains)
- else:
- _ui_logfilters[handlerNum].update(mask, level, debug_domains)
- return True
-
-def getName(e):
- """Returns the name of a class or class instance"""
- if getattr(e, "__name__", None) == None:
- return e.__class__.__name__
- else:
- return e.__name__
-
-class OperationStarted(Event):
- """An operation has begun"""
- def __init__(self, msg = "Operation Started"):
- Event.__init__(self)
- self.msg = msg
-
-class OperationCompleted(Event):
- """An operation has completed"""
- def __init__(self, total, msg = "Operation Completed"):
- Event.__init__(self)
- self.total = total
- self.msg = msg
-
-class OperationProgress(Event):
- """An operation is in progress"""
- def __init__(self, current, total, msg = "Operation in Progress"):
- Event.__init__(self)
- self.current = current
- self.total = total
- self.msg = msg + ": %s/%s" % (current, total);
-
-class ConfigParsed(Event):
- """Configuration Parsing Complete"""
-
-class RecipeEvent(Event):
- def __init__(self, fn):
- self.fn = fn
- Event.__init__(self)
-
-class RecipePreFinalise(RecipeEvent):
- """ Recipe Parsing Complete but not yet finialised"""
-
-class RecipeParsed(RecipeEvent):
- """ Recipe Parsing Complete """
-
-class StampUpdate(Event):
- """Trigger for any adjustment of the stamp files to happen"""
-
- def __init__(self, targets, stampfns):
- self._targets = targets
- self._stampfns = stampfns
- Event.__init__(self)
-
- def getStampPrefix(self):
- return self._stampfns
-
- def getTargets(self):
- return self._targets
-
- stampPrefix = property(getStampPrefix)
- targets = property(getTargets)
-
-class BuildBase(Event):
- """Base class for bbmake run events"""
-
- def __init__(self, n, p, failures = 0):
- self._name = n
- self._pkgs = p
- Event.__init__(self)
- self._failures = failures
-
- def getPkgs(self):
- return self._pkgs
-
- def setPkgs(self, pkgs):
- self._pkgs = pkgs
-
- def getName(self):
- return self._name
-
- def setName(self, name):
- self._name = name
-
- def getCfg(self):
- return self.data
-
- def setCfg(self, cfg):
- self.data = cfg
-
- def getFailures(self):
- """
- Return the number of failed packages
- """
- return self._failures
-
- pkgs = property(getPkgs, setPkgs, None, "pkgs property")
- name = property(getName, setName, None, "name property")
- cfg = property(getCfg, setCfg, None, "cfg property")
-
-
-
-
-
-class BuildStarted(BuildBase, OperationStarted):
- """bbmake build run started"""
- def __init__(self, n, p, failures = 0):
- OperationStarted.__init__(self, "Building Started")
- BuildBase.__init__(self, n, p, failures)
-
-class BuildCompleted(BuildBase, OperationCompleted):
- """bbmake build run completed"""
- def __init__(self, total, n, p, failures=0, interrupted=0):
- if not failures:
- OperationCompleted.__init__(self, total, "Building Succeeded")
- else:
- OperationCompleted.__init__(self, total, "Building Failed")
- self._interrupted = interrupted
- BuildBase.__init__(self, n, p, failures)
-
-class DiskFull(Event):
- """Disk full case build aborted"""
- def __init__(self, dev, type, freespace, mountpoint):
- Event.__init__(self)
- self._dev = dev
- self._type = type
- self._free = freespace
- self._mountpoint = mountpoint
-
-class NoProvider(Event):
- """No Provider for an Event"""
-
- def __init__(self, item, runtime=False, dependees=None, reasons=None, close_matches=None):
- Event.__init__(self)
- self._item = item
- self._runtime = runtime
- self._dependees = dependees
- self._reasons = reasons
- self._close_matches = close_matches
-
- def getItem(self):
- return self._item
-
- def isRuntime(self):
- return self._runtime
-
-class MultipleProviders(Event):
- """Multiple Providers"""
-
- def __init__(self, item, candidates, runtime = False):
- Event.__init__(self)
- self._item = item
- self._candidates = candidates
- self._is_runtime = runtime
-
- def isRuntime(self):
- """
- Is this a runtime issue?
- """
- return self._is_runtime
-
- def getItem(self):
- """
- The name for the to be build item
- """
- return self._item
-
- def getCandidates(self):
- """
- Get the possible Candidates for a PROVIDER.
- """
- return self._candidates
-
-class ParseStarted(OperationStarted):
- """Recipe parsing for the runqueue has begun"""
- def __init__(self, total):
- OperationStarted.__init__(self, "Recipe parsing Started")
- self.total = total
-
-class ParseCompleted(OperationCompleted):
- """Recipe parsing for the runqueue has completed"""
- def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
- OperationCompleted.__init__(self, total, "Recipe parsing Completed")
- self.cached = cached
- self.parsed = parsed
- self.skipped = skipped
- self.virtuals = virtuals
- self.masked = masked
- self.errors = errors
- self.sofar = cached + parsed
-
-class ParseProgress(OperationProgress):
- """Recipe parsing progress"""
- def __init__(self, current, total):
- OperationProgress.__init__(self, current, total, "Recipe parsing")
-
-
-class CacheLoadStarted(OperationStarted):
- """Loading of the dependency cache has begun"""
- def __init__(self, total):
- OperationStarted.__init__(self, "Loading cache Started")
- self.total = total
-
-class CacheLoadProgress(OperationProgress):
- """Cache loading progress"""
- def __init__(self, current, total):
- OperationProgress.__init__(self, current, total, "Loading cache")
-
-class CacheLoadCompleted(OperationCompleted):
- """Cache loading is complete"""
- def __init__(self, total, num_entries):
- OperationCompleted.__init__(self, total, "Loading cache Completed")
- self.num_entries = num_entries
-
-class TreeDataPreparationStarted(OperationStarted):
- """Tree data preparation started"""
- def __init__(self):
- OperationStarted.__init__(self, "Preparing tree data Started")
-
-class TreeDataPreparationProgress(OperationProgress):
- """Tree data preparation is in progress"""
- def __init__(self, current, total):
- OperationProgress.__init__(self, current, total, "Preparing tree data")
-
-class TreeDataPreparationCompleted(OperationCompleted):
- """Tree data preparation completed"""
- def __init__(self, total):
- OperationCompleted.__init__(self, total, "Preparing tree data Completed")
-
-class DepTreeGenerated(Event):
- """
- Event when a dependency tree has been generated
- """
-
- def __init__(self, depgraph):
- Event.__init__(self)
- self._depgraph = depgraph
-
-class TargetsTreeGenerated(Event):
- """
- Event when a set of buildable targets has been generated
- """
- def __init__(self, model):
- Event.__init__(self)
- self._model = model
-
-class ReachableStamps(Event):
- """
- An event listing all stamps reachable after parsing
- which the metadata may use to clean up stale data
- """
-
- def __init__(self, stamps):
- Event.__init__(self)
- self.stamps = stamps
-
-class FilesMatchingFound(Event):
- """
- Event when a list of files matching the supplied pattern has
- been generated
- """
- def __init__(self, pattern, matches):
- Event.__init__(self)
- self._pattern = pattern
- self._matches = matches
-
-class CoreBaseFilesFound(Event):
- """
- Event when a list of appropriate config files has been generated
- """
- def __init__(self, paths):
- Event.__init__(self)
- self._paths = paths
-
-class ConfigFilesFound(Event):
- """
- Event when a list of appropriate config files has been generated
- """
- def __init__(self, variable, values):
- Event.__init__(self)
- self._variable = variable
- self._values = values
-
-class ConfigFilePathFound(Event):
- """
- Event when a path for a config file has been found
- """
- def __init__(self, path):
- Event.__init__(self)
- self._path = path
-
-class MsgBase(Event):
- """Base class for messages"""
-
- def __init__(self, msg):
- self._message = msg
- Event.__init__(self)
-
-class MsgDebug(MsgBase):
- """Debug Message"""
-
-class MsgNote(MsgBase):
- """Note Message"""
-
-class MsgWarn(MsgBase):
- """Warning Message"""
-
-class MsgError(MsgBase):
- """Error Message"""
-
-class MsgFatal(MsgBase):
- """Fatal Message"""
-
-class MsgPlain(MsgBase):
- """General output"""
-
-class LogExecTTY(Event):
- """Send event containing program to spawn on tty of the logger"""
- def __init__(self, msg, prog, sleep_delay, retries):
- Event.__init__(self)
- self.msg = msg
- self.prog = prog
- self.sleep_delay = sleep_delay
- self.retries = retries
-
-class LogHandler(logging.Handler):
- """Dispatch logging messages as bitbake events"""
-
- def emit(self, record):
- if record.exc_info:
- etype, value, tb = record.exc_info
- if hasattr(tb, 'tb_next'):
- tb = list(bb.exceptions.extract_traceback(tb, context=3))
- # Need to turn the value into something the logging system can pickle
- value = str(value)
- record.bb_exc_info = (etype, value, tb)
- record.exc_info = None
- fire(record, None)
-
- def filter(self, record):
- record.taskpid = worker_pid
- return True
-
-class RequestPackageInfo(Event):
- """
- Event to request package information
- """
-
-class PackageInfo(Event):
- """
- Package information for GUI
- """
- def __init__(self, pkginfolist):
- Event.__init__(self)
- self._pkginfolist = pkginfolist
-
-class MetadataEvent(Event):
- """
- Generic event that target for OE-Core classes
- to report information during asynchrous execution
- """
- def __init__(self, eventtype, eventdata):
- Event.__init__(self)
- self.type = eventtype
- self._localdata = eventdata
-
-class SanityCheck(Event):
- """
- Event to run sanity checks, either raise errors or generate events as return status.
- """
- def __init__(self, generateevents = True):
- Event.__init__(self)
- self.generateevents = generateevents
-
-class SanityCheckPassed(Event):
- """
- Event to indicate sanity check has passed
- """
-
-class SanityCheckFailed(Event):
- """
- Event to indicate sanity check has failed
- """
- def __init__(self, msg, network_error=False):
- Event.__init__(self)
- self._msg = msg
- self._network_error = network_error
-
-class NetworkTest(Event):
- """
- Event to run network connectivity tests, either raise errors or generate events as return status.
- """
- def __init__(self, generateevents = True):
- Event.__init__(self)
- self.generateevents = generateevents
-
-class NetworkTestPassed(Event):
- """
- Event to indicate network test has passed
- """
-
-class NetworkTestFailed(Event):
- """
- Event to indicate network test has failed
- """
-
diff --git a/yocto-poky/bitbake/lib/bb/exceptions.py b/yocto-poky/bitbake/lib/bb/exceptions.py
deleted file mode 100644
index f182c8fd6..000000000
--- a/yocto-poky/bitbake/lib/bb/exceptions.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from __future__ import absolute_import
-import inspect
-import traceback
-import bb.namedtuple_with_abc
-from collections import namedtuple
-
-
-class TracebackEntry(namedtuple.abc):
- """Pickleable representation of a traceback entry"""
- _fields = 'filename lineno function args code_context index'
- _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
-
- def format(self, formatter=None):
- if not self.code_context:
- return self._header.format(self) + '\n'
-
- formatted = [self._header.format(self) + ':\n']
-
- for lineindex, line in enumerate(self.code_context):
- if formatter:
- line = formatter(line)
-
- if lineindex == self.index:
- formatted.append(' >%s' % line)
- else:
- formatted.append(' %s' % line)
- return formatted
-
- def __str__(self):
- return ''.join(self.format())
-
-def _get_frame_args(frame):
- """Get the formatted arguments and class (if available) for a frame"""
- arginfo = inspect.getargvalues(frame)
-
- try:
- if not arginfo.args:
- return '', None
- # There have been reports from the field of python 2.6 which doesn't
- # return a namedtuple here but simply a tuple so fallback gracefully if
- # args isn't present.
- except AttributeError:
- return '', None
-
- firstarg = arginfo.args[0]
- if firstarg == 'self':
- self = arginfo.locals['self']
- cls = self.__class__.__name__
-
- arginfo.args.pop(0)
- del arginfo.locals['self']
- else:
- cls = None
-
- formatted = inspect.formatargvalues(*arginfo)
- return formatted, cls
-
-def extract_traceback(tb, context=1):
- frames = inspect.getinnerframes(tb, context)
- for frame, filename, lineno, function, code_context, index in frames:
- formatted_args, cls = _get_frame_args(frame)
- if cls:
- function = '%s.%s' % (cls, function)
- yield TracebackEntry(filename, lineno, function, formatted_args,
- code_context, index)
-
-def format_extracted(extracted, formatter=None, limit=None):
- if limit:
- extracted = extracted[-limit:]
-
- formatted = []
- for tracebackinfo in extracted:
- formatted.extend(tracebackinfo.format(formatter))
- return formatted
-
-
-def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
- formatted = ['Traceback (most recent call last):\n']
-
- if hasattr(tb, 'tb_next'):
- tb = extract_traceback(tb, context)
-
- formatted.extend(format_extracted(tb, formatter, limit))
- formatted.extend(traceback.format_exception_only(etype, value))
- return formatted
-
-def to_string(exc):
- if isinstance(exc, SystemExit):
- if not isinstance(exc.code, basestring):
- return 'Exited with "%d"' % exc.code
- return str(exc)
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/__init__.py b/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
deleted file mode 100644
index 1fa67020c..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
+++ /dev/null
@@ -1,1773 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2012 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from __future__ import absolute_import
-from __future__ import print_function
-import os, re
-import signal
-import logging
-import urllib
-import urlparse
-import bb.persist_data, bb.utils
-import bb.checksum
-from bb import data
-import bb.process
-import subprocess
-
-__version__ = "2"
-_checksum_cache = bb.checksum.FileChecksumCache()
-
-logger = logging.getLogger("BitBake.Fetcher")
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
-class BBFetchException(Exception):
- """Class all fetch exceptions inherit from"""
- def __init__(self, message):
- self.msg = message
- Exception.__init__(self, message)
-
- def __str__(self):
- return self.msg
-
-class UntrustedUrl(BBFetchException):
- """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
- def __init__(self, url, message=''):
- if message:
- msg = message
- else:
- msg = "The URL: '%s' is not trusted and cannot be used" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (url,)
-
-class MalformedUrl(BBFetchException):
- """Exception raised when encountering an invalid url"""
- def __init__(self, url, message=''):
- if message:
- msg = message
- else:
- msg = "The URL: '%s' is invalid and cannot be interpreted" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (url,)
-
-class FetchError(BBFetchException):
- """General fetcher exception when something happens incorrectly"""
- def __init__(self, message, url = None):
- if url:
- msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
- else:
- msg = "Fetcher failure: %s" % message
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class ChecksumError(FetchError):
- """Exception when mismatched checksum encountered"""
- def __init__(self, message, url = None, checksum = None):
- self.checksum = checksum
- FetchError.__init__(self, message, url)
-
-class NoChecksumError(FetchError):
- """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
-
-class UnpackError(BBFetchException):
- """General fetcher exception when something happens incorrectly when unpacking"""
- def __init__(self, message, url):
- msg = "Unpack failure for URL: '%s'. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class NoMethodError(BBFetchException):
- """Exception raised when there is no method to obtain a supplied url or set of urls"""
- def __init__(self, url):
- msg = "Could not find a fetcher which supports the URL: '%s'" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (url,)
-
-class MissingParameterError(BBFetchException):
- """Exception raised when a fetch method is missing a critical parameter in the url"""
- def __init__(self, missing, url):
- msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
- self.url = url
- self.missing = missing
- BBFetchException.__init__(self, msg)
- self.args = (missing, url)
-
-class ParameterError(BBFetchException):
- """Exception raised when a url cannot be proccessed due to invalid parameters."""
- def __init__(self, message, url):
- msg = "URL: '%s' has invalid parameters. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class NetworkAccess(BBFetchException):
- """Exception raised when network access is disabled but it is required."""
- def __init__(self, url, cmd):
- msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
- self.url = url
- self.cmd = cmd
- BBFetchException.__init__(self, msg)
- self.args = (url, cmd)
-
-class NonLocalMethod(Exception):
- def __init__(self):
- Exception.__init__(self)
-
-
-class URI(object):
- """
- A class representing a generic URI, with methods for
- accessing the URI components, and stringifies to the
- URI.
-
- It is constructed by calling it with a URI, or setting
- the attributes manually:
-
- uri = URI("http://example.com/")
-
- uri = URI()
- uri.scheme = 'http'
- uri.hostname = 'example.com'
- uri.path = '/'
-
- It has the following attributes:
-
- * scheme (read/write)
- * userinfo (authentication information) (read/write)
- * username (read/write)
- * password (read/write)
-
- Note, password is deprecated as of RFC 3986.
-
- * hostname (read/write)
- * port (read/write)
- * hostport (read only)
- "hostname:port", if both are set, otherwise just "hostname"
- * path (read/write)
- * path_quoted (read/write)
- A URI quoted version of path
- * params (dict) (read/write)
- * query (dict) (read/write)
- * relative (bool) (read only)
- True if this is a "relative URI", (e.g. file:foo.diff)
-
- It stringifies to the URI itself.
-
- Some notes about relative URIs: while it's specified that
- a URI beginning with <scheme>:// should either be directly
- followed by a hostname or a /, the old URI handling of the
- fetch2 library did not comform to this. Therefore, this URI
- class has some kludges to make sure that URIs are parsed in
- a way comforming to bitbake's current usage. This URI class
- supports the following:
-
- file:relative/path.diff (IETF compliant)
- git:relative/path.git (IETF compliant)
- git:///absolute/path.git (IETF compliant)
- file:///absolute/path.diff (IETF compliant)
-
- file://relative/path.diff (not IETF compliant)
-
- But it does not support the following:
-
- file://hostname/absolute/path.diff (would be IETF compliant)
-
- Note that the last case only applies to a list of
- "whitelisted" schemes (currently only file://), that requires
- its URIs to not have a network location.
- """
-
- _relative_schemes = ['file', 'git']
- _netloc_forbidden = ['file']
-
- def __init__(self, uri=None):
- self.scheme = ''
- self.userinfo = ''
- self.hostname = ''
- self.port = None
- self._path = ''
- self.params = {}
- self.query = {}
- self.relative = False
-
- if not uri:
- return
-
- # We hijack the URL parameters, since the way bitbake uses
- # them are not quite RFC compliant.
- uri, param_str = (uri.split(";", 1) + [None])[:2]
-
- urlp = urlparse.urlparse(uri)
- self.scheme = urlp.scheme
-
- reparse = 0
-
- # Coerce urlparse to make URI scheme use netloc
- if not self.scheme in urlparse.uses_netloc:
- urlparse.uses_params.append(self.scheme)
- reparse = 1
-
- # Make urlparse happy(/ier) by converting local resources
- # to RFC compliant URL format. E.g.:
- # file://foo.diff -> file:foo.diff
- if urlp.scheme in self._netloc_forbidden:
- uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
- reparse = 1
-
- if reparse:
- urlp = urlparse.urlparse(uri)
-
- # Identify if the URI is relative or not
- if urlp.scheme in self._relative_schemes and \
- re.compile("^\w+:(?!//)").match(uri):
- self.relative = True
-
- if not self.relative:
- self.hostname = urlp.hostname or ''
- self.port = urlp.port
-
- self.userinfo += urlp.username or ''
-
- if urlp.password:
- self.userinfo += ':%s' % urlp.password
-
- self.path = urllib.unquote(urlp.path)
-
- if param_str:
- self.params = self._param_str_split(param_str, ";")
- if urlp.query:
- self.query = self._param_str_split(urlp.query, "&")
-
- def __str__(self):
- userinfo = self.userinfo
- if userinfo:
- userinfo += '@'
-
- return "%s:%s%s%s%s%s%s" % (
- self.scheme,
- '' if self.relative else '//',
- userinfo,
- self.hostport,
- self.path_quoted,
- self._query_str(),
- self._param_str())
-
- def _param_str(self):
- return (
- ''.join([';', self._param_str_join(self.params, ";")])
- if self.params else '')
-
- def _query_str(self):
- return (
- ''.join(['?', self._param_str_join(self.query, "&")])
- if self.query else '')
-
- def _param_str_split(self, string, elmdelim, kvdelim="="):
- ret = {}
- for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
- ret[k] = v
- return ret
-
- def _param_str_join(self, dict_, elmdelim, kvdelim="="):
- return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
-
- @property
- def hostport(self):
- if not self.port:
- return self.hostname
- return "%s:%d" % (self.hostname, self.port)
-
- @property
- def path_quoted(self):
- return urllib.quote(self.path)
-
- @path_quoted.setter
- def path_quoted(self, path):
- self.path = urllib.unquote(path)
-
- @property
- def path(self):
- return self._path
-
- @path.setter
- def path(self, path):
- self._path = path
-
- if not path or re.compile("^/").match(path):
- self.relative = False
- else:
- self.relative = True
-
- @property
- def username(self):
- if self.userinfo:
- return (self.userinfo.split(":", 1))[0]
- return ''
-
- @username.setter
- def username(self, username):
- password = self.password
- self.userinfo = username
- if password:
- self.userinfo += ":%s" % password
-
- @property
- def password(self):
- if self.userinfo and ":" in self.userinfo:
- return (self.userinfo.split(":", 1))[1]
- return ''
-
- @password.setter
- def password(self, password):
- self.userinfo = "%s:%s" % (self.username, password)
-
-def decodeurl(url):
- """Decodes an URL into the tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
- if not m:
- raise MalformedUrl(url)
-
- type = m.group('type')
- location = m.group('location')
- if not location:
- raise MalformedUrl(url)
- user = m.group('user')
- parm = m.group('parm')
-
- locidx = location.find('/')
- if locidx != -1 and type.lower() != 'file':
- host = location[:locidx]
- path = location[locidx:]
- elif type.lower() == 'file':
- host = ""
- path = location
- else:
- host = location
- path = ""
- if user:
- m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
- if m:
- user = m.group('user')
- pswd = m.group('pswd')
- else:
- user = ''
- pswd = ''
-
- p = {}
- if parm:
- for s in parm.split(';'):
- if s:
- if not '=' in s:
- raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
- s1, s2 = s.split('=')
- p[s1] = s2
-
- return type, host, urllib.unquote(path), user, pswd, p
-
-def encodeurl(decoded):
- """Encodes a URL from tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- type, host, path, user, pswd, p = decoded
-
- if not path:
- raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
- if not type:
- raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
- url = '%s://' % type
- if user and type != "file":
- url += "%s" % user
- if pswd:
- url += ":%s" % pswd
- url += "@"
- if host and type != "file":
- url += "%s" % host
- # Standardise path to ensure comparisons work
- while '//' in path:
- path = path.replace("//", "/")
- url += "%s" % urllib.quote(path)
- if p:
- for parm in p:
- url += ";%s=%s" % (parm, p[parm])
-
- return url
-
-def uri_replace(ud, uri_find, uri_replace, replacements, d):
- if not ud.url or not uri_find or not uri_replace:
- logger.error("uri_replace: passed an undefined value, not replacing")
- return None
- uri_decoded = list(decodeurl(ud.url))
- uri_find_decoded = list(decodeurl(uri_find))
- uri_replace_decoded = list(decodeurl(uri_replace))
- logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
- result_decoded = ['', '', '', '', '', {}]
- for loc, i in enumerate(uri_find_decoded):
- result_decoded[loc] = uri_decoded[loc]
- regexp = i
- if loc == 0 and regexp and not regexp.endswith("$"):
- # Leaving the type unanchored can mean "https" matching "file" can become "files"
- # which is clearly undesirable.
- regexp += "$"
- if loc == 5:
- # Handle URL parameters
- if i:
- # Any specified URL parameters must match
- for k in uri_replace_decoded[loc]:
- if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
- return None
- # Overwrite any specified replacement parameters
- for k in uri_replace_decoded[loc]:
- for l in replacements:
- uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
- result_decoded[loc][k] = uri_replace_decoded[loc][k]
- elif (re.match(regexp, uri_decoded[loc])):
- if not uri_replace_decoded[loc]:
- result_decoded[loc] = ""
- else:
- for k in replacements:
- uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
- #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
- result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
- if loc == 2:
- # Handle path manipulations
- basename = None
- if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
- # If the source and destination url types differ, must be a mirrortarball mapping
- basename = os.path.basename(ud.mirrortarball)
- # Kill parameters, they make no sense for mirror tarballs
- uri_decoded[5] = {}
- elif ud.localpath and ud.method.supports_checksum(ud):
- basename = os.path.basename(ud.localpath)
- if basename and not result_decoded[loc].endswith(basename):
- result_decoded[loc] = os.path.join(result_decoded[loc], basename)
- else:
- return None
- result = encodeurl(result_decoded)
- if result == ud.url:
- return None
- logger.debug(2, "For url %s returning %s" % (ud.url, result))
- return result
-
-methods = []
-urldata_cache = {}
-saved_headrevs = {}
-
-def fetcher_init(d):
- """
- Called to initialize the fetchers once the configuration data is known.
- Calls before this must not hit the cache.
- """
- # When to drop SCM head revisions controlled by user policy
- srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
- if srcrev_policy == "cache":
- logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
- elif srcrev_policy == "clear":
- logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
- revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
- try:
- bb.fetch2.saved_headrevs = revs.items()
- except:
- pass
- revs.clear()
- else:
- raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
-
- _checksum_cache.init_cache(d)
-
- for m in methods:
- if hasattr(m, "init"):
- m.init(d)
-
-def fetcher_parse_save():
- _checksum_cache.save_extras()
-
-def fetcher_parse_done():
- _checksum_cache.save_merge()
-
-def fetcher_compare_revisions():
- """
- Compare the revisions in the persistant cache with current values and
- return true/false on whether they've changed.
- """
-
- data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
- data2 = bb.fetch2.saved_headrevs
-
- changed = False
- for key in data:
- if key not in data2 or data2[key] != data[key]:
- logger.debug(1, "%s changed", key)
- changed = True
- return True
- else:
- logger.debug(2, "%s did not change", key)
- return False
-
-def mirror_from_string(data):
- return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
-
-def verify_checksum(ud, d, precomputed={}):
- """
- verify the MD5 and SHA256 checksum for downloaded src
-
- Raises a FetchError if one or both of the SRC_URI checksums do not match
- the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
- checksums specified.
-
- Returns a dict of checksums that can be stored in a done stamp file and
- passed in as precomputed parameter in a later call to avoid re-computing
- the checksums from the file. This allows verifying the checksums of the
- file against those in the recipe each time, rather than only after
- downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
- """
-
- _MD5_KEY = "md5"
- _SHA256_KEY = "sha256"
-
- if ud.ignore_checksums or not ud.method.supports_checksum(ud):
- return {}
-
- if _MD5_KEY in precomputed:
- md5data = precomputed[_MD5_KEY]
- else:
- md5data = bb.utils.md5_file(ud.localpath)
-
- if _SHA256_KEY in precomputed:
- sha256data = precomputed[_SHA256_KEY]
- else:
- sha256data = bb.utils.sha256_file(ud.localpath)
-
- if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
- # If strict checking enabled and neither sum defined, raise error
- strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
- if strict == "1":
- logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
- 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
- (ud.localpath, ud.md5_name, md5data,
- ud.sha256_name, sha256data))
- raise NoChecksumError('Missing SRC_URI checksum', ud.url)
-
- # Log missing sums so user can more easily add them
- logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"',
- ud.localpath, ud.md5_name, md5data)
- logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"',
- ud.localpath, ud.sha256_name, sha256data)
-
- # We want to alert the user if a checksum is defined in the recipe but
- # it does not match.
- msg = ""
- mismatch = False
- if ud.md5_expected and ud.md5_expected != md5data:
- msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
- mismatch = True;
-
- if ud.sha256_expected and ud.sha256_expected != sha256data:
- msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
- mismatch = True;
-
- if mismatch:
- msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
-
- if len(msg):
- raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
-
- return {
- _MD5_KEY: md5data,
- _SHA256_KEY: sha256data
- }
-
-
-def verify_donestamp(ud, d, origud=None):
- """
- Check whether the done stamp file has the right checksums (if the fetch
- method supports them). If it doesn't, delete the done stamp and force
- a re-download.
-
- Returns True, if the donestamp exists and is valid, False otherwise. When
- returning False, any existing done stamps are removed.
- """
- if not ud.needdonestamp:
- return True
-
- if not os.path.exists(ud.donestamp):
- return False
-
- if (not ud.method.supports_checksum(ud) or
- (origud and not origud.method.supports_checksum(origud))):
- # done stamp exists, checksums not supported; assume the local file is
- # current
- return True
-
- if not os.path.exists(ud.localpath):
- # done stamp exists, but the downloaded file does not; the done stamp
- # must be incorrect, re-trigger the download
- bb.utils.remove(ud.donestamp)
- return False
-
- precomputed_checksums = {}
- # Only re-use the precomputed checksums if the donestamp is newer than the
- # file. Do not rely on the mtime of directories, though. If ud.localpath is
- # a directory, there will probably not be any checksums anyway.
- if (os.path.isdir(ud.localpath) or
- os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
- try:
- with open(ud.donestamp, "rb") as cachefile:
- pickled = pickle.Unpickler(cachefile)
- precomputed_checksums.update(pickled.load())
- except Exception as e:
- # Avoid the warnings on the upgrade path from emtpy done stamp
- # files to those containing the checksums.
- if not isinstance(e, EOFError):
- # Ignore errors, they aren't fatal
- logger.warn("Couldn't load checksums from donestamp %s: %s "
- "(msg: %s)" % (ud.donestamp, type(e).__name__,
- str(e)))
-
- try:
- checksums = verify_checksum(ud, d, precomputed_checksums)
- # If the cache file did not have the checksums, compute and store them
- # as an upgrade path from the previous done stamp file format.
- if checksums != precomputed_checksums:
- with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
- p.dump(checksums)
- return True
- except ChecksumError as e:
- # Checksums failed to verify, trigger re-download and remove the
- # incorrect stamp file.
- logger.warn("Checksum mismatch for local file %s\n"
- "Cleaning and trying again." % ud.localpath)
- if os.path.exists(ud.localpath):
- rename_bad_checksum(ud, e.checksum)
- bb.utils.remove(ud.donestamp)
- return False
-
-
-def update_stamp(ud, d):
- """
- donestamp is file stamp indicating the whole fetching is done
- this function update the stamp after verifying the checksum
- """
- if not ud.needdonestamp:
- return
-
- if os.path.exists(ud.donestamp):
- # Touch the done stamp file to show active use of the download
- try:
- os.utime(ud.donestamp, None)
- except:
- # Errors aren't fatal here
- pass
- else:
- try:
- checksums = verify_checksum(ud, d)
- # Store the checksums for later re-verification against the recipe
- with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
- p.dump(checksums)
- except ChecksumError as e:
- # Checksums failed to verify, trigger re-download and remove the
- # incorrect stamp file.
- logger.warn("Checksum mismatch for local file %s\n"
- "Cleaning and trying again." % ud.localpath)
- if os.path.exists(ud.localpath):
- rename_bad_checksum(ud, e.checksum)
- bb.utils.remove(ud.donestamp)
- raise
-
-def subprocess_setup():
- # Python installs a SIGPIPE handler by default. This is usually not what
- # non-Python subprocesses expect.
- # SIGPIPE errors are known issues with gzip/bash
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-
-def get_autorev(d):
- # only not cache src rev in autorev case
- if d.getVar('BB_SRCREV_POLICY', True) != "cache":
- d.setVar('BB_DONT_CACHE', '1')
- return "AUTOINC"
-
-def get_srcrev(d, method_name='sortable_revision'):
- """
- Return the revsion string, usually for use in the version string (PV) of the current package
- Most packages usually only have one SCM so we just pass on the call.
- In the multi SCM case, we build a value based on SRCREV_FORMAT which must
- have been set.
-
- The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
- incremental, other code is then responsible for turning that into an increasing value (if needed)
-
- A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
- that fetcher provides a method with the given name and the same signature as sortable_revision.
- """
-
- scms = []
- fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
- urldata = fetcher.ud
- for u in urldata:
- if urldata[u].method.supports_srcrev():
- scms.append(u)
-
- if len(scms) == 0:
- raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
-
- if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
- autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
- if len(rev) > 10:
- rev = rev[:10]
- if autoinc:
- return "AUTOINC+" + rev
- return rev
-
- #
- # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
- #
- format = d.getVar('SRCREV_FORMAT', True)
- if not format:
- raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
-
- seenautoinc = False
- for scm in scms:
- ud = urldata[scm]
- for name in ud.names:
- autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
- seenautoinc = seenautoinc or autoinc
- if len(rev) > 10:
- rev = rev[:10]
- format = format.replace(name, rev)
- if seenautoinc:
- format = "AUTOINC+" + format
-
- return format
-
-def localpath(url, d):
- fetcher = bb.fetch2.Fetch([url], d)
- return fetcher.localpath(url)
-
-def runfetchcmd(cmd, d, quiet=False, cleanup=None):
- """
- Run cmd returning the command output
- Raise an error if interrupted or cmd fails
- Optionally echo command output to stdout
- Optionally remove the files/directories listed in cleanup upon failure
- """
-
- # Need to export PATH as binary could be in metadata paths
- # rather than host provided
- # Also include some other variables.
- # FIXME: Should really include all export varaiables?
- exportvars = ['HOME', 'PATH',
- 'HTTP_PROXY', 'http_proxy',
- 'HTTPS_PROXY', 'https_proxy',
- 'FTP_PROXY', 'ftp_proxy',
- 'FTPS_PROXY', 'ftps_proxy',
- 'NO_PROXY', 'no_proxy',
- 'ALL_PROXY', 'all_proxy',
- 'GIT_PROXY_COMMAND',
- 'GIT_SSL_CAINFO',
- 'GIT_SMART_HTTP',
- 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
- 'SOCKS5_USER', 'SOCKS5_PASSWD']
-
- if not cleanup:
- cleanup = []
-
- for var in exportvars:
- val = d.getVar(var, True)
- if val:
- cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
-
- logger.debug(1, "Running %s", cmd)
-
- success = False
- error_message = ""
-
- try:
- (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
- success = True
- except bb.process.NotFoundError as e:
- error_message = "Fetch command %s" % (e.command)
- except bb.process.ExecutionError as e:
- if e.stdout:
- output = "output:\n%s\n%s" % (e.stdout, e.stderr)
- elif e.stderr:
- output = "output:\n%s" % e.stderr
- else:
- output = "no output"
- error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
- except bb.process.CmdError as e:
- error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
- if not success:
- for f in cleanup:
- try:
- bb.utils.remove(f, True)
- except OSError:
- pass
-
- raise FetchError(error_message)
-
- return output
-
-def check_network_access(d, info = "", url = None):
- """
- log remote network access, and error if BB_NO_NETWORK is set
- """
- if d.getVar("BB_NO_NETWORK", True) == "1":
- raise NetworkAccess(url, info)
- else:
- logger.debug(1, "Fetcher accessed the network with the command %s" % info)
-
-def build_mirroruris(origud, mirrors, ld):
- uris = []
- uds = []
-
- replacements = {}
- replacements["TYPE"] = origud.type
- replacements["HOST"] = origud.host
- replacements["PATH"] = origud.path
- replacements["BASENAME"] = origud.path.split("/")[-1]
- replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
-
- def adduri(ud, uris, uds, mirrors):
- for line in mirrors:
- try:
- (find, replace) = line
- except ValueError:
- continue
- newuri = uri_replace(ud, find, replace, replacements, ld)
- if not newuri or newuri in uris or newuri == origud.url:
- continue
-
- if not trusted_network(ld, newuri):
- logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
- continue
-
- # Create a local copy of the mirrors minus the current line
- # this will prevent us from recursively processing the same line
- # as well as indirect recursion A -> B -> C -> A
- localmirrors = list(mirrors)
- localmirrors.remove(line)
-
- try:
- newud = FetchData(newuri, ld)
- newud.setup_localpath(ld)
- except bb.fetch2.BBFetchException as e:
- logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
- logger.debug(1, str(e))
- try:
- # setup_localpath of file:// urls may fail, we should still see
- # if mirrors of the url exist
- adduri(newud, uris, uds, localmirrors)
- except UnboundLocalError:
- pass
- continue
- uris.append(newuri)
- uds.append(newud)
-
- adduri(newud, uris, uds, localmirrors)
-
- adduri(origud, uris, uds, mirrors)
-
- return uris, uds
-
-def rename_bad_checksum(ud, suffix):
- """
- Renames files to have suffix from parameter
- """
-
- if ud.localpath is None:
- return
-
- new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
- bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
- bb.utils.movefile(ud.localpath, new_localpath)
-
-
-def try_mirror_url(fetch, origud, ud, ld, check = False):
- # Return of None or a value means we're finished
- # False means try another url
-
- if ud.lockfile and ud.lockfile != origud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
-
- try:
- if check:
- found = ud.method.checkstatus(fetch, ud, ld)
- if found:
- return found
- return False
-
- os.chdir(ld.getVar("DL_DIR", True))
-
- if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
- ud.method.download(ud, ld)
- if hasattr(ud.method,"build_mirror_data"):
- ud.method.build_mirror_data(ud, ld)
-
- if not ud.localpath or not os.path.exists(ud.localpath):
- return False
-
- if ud.localpath == origud.localpath:
- return ud.localpath
-
- # We may be obtaining a mirror tarball which needs further processing by the real fetcher
- # If that tarball is a local file:// we need to provide a symlink to it
- dldir = ld.getVar("DL_DIR", True)
- if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
- and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
- # Create donestamp in old format to avoid triggering a re-download
- if ud.donestamp:
- bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
- open(ud.donestamp, 'w').close()
- dest = os.path.join(dldir, os.path.basename(ud.localpath))
- if not os.path.exists(dest):
- os.symlink(ud.localpath, dest)
- if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
- origud.method.download(origud, ld)
- if hasattr(origud.method,"build_mirror_data"):
- origud.method.build_mirror_data(origud, ld)
- return origud.localpath
- # Otherwise the result is a local file:// and we symlink to it
- if not os.path.exists(origud.localpath):
- if os.path.islink(origud.localpath):
- # Broken symbolic link
- os.unlink(origud.localpath)
-
- os.symlink(ud.localpath, origud.localpath)
- update_stamp(origud, ld)
- return ud.localpath
-
- except bb.fetch2.NetworkAccess:
- raise
-
- except bb.fetch2.BBFetchException as e:
- if isinstance(e, ChecksumError):
- logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
- logger.warn(str(e))
- if os.path.exists(ud.localpath):
- rename_bad_checksum(ud, e.checksum)
- elif isinstance(e, NoChecksumError):
- raise
- else:
- logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
- logger.debug(1, str(e))
- try:
- ud.method.clean(ud, ld)
- except UnboundLocalError:
- pass
- return False
- finally:
- if ud.lockfile and ud.lockfile != origud.lockfile:
- bb.utils.unlockfile(lf)
-
-
-def try_mirrors(fetch, d, origud, mirrors, check = False):
- """
- Try to use a mirrored version of the sources.
- This method will be automatically called before the fetchers go.
-
- d Is a bb.data instance
- uri is the original uri we're trying to download
- mirrors is the list of mirrors we're going to try
- """
- ld = d.createCopy()
-
- uris, uds = build_mirroruris(origud, mirrors, ld)
-
- for index, uri in enumerate(uris):
- ret = try_mirror_url(fetch, origud, uds[index], ld, check)
- if ret != False:
- return ret
- return None
-
-def trusted_network(d, url):
- """
- Use a trusted url during download if networking is enabled and
- BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
- Note: modifies SRC_URI & mirrors.
- """
- if d.getVar('BB_NO_NETWORK', True) == "1":
- return True
-
- pkgname = d.expand(d.getVar('PN', False))
- trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
-
- if not trusted_hosts:
- trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
-
- # Not enabled.
- if not trusted_hosts:
- return True
-
- scheme, network, path, user, passwd, param = decodeurl(url)
-
- if not network:
- return True
-
- network = network.split(':')[0]
- network = network.lower()
-
- for host in trusted_hosts.split(" "):
- host = host.lower()
- if host.startswith("*.") and ("." + network).endswith(host[1:]):
- return True
- if host == network:
- return True
-
- return False
-
-def srcrev_internal_helper(ud, d, name):
- """
- Return:
- a) a source revision if specified
- b) latest revision if SRCREV="AUTOINC"
- c) None if not specified
- """
-
- srcrev = None
- pn = d.getVar("PN", True)
- attempts = []
- if name != '' and pn:
- attempts.append("SRCREV_%s_pn-%s" % (name, pn))
- if name != '':
- attempts.append("SRCREV_%s" % name)
- if pn:
- attempts.append("SRCREV_pn-%s" % pn)
- attempts.append("SRCREV")
-
- for a in attempts:
- srcrev = d.getVar(a, True)
- if srcrev and srcrev != "INVALID":
- break
-
- if 'rev' in ud.parm and 'tag' in ud.parm:
- raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
-
- if 'rev' in ud.parm or 'tag' in ud.parm:
- if 'rev' in ud.parm:
- parmrev = ud.parm['rev']
- else:
- parmrev = ud.parm['tag']
- if srcrev == "INVALID" or not srcrev:
- return parmrev
- if srcrev != parmrev:
- raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
- return parmrev
-
- if srcrev == "INVALID" or not srcrev:
- raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
- if srcrev == "AUTOINC":
- srcrev = ud.method.latest_revision(ud, d, name)
-
- return srcrev
-
-def get_checksum_file_list(d):
- """ Get a list of files checksum in SRC_URI
-
- Returns the resolved local paths of all local file entries in
- SRC_URI as a space-separated string
- """
- fetch = Fetch([], d, cache = False, localonly = True)
-
- dl_dir = d.getVar('DL_DIR', True)
- filelist = []
- for u in fetch.urls:
- ud = fetch.ud[u]
-
- if ud and isinstance(ud.method, local.Local):
- paths = ud.method.localpaths(ud, d)
- for f in paths:
- pth = ud.decodedurl
- if '*' in pth:
- f = os.path.join(os.path.abspath(f), pth)
- if f.startswith(dl_dir):
- # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
- if os.path.exists(f):
- bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
- else:
- bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
- filelist.append(f + ":" + str(os.path.exists(f)))
-
- return " ".join(filelist)
-
-def get_file_checksums(filelist, pn):
- """Get a list of the checksums for a list of local files
-
- Returns the checksums for a list of local files, caching the results as
- it proceeds
-
- """
- return _checksum_cache.get_checksums(filelist, pn)
-
-
-class FetchData(object):
- """
- A class which represents the fetcher state for a given URI.
- """
- def __init__(self, url, d, localonly = False):
- # localpath is the location of a downloaded result. If not set, the file is local.
- self.donestamp = None
- self.needdonestamp = True
- self.localfile = ""
- self.localpath = None
- self.lockfile = None
- self.mirrortarball = None
- self.basename = None
- self.basepath = None
- (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
- self.date = self.getSRCDate(d)
- self.url = url
- if not self.user and "user" in self.parm:
- self.user = self.parm["user"]
- if not self.pswd and "pswd" in self.parm:
- self.pswd = self.parm["pswd"]
- self.setup = False
-
- if "name" in self.parm:
- self.md5_name = "%s.md5sum" % self.parm["name"]
- self.sha256_name = "%s.sha256sum" % self.parm["name"]
- else:
- self.md5_name = "md5sum"
- self.sha256_name = "sha256sum"
- if self.md5_name in self.parm:
- self.md5_expected = self.parm[self.md5_name]
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
- self.md5_expected = None
- else:
- self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name, True)
- if self.sha256_name in self.parm:
- self.sha256_expected = self.parm[self.sha256_name]
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
- self.sha256_expected = None
- else:
- self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name, True)
- self.ignore_checksums = False
-
- self.names = self.parm.get("name",'default').split(',')
-
- self.method = None
- for m in methods:
- if m.supports(self, d):
- self.method = m
- break
-
- if not self.method:
- raise NoMethodError(url)
-
- if localonly and not isinstance(self.method, local.Local):
- raise NonLocalMethod()
-
- if self.parm.get("proto", None) and "protocol" not in self.parm:
- logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
- self.parm["protocol"] = self.parm.get("proto", None)
-
- if hasattr(self.method, "urldata_init"):
- self.method.urldata_init(self, d)
-
- if "localpath" in self.parm:
- # if user sets localpath for file, use it instead.
- self.localpath = self.parm["localpath"]
- self.basename = os.path.basename(self.localpath)
- elif self.localfile:
- self.localpath = self.method.localpath(self, d)
-
- dldir = d.getVar("DL_DIR", True)
-
- if not self.needdonestamp:
- return
-
- # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
- if self.localpath and self.localpath.startswith(dldir):
- basepath = self.localpath
- elif self.localpath:
- basepath = dldir + os.sep + os.path.basename(self.localpath)
- elif self.basepath or self.basename:
- basepath = dldir + os.sep + (self.basepath or self.basename)
- else:
- bb.fatal("Can't determine lock path for url %s" % url)
-
- self.donestamp = basepath + '.done'
- self.lockfile = basepath + '.lock'
-
- def setup_revisons(self, d):
- self.revisions = {}
- for name in self.names:
- self.revisions[name] = srcrev_internal_helper(self, d, name)
-
- # add compatibility code for non name specified case
- if len(self.names) == 1:
- self.revision = self.revisions[self.names[0]]
-
- def setup_localpath(self, d):
- if not self.localpath:
- self.localpath = self.method.localpath(self, d)
-
- def getSRCDate(self, d):
- """
- Return the SRC Date for the component
-
- d the bb.data module
- """
- if "srcdate" in self.parm:
- return self.parm['srcdate']
-
- pn = d.getVar("PN", True)
-
- if pn:
- return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
-
- return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
-
-class FetchMethod(object):
- """Base class for 'fetch'ing data"""
-
- def __init__(self, urls=None):
- self.urls = []
-
- def supports(self, urldata, d):
- """
- Check to see if this fetch class supports a given url.
- """
- return 0
-
- def localpath(self, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- Can also setup variables in urldata for use in go (saving code duplication
- and duplicate code execution)
- """
- return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
-
- def supports_checksum(self, urldata):
- """
- Is localpath something that can be represented by a checksum?
- """
-
- # We cannot compute checksums for directories
- if os.path.isdir(urldata.localpath) == True:
- return False
- if urldata.localpath.find("*") != -1:
- return False
-
- return True
-
- def recommends_checksum(self, urldata):
- """
- Is the backend on where checksumming is recommended (should warnings
- be displayed if there is no checksum)?
- """
- return False
-
- def _strip_leading_slashes(self, relpath):
- """
- Remove leading slash as os.path.join can't cope
- """
- while os.path.isabs(relpath):
- relpath = relpath[1:]
- return relpath
-
- def setUrls(self, urls):
- self.__urls = urls
-
- def getUrls(self):
- return self.__urls
-
- urls = property(getUrls, setUrls, None, "Urls property")
-
- def need_update(self, ud, d):
- """
- Force a fetch, even if localpath exists?
- """
- if os.path.exists(ud.localpath):
- return False
- return True
-
- def supports_srcrev(self):
- """
- The fetcher supports auto source revisions (SRCREV)
- """
- return False
-
- def download(self, urldata, d):
- """
- Fetch urls
- Assumes localpath was called first
- """
- raise NoMethodError(url)
-
- def unpack(self, urldata, rootdir, data):
- iterate = False
- file = urldata.localpath
-
- # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
- # but it must be corrected back for local files copying
- if urldata.basename == '*' and file.endswith('/.'):
- file = '%s/%s' % (file.rstrip('/.'), urldata.path)
-
- try:
- unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
- except ValueError as exc:
- bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
- (file, urldata.parm.get('unpack')))
-
- base, ext = os.path.splitext(file)
- if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
- efile = os.path.join(rootdir, os.path.basename(base))
- else:
- efile = file
- cmd = None
-
- if unpack:
- if file.endswith('.tar'):
- cmd = 'tar x --no-same-owner -f %s' % file
- elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
- cmd = 'tar xz --no-same-owner -f %s' % file
- elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
- cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
- cmd = 'gzip -dc %s > %s' % (file, efile)
- elif file.endswith('.bz2'):
- cmd = 'bzip2 -dc %s > %s' % (file, efile)
- elif file.endswith('.tar.xz'):
- cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.xz'):
- cmd = 'xz -dc %s > %s' % (file, efile)
- elif file.endswith('.tar.lz'):
- cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.lz'):
- cmd = 'lzip -dc %s > %s' % (file, efile)
- elif file.endswith('.zip') or file.endswith('.jar'):
- try:
- dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
- except ValueError as exc:
- bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
- (file, urldata.parm.get('dos')))
- cmd = 'unzip -q -o'
- if dos:
- cmd = '%s -a' % cmd
- cmd = "%s '%s'" % (cmd, file)
- elif file.endswith('.rpm') or file.endswith('.srpm'):
- if 'extract' in urldata.parm:
- unpack_file = urldata.parm.get('extract')
- cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
- iterate = True
- iterate_file = unpack_file
- else:
- cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
- elif file.endswith('.deb') or file.endswith('.ipk'):
- cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
- elif file.endswith('.tar.7z'):
- cmd = '7z x -so %s | tar xf - ' % file
- elif file.endswith('.7z'):
- cmd = '7za x -y %s 1>/dev/null' % file
-
- # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
- if 'subdir' in urldata.parm:
- unpackdir = '%s/%s' % (rootdir, urldata.parm.get('subdir'))
- bb.utils.mkdirhier(unpackdir)
- else:
- unpackdir = rootdir
-
- if not unpack or not cmd:
- # If file == dest, then avoid any copies, as we already put the file into dest!
- dest = os.path.join(unpackdir, os.path.basename(file))
- if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
- destdir = '.'
- # For file:// entries all intermediate dirs in path must be created at destination
- if urldata.type == "file":
- # Trailing '/' does a copying to wrong place
- urlpath = urldata.path.rstrip('/')
- # Want files places relative to cwd so no leading '/'
- urlpath = urlpath.lstrip('/')
- if urlpath.find("/") != -1:
- destdir = urlpath.rsplit("/", 1)[0] + '/'
- bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
- cmd = 'cp -fpPR %s %s' % (file, destdir)
-
- if not cmd:
- return
-
- # Change to unpackdir before executing command
- save_cwd = os.getcwd();
- os.chdir(unpackdir)
-
- path = data.getVar('PATH', True)
- if path:
- cmd = "PATH=\"%s\" %s" % (path, cmd)
- bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
-
- os.chdir(save_cwd)
-
- if ret != 0:
- raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
-
- if iterate is True:
- iterate_urldata = urldata
- iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
- self.unpack(urldata, rootdir, data)
-
- return
-
- def clean(self, urldata, d):
- """
- Clean any existing full or partial download
- """
- bb.utils.remove(urldata.localpath)
-
- def try_premirror(self, urldata, d):
- """
- Should premirrors be used?
- """
- return True
-
- def checkstatus(self, fetch, urldata, d):
- """
- Check the status of a URL
- Assumes localpath was called first
- """
- logger.info("URL %s could not be checked for status since no method exists.", url)
- return True
-
- def latest_revision(self, ud, d, name):
- """
- Look in the cache for the latest revision, if not present ask the SCM.
- """
- if not hasattr(self, "_latest_revision"):
- raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
-
- revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
- key = self.generate_revision_key(ud, d, name)
- try:
- return revs[key]
- except KeyError:
- revs[key] = rev = self._latest_revision(ud, d, name)
- return rev
-
- def sortable_revision(self, ud, d, name):
- latest_rev = self._build_revision(ud, d, name)
- return True, str(latest_rev)
-
- def generate_revision_key(self, ud, d, name):
- key = self._revision_key(ud, d, name)
- return "%s-%s" % (key, d.getVar("PN", True) or "")
-
-class Fetch(object):
- def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
- if localonly and cache:
- raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
-
- if len(urls) == 0:
- urls = d.getVar("SRC_URI", True).split()
- self.urls = urls
- self.d = d
- self.ud = {}
- self.connection_cache = connection_cache
-
- fn = d.getVar('FILE', True)
- if cache and fn and fn in urldata_cache:
- self.ud = urldata_cache[fn]
-
- for url in urls:
- if url not in self.ud:
- try:
- self.ud[url] = FetchData(url, d, localonly)
- except NonLocalMethod:
- if localonly:
- self.ud[url] = None
- pass
-
- if fn and cache:
- urldata_cache[fn] = self.ud
-
- def localpath(self, url):
- if url not in self.urls:
- self.ud[url] = FetchData(url, self.d)
-
- self.ud[url].setup_localpath(self.d)
- return self.d.expand(self.ud[url].localpath)
-
- def localpaths(self):
- """
- Return a list of the local filenames, assuming successful fetch
- """
- local = []
-
- for u in self.urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- local.append(ud.localpath)
-
- return local
-
- def download(self, urls=None):
- """
- Fetch all urls
- """
- if not urls:
- urls = self.urls
-
- network = self.d.getVar("BB_NO_NETWORK", True)
- premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- m = ud.method
- localpath = ""
-
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
-
- try:
- self.d.setVar("BB_NO_NETWORK", network)
-
- if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
- localpath = ud.localpath
- elif m.try_premirror(ud, self.d):
- logger.debug(1, "Trying PREMIRRORS")
- mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
- localpath = try_mirrors(self, self.d, ud, mirrors, False)
-
- if premirroronly:
- self.d.setVar("BB_NO_NETWORK", "1")
-
- os.chdir(self.d.getVar("DL_DIR", True))
-
- firsterr = None
- verified_stamp = verify_donestamp(ud, self.d)
- if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
- try:
- if not trusted_network(self.d, ud.url):
- raise UntrustedUrl(ud.url)
- logger.debug(1, "Trying Upstream")
- m.download(ud, self.d)
- if hasattr(m, "build_mirror_data"):
- m.build_mirror_data(ud, self.d)
- localpath = ud.localpath
- # early checksum verify, so that if checksum mismatched,
- # fetcher still have chance to fetch from mirror
- update_stamp(ud, self.d)
-
- except bb.fetch2.NetworkAccess:
- raise
-
- except BBFetchException as e:
- if isinstance(e, ChecksumError):
- logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
- logger.debug(1, str(e))
- if os.path.exists(ud.localpath):
- rename_bad_checksum(ud, e.checksum)
- elif isinstance(e, NoChecksumError):
- raise
- else:
- logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
- logger.debug(1, str(e))
- firsterr = e
- # Remove any incomplete fetch
- if not verified_stamp:
- m.clean(ud, self.d)
- logger.debug(1, "Trying MIRRORS")
- mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
- localpath = try_mirrors(self, self.d, ud, mirrors)
-
- if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
- if firsterr:
- logger.error(str(firsterr))
- raise FetchError("Unable to fetch URL from any source.", u)
-
- update_stamp(ud, self.d)
-
- except BBFetchException as e:
- if isinstance(e, ChecksumError):
- logger.error("Checksum failure fetching %s" % u)
- raise
-
- finally:
- if ud.lockfile:
- bb.utils.unlockfile(lf)
-
- def checkstatus(self, urls=None):
- """
- Check all urls exist upstream
- """
-
- if not urls:
- urls = self.urls
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- m = ud.method
- logger.debug(1, "Testing URL %s", u)
- # First try checking uri, u, from PREMIRRORS
- mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
- ret = try_mirrors(self, self.d, ud, mirrors, True)
- if not ret:
- # Next try checking from the original uri, u
- try:
- ret = m.checkstatus(self, ud, self.d)
- except:
- # Finally, try checking uri, u, from MIRRORS
- mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
- ret = try_mirrors(self, self.d, ud, mirrors, True)
-
- if not ret:
- raise FetchError("URL %s doesn't work" % u, u)
-
- def unpack(self, root, urls=None):
- """
- Check all urls exist upstream
- """
-
- if not urls:
- urls = self.urls
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
-
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
-
- ud.method.unpack(ud, root, self.d)
-
- if ud.lockfile:
- bb.utils.unlockfile(lf)
-
- def clean(self, urls=None):
- """
- Clean files that the fetcher gets or places
- """
-
- if not urls:
- urls = self.urls
-
- for url in urls:
- if url not in self.ud:
- self.ud[url] = FetchData(url, d)
- ud = self.ud[url]
- ud.setup_localpath(self.d)
-
- if not ud.localfile and ud.localpath is None:
- continue
-
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
-
- ud.method.clean(ud, self.d)
- if ud.donestamp:
- bb.utils.remove(ud.donestamp)
-
- if ud.lockfile:
- bb.utils.unlockfile(lf)
-
-class FetchConnectionCache(object):
- """
- A class which represents an container for socket connections.
- """
- def __init__(self):
- self.cache = {}
-
- def get_connection_name(self, host, port):
- return host + ':' + str(port)
-
- def add_connection(self, host, port, connection):
- cn = self.get_connection_name(host, port)
-
- if cn not in self.cache:
- self.cache[cn] = connection
-
- def get_connection(self, host, port):
- connection = None
-
- cn = self.get_connection_name(host, port)
- if cn in self.cache:
- connection = self.cache[cn]
-
- return connection
-
- def remove_connection(self, host, port):
- cn = self.get_connection_name(host, port)
- if cn in self.cache:
- self.cache[cn].close()
- del self.cache[cn]
-
- def close_connections(self):
- for cn in self.cache.keys():
- self.cache[cn].close()
- del self.cache[cn]
-
-from . import cvs
-from . import git
-from . import gitsm
-from . import gitannex
-from . import local
-from . import svn
-from . import wget
-from . import ssh
-from . import sftp
-from . import perforce
-from . import bzr
-from . import hg
-from . import osc
-from . import repo
-from . import clearcase
-from . import npm
-
-methods.append(local.Local())
-methods.append(wget.Wget())
-methods.append(svn.Svn())
-methods.append(git.Git())
-methods.append(gitsm.GitSM())
-methods.append(gitannex.GitANNEX())
-methods.append(cvs.Cvs())
-methods.append(ssh.SSH())
-methods.append(sftp.SFTP())
-methods.append(perforce.Perforce())
-methods.append(bzr.Bzr())
-methods.append(hg.Hg())
-methods.append(osc.Osc())
-methods.append(repo.Repo())
-methods.append(clearcase.ClearCase())
-methods.append(npm.Npm())
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/bzr.py b/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
deleted file mode 100644
index 03e9ac461..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
+++ /dev/null
@@ -1,143 +0,0 @@
-"""
-BitBake 'Fetch' implementation for bzr.
-
-"""
-
-# Copyright (C) 2007 Ross Burton
-# Copyright (C) 2007 Richard Purdie
-#
-# Classes for obtaining upstream sources for the
-# BitBake build tools.
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Bzr(FetchMethod):
- def supports(self, ud, d):
- return ud.type in ['bzr']
-
- def urldata_init(self, ud, d):
- """
- init bzr specific variable within url data
- """
- # Create paths to bzr checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
-
- ud.setup_revisons(d)
-
- if not ud.revision:
- ud.revision = self.latest_revision(ud, d)
-
- ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildbzrcommand(self, ud, d, command):
- """
- Build up an bzr commandline based on ud
- command is "fetch", "update", "revno"
- """
-
- basecmd = data.expand('${FETCHCMD_bzr}', d)
-
- proto = ud.parm.get('protocol', 'http')
-
- bzrroot = ud.host + ud.path
-
- options = []
-
- if command == "revno":
- bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- else:
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command == "fetch":
- bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- elif command == "update":
- bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid bzr command %s" % command, ud.url)
-
- return bzrcmd
-
- def download(self, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
- bzrcmd = self._buildbzrcommand(ud, d, "update")
- logger.debug(1, "BZR Update %s", ud.url)
- bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
- runfetchcmd(bzrcmd, d)
- else:
- bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
- bzrcmd = self._buildbzrcommand(ud, d, "fetch")
- bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- logger.debug(1, "BZR Checkout %s", ud.url)
- bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", bzrcmd)
- runfetchcmd(bzrcmd, d)
-
- os.chdir(ud.pkgdir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
-
- # tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, ud, d, name):
- """
- Return a unique key for the url
- """
- return "bzr:" + ud.pkgdir
-
- def _latest_revision(self, ud, d, name):
- """
- Return the latest upstream revision number
- """
- logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
-
- bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
-
- output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
-
- return output.strip()
-
- def sortable_revision(self, ud, d, name):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return False, self._build_revision(ud, d)
-
- def _build_revision(self, ud, d):
- return ud.revision
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py b/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
deleted file mode 100644
index ba83e7cb6..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
+++ /dev/null
@@ -1,263 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' clearcase implementation
-
-The clearcase fetcher is used to retrieve files from a ClearCase repository.
-
-Usage in the recipe:
-
- SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
- SRCREV = "EXAMPLE_CLEARCASE_TAG"
- PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
-
-The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
-
-Supported SRC_URI options are:
-
-- vob
- (required) The name of the clearcase VOB (with prepending "/")
-
-- module
- The module in the selected VOB (with prepending "/")
-
- The module and vob parameters are combined to create
- the following load rule in the view config spec:
- load <vob><module>
-
-- proto
- http or https
-
-Related variables:
-
- CCASE_CUSTOM_CONFIG_SPEC
- Write a config spec to this variable in your recipe to use it instead
- of the default config spec generated by this fetcher.
- Please note that the SRCREV loses its functionality if you specify
- this variable. SRCREV is still used to label the archive after a fetch,
- but it doesn't define what's fetched.
-
-User credentials:
- cleartool:
- The login of cleartool is handled by the system. No special steps needed.
-
- rcleartool:
- In order to use rcleartool with authenticated users an `rcleartool login` is
- necessary before using the fetcher.
-"""
-# Copyright (C) 2014 Siemens AG
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-
-import os
-import sys
-import shutil
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-from distutils import spawn
-
-class ClearCase(FetchMethod):
- """Class to fetch urls via 'clearcase'"""
- def init(self, d):
- pass
-
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with Clearcase.
- """
- return ud.type in ['ccrc']
-
- def debug(self, msg):
- logger.debug(1, "ClearCase: %s", msg)
-
- def urldata_init(self, ud, d):
- """
- init ClearCase specific variable within url data
- """
- ud.proto = "https"
- if 'protocol' in ud.parm:
- ud.proto = ud.parm['protocol']
- if not ud.proto in ('http', 'https'):
- raise fetch2.ParameterError("Invalid protocol type", ud.url)
-
- ud.vob = ''
- if 'vob' in ud.parm:
- ud.vob = ud.parm['vob']
- else:
- msg = ud.url+": vob must be defined so the fetcher knows what to get."
- raise MissingParameterError('vob', msg)
-
- if 'module' in ud.parm:
- ud.module = ud.parm['module']
- else:
- ud.module = ""
-
- ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
-
- if data.getVar("SRCREV", d, True) == "INVALID":
- raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
-
- ud.label = d.getVar("SRCREV", False)
- ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
-
- ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
-
- ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
- ud.module.replace("/", "."),
- ud.label.replace("/", "."))
-
- ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
- ud.csname = "%s-config-spec" % (ud.identifier)
- ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
- ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
- ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
- ud.localfile = "%s.tar.gz" % (ud.identifier)
-
- self.debug("host = %s" % ud.host)
- self.debug("path = %s" % ud.path)
- self.debug("server = %s" % ud.server)
- self.debug("proto = %s" % ud.proto)
- self.debug("type = %s" % ud.type)
- self.debug("vob = %s" % ud.vob)
- self.debug("module = %s" % ud.module)
- self.debug("basecmd = %s" % ud.basecmd)
- self.debug("label = %s" % ud.label)
- self.debug("ccasedir = %s" % ud.ccasedir)
- self.debug("viewdir = %s" % ud.viewdir)
- self.debug("viewname = %s" % ud.viewname)
- self.debug("configspecfile = %s" % ud.configspecfile)
- self.debug("localfile = %s" % ud.localfile)
-
- ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _build_ccase_command(self, ud, command):
- """
- Build up a commandline based on ud
- command is: mkview, setcs, rmview
- """
- options = []
-
- if "rcleartool" in ud.basecmd:
- options.append("-server %s" % ud.server)
-
- basecmd = "%s %s" % (ud.basecmd, command)
-
- if command is 'mkview':
- if not "rcleartool" in ud.basecmd:
- # Cleartool needs a -snapshot view
- options.append("-snapshot")
- options.append("-tag %s" % ud.viewname)
- options.append(ud.viewdir)
-
- elif command is 'rmview':
- options.append("-force")
- options.append("%s" % ud.viewdir)
-
- elif command is 'setcs':
- options.append("-overwrite")
- options.append(ud.configspecfile)
-
- else:
- raise FetchError("Invalid ccase command %s" % command)
-
- ccasecmd = "%s %s" % (basecmd, " ".join(options))
- self.debug("ccasecmd = %s" % ccasecmd)
- return ccasecmd
-
- def _write_configspec(self, ud, d):
- """
- Create config spec file (ud.configspecfile) for ccase view
- """
- config_spec = ""
- custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
- if custom_config_spec is not None:
- for line in custom_config_spec.split("\\n"):
- config_spec += line+"\n"
- bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
- else:
- config_spec += "element * CHECKEDOUT\n"
- config_spec += "element * %s\n" % ud.label
- config_spec += "load %s%s\n" % (ud.vob, ud.module)
-
- logger.info("Using config spec: \n%s" % config_spec)
-
- with open(ud.configspecfile, 'w') as f:
- f.write(config_spec)
-
- def _remove_view(self, ud, d):
- if os.path.exists(ud.viewdir):
- os.chdir(ud.ccasedir)
- cmd = self._build_ccase_command(ud, 'rmview');
- logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
- bb.fetch2.check_network_access(d, cmd, ud.url)
- output = runfetchcmd(cmd, d)
- logger.info("rmview output: %s", output)
-
- def need_update(self, ud, d):
- if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
- ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
- return True
- if os.path.exists(ud.localpath):
- return False
- return True
-
- def supports_srcrev(self):
- return True
-
- def sortable_revision(self, ud, d, name):
- return False, ud.identifier
-
- def download(self, ud, d):
- """Fetch url"""
-
- # Make a fresh view
- bb.utils.mkdirhier(ud.ccasedir)
- self._write_configspec(ud, d)
- cmd = self._build_ccase_command(ud, 'mkview')
- logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
- bb.fetch2.check_network_access(d, cmd, ud.url)
- try:
- runfetchcmd(cmd, d)
- except FetchError as e:
- if "CRCLI2008E" in e.msg:
- raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
- else:
- raise e
-
- # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
- os.chdir(ud.viewdir)
- cmd = self._build_ccase_command(ud, 'setcs');
- logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
- bb.fetch2.check_network_access(d, cmd, ud.url)
- output = runfetchcmd(cmd, d)
- logger.info("%s", output)
-
- # Copy the configspec to the viewdir so we have it in our source tarball later
- shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
-
- # Clean clearcase meta-data before tar
-
- runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
-
- # Clean up so we can create a new view next time
- self.clean(ud, d);
-
- def clean(self, ud, d):
- self._remove_view(ud, d)
- bb.utils.remove(ud.configspecfile)
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/cvs.py b/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
deleted file mode 100644
index d27d96f68..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
+++ /dev/null
@@ -1,171 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-#
-
-import os
-import logging
-import bb
-from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
-from bb.fetch2 import runfetchcmd
-
-class Cvs(FetchMethod):
- """
- Class to fetch a module or modules from cvs repositories
- """
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with cvs.
- """
- return ud.type in ['cvs']
-
- def urldata_init(self, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("module", ud.url)
- ud.module = ud.parm["module"]
-
- ud.tag = ud.parm.get('tag', "")
-
- # Override the default date in certain cases
- if 'date' in ud.parm:
- ud.date = ud.parm['date']
- elif ud.tag:
- ud.date = ""
-
- norecurse = ''
- if 'norecurse' in ud.parm:
- norecurse = '_norecurse'
-
- fullpath = ''
- if 'fullpath' in ud.parm:
- fullpath = '_fullpath'
-
- ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
-
- def need_update(self, ud, d):
- if (ud.date == "now"):
- return True
- if not os.path.exists(ud.localpath):
- return True
- return False
-
- def download(self, ud, d):
-
- method = ud.parm.get('method', 'pserver')
- localdir = ud.parm.get('localdir', ud.module)
- cvs_port = ud.parm.get('port', '')
-
- cvs_rsh = None
- if method == "ext":
- if "rsh" in ud.parm:
- cvs_rsh = ud.parm["rsh"]
-
- if method == "dir":
- cvsroot = ud.path
- else:
- cvsroot = ":" + method
- cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
- if cvsproxyhost:
- cvsroot += ";proxy=" + cvsproxyhost
- cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
- if cvsproxyport:
- cvsroot += ";proxyport=" + cvsproxyport
- cvsroot += ":" + ud.user
- if ud.pswd:
- cvsroot += ":" + ud.pswd
- cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
-
- options = []
- if 'norecurse' in ud.parm:
- options.append("-l")
- if ud.date:
- # treat YYYYMMDDHHMM specially for CVS
- if len(ud.date) == 12:
- options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
- else:
- options.append("-D \"%s UTC\"" % ud.date)
- if ud.tag:
- options.append("-r %s" % ud.tag)
-
- cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
- cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
- cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
-
- if cvs_rsh:
- cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
- cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
-
- # create module directory
- logger.debug(2, "Fetch: checking for module directory")
- pkg = d.getVar('PN', True)
- pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
- moddir = os.path.join(pkgdir, localdir)
- if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
- logger.info("Update " + ud.url)
- bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
- # update sources there
- os.chdir(moddir)
- cmd = cvsupdatecmd
- else:
- logger.info("Fetch " + ud.url)
- # check out sources there
- bb.utils.mkdirhier(pkgdir)
- os.chdir(pkgdir)
- logger.debug(1, "Running %s", cvscmd)
- bb.fetch2.check_network_access(d, cvscmd, ud.url)
- cmd = cvscmd
-
- runfetchcmd(cmd, d, cleanup = [moddir])
-
- if not os.access(moddir, os.R_OK):
- raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude 'CVS'"
-
- # tar them up to a defined filename
- if 'fullpath' in ud.parm:
- os.chdir(pkgdir)
- cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
- else:
- os.chdir(moddir)
- os.chdir('..')
- cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
-
- runfetchcmd(cmd, d, cleanup = [ud.localpath])
-
- def clean(self, ud, d):
- """ Clean CVS Files and tarballs """
-
- pkg = d.getVar('PN', True)
- pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
-
- bb.utils.remove(pkgdir, True)
- bb.utils.remove(ud.localpath)
-
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/git.py b/yocto-poky/bitbake/lib/bb/fetch2/git.py
deleted file mode 100644
index 526668bc2..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/git.py
+++ /dev/null
@@ -1,435 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' git implementation
-
-git fetcher support the SRC_URI with format of:
-SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
-
-Supported SRC_URI options are:
-
-- branch
- The git branch to retrieve from. The default is "master"
-
- This option also supports multiple branch fetching, with branches
- separated by commas. In multiple branches case, the name option
- must have the same number of names to match the branches, which is
- used to specify the SRC_REV for the branch
- e.g:
- SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
- SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
- SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
-
-- tag
- The git tag to retrieve. The default is "master"
-
-- protocol
- The method to use to access the repository. Common options are "git",
- "http", "https", "file", "ssh" and "rsync". The default is "git".
-
-- rebaseable
- rebaseable indicates that the upstream git repo may rebase in the future,
- and current revision may disappear from upstream repo. This option will
- remind fetcher to preserve local cache carefully for future use.
- The default value is "0", set rebaseable=1 for rebaseable git repo.
-
-- nocheckout
- Don't checkout source code when unpacking. set this option for the recipe
- who has its own routine to checkout code.
- The default is "0", set nocheckout=1 if needed.
-
-- bareclone
- Create a bare clone of the source code and don't checkout the source code
- when unpacking. Set this option for the recipe who has its own routine to
- checkout code and tracking branch requirements.
- The default is "0", set bareclone=1 if needed.
-
-- nobranch
- Don't check the SHA validation for branch. set this option for the recipe
- referring to commit which is valid in tag instead of branch.
- The default is "0", set nobranch=1 if needed.
-
-"""
-
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import errno
-import os
-import re
-import bb
-import errno
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Git(FetchMethod):
- """Class to fetch a module or modules from git repositories"""
- def init(self, d):
- pass
-
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with git.
- """
- return ud.type in ['git']
-
- def supports_checksum(self, urldata):
- return False
-
- def urldata_init(self, ud, d):
- """
- init git specific variable within url data
- so that the git method like latest_revision() can work
- """
- if 'protocol' in ud.parm:
- ud.proto = ud.parm['protocol']
- elif not ud.host:
- ud.proto = 'file'
- else:
- ud.proto = "git"
-
- if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
- raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
-
- ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
-
- ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
-
- ud.nobranch = ud.parm.get("nobranch","0") == "1"
-
- # bareclone implies nocheckout
- ud.bareclone = ud.parm.get("bareclone","0") == "1"
- if ud.bareclone:
- ud.nocheckout = 1
-
- ud.unresolvedrev = {}
- branches = ud.parm.get("branch", "master").split(',')
- if len(branches) != len(ud.names):
- raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
- ud.branches = {}
- for name in ud.names:
- branch = branches[ud.names.index(name)]
- ud.branches[name] = branch
- ud.unresolvedrev[name] = branch
-
- ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
-
- ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
-
- ud.setup_revisons(d)
-
- for name in ud.names:
- # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
- if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
- if ud.revisions[name]:
- ud.unresolvedrev[name] = ud.revisions[name]
- ud.revisions[name] = self.latest_revision(ud, d, name)
-
- gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
- if gitsrcname.startswith('.'):
- gitsrcname = gitsrcname[1:]
-
- # for rebaseable git repo, it is necessary to keep mirror tar ball
- # per revision, so that even the revision disappears from the
- # upstream repo in the future, the mirror will remain intact and still
- # contains the revision
- if ud.rebaseable:
- for name in ud.names:
- gitsrcname = gitsrcname + '_' + ud.revisions[name]
- ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
- ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
- gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
- ud.clonedir = os.path.join(gitdir, gitsrcname)
-
- ud.localfile = ud.clonedir
-
- def localpath(self, ud, d):
- return ud.clonedir
-
- def need_update(self, ud, d):
- if not os.path.exists(ud.clonedir):
- return True
- os.chdir(ud.clonedir)
- for name in ud.names:
- if not self._contains_ref(ud, d, name):
- return True
- if ud.write_tarballs and not os.path.exists(ud.fullmirror):
- return True
- return False
-
- def try_premirror(self, ud, d):
- # If we don't do this, updating an existing checkout with only premirrors
- # is not possible
- if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
- return True
- if os.path.exists(ud.clonedir):
- return False
- return True
-
- def download(self, ud, d):
- """Fetch url"""
-
- # If the checkout doesn't exist and the mirror tarball does, extract it
- if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
- bb.utils.mkdirhier(ud.clonedir)
- os.chdir(ud.clonedir)
- runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
-
- repourl = self._get_repo_url(ud)
-
- # If the repo still doesn't exist, fallback to cloning it
- if not os.path.exists(ud.clonedir):
- # We do this since git will use a "-l" option automatically for local urls where possible
- if repourl.startswith("file://"):
- repourl = repourl[7:]
- clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
- if ud.proto.lower() != 'file':
- bb.fetch2.check_network_access(d, clone_cmd)
- runfetchcmd(clone_cmd, d)
-
- os.chdir(ud.clonedir)
- # Update the checkout if needed
- needupdate = False
- for name in ud.names:
- if not self._contains_ref(ud, d, name):
- needupdate = True
- if needupdate:
- try:
- runfetchcmd("%s remote rm origin" % ud.basecmd, d)
- except bb.fetch2.FetchError:
- logger.debug(1, "No Origin")
-
- runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
- fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
- if ud.proto.lower() != 'file':
- bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
- runfetchcmd(fetch_cmd, d)
- runfetchcmd("%s prune-packed" % ud.basecmd, d)
- runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
- try:
- os.unlink(ud.fullmirror)
- except OSError as exc:
- if exc.errno != errno.ENOENT:
- raise
- os.chdir(ud.clonedir)
- for name in ud.names:
- if not self._contains_ref(ud, d, name):
- raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
-
- def build_mirror_data(self, ud, d):
- # Generate a mirror tarball if needed
- if ud.write_tarballs and not os.path.exists(ud.fullmirror):
- # it's possible that this symlink points to read-only filesystem with PREMIRROR
- if os.path.islink(ud.fullmirror):
- os.unlink(ud.fullmirror)
-
- os.chdir(ud.clonedir)
- logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
-
- def unpack(self, ud, destdir, d):
- """ unpack the downloaded src to destdir"""
-
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- readpathspec = ":%s" % (subdir)
- def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
- else:
- readpathspec = ""
- def_destsuffix = "git/"
-
- destsuffix = ud.parm.get("destsuffix", def_destsuffix)
- destdir = ud.destdir = os.path.join(destdir, destsuffix)
- if os.path.exists(destdir):
- bb.utils.prunedir(destdir)
-
- cloneflags = "-s -n"
- if ud.bareclone:
- cloneflags += " --mirror"
-
- runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, ud.clonedir, destdir), d)
- os.chdir(destdir)
- repourl = self._get_repo_url(ud)
- runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
- if not ud.nocheckout:
- if subdir != "":
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
- runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
- elif not ud.nobranch:
- branchname = ud.branches[ud.names[0]]
- runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
- ud.revisions[ud.names[0]]), d)
- runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
- branchname), d)
- else:
- runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
-
- return True
-
- def clean(self, ud, d):
- """ clean the git directory """
-
- bb.utils.remove(ud.localpath, True)
- bb.utils.remove(ud.fullmirror)
- bb.utils.remove(ud.fullmirror + ".done")
-
- def supports_srcrev(self):
- return True
-
- def _contains_ref(self, ud, d, name):
- cmd = ""
- if ud.nobranch:
- cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
- ud.basecmd, ud.revisions[name])
- else:
- cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
- ud.basecmd, ud.revisions[name], ud.branches[name])
- try:
- output = runfetchcmd(cmd, d, quiet=True)
- except bb.fetch2.FetchError:
- return False
- if len(output.split()) > 1:
- raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
- return output.split()[0] != "0"
-
- def _get_repo_url(self, ud):
- """
- Return the repository URL
- """
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
- return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
-
- def _revision_key(self, ud, d, name):
- """
- Return a unique key for the url
- """
- return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
-
- def _lsremote(self, ud, d, search):
- """
- Run git ls-remote with the specified search string
- """
- repourl = self._get_repo_url(ud)
- cmd = "%s ls-remote %s %s" % \
- (ud.basecmd, repourl, search)
- if ud.proto.lower() != 'file':
- bb.fetch2.check_network_access(d, cmd)
- output = runfetchcmd(cmd, d, True)
- if not output:
- raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
- return output
-
- def _latest_revision(self, ud, d, name):
- """
- Compute the HEAD revision for the url
- """
- output = self._lsremote(ud, d, "")
- # Tags of the form ^{} may not work, need to fallback to other form
- if ud.unresolvedrev[name][:5] == "refs/":
- head = ud.unresolvedrev[name]
- tag = ud.unresolvedrev[name]
- else:
- head = "refs/heads/%s" % ud.unresolvedrev[name]
- tag = "refs/tags/%s" % ud.unresolvedrev[name]
- for s in [head, tag + "^{}", tag]:
- for l in output.split('\n'):
- if s in l:
- return l.split()[0]
- raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
- (ud.unresolvedrev[name], ud.host+ud.path))
-
- def latest_versionstring(self, ud, d):
- """
- Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
- by searching through the tags output of ls-remote, comparing
- versions and returning the highest match.
- """
- pupver = ('', '')
-
- tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
- try:
- output = self._lsremote(ud, d, "refs/tags/*")
- except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
- return pupver
-
- verstring = ""
- revision = ""
- for line in output.split("\n"):
- if not line:
- break
-
- tag_head = line.split("/")[-1]
- # Ignore non-released branches
- m = re.search("(alpha|beta|rc|final)+", tag_head)
- if m:
- continue
-
- # search for version in the line
- tag = tagregex.search(tag_head)
- if tag == None:
- continue
-
- tag = tag.group('pver')
- tag = tag.replace("_", ".")
-
- if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
- continue
-
- verstring = tag
- revision = line.split()[0]
- pupver = (verstring, revision)
-
- return pupver
-
- def _build_revision(self, ud, d, name):
- return ud.revisions[name]
-
- def gitpkgv_revision(self, ud, d, name):
- """
- Return a sortable revision number by counting commits in the history
- Based on gitpkgv.bblass in meta-openembedded
- """
- rev = self._build_revision(ud, d, name)
- localpath = ud.localpath
- rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
- if not os.path.exists(localpath):
- commits = None
- else:
- if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
- from pipes import quote
- commits = bb.fetch2.runfetchcmd(
- "git rev-list %s -- | wc -l" % (quote(rev)),
- d, quiet=True).strip().lstrip('0')
- if commits:
- open(rev_file, "w").write("%d\n" % int(commits))
- else:
- commits = open(rev_file, "r").readline(128).strip()
- if commits:
- return False, "%s+%s" % (commits, rev[:7])
- else:
- return True, str(rev)
-
- def checkstatus(self, fetch, ud, d):
- try:
- self._lsremote(ud, d, "")
- return True
- except FetchError:
- return False
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py b/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
deleted file mode 100644
index 0f3789745..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' git annex implementation
-"""
-
-# Copyright (C) 2014 Otavio Salvador
-# Copyright (C) 2014 O.S. Systems Software LTDA.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch2.git import Git
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class GitANNEX(Git):
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with git.
- """
- return ud.type in ['gitannex']
-
- def uses_annex(self, ud, d):
- for name in ud.names:
- try:
- runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
- return True
- except bb.fetch.FetchError:
- pass
-
- return False
-
- def update_annex(self, ud, d):
- try:
- runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
- except bb.fetch.FetchError:
- return False
- runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
-
- return True
-
- def download(self, ud, d):
- Git.download(self, ud, d)
-
- os.chdir(ud.clonedir)
- annex = self.uses_annex(ud, d)
- if annex:
- self.update_annex(ud, d)
-
- def unpack(self, ud, destdir, d):
- Git.unpack(self, ud, destdir, d)
-
- os.chdir(ud.destdir)
- try:
- runfetchcmd("%s annex sync" % (ud.basecmd), d)
- except bb.fetch.FetchError:
- pass
-
- annex = self.uses_annex(ud, d)
- if annex:
- runfetchcmd("%s annex get" % (ud.basecmd), d)
- runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py b/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
deleted file mode 100644
index 752f1d3c1..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' git submodules implementation
-
-Inherits from and extends the Git fetcher to retrieve submodules of a git repository
-after cloning.
-
-SRC_URI = "gitsm://<see Git fetcher for syntax>"
-
-See the Git fetcher, git://, for usage documentation.
-
-NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
-
-"""
-
-# Copyright (C) 2013 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch2.git import Git
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class GitSM(Git):
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with git.
- """
- return ud.type in ['gitsm']
-
- def uses_submodules(self, ud, d):
- for name in ud.names:
- try:
- runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
- return True
- except bb.fetch.FetchError:
- pass
- return False
-
- def _set_relative_paths(self, repopath):
- """
- Fix submodule paths to be relative instead of absolute,
- so that when we move the repo it doesn't break
- (In Git 1.7.10+ this is done automatically)
- """
- submodules = []
- with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
- for line in f.readlines():
- if line.startswith('[submodule'):
- submodules.append(line.split('"')[1])
-
- for module in submodules:
- repo_conf = os.path.join(repopath, module, '.git')
- if os.path.exists(repo_conf):
- with open(repo_conf, 'r') as f:
- lines = f.readlines()
- newpath = ''
- for i, line in enumerate(lines):
- if line.startswith('gitdir:'):
- oldpath = line.split(': ')[-1].rstrip()
- if oldpath.startswith('/'):
- newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
- lines[i] = 'gitdir: %s\n' % newpath
- break
- if newpath:
- with open(repo_conf, 'w') as f:
- for line in lines:
- f.write(line)
-
- repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
- if os.path.exists(repo_conf2):
- with open(repo_conf2, 'r') as f:
- lines = f.readlines()
- newpath = ''
- for i, line in enumerate(lines):
- if line.lstrip().startswith('worktree = '):
- oldpath = line.split(' = ')[-1].rstrip()
- if oldpath.startswith('/'):
- newpath = '../' * (module.count('/') + 3) + module
- lines[i] = '\tworktree = %s\n' % newpath
- break
- if newpath:
- with open(repo_conf2, 'w') as f:
- for line in lines:
- f.write(line)
-
- def update_submodules(self, ud, d):
- # We have to convert bare -> full repo, do the submodule bit, then convert back
- tmpclonedir = ud.clonedir + ".tmp"
- gitdir = tmpclonedir + os.sep + ".git"
- bb.utils.remove(tmpclonedir, True)
- os.mkdir(tmpclonedir)
- os.rename(ud.clonedir, gitdir)
- runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
- os.chdir(tmpclonedir)
- runfetchcmd(ud.basecmd + " reset --hard", d)
- runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
- runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
- self._set_relative_paths(tmpclonedir)
- runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
- os.rename(gitdir, ud.clonedir,)
- bb.utils.remove(tmpclonedir, True)
-
- def download(self, ud, d):
- Git.download(self, ud, d)
-
- os.chdir(ud.clonedir)
- submodules = self.uses_submodules(ud, d)
- if submodules:
- self.update_submodules(ud, d)
-
- def unpack(self, ud, destdir, d):
- Git.unpack(self, ud, destdir, d)
-
- os.chdir(ud.destdir)
- submodules = self.uses_submodules(ud, d)
- if submodules:
- runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
- runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/hg.py b/yocto-poky/bitbake/lib/bb/fetch2/hg.py
deleted file mode 100644
index 3b743ff51..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/hg.py
+++ /dev/null
@@ -1,278 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for mercurial DRCS (hg).
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-# Copyright (C) 2007 Robert Schuster
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-import errno
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Hg(FetchMethod):
- """Class to fetch from mercurial repositories"""
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with mercurial.
- """
- return ud.type in ['hg']
-
- def supports_checksum(self, urldata):
- """
- Don't require checksums for local archives created from
- repository checkouts.
- """
- return False
-
- def urldata_init(self, ud, d):
- """
- init hg specific variable within url data
- """
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.module = ud.parm["module"]
-
- if 'protocol' in ud.parm:
- ud.proto = ud.parm['protocol']
- elif not ud.host:
- ud.proto = 'file'
- else:
- ud.proto = "hg"
-
- ud.setup_revisons(d)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- elif not ud.revision:
- ud.revision = self.latest_revision(ud, d)
-
- # Create paths to mercurial checkouts
- hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
- ud.host, ud.path.replace('/', '.'))
- ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
- ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
-
- hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
- ud.pkgdir = os.path.join(hgdir, hgsrcname)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
- ud.localfile = ud.moddir
- ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
-
- ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
-
- def need_update(self, ud, d):
- revTag = ud.parm.get('rev', 'tip')
- if revTag == "tip":
- return True
- if not os.path.exists(ud.localpath):
- return True
- return False
-
- def try_premirror(self, ud, d):
- # If we don't do this, updating an existing checkout with only premirrors
- # is not possible
- if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
- return True
- if os.path.exists(ud.moddir):
- return False
- return True
-
- def _buildhgcommand(self, ud, d, command):
- """
- Build up an hg commandline based on ud
- command is "fetch", "update", "info"
- """
-
- proto = ud.parm.get('protocol', 'http')
-
- host = ud.host
- if proto == "file":
- host = "/"
- ud.host = "localhost"
-
- if not ud.user:
- hgroot = host + ud.path
- else:
- if ud.pswd:
- hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
- else:
- hgroot = ud.user + "@" + host + ud.path
-
- if command == "info":
- return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
-
- options = [];
-
- # Don't specify revision for the fetch; clone the entire repo.
- # This avoids an issue if the specified revision is a tag, because
- # the tag actually exists in the specified revision + 1, so it won't
- # be available when used in any successive commands.
- if ud.revision and command != "fetch":
- options.append("-r %s" % ud.revision)
-
- if command == "fetch":
- if ud.user and ud.pswd:
- cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
- else:
- cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
- elif command == "pull":
- # do not pass options list; limiting pull to rev causes the local
- # repo not to contain it and immediately following "update" command
- # will crash
- if ud.user and ud.pswd:
- cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
- else:
- cmd = "%s pull" % (ud.basecmd)
- elif command == "update":
- if ud.user and ud.pswd:
- cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
- else:
- cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
- else:
- raise FetchError("Invalid hg command %s" % command, ud.url)
-
- return cmd
-
- def download(self, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- # If the checkout doesn't exist and the mirror tarball does, extract it
- if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
- bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
-
- if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
- # Found the source, check whether need pull
- updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- try:
- runfetchcmd(updatecmd, d)
- except bb.fetch2.FetchError:
- # Runnning pull in the repo
- pullcmd = self._buildhgcommand(ud, d, "pull")
- logger.info("Pulling " + ud.url)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", pullcmd)
- bb.fetch2.check_network_access(d, pullcmd, ud.url)
- runfetchcmd(pullcmd, d)
- try:
- os.unlink(ud.fullmirror)
- except OSError as exc:
- if exc.errno != errno.ENOENT:
- raise
-
- # No source found, clone it.
- if not os.path.exists(ud.moddir):
- fetchcmd = self._buildhgcommand(ud, d, "fetch")
- logger.info("Fetch " + ud.url)
- # check out sources there
- bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", fetchcmd)
- bb.fetch2.check_network_access(d, fetchcmd, ud.url)
- runfetchcmd(fetchcmd, d)
-
- # Even when we clone (fetch), we still need to update as hg's clone
- # won't checkout the specified revision if its on a branch
- updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
-
- def clean(self, ud, d):
- """ Clean the hg dir """
-
- bb.utils.remove(ud.localpath, True)
- bb.utils.remove(ud.fullmirror)
- bb.utils.remove(ud.fullmirror + ".done")
-
- def supports_srcrev(self):
- return True
-
- def _latest_revision(self, ud, d, name):
- """
- Compute tip revision for the url
- """
- bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
- output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
- return output.strip()
-
- def _build_revision(self, ud, d, name):
- return ud.revision
-
- def _revision_key(self, ud, d, name):
- """
- Return a unique key for the url
- """
- return "hg:" + ud.moddir
-
- def build_mirror_data(self, ud, d):
- # Generate a mirror tarball if needed
- if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror):
- # it's possible that this symlink points to read-only filesystem with PREMIRROR
- if os.path.islink(ud.fullmirror):
- os.unlink(ud.fullmirror)
-
- os.chdir(ud.pkgdir)
- logger.info("Creating tarball of hg repository")
- runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
-
- def localpath(self, ud, d):
- return ud.pkgdir
-
- def unpack(self, ud, destdir, d):
- """
- Make a local clone or export for the url
- """
-
- revflag = "-r %s" % ud.revision
- subdir = ud.parm.get("destsuffix", ud.module)
- codir = "%s/%s" % (destdir, subdir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata != "nokeep":
- if not os.access(os.path.join(codir, '.hg'), os.R_OK):
- logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
- runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
- logger.debug(2, "Unpack: updating source in '" + codir + "'")
- os.chdir(codir)
- runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
- runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
- else:
- logger.debug(2, "Unpack: extracting source to '" + codir + "'")
- os.chdir(ud.moddir)
- runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/local.py b/yocto-poky/bitbake/lib/bb/fetch2/local.py
deleted file mode 100644
index 303a52b63..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/local.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import urllib
-import bb
-import bb.utils
-from bb import data
-from bb.fetch2 import FetchMethod, FetchError
-from bb.fetch2 import logger
-
-class Local(FetchMethod):
- def supports(self, urldata, d):
- """
- Check to see if a given url represents a local fetch.
- """
- return urldata.type in ['file']
-
- def urldata_init(self, ud, d):
- # We don't set localfile as for this fetcher the file is already local!
- ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
- ud.basename = os.path.basename(ud.decodedurl)
- ud.basepath = ud.decodedurl
- ud.needdonestamp = False
- return
-
- def localpath(self, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- """
- return self.localpaths(urldata, d)[-1]
-
- def localpaths(self, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- """
- searched = []
- path = urldata.decodedurl
- newpath = path
- if path[0] == "/":
- return [path]
- filespath = data.getVar('FILESPATH', d, True)
- if filespath:
- logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
- newpath, hist = bb.utils.which(filespath, path, history=True)
- searched.extend(hist)
- if not newpath:
- filesdir = data.getVar('FILESDIR', d, True)
- if filesdir:
- logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
- newpath = os.path.join(filesdir, path)
- searched.append(newpath)
- if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
- # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
- newpath, hist = bb.utils.which(filespath, ".", history=True)
- searched.extend(hist)
- logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
- return searched
- if not os.path.exists(newpath):
- dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
- logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
- bb.utils.mkdirhier(os.path.dirname(dldirfile))
- searched.append(dldirfile)
- return searched
- return searched
-
- def need_update(self, ud, d):
- if ud.url.find("*") != -1:
- return False
- if os.path.exists(ud.localpath):
- return False
- return True
-
- def download(self, urldata, d):
- """Fetch urls (no-op for Local method)"""
- # no need to fetch local files, we'll deal with them in place.
- if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
- locations = []
- filespath = data.getVar('FILESPATH', d, True)
- if filespath:
- locations = filespath.split(":")
- filesdir = data.getVar('FILESDIR', d, True)
- if filesdir:
- locations.append(filesdir)
- locations.append(d.getVar("DL_DIR", True))
-
- msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
- raise FetchError(msg)
-
- return True
-
- def checkstatus(self, fetch, urldata, d):
- """
- Check the status of the url
- """
- if urldata.localpath.find("*") != -1:
- logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
- return True
- if os.path.exists(urldata.localpath):
- return True
- return False
-
- def clean(self, urldata, d):
- return
-
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/npm.py b/yocto-poky/bitbake/lib/bb/fetch2/npm.py
deleted file mode 100644
index e8d9b1109..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/npm.py
+++ /dev/null
@@ -1,284 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' NPM implementation
-
-The NPM fetcher is used to retrieve files from the npmjs repository
-
-Usage in the recipe:
-
- SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}"
- Suported SRC_URI options are:
-
- - name
- - version
-
- npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};ver=${PV}
- The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
-
-"""
-
-import os
-import sys
-import urllib
-import json
-import subprocess
-import signal
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import ChecksumError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-from bb.fetch2 import UnpackError
-from bb.fetch2 import ParameterError
-from distutils import spawn
-
-def subprocess_setup():
- # Python installs a SIGPIPE handler by default. This is usually not what
- # non-Python subprocesses expect.
- # SIGPIPE errors are known issues with gzip/bash
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-
-class Npm(FetchMethod):
-
- """Class to fetch urls via 'npm'"""
- def init(self, d):
- pass
-
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with npm
- """
- return ud.type in ['npm']
-
- def debug(self, msg):
- logger.debug(1, "NpmFetch: %s", msg)
-
- def clean(self, ud, d):
- logger.debug(2, "Calling cleanup %s" % ud.pkgname)
- bb.utils.remove(ud.localpath, False)
- bb.utils.remove(ud.pkgdatadir, True)
- bb.utils.remove(ud.fullmirror, False)
-
- def urldata_init(self, ud, d):
- """
- init NPM specific variable within url data
- """
- if 'downloadfilename' in ud.parm:
- ud.basename = ud.parm['downloadfilename']
- else:
- ud.basename = os.path.basename(ud.path)
-
- # can't call it ud.name otherwise fetcher base class will start doing sha1stuff
- # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
- ud.pkgname = ud.parm.get("name", None)
- if not ud.pkgname:
- raise ParameterError("NPM fetcher requires a name parameter", ud.url)
- ud.version = ud.parm.get("version", None)
- if not ud.version:
- raise ParameterError("NPM fetcher requires a version parameter", ud.url)
- ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
- ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
- prefixdir = "npm/%s" % ud.pkgname
- ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
- if not os.path.exists(ud.pkgdatadir):
- bb.utils.mkdirhier(ud.pkgdatadir)
- ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
-
- self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
- self.basecmd += " --directory-prefix=%s " % prefixdir
-
- ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0")
- ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
- ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
-
- def need_update(self, ud, d):
- if os.path.exists(ud.localpath):
- return False
- return True
-
- def _runwget(self, ud, d, command, quiet):
- logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
- bb.fetch2.check_network_access(d, command)
- runfetchcmd(command, d, quiet)
-
- def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
- file = data[pkg]['tgz']
- logger.debug(2, "file to extract is %s" % file)
- if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
- cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
- else:
- bb.fatal("NPM package %s downloaded not a tarball!" % file)
-
- # Change to subdir before executing command
- save_cwd = os.getcwd()
- if not os.path.exists(destdir):
- os.makedirs(destdir)
- os.chdir(destdir)
- path = d.getVar('PATH', True)
- if path:
- cmd = "PATH=\"%s\" %s" % (path, cmd)
- bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
- os.chdir(save_cwd)
-
- if ret != 0:
- raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
-
- if 'deps' not in data[pkg]:
- return
- for dep in data[pkg]['deps']:
- self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
-
-
- def unpack(self, ud, destdir, d):
- dldir = d.getVar("DL_DIR", True)
- depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version)
- with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile:
- workobj = json.load(datafile)
- dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
-
- self._unpackdep(ud, ud.pkgname, workobj, "%s/npmpkg" % destdir, dldir, d)
-
- def _parse_view(self, output):
- '''
- Parse the output of npm view --json; the last JSON result
- is assumed to be the one that we're interested in.
- '''
- pdata = None
- outdeps = {}
- datalines = []
- bracelevel = 0
- for line in output.splitlines():
- if bracelevel:
- datalines.append(line)
- elif '{' in line:
- datalines = []
- datalines.append(line)
- bracelevel = bracelevel + line.count('{') - line.count('}')
- if datalines:
- pdata = json.loads('\n'.join(datalines))
- return pdata
-
- def _getdependencies(self, pkg, data, version, d, ud, optional=False):
- pkgfullname = pkg
- if version != '*' and not '/' in version:
- pkgfullname += "@'%s'" % version
- logger.debug(2, "Calling getdeps on %s" % pkg)
- fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
- output = runfetchcmd(fetchcmd, d, True)
- pdata = self._parse_view(output)
- if not pdata:
- raise FetchError("The command '%s' returned no output" % fetchcmd)
- if optional:
- pkg_os = pdata.get('os', None)
- if pkg_os:
- if not isinstance(pkg_os, list):
- pkg_os = [pkg_os]
- if 'linux' not in pkg_os or '!linux' in pkg_os:
- logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
- return
- #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
- outputurl = pdata['dist']['tarball']
- data[pkg] = {}
- data[pkg]['tgz'] = os.path.basename(outputurl)
- self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
-
- dependencies = pdata.get('dependencies', {})
- optionalDependencies = pdata.get('optionalDependencies', {})
- depsfound = {}
- optdepsfound = {}
- data[pkg]['deps'] = {}
- for dep in dependencies:
- if dep in optionalDependencies:
- optdepsfound[dep] = dependencies[dep]
- else:
- depsfound[dep] = dependencies[dep]
- for dep, version in optdepsfound.iteritems():
- self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True)
- for dep, version in depsfound.iteritems():
- self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
-
- def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest):
- logger.debug(2, "NPM shrinkwrap file is %s" % data)
- outputurl = "invalid"
- if ('resolved' not in data) or (not data['resolved'].startswith('http')):
- # will be the case for ${PN}
- fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
- logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
- outputurl = runfetchcmd(fetchcmd, d, True)
- else:
- outputurl = data['resolved']
- self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
- manifest[pkg] = {}
- manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
- manifest[pkg]['deps'] = {}
-
- if pkg in lockdown:
- sha1_expected = lockdown[pkg][version]
- sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
- if sha1_expected != sha1_data:
- msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
- raise ChecksumError('Checksum mismatch!%s' % msg)
- else:
- logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))
-
- if 'dependencies' in data:
- for obj in data['dependencies']:
- logger.debug(2, "Found dep is %s" % str(obj))
- self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'])
-
- def download(self, ud, d):
- """Fetch url"""
- jsondepobj = {}
- shrinkobj = {}
- lockdown = {}
-
- if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
- dest = d.getVar("DL_DIR", True)
- bb.utils.mkdirhier(dest)
- save_cwd = os.getcwd()
- os.chdir(dest)
- runfetchcmd("tar -xJf %s" % (ud.fullmirror), d)
- os.chdir(save_cwd)
- return
-
- shwrf = d.getVar('NPM_SHRINKWRAP', True)
- logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
- try:
- with open(shwrf) as datafile:
- shrinkobj = json.load(datafile)
- except:
- logger.warn('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
- lckdf = d.getVar('NPM_LOCKDOWN', True)
- logger.debug(2, "NPM lockdown file is %s" % lckdf)
- try:
- with open(lckdf) as datafile:
- lockdown = json.load(datafile)
- except:
- logger.warn('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
-
- if ('name' not in shrinkobj):
- self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
- else:
- self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)
-
- with open(ud.localpath, 'w') as outfile:
- json.dump(jsondepobj, outfile)
-
- def build_mirror_data(self, ud, d):
- # Generate a mirror tarball if needed
- if ud.write_tarballs and not os.path.exists(ud.fullmirror):
- # it's possible that this symlink points to read-only filesystem with PREMIRROR
- if os.path.islink(ud.fullmirror):
- os.unlink(ud.fullmirror)
-
- save_cwd = os.getcwd()
- os.chdir(d.getVar("DL_DIR", True))
- logger.info("Creating tarball of npm data")
- runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
- os.chdir(save_cwd)
-
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/osc.py b/yocto-poky/bitbake/lib/bb/fetch2/osc.py
deleted file mode 100644
index d051dfdaf..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/osc.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-Bitbake "Fetch" implementation for osc (Opensuse build service client).
-Based on the svn "Fetch" implementation.
-
-"""
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-
-class Osc(FetchMethod):
- """Class to fetch a module or modules from Opensuse build server
- repositories."""
-
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with osc.
- """
- return ud.type in ['osc']
-
- def urldata_init(self, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.module = ud.parm["module"]
-
- # Create paths to osc checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host)
- ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- else:
- pv = d.getVar("PV", False)
- rev = bb.fetch2.srcrev_internal_helper(ud, d)
- if rev and rev != True:
- ud.revision = rev
- else:
- ud.revision = ""
-
- ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildosccommand(self, ud, d, command):
- """
- Build up an ocs commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_osc}', d)
-
- proto = ud.parm.get('protocol', 'ocs')
-
- options = []
-
- config = "-c %s" % self.generate_config(ud, d)
-
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- coroot = self._strip_leading_slashes(ud.path)
-
- if command == "fetch":
- osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
- elif command == "update":
- osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
- else:
- raise FetchError("Invalid osc command %s" % command, ud.url)
-
- return osccmd
-
- def download(self, ud, d):
- """
- Fetch url
- """
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK):
- oscupdatecmd = self._buildosccommand(ud, d, "update")
- logger.info("Update "+ ud.url)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", oscupdatecmd)
- bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
- runfetchcmd(oscupdatecmd, d)
- else:
- oscfetchcmd = self._buildosccommand(ud, d, "fetch")
- logger.info("Fetch " + ud.url)
- # check out sources there
- bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", oscfetchcmd)
- bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
- runfetchcmd(oscfetchcmd, d)
-
- os.chdir(os.path.join(ud.pkgdir + ud.path))
- # tar them up to a defined filename
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
-
- def supports_srcrev(self):
- return False
-
- def generate_config(self, ud, d):
- """
- Generate a .oscrc to be used for this run.
- """
-
- config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc")
- if (os.path.exists(config_path)):
- os.remove(config_path)
-
- f = open(config_path, 'w')
- f.write("[general]\n")
- f.write("apisrv = %s\n" % ud.host)
- f.write("scheme = http\n")
- f.write("su-wrapper = su -c\n")
- f.write("build-root = %s\n" % d.getVar('WORKDIR', True))
- f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True))
- f.write("extra-pkgs = gzip\n")
- f.write("\n")
- f.write("[%s]\n" % ud.host)
- f.write("user = %s\n" % ud.parm["user"])
- f.write("pass = %s\n" % ud.parm["pswd"])
- f.close()
-
- return config_path
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/perforce.py b/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
deleted file mode 100644
index 3a10c7ca3..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
+++ /dev/null
@@ -1,187 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from future_builtins import zip
-import os
-import subprocess
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-class Perforce(FetchMethod):
- def supports(self, ud, d):
- return ud.type in ['p4']
-
- def doparse(url, d):
- parm = {}
- path = url.split("://")[1]
- delim = path.find("@");
- if delim != -1:
- (user, pswd, host, port) = path.split('@')[0].split(":")
- path = path.split('@')[1]
- else:
- (host, port) = d.getVar('P4PORT', False).split(':')
- user = ""
- pswd = ""
-
- if path.find(";") != -1:
- keys=[]
- values=[]
- plist = path.split(';')
- for item in plist:
- if item.count('='):
- (key, value) = item.split('=')
- keys.append(key)
- values.append(value)
-
- parm = dict(zip(keys, values))
- path = "//" + path.split(';')[0]
- host += ":%s" % (port)
- parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
-
- return host, path, user, pswd, parm
- doparse = staticmethod(doparse)
-
- def getcset(d, depot, host, user, pswd, parm):
- p4opt = ""
- if "cset" in parm:
- return parm["cset"];
- if user:
- p4opt += " -u %s" % (user)
- if pswd:
- p4opt += " -P %s" % (pswd)
- if host:
- p4opt += " -p %s" % (host)
-
- p4date = d.getVar("P4DATE", True)
- if "revision" in parm:
- depot += "#%s" % (parm["revision"])
- elif "label" in parm:
- depot += "@%s" % (parm["label"])
- elif p4date:
- depot += "@%s" % (p4date)
-
- p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
- logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
- p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
- cset = p4file.strip()
- logger.debug(1, "READ %s", cset)
- if not cset:
- return -1
-
- return cset.split(' ')[1]
- getcset = staticmethod(getcset)
-
- def urldata_init(self, ud, d):
- (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
-
- base_path = path.replace('/...', '')
- base_path = self._strip_leading_slashes(base_path)
-
- if "label" in parm:
- version = parm["label"]
- else:
- version = Perforce.getcset(d, path, host, user, pswd, parm)
-
- ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
-
- def download(self, ud, d):
- """
- Fetch urls
- """
-
- (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
-
- if depot.find('/...') != -1:
- path = depot[:depot.find('/...')]
- else:
- path = depot[:depot.rfind('/')]
-
- module = parm.get('module', os.path.basename(path))
-
- # Get the p4 command
- p4opt = ""
- if user:
- p4opt += " -u %s" % (user)
-
- if pswd:
- p4opt += " -P %s" % (pswd)
-
- if host:
- p4opt += " -p %s" % (host)
-
- p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
-
- # create temp directory
- logger.debug(2, "Fetch: creating temporary directory")
- bb.utils.mkdirhier(d.expand('${WORKDIR}'))
- mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
- tmpfile, errors = bb.process.run(mktemp)
- tmpfile = tmpfile.strip()
- if not tmpfile:
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
-
- if "label" in parm:
- depot = "%s@%s" % (depot, parm["label"])
- else:
- cset = Perforce.getcset(d, depot, host, user, pswd, parm)
- depot = "%s@%s" % (depot, cset)
-
- os.chdir(tmpfile)
- logger.info("Fetch " + ud.url)
- logger.info("%s%s files %s", p4cmd, p4opt, depot)
- p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
- p4file = [f.rstrip() for f in p4file.splitlines()]
-
- if not p4file:
- raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
-
- count = 0
-
- for file in p4file:
- list = file.split()
-
- if list[2] == "delete":
- continue
-
- dest = list[0][len(path)+1:]
- where = dest.find("#")
-
- subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
- count = count + 1
-
- if count == 0:
- logger.error()
- raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
-
- runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/repo.py b/yocto-poky/bitbake/lib/bb/fetch2/repo.py
deleted file mode 100644
index 21678eb7d..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/repo.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake "Fetch" repo (git) implementation
-
-"""
-
-# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
-#
-# Based on git.py which is:
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
-
-class Repo(FetchMethod):
- """Class to fetch a module or modules from repo (git) repositories"""
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with repo.
- """
- return ud.type in ["repo"]
-
- def urldata_init(self, ud, d):
- """
- We don"t care about the git rev of the manifests repository, but
- we do care about the manifest to use. The default is "default".
- We also care about the branch or tag to be used. The default is
- "master".
- """
-
- ud.proto = ud.parm.get('protocol', 'git')
- ud.branch = ud.parm.get('branch', 'master')
- ud.manifest = ud.parm.get('manifest', 'default.xml')
- if not ud.manifest.endswith('.xml'):
- ud.manifest += '.xml'
-
- ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
-
- def download(self, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
- logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
- return
-
- gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
- repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
- codir = os.path.join(repodir, gitsrcname, ud.manifest)
-
- if ud.user:
- username = ud.user + "@"
- else:
- username = ""
-
- bb.utils.mkdirhier(os.path.join(codir, "repo"))
- os.chdir(os.path.join(codir, "repo"))
- if not os.path.exists(os.path.join(codir, "repo", ".repo")):
- bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
- runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
-
- bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
- runfetchcmd("repo sync", d)
- os.chdir(codir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.repo' --exclude '.git'"
-
- # Create a cache
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
-
- def supports_srcrev(self):
- return False
-
- def _build_revision(self, ud, d):
- return ud.manifest
-
- def _want_sortable_revision(self, ud, d):
- return False
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/sftp.py b/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
deleted file mode 100644
index cb2f753a8..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake SFTP Fetch implementation
-
-Class for fetching files via SFTP. It tries to adhere to the (now
-expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
-Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
-(SSH)" (SECSH URI).
-
-It uses SFTP (as to adhere to the SECSH URI specification). It only
-supports key based authentication, not password. This class, unlike
-the SSH fetcher, does not support fetching a directory tree from the
-remote.
-
- http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
- https://www.iana.org/assignments/uri-schemes/prov/sftp
- https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
-
-Please note that '/' is used as host path seperator, and not ":"
-as you may be used to from the scp/sftp commands. You can use a
-~ (tilde) to specify a path relative to your home directory.
-(The /~user/ syntax, for specyfing a path relative to another
-user's home directory is not supported.) Note that the tilde must
-still follow the host path seperator ("/"). See exampels below.
-
-Example SRC_URIs:
-
-SRC_URI = "sftp://host.example.com/dir/path.file.txt"
-
-A path relative to your home directory.
-
-SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
-
-You can also specify a username (specyfing password in the
-URI is not supported, use SSH keys to authenticate):
-
-SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
-
-"""
-
-# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
-#
-# Based in part on bb.fetch2.wget:
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import bb
-import urllib
-import commands
-from bb import data
-from bb.fetch2 import URI
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
-
-
-class SFTP(FetchMethod):
- """Class to fetch urls via 'sftp'"""
-
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with sftp.
- """
- return ud.type in ['sftp']
-
- def recommends_checksum(self, urldata):
- return True
-
- def urldata_init(self, ud, d):
- if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
- raise bb.fetch2.ParameterError(
- "Invalid protocol - if you wish to fetch from a " +
- "git repository using ssh, you need to use the " +
- "git:// prefix with protocol=ssh", ud.url)
-
- if 'downloadfilename' in ud.parm:
- ud.basename = ud.parm['downloadfilename']
- else:
- ud.basename = os.path.basename(ud.path)
-
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
-
- def download(self, ud, d):
- """Fetch urls"""
-
- urlo = URI(ud.url)
- basecmd = 'sftp -oBatchMode=yes'
- port = ''
- if urlo.port:
- port = '-P %d' % urlo.port
- urlo.port = None
-
- dldir = data.getVar('DL_DIR', d, True)
- lpath = os.path.join(dldir, ud.localfile)
-
- user = ''
- if urlo.userinfo:
- user = urlo.userinfo + '@'
-
- path = urlo.path
-
- # Supoprt URIs relative to the user's home directory, with
- # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
- if path[:3] == '/~/':
- path = path[3:]
-
- remote = '%s%s:%s' % (user, urlo.hostname, path)
-
- cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
- commands.mkarg(lpath))
-
- bb.fetch2.check_network_access(d, cmd, ud.url)
- runfetchcmd(cmd, d)
- return True
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/ssh.py b/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
deleted file mode 100644
index 635578a71..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-'''
-BitBake 'Fetch' implementations
-
-This implementation is for Secure Shell (SSH), and attempts to comply with the
-IETF secsh internet draft:
- http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
-
- Currently does not support the sftp parameters, as this uses scp
- Also does not support the 'fingerprint' connection parameter.
-
- Please note that '/' is used as host, path separator not ':' as you may
- be used to, also '~' can be used to specify user HOME, but again after '/'
-
- Example SRC_URI:
- SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
- SRC_URI = "ssh://user@host.example.com/~/file.txt"
-'''
-
-# Copyright (C) 2006 OpenedHand Ltd.
-#
-#
-# Based in part on svk.py:
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Based on svn.py:
-# Copyright (C) 2003, 2004 Chris Larson
-# Based on functions from the base bb module:
-# Copyright 2003 Holger Schurig
-#
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, os
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-
-__pattern__ = re.compile(r'''
- \s* # Skip leading whitespace
- ssh:// # scheme
- ( # Optional username/password block
- (?P<user>\S+) # username
- (:(?P<pass>\S+))? # colon followed by the password (optional)
- )?
- (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
- @
- (?P<host>\S+?) # non-greedy match of the host
- (:(?P<port>[0-9]+))? # colon followed by the port (optional)
- /
- (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
- # and may include the use of '~' to reference the remote home
- # directory
- (?P<sparam>(;[^;]+)*)? # parameters block (optional)
- $
-''', re.VERBOSE)
-
-class SSH(FetchMethod):
- '''Class to fetch a module or modules via Secure Shell'''
-
- def supports(self, urldata, d):
- return __pattern__.match(urldata.url) != None
-
- def supports_checksum(self, urldata):
- return False
-
- def urldata_init(self, urldata, d):
- if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
- raise bb.fetch2.ParameterError(
- "Invalid protocol - if you wish to fetch from a git " +
- "repository using ssh, you need to use " +
- "git:// prefix with protocol=ssh", urldata.url)
- m = __pattern__.match(urldata.url)
- path = m.group('path')
- host = m.group('host')
- urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
- os.path.basename(os.path.normpath(path)))
-
- def download(self, urldata, d):
- dldir = d.getVar('DL_DIR', True)
-
- m = __pattern__.match(urldata.url)
- path = m.group('path')
- host = m.group('host')
- port = m.group('port')
- user = m.group('user')
- password = m.group('pass')
-
- if port:
- portarg = '-P %s' % port
- else:
- portarg = ''
-
- if user:
- fr = user
- if password:
- fr += ':%s' % password
- fr += '@%s' % host
- else:
- fr = host
- fr += ':%s' % path
-
-
- import commands
- cmd = 'scp -B -r %s %s %s/' % (
- portarg,
- commands.mkarg(fr),
- commands.mkarg(dldir)
- )
-
- bb.fetch2.check_network_access(d, cmd, urldata.url)
-
- runfetchcmd(cmd, d)
-
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/svn.py b/yocto-poky/bitbake/lib/bb/fetch2/svn.py
deleted file mode 100644
index 8a291935c..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/svn.py
+++ /dev/null
@@ -1,197 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for svn.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-import re
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Svn(FetchMethod):
- """Class to fetch a module or modules from svn repositories"""
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with svn.
- """
- return ud.type in ['svn']
-
- def urldata_init(self, ud, d):
- """
- init svn specific variable within url data
- """
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.basecmd = d.getVar('FETCHCMD_svn', True)
-
- ud.module = ud.parm["module"]
-
- if not "path_spec" in ud.parm:
- ud.path_spec = ud.module
- else:
- ud.path_spec = ud.parm["path_spec"]
-
- # Create paths to svn checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- ud.setup_revisons(d)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
-
- ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildsvncommand(self, ud, d, command):
- """
- Build up an svn commandline based on ud
- command is "fetch", "update", "info"
- """
-
- proto = ud.parm.get('protocol', 'svn')
-
- svn_rsh = None
- if proto == "svn+ssh" and "rsh" in ud.parm:
- svn_rsh = ud.parm["rsh"]
-
- svnroot = ud.host + ud.path
-
- options = []
-
- options.append("--no-auth-cache")
-
- if ud.user:
- options.append("--username %s" % ud.user)
-
- if ud.pswd:
- options.append("--password %s" % ud.pswd)
-
- if command == "info":
- svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
- elif command == "log1":
- svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
- else:
- suffix = ""
- if ud.revision:
- options.append("-r %s" % ud.revision)
- suffix = "@%s" % (ud.revision)
-
- if command == "fetch":
- transportuser = ud.parm.get("transportuser", "")
- svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec)
- elif command == "update":
- svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
- else:
- raise FetchError("Invalid svn command %s" % command, ud.url)
-
- if svn_rsh:
- svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
-
- return svncmd
-
- def download(self, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
- svnupdatecmd = self._buildsvncommand(ud, d, "update")
- logger.info("Update " + ud.url)
- # update sources there
- os.chdir(ud.moddir)
- # We need to attempt to run svn upgrade first in case its an older working format
- try:
- runfetchcmd(ud.basecmd + " upgrade", d)
- except FetchError:
- pass
- logger.debug(1, "Running %s", svnupdatecmd)
- bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
- runfetchcmd(svnupdatecmd, d)
- else:
- svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
- logger.info("Fetch " + ud.url)
- # check out sources there
- bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", svnfetchcmd)
- bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
- runfetchcmd(svnfetchcmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.svn'"
-
- os.chdir(ud.pkgdir)
- # tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup = [ud.localpath])
-
- def clean(self, ud, d):
- """ Clean SVN specific files and dirs """
-
- bb.utils.remove(ud.localpath)
- bb.utils.remove(ud.moddir, True)
-
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, ud, d, name):
- """
- Return a unique key for the url
- """
- return "svn:" + ud.moddir
-
- def _latest_revision(self, ud, d, name):
- """
- Return the latest upstream revision number
- """
- bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"))
-
- output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
-
- # skip the first line, as per output of svn log
- # then we expect the revision on the 2nd line
- revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1)
-
- return revision
-
- def sortable_revision(self, ud, d, name):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return False, self._build_revision(ud, d)
-
- def _build_revision(self, ud, d):
- return ud.revision
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/wget.py b/yocto-poky/bitbake/lib/bb/fetch2/wget.py
deleted file mode 100644
index 8bc9e93ca..000000000
--- a/yocto-poky/bitbake/lib/bb/fetch2/wget.py
+++ /dev/null
@@ -1,565 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import re
-import tempfile
-import subprocess
-import os
-import logging
-import bb
-import urllib
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-from bb.utils import export_proxies
-from bs4 import BeautifulSoup
-from bs4 import SoupStrainer
-
-class Wget(FetchMethod):
- """Class to fetch urls via 'wget'"""
- def supports(self, ud, d):
- """
- Check to see if a given url can be fetched with wget.
- """
- return ud.type in ['http', 'https', 'ftp']
-
- def recommends_checksum(self, urldata):
- return True
-
- def urldata_init(self, ud, d):
- if 'protocol' in ud.parm:
- if ud.parm['protocol'] == 'git':
- raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
-
- if 'downloadfilename' in ud.parm:
- ud.basename = ud.parm['downloadfilename']
- else:
- ud.basename = os.path.basename(ud.path)
-
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
- if not ud.localfile:
- ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d)
-
- self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
-
- def _runwget(self, ud, d, command, quiet):
-
- logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
- bb.fetch2.check_network_access(d, command)
- runfetchcmd(command, d, quiet)
-
- def download(self, ud, d):
- """Fetch urls"""
-
- fetchcmd = self.basecmd
-
- if 'downloadfilename' in ud.parm:
- dldir = d.getVar("DL_DIR", True)
- bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
- fetchcmd += " -O " + dldir + os.sep + ud.localfile
-
- uri = ud.url.split(";")[0]
- if os.path.exists(ud.localpath):
- # file exists, but we didnt complete it.. trying again..
- fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
- else:
- fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
-
- self._runwget(ud, d, fetchcmd, False)
-
- # Sanity check since wget can pretend it succeed when it didn't
- # Also, this used to happen if sourceforge sent us to the mirror page
- if not os.path.exists(ud.localpath):
- raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
-
- if os.path.getsize(ud.localpath) == 0:
- os.remove(ud.localpath)
- raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
-
- return True
-
- def checkstatus(self, fetch, ud, d):
- import urllib2, socket, httplib
- from urllib import addinfourl
- from bb.fetch2 import FetchConnectionCache
-
- class HTTPConnectionCache(httplib.HTTPConnection):
- if fetch.connection_cache:
- def connect(self):
- """Connect to the host and port specified in __init__."""
-
- sock = fetch.connection_cache.get_connection(self.host, self.port)
- if sock:
- self.sock = sock
- else:
- self.sock = socket.create_connection((self.host, self.port),
- self.timeout, self.source_address)
- fetch.connection_cache.add_connection(self.host, self.port, self.sock)
-
- if self._tunnel_host:
- self._tunnel()
-
- class CacheHTTPHandler(urllib2.HTTPHandler):
- def http_open(self, req):
- return self.do_open(HTTPConnectionCache, req)
-
- def do_open(self, http_class, req):
- """Return an addinfourl object for the request, using http_class.
-
- http_class must implement the HTTPConnection API from httplib.
- The addinfourl return value is a file-like object. It also
- has methods and attributes including:
- - info(): return a mimetools.Message object for the headers
- - geturl(): return the original request URL
- - code: HTTP status code
- """
- host = req.get_host()
- if not host:
- raise urlllib2.URLError('no host given')
-
- h = http_class(host, timeout=req.timeout) # will parse host:port
- h.set_debuglevel(self._debuglevel)
-
- headers = dict(req.unredirected_hdrs)
- headers.update(dict((k, v) for k, v in req.headers.items()
- if k not in headers))
-
- # We want to make an HTTP/1.1 request, but the addinfourl
- # class isn't prepared to deal with a persistent connection.
- # It will try to read all remaining data from the socket,
- # which will block while the server waits for the next request.
- # So make sure the connection gets closed after the (only)
- # request.
-
- # Don't close connection when connection_cache is enabled,
- if fetch.connection_cache is None:
- headers["Connection"] = "close"
- else:
- headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
-
- headers = dict(
- (name.title(), val) for name, val in headers.items())
-
- if req._tunnel_host:
- tunnel_headers = {}
- proxy_auth_hdr = "Proxy-Authorization"
- if proxy_auth_hdr in headers:
- tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
- # Proxy-Authorization should not be sent to origin
- # server.
- del headers[proxy_auth_hdr]
- h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
-
- try:
- h.request(req.get_method(), req.get_selector(), req.data, headers)
- except socket.error, err: # XXX what error?
- # Don't close connection when cache is enabled.
- if fetch.connection_cache is None:
- h.close()
- raise urllib2.URLError(err)
- else:
- try:
- r = h.getresponse(buffering=True)
- except TypeError: # buffering kw not supported
- r = h.getresponse()
-
- # Pick apart the HTTPResponse object to get the addinfourl
- # object initialized properly.
-
- # Wrap the HTTPResponse object in socket's file object adapter
- # for Windows. That adapter calls recv(), so delegate recv()
- # to read(). This weird wrapping allows the returned object to
- # have readline() and readlines() methods.
-
- # XXX It might be better to extract the read buffering code
- # out of socket._fileobject() and into a base class.
- r.recv = r.read
-
- # no data, just have to read
- r.read()
- class fp_dummy(object):
- def read(self):
- return ""
- def readline(self):
- return ""
- def close(self):
- pass
-
- resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
- resp.code = r.status
- resp.msg = r.reason
-
- # Close connection when server request it.
- if fetch.connection_cache is not None:
- if 'Connection' in r.msg and r.msg['Connection'] == 'close':
- fetch.connection_cache.remove_connection(h.host, h.port)
-
- return resp
-
- class HTTPMethodFallback(urllib2.BaseHandler):
- """
- Fallback to GET if HEAD is not allowed (405 HTTP error)
- """
- def http_error_405(self, req, fp, code, msg, headers):
- fp.read()
- fp.close()
-
- newheaders = dict((k,v) for k,v in req.headers.items()
- if k.lower() not in ("content-length", "content-type"))
- return self.parent.open(urllib2.Request(req.get_full_url(),
- headers=newheaders,
- origin_req_host=req.get_origin_req_host(),
- unverifiable=True))
-
- """
- Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
- Forbidden when they actually mean 405 Method Not Allowed.
- """
- http_error_403 = http_error_405
-
- """
- Some servers (e.g. FusionForge) returns 406 Not Acceptable when they
- actually mean 405 Method Not Allowed.
- """
- http_error_406 = http_error_405
-
- class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
- """
- urllib2.HTTPRedirectHandler resets the method to GET on redirect,
- when we want to follow redirects using the original method.
- """
- def redirect_request(self, req, fp, code, msg, headers, newurl):
- newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
- newreq.get_method = lambda: req.get_method()
- return newreq
- exported_proxies = export_proxies(d)
-
- handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
- if export_proxies:
- handlers.append(urllib2.ProxyHandler())
- handlers.append(CacheHTTPHandler())
- # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
- # see PEP-0476, this causes verification errors on some https servers
- # so disable by default.
- import ssl
- if hasattr(ssl, '_create_unverified_context'):
- handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
- opener = urllib2.build_opener(*handlers)
-
- try:
- uri = ud.url.split(";")[0]
- r = urllib2.Request(uri)
- r.get_method = lambda: "HEAD"
- opener.open(r)
- except urllib2.URLError as e:
- # debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug(2, "checkstatus() urlopen failed: %s" % e)
- return False
- return True
-
- def _parse_path(self, regex, s):
- """
- Find and group name, version and archive type in the given string s
- """
-
- m = regex.search(s)
- if m:
- pname = ''
- pver = ''
- ptype = ''
-
- mdict = m.groupdict()
- if 'name' in mdict.keys():
- pname = mdict['name']
- if 'pver' in mdict.keys():
- pver = mdict['pver']
- if 'type' in mdict.keys():
- ptype = mdict['type']
-
- bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
-
- return (pname, pver, ptype)
-
- return None
-
- def _modelate_version(self, version):
- if version[0] in ['.', '-']:
- if version[1].isdigit():
- version = version[1] + version[0] + version[2:len(version)]
- else:
- version = version[1:len(version)]
-
- version = re.sub('-', '.', version)
- version = re.sub('_', '.', version)
- version = re.sub('(rc)+', '.1000.', version)
- version = re.sub('(beta)+', '.100.', version)
- version = re.sub('(alpha)+', '.10.', version)
- if version[0] == 'v':
- version = version[1:len(version)]
- return version
-
- def _vercmp(self, old, new):
- """
- Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
- purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
- for simplicity as it's somehow difficult to get from various upstream format
- """
-
- (oldpn, oldpv, oldsuffix) = old
- (newpn, newpv, newsuffix) = new
-
- """
- Check for a new suffix type that we have never heard of before
- """
- if (newsuffix):
- m = self.suffix_regex_comp.search(newsuffix)
- if not m:
- bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
- return False
-
- """
- Not our package so ignore it
- """
- if oldpn != newpn:
- return False
-
- oldpv = self._modelate_version(oldpv)
- newpv = self._modelate_version(newpv)
-
- return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
-
- def _fetch_index(self, uri, ud, d):
- """
- Run fetch checkstatus to get directory information
- """
- f = tempfile.NamedTemporaryFile()
-
- agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
- fetchcmd = self.basecmd
- fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
- try:
- self._runwget(ud, d, fetchcmd, True)
- fetchresult = f.read()
- except bb.fetch2.BBFetchException:
- fetchresult = ""
-
- f.close()
- return fetchresult
-
- def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
- """
- Return the latest version of a package inside a given directory path
- If error or no version, return ""
- """
- valid = 0
- version = ['', '', '']
-
- bb.debug(3, "VersionURL: %s" % (url))
- soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
- if not soup:
- bb.debug(3, "*** %s NO SOUP" % (url))
- return ""
-
- for line in soup.find_all('a', href=True):
- bb.debug(3, "line['href'] = '%s'" % (line['href']))
- bb.debug(3, "line = '%s'" % (str(line)))
-
- newver = self._parse_path(package_regex, line['href'])
- if not newver:
- newver = self._parse_path(package_regex, str(line))
-
- if newver:
- bb.debug(3, "Upstream version found: %s" % newver[1])
- if valid == 0:
- version = newver
- valid = 1
- elif self._vercmp(version, newver) < 0:
- version = newver
-
- pupver = re.sub('_', '.', version[1])
-
- bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
- (package, pupver or "N/A", current_version[1]))
-
- if valid:
- return pupver
-
- return ""
-
- def _check_latest_version_by_dir(self, dirver, package, package_regex,
- current_version, ud, d):
- """
- Scan every directory in order to get upstream version.
- """
- version_dir = ['', '', '']
- version = ['', '', '']
-
- dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
- s = dirver_regex.search(dirver)
- if s:
- version_dir[1] = s.group('ver')
- else:
- version_dir[1] = dirver
-
- dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
- ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
- bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
-
- soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
- if not soup:
- return version[1]
-
- for line in soup.find_all('a', href=True):
- s = dirver_regex.search(line['href'].strip("/"))
- if s:
- sver = s.group('ver')
-
- # When prefix is part of the version directory it need to
- # ensure that only version directory is used so remove previous
- # directories if exists.
- #
- # Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
- # result is v2.5.
- spfx = s.group('pfx').split('/')[-1]
-
- version_dir_new = ['', sver, '']
- if self._vercmp(version_dir, version_dir_new) <= 0:
- dirver_new = spfx + sver
- path = ud.path.replace(dirver, dirver_new, True) \
- .split(package)[0]
- uri = bb.fetch.encodeurl([ud.type, ud.host, path,
- ud.user, ud.pswd, {}])
-
- pupver = self._check_latest_version(uri,
- package, package_regex, current_version, ud, d)
- if pupver:
- version[1] = pupver
-
- version_dir = version_dir_new
-
- return version[1]
-
- def _init_regexes(self, package, ud, d):
- """
- Match as many patterns as possible such as:
- gnome-common-2.20.0.tar.gz (most common format)
- gtk+-2.90.1.tar.gz
- xf86-input-synaptics-12.6.9.tar.gz
- dri2proto-2.3.tar.gz
- blktool_4.orig.tar.gz
- libid3tag-0.15.1b.tar.gz
- unzip552.tar.gz
- icu4c-3_6-src.tgz
- genext2fs_1.3.orig.tar.gz
- gst-fluendo-mp3
- """
- # match most patterns which uses "-" as separator to version digits
- pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
- # a loose pattern such as for unzip552.tar.gz
- pn_prefix2 = "[a-zA-Z]+"
- # a loose pattern such as for 80325-quicky-0.4.tar.gz
- pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
- # Save the Package Name (pn) Regex for use later
- pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
-
- # match version
- pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
-
- # match arch
- parch_regex = "-source|_all_"
-
- # src.rpm extension was added only for rpm package. Can be removed if the rpm
- # packaged will always be considered as having to be manually upgraded
- psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
-
- # match name, version and archive type of a package
- package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
- % (pn_regex, pver_regex, parch_regex, psuffix_regex))
- self.suffix_regex_comp = re.compile(psuffix_regex)
-
- # compile regex, can be specific by package or generic regex
- pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True)
- if pn_regex:
- package_custom_regex_comp = re.compile(pn_regex)
- else:
- version = self._parse_path(package_regex_comp, package)
- if version:
- package_custom_regex_comp = re.compile(
- "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
- (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
- else:
- package_custom_regex_comp = None
-
- return package_custom_regex_comp
-
- def latest_versionstring(self, ud, d):
- """
- Manipulate the URL and try to obtain the latest package version
-
- sanity check to ensure same name and type.
- """
- package = ud.path.split("/")[-1]
- current_version = ['', d.getVar('PV', True), '']
-
- """possible to have no version in pkg name, such as spectrum-fw"""
- if not re.search("\d+", package):
- current_version[1] = re.sub('_', '.', current_version[1])
- current_version[1] = re.sub('-', '.', current_version[1])
- return (current_version[1], '')
-
- package_regex = self._init_regexes(package, ud, d)
- if package_regex is None:
- bb.warn("latest_versionstring: package %s don't match pattern" % (package))
- return ('', '')
- bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
-
- uri = ""
- regex_uri = d.getVar("UPSTREAM_CHECK_URI", True)
- if not regex_uri:
- path = ud.path.split(package)[0]
-
- # search for version matches on folders inside the path, like:
- # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
- dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
- m = dirver_regex.search(path)
- if m:
- pn = d.getVar('PN', True)
- dirver = m.group('dirver')
-
- dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
- if not dirver_pn_regex.search(dirver):
- return (self._check_latest_version_by_dir(dirver,
- package, package_regex, current_version, ud, d), '')
-
- uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
- else:
- uri = regex_uri
-
- return (self._check_latest_version(uri, package, package_regex,
- current_version, ud, d), '')
diff --git a/yocto-poky/bitbake/lib/bb/main.py b/yocto-poky/bitbake/lib/bb/main.py
deleted file mode 100755
index e30217369..000000000
--- a/yocto-poky/bitbake/lib/bb/main.py
+++ /dev/null
@@ -1,440 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import sys
-import logging
-import optparse
-import warnings
-
-import bb
-from bb import event
-import bb.msg
-from bb import cooker
-from bb import ui
-from bb import server
-from bb import cookerdata
-
-logger = logging.getLogger("BitBake")
-
-class BBMainException(Exception):
- pass
-
-def present_options(optionlist):
- if len(optionlist) > 1:
- return ' or '.join([', '.join(optionlist[:-1]), optionlist[-1]])
- else:
- return optionlist[0]
-
-class BitbakeHelpFormatter(optparse.IndentedHelpFormatter):
- def format_option(self, option):
- # We need to do this here rather than in the text we supply to
- # add_option() because we don't want to call list_extension_modules()
- # on every execution (since it imports all of the modules)
- # Note also that we modify option.help rather than the returned text
- # - this is so that we don't have to re-format the text ourselves
- if option.dest == 'ui':
- valid_uis = list_extension_modules(bb.ui, 'main')
- option.help = option.help.replace('@CHOICES@', present_options(valid_uis))
- elif option.dest == 'servertype':
- valid_server_types = list_extension_modules(bb.server, 'BitBakeServer')
- option.help = option.help.replace('@CHOICES@', present_options(valid_server_types))
-
- return optparse.IndentedHelpFormatter.format_option(self, option)
-
-def list_extension_modules(pkg, checkattr):
- """
- Lists extension modules in a specific Python package
- (e.g. UIs, servers). NOTE: Calling this function will import all of the
- submodules of the specified module in order to check for the specified
- attribute; this can have unusual side-effects. As a result, this should
- only be called when displaying help text or error messages.
- Parameters:
- pkg: previously imported Python package to list
- checkattr: attribute to look for in module to determine if it's valid
- as the type of extension you are looking for
- """
- import pkgutil
- pkgdir = os.path.dirname(pkg.__file__)
-
- modules = []
- for _, modulename, _ in pkgutil.iter_modules([pkgdir]):
- if os.path.isdir(os.path.join(pkgdir, modulename)):
- # ignore directories
- continue
- try:
- module = __import__(pkg.__name__, fromlist=[modulename])
- except:
- # If we can't import it, it's not valid
- continue
- module_if = getattr(module, modulename)
- if getattr(module_if, 'hidden_extension', False):
- continue
- if not checkattr or hasattr(module_if, checkattr):
- modules.append(modulename)
- return modules
-
-def import_extension_module(pkg, modulename, checkattr):
- try:
- # Dynamically load the UI based on the ui name. Although we
- # suggest a fixed set this allows you to have flexibility in which
- # ones are available.
- module = __import__(pkg.__name__, fromlist = [modulename])
- return getattr(module, modulename)
- except AttributeError:
- raise BBMainException('FATAL: Unable to import extension module "%s" from %s. Valid extension modules: %s' % (modulename, pkg.__name__, present_options(list_extension_modules(pkg, checkattr))))
-
-
-# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
-warnlog = logging.getLogger("BitBake.Warnings")
-_warnings_showwarning = warnings.showwarning
-def _showwarning(message, category, filename, lineno, file=None, line=None):
- if file is not None:
- if _warnings_showwarning is not None:
- _warnings_showwarning(message, category, filename, lineno, file, line)
- else:
- s = warnings.formatwarning(message, category, filename, lineno)
- warnlog.warn(s)
-
-warnings.showwarning = _showwarning
-warnings.filterwarnings("ignore")
-warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
-warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
-warnings.filterwarnings("ignore", category=ImportWarning)
-warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
-warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
-
-class BitBakeConfigParameters(cookerdata.ConfigParameters):
-
- def parseCommandLine(self, argv=sys.argv):
- parser = optparse.OptionParser(
- formatter = BitbakeHelpFormatter(),
- version = "BitBake Build Tool Core version %s" % bb.__version__,
- usage = """%prog [options] [recipename/target recipe:do_task ...]
-
- Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
- It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
- will provide the layer, BBFILES and other configuration information.""")
-
- parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
- action = "store", dest = "buildfile", default = None)
-
- parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
- action = "store_false", dest = "abort", default = True)
-
- parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
- action = "store_true", dest = "tryaltconfigs", default = False)
-
- parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
- action = "store_true", dest = "force", default = False)
-
- parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
- action = "store", dest = "cmd")
-
- parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
- action = "store", dest = "invalidate_stamp")
-
- parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
- action = "append", dest = "prefile", default = [])
-
- parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
- action = "append", dest = "postfile", default = [])
-
- parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
- action = "store_true", dest = "verbose", default = False)
-
- parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
- action = "count", dest="debug", default = 0)
-
- parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
- action = "store_true", dest = "dry_run", default = False)
-
- parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
- action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
-
- parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
- action = "store_true", dest = "parse_only", default = False)
-
- parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
- action = "store_true", dest = "show_versions", default = False)
-
- parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
- action = "store_true", dest = "show_environment", default = False)
-
- parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
- action = "store_true", dest = "dot_graph", default = False)
-
- parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
- action = "append", dest = "extra_assume_provided", default = [])
-
- parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
- action = "append", dest = "debug_domains", default = [])
-
- parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
- action = "store_true", dest = "profile", default = False)
-
- env_ui = os.environ.get('BITBAKE_UI', None)
- default_ui = env_ui or 'knotty'
- # @CHOICES@ is substituted out by BitbakeHelpFormatter above
- parser.add_option("-u", "--ui", help = "The user interface to use (@CHOICES@ - default %default).",
- action="store", dest="ui", default=default_ui)
-
- # @CHOICES@ is substituted out by BitbakeHelpFormatter above
- parser.add_option("-t", "--servertype", help = "Choose which server type to use (@CHOICES@ - default %default).",
- action = "store", dest = "servertype", default = "process")
-
- parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
- action = "store", dest = "xmlrpctoken")
-
- parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
- action = "store_true", dest = "revisions_changed", default = False)
-
- parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
- action = "store_true", dest = "server_only", default = False)
-
- parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
- action = "store", dest = "bind", default = False)
-
- parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
- action = "store_true", dest = "nosetscene", default = False)
-
- parser.add_option("", "--setscene-only", help = "Only run setscene tasks, don't run any real tasks.",
- action = "store_true", dest = "setsceneonly", default = False)
-
- parser.add_option("", "--remote-server", help = "Connect to the specified server.",
- action = "store", dest = "remote_server", default = False)
-
- parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
- action = "store_true", dest = "kill_server", default = False)
-
- parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
- action = "store_true", dest = "observe_only", default = False)
-
- parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
- action = "store_true", dest = "status_only", default = False)
-
- parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
- action = "store", dest = "writeeventlog")
-
- options, targets = parser.parse_args(argv)
-
- # some environmental variables set also configuration options
- if "BBSERVER" in os.environ:
- options.servertype = "xmlrpc"
- options.remote_server = os.environ["BBSERVER"]
-
- if "BBTOKEN" in os.environ:
- options.xmlrpctoken = os.environ["BBTOKEN"]
-
- if "BBEVENTLOG" in os.environ:
- options.writeeventlog = os.environ["BBEVENTLOG"]
-
- # fill in proper log name if not supplied
- if options.writeeventlog is not None and len(options.writeeventlog) == 0:
- import datetime
- options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
-
- # if BBSERVER says to autodetect, let's do that
- if options.remote_server:
- [host, port] = options.remote_server.split(":", 2)
- port = int(port)
- # use automatic port if port set to -1, means read it from
- # the bitbake.lock file; this is a bit tricky, but we always expect
- # to be in the base of the build directory if we need to have a
- # chance to start the server later, anyway
- if port == -1:
- lock_location = "./bitbake.lock"
- # we try to read the address at all times; if the server is not started,
- # we'll try to start it after the first connect fails, below
- try:
- lf = open(lock_location, 'r')
- remotedef = lf.readline()
- [host, port] = remotedef.split(":")
- port = int(port)
- lf.close()
- options.remote_server = remotedef
- except Exception as e:
- raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
-
- return options, targets[1:]
-
-
-def start_server(servermodule, configParams, configuration, features):
- server = servermodule.BitBakeServer()
- single_use = not configParams.server_only
- if configParams.bind:
- (host, port) = configParams.bind.split(':')
- server.initServer((host, int(port)), single_use)
- configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
- else:
- server.initServer(single_use=single_use)
- configuration.interface = []
-
- try:
- configuration.setServerRegIdleCallback(server.getServerIdleCB())
-
- cooker = bb.cooker.BBCooker(configuration, features)
-
- server.addcooker(cooker)
- server.saveConnectionDetails()
- except Exception as e:
- exc_info = sys.exc_info()
- while hasattr(server, "event_queue"):
- try:
- import queue
- except ImportError:
- import Queue as queue
- try:
- event = server.event_queue.get(block=False)
- except (queue.Empty, IOError):
- break
- if isinstance(event, logging.LogRecord):
- logger.handle(event)
- raise exc_info[1], None, exc_info[2]
- server.detach()
- cooker.lock.close()
- return server
-
-
-def bitbake_main(configParams, configuration):
-
- # Python multiprocessing requires /dev/shm on Linux
- if sys.platform.startswith('linux') and not os.access('/dev/shm', os.W_OK | os.X_OK):
- raise BBMainException("FATAL: /dev/shm does not exist or is not writable")
-
- # Unbuffer stdout to avoid log truncation in the event
- # of an unorderly exit as well as to provide timely
- # updates to log files for use with tail
- try:
- if sys.stdout.name == '<stdout>':
- sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
- except:
- pass
-
-
- configuration.setConfigParameters(configParams)
-
- ui_module = import_extension_module(bb.ui, configParams.ui, 'main')
- servermodule = import_extension_module(bb.server, configParams.servertype, 'BitBakeServer')
-
- if configParams.server_only:
- if configParams.servertype != "xmlrpc":
- raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
- "servertype as 'xmlrpc'.\n")
- if not configParams.bind:
- raise BBMainException("FATAL: The '--server-only' option requires a name/address "
- "to bind to with the -B option.\n")
- if configParams.remote_server:
- raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
- ("the BBSERVER environment variable" if "BBSERVER" in os.environ \
- else "the '--remote-server' option" ))
-
- if configParams.bind and configParams.servertype != "xmlrpc":
- raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
- "set the servertype as 'xmlrpc'.\n")
-
- if configParams.remote_server and configParams.servertype != "xmlrpc":
- raise BBMainException("FATAL: If '--remote-server' is defined, we must "
- "set the servertype as 'xmlrpc'.\n")
-
- if configParams.observe_only and (not configParams.remote_server or configParams.bind):
- raise BBMainException("FATAL: '--observe-only' can only be used by UI clients "
- "connecting to a server.\n")
-
- if configParams.kill_server and not configParams.remote_server:
- raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
-
- if "BBDEBUG" in os.environ:
- level = int(os.environ["BBDEBUG"])
- if level > configuration.debug:
- configuration.debug = level
-
- bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
- configuration.debug_domains)
-
- # Ensure logging messages get sent to the UI as events
- handler = bb.event.LogHandler()
- if not configParams.status_only:
- # In status only mode there are no logs and no UI
- logger.addHandler(handler)
-
- # Clear away any spurious environment variables while we stoke up the cooker
- cleanedvars = bb.utils.clean_environment()
-
- featureset = []
- if not configParams.server_only:
- # Collect the feature set for the UI
- featureset = getattr(ui_module, "featureSet", [])
-
- if configParams.server_only:
- for param in ('prefile', 'postfile'):
- value = getattr(configParams, param)
- if value:
- setattr(configuration, "%s_server" % param, value)
- param = "%s_server" % param
-
- if not configParams.remote_server:
- # we start a server with a given configuration
- server = start_server(servermodule, configParams, configuration, featureset)
- bb.event.ui_queue = []
- else:
- # we start a stub server that is actually a XMLRPClient that connects to a real server
- server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
- server.saveConnectionDetails(configParams.remote_server)
-
-
- if not configParams.server_only:
- try:
- server_connection = server.establishConnection(featureset)
- except Exception as e:
- bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
-
- if configParams.kill_server:
- server_connection.connection.terminateServer()
- bb.event.ui_queue = []
- return 0
-
- server_connection.setupEventQueue()
-
- # Restore the environment in case the UI needs it
- for k in cleanedvars:
- os.environ[k] = cleanedvars[k]
-
- logger.removeHandler(handler)
-
-
- if configParams.status_only:
- server_connection.terminate()
- return 0
-
- try:
- return ui_module.main(server_connection.connection, server_connection.events, configParams)
- finally:
- bb.event.ui_queue = []
- server_connection.terminate()
- else:
- print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
- return 0
-
- return 1
diff --git a/yocto-poky/bitbake/lib/bb/methodpool.py b/yocto-poky/bitbake/lib/bb/methodpool.py
deleted file mode 100644
index 49aed3338..000000000
--- a/yocto-poky/bitbake/lib/bb/methodpool.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-#
-# Copyright (C) 2006 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from bb.utils import better_compile, better_exec
-
-def insert_method(modulename, code, fn, lineno):
- """
- Add code of a module should be added. The methods
- will be simply added, no checking will be done
- """
- comp = better_compile(code, modulename, fn, lineno=lineno)
- better_exec(comp, None, code, fn)
-
-compilecache = {}
-
-def compile_cache(code):
- h = hash(code)
- if h in compilecache:
- return compilecache[h]
- return None
-
-def compile_cache_add(code, compileobj):
- h = hash(code)
- compilecache[h] = compileobj
diff --git a/yocto-poky/bitbake/lib/bb/monitordisk.py b/yocto-poky/bitbake/lib/bb/monitordisk.py
deleted file mode 100644
index 466523c6e..000000000
--- a/yocto-poky/bitbake/lib/bb/monitordisk.py
+++ /dev/null
@@ -1,263 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2012 Robert Yang
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os, logging, re, sys
-import bb
-logger = logging.getLogger("BitBake.Monitor")
-
-def printErr(info):
- logger.error("%s\n Disk space monitor will NOT be enabled" % info)
-
-def convertGMK(unit):
-
- """ Convert the space unit G, M, K, the unit is case-insensitive """
-
- unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
- if unitG:
- return int(unitG.group(1)) * (1024 ** 3)
- unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
- if unitM:
- return int(unitM.group(1)) * (1024 ** 2)
- unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
- if unitK:
- return int(unitK.group(1)) * 1024
- unitN = re.match('([1-9][0-9]*)\s?$', unit)
- if unitN:
- return int(unitN.group(1))
- else:
- return None
-
-def getMountedDev(path):
-
- """ Get the device mounted at the path, uses /proc/mounts """
-
- # Get the mount point of the filesystem containing path
- # st_dev is the ID of device containing file
- parentDev = os.stat(path).st_dev
- currentDev = parentDev
- # When the current directory's device is different from the
- # parent's, then the current directory is a mount point
- while parentDev == currentDev:
- mountPoint = path
- # Use dirname to get the parent's directory
- path = os.path.dirname(path)
- # Reach the "/"
- if path == mountPoint:
- break
- parentDev= os.stat(path).st_dev
-
- try:
- with open("/proc/mounts", "r") as ifp:
- for line in ifp:
- procLines = line.rstrip('\n').split()
- if procLines[1] == mountPoint:
- return procLines[0]
- except EnvironmentError:
- pass
- return None
-
-def getDiskData(BBDirs, configuration):
-
- """Prepare disk data for disk space monitor"""
-
- # Save the device IDs, need the ID to be unique (the dictionary's key is
- # unique), so that when more than one directory is located on the same
- # device, we just monitor it once
- devDict = {}
- for pathSpaceInode in BBDirs.split():
- # The input format is: "dir,space,inode", dir is a must, space
- # and inode are optional
- pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
- if not pathSpaceInodeRe:
- printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
- return None
-
- action = pathSpaceInodeRe.group(1)
- if action not in ("ABORT", "STOPTASKS", "WARN"):
- printErr("Unknown disk space monitor action: %s" % action)
- return None
-
- path = os.path.realpath(pathSpaceInodeRe.group(2))
- if not path:
- printErr("Invalid path value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
- return None
-
- # The disk space or inode is optional, but it should have a correct
- # value once it is specified
- minSpace = pathSpaceInodeRe.group(3)
- if minSpace:
- minSpace = convertGMK(minSpace)
- if not minSpace:
- printErr("Invalid disk space value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(3))
- return None
- else:
- # None means that it is not specified
- minSpace = None
-
- minInode = pathSpaceInodeRe.group(4)
- if minInode:
- minInode = convertGMK(minInode)
- if not minInode:
- printErr("Invalid inode value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(4))
- return None
- else:
- # None means that it is not specified
- minInode = None
-
- if minSpace is None and minInode is None:
- printErr("No disk space or inode value in found BB_DISKMON_DIRS: %s" % pathSpaceInode)
- return None
- # mkdir for the directory since it may not exist, for example the
- # DL_DIR may not exist at the very beginning
- if not os.path.exists(path):
- bb.utils.mkdirhier(path)
- dev = getMountedDev(path)
- # Use path/action as the key
- devDict[os.path.join(path, action)] = [dev, minSpace, minInode]
-
- return devDict
-
-def getInterval(configuration):
-
- """ Get the disk space interval """
-
- # The default value is 50M and 5K.
- spaceDefault = 50 * 1024 * 1024
- inodeDefault = 5 * 1024
-
- interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True)
- if not interval:
- return spaceDefault, inodeDefault
- else:
- # The disk space or inode interval is optional, but it should
- # have a correct value once it is specified
- intervalRe = re.match('([^,]*),?\s*(.*)', interval)
- if intervalRe:
- intervalSpace = intervalRe.group(1)
- if intervalSpace:
- intervalSpace = convertGMK(intervalSpace)
- if not intervalSpace:
- printErr("Invalid disk space interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(1))
- return None, None
- else:
- intervalSpace = spaceDefault
- intervalInode = intervalRe.group(2)
- if intervalInode:
- intervalInode = convertGMK(intervalInode)
- if not intervalInode:
- printErr("Invalid disk inode interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(2))
- return None, None
- else:
- intervalInode = inodeDefault
- return intervalSpace, intervalInode
- else:
- printErr("Invalid interval value in BB_DISKMON_WARNINTERVAL: %s" % interval)
- return None, None
-
-class diskMonitor:
-
- """Prepare the disk space monitor data"""
-
- def __init__(self, configuration):
-
- self.enableMonitor = False
- self.configuration = configuration
-
- BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None
- if BBDirs:
- self.devDict = getDiskData(BBDirs, configuration)
- if self.devDict:
- self.spaceInterval, self.inodeInterval = getInterval(configuration)
- if self.spaceInterval and self.inodeInterval:
- self.enableMonitor = True
- # These are for saving the previous disk free space and inode, we
- # use them to avoid printing too many warning messages
- self.preFreeS = {}
- self.preFreeI = {}
- # This is for STOPTASKS and ABORT, to avoid printing the message
- # repeatedly while waiting for the tasks to finish
- self.checked = {}
- for k in self.devDict:
- self.preFreeS[k] = 0
- self.preFreeI[k] = 0
- self.checked[k] = False
- if self.spaceInterval is None and self.inodeInterval is None:
- self.enableMonitor = False
-
- def check(self, rq):
-
- """ Take action for the monitor """
-
- if self.enableMonitor:
- for k in self.devDict:
- path = os.path.dirname(k)
- action = os.path.basename(k)
- dev = self.devDict[k][0]
- minSpace = self.devDict[k][1]
- minInode = self.devDict[k][2]
-
- st = os.statvfs(path)
-
- # The free space, float point number
- freeSpace = st.f_bavail * st.f_frsize
-
- if minSpace and freeSpace < minSpace:
- # Always show warning, the self.checked would always be False if the action is WARN
- if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
- logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \
- (path, dev, freeSpace / 1024 / 1024 / 1024.0))
- self.preFreeS[k] = freeSpace
-
- if action == "STOPTASKS" and not self.checked[k]:
- logger.error("No new tasks can be executed since the disk space monitor action is \"STOPTASKS\"!")
- self.checked[k] = True
- rq.finish_runqueue(False)
- bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
- elif action == "ABORT" and not self.checked[k]:
- logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
- self.checked[k] = True
- rq.finish_runqueue(True)
- bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
-
- # The free inodes, float point number
- freeInode = st.f_favail
-
- if minInode and freeInode < minInode:
- # Some filesystems use dynamic inodes so can't run out
- # (e.g. btrfs). This is reported by the inode count being 0.
- if st.f_files == 0:
- self.devDict[k][2] = None
- continue
- # Always show warning, the self.checked would always be False if the action is WARN
- if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
- logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \
- (path, dev, freeInode / 1024.0))
- self.preFreeI[k] = freeInode
-
- if action == "STOPTASKS" and not self.checked[k]:
- logger.error("No new tasks can be executed since the disk space monitor action is \"STOPTASKS\"!")
- self.checked[k] = True
- rq.finish_runqueue(False)
- bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
- elif action == "ABORT" and not self.checked[k]:
- logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
- self.checked[k] = True
- rq.finish_runqueue(True)
- bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
- return
diff --git a/yocto-poky/bitbake/lib/bb/msg.py b/yocto-poky/bitbake/lib/bb/msg.py
deleted file mode 100644
index 786b5aef4..000000000
--- a/yocto-poky/bitbake/lib/bb/msg.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'msg' implementation
-
-Message handling infrastructure for bitbake
-
-"""
-
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import sys
-import copy
-import logging
-import collections
-from itertools import groupby
-import warnings
-import bb
-import bb.event
-
-class BBLogFormatter(logging.Formatter):
- """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is"""
-
- DEBUG3 = logging.DEBUG - 2
- DEBUG2 = logging.DEBUG - 1
- DEBUG = logging.DEBUG
- VERBOSE = logging.INFO - 1
- NOTE = logging.INFO
- PLAIN = logging.INFO + 1
- ERROR = logging.ERROR
- WARNING = logging.WARNING
- CRITICAL = logging.CRITICAL
-
- levelnames = {
- DEBUG3 : 'DEBUG',
- DEBUG2 : 'DEBUG',
- DEBUG : 'DEBUG',
- VERBOSE: 'NOTE',
- NOTE : 'NOTE',
- PLAIN : '',
- WARNING : 'WARNING',
- ERROR : 'ERROR',
- CRITICAL: 'ERROR',
- }
-
- color_enabled = False
- BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38)
-
- COLORS = {
- DEBUG3 : CYAN,
- DEBUG2 : CYAN,
- DEBUG : CYAN,
- VERBOSE : BASECOLOR,
- NOTE : BASECOLOR,
- PLAIN : BASECOLOR,
- WARNING : YELLOW,
- ERROR : RED,
- CRITICAL: RED,
- }
-
- BLD = '\033[1;%dm'
- STD = '\033[%dm'
- RST = '\033[0m'
-
- def getLevelName(self, levelno):
- try:
- return self.levelnames[levelno]
- except KeyError:
- self.levelnames[levelno] = value = 'Level %d' % levelno
- return value
-
- def format(self, record):
- record.levelname = self.getLevelName(record.levelno)
- if record.levelno == self.PLAIN:
- msg = record.getMessage()
- else:
- if self.color_enabled:
- record = self.colorize(record)
- msg = logging.Formatter.format(self, record)
-
- if hasattr(record, 'bb_exc_info'):
- etype, value, tb = record.bb_exc_info
- formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
- msg += '\n' + ''.join(formatted)
- return msg
-
- def colorize(self, record):
- color = self.COLORS[record.levelno]
- if self.color_enabled and color is not None:
- record = copy.copy(record)
- record.levelname = "".join([self.BLD % color, record.levelname, self.RST])
- record.msg = "".join([self.STD % color, record.msg, self.RST])
- return record
-
- def enable_color(self):
- self.color_enabled = True
-
-class BBLogFilter(object):
- def __init__(self, handler, level, debug_domains):
- self.stdlevel = level
- self.debug_domains = debug_domains
- loglevel = level
- for domain in debug_domains:
- if debug_domains[domain] < loglevel:
- loglevel = debug_domains[domain]
- handler.setLevel(loglevel)
- handler.addFilter(self)
-
- def filter(self, record):
- if record.levelno >= self.stdlevel:
- return True
- if record.name in self.debug_domains and record.levelno >= self.debug_domains[record.name]:
- return True
- return False
-
-class BBLogFilterStdErr(BBLogFilter):
- def filter(self, record):
- if not BBLogFilter.filter(self, record):
- return False
- if record.levelno >= logging.ERROR:
- return True
- return False
-
-class BBLogFilterStdOut(BBLogFilter):
- def filter(self, record):
- if not BBLogFilter.filter(self, record):
- return False
- if record.levelno < logging.ERROR:
- return True
- return False
-
-# Message control functions
-#
-
-loggerDefaultDebugLevel = 0
-loggerDefaultVerbose = False
-loggerVerboseLogs = False
-loggerDefaultDomains = []
-
-def init_msgconfig(verbose, debug, debug_domains=None):
- """
- Set default verbosity and debug levels config the logger
- """
- bb.msg.loggerDefaultDebugLevel = debug
- bb.msg.loggerDefaultVerbose = verbose
- if verbose:
- bb.msg.loggerVerboseLogs = True
- if debug_domains:
- bb.msg.loggerDefaultDomains = debug_domains
- else:
- bb.msg.loggerDefaultDomains = []
-
-def constructLogOptions():
- debug = loggerDefaultDebugLevel
- verbose = loggerDefaultVerbose
- domains = loggerDefaultDomains
-
- if debug:
- level = BBLogFormatter.DEBUG - debug + 1
- elif verbose:
- level = BBLogFormatter.VERBOSE
- else:
- level = BBLogFormatter.NOTE
-
- debug_domains = {}
- for (domainarg, iterator) in groupby(domains):
- dlevel = len(tuple(iterator))
- debug_domains["BitBake.%s" % domainarg] = logging.DEBUG - dlevel + 1
- return level, debug_domains
-
-def addDefaultlogFilter(handler, cls = BBLogFilter):
- level, debug_domains = constructLogOptions()
-
- cls(handler, level, debug_domains)
-
-#
-# Message handling functions
-#
-
-def fatal(msgdomain, msg):
- if msgdomain:
- logger = logging.getLogger("BitBake.%s" % msgdomain)
- else:
- logger = logging.getLogger("BitBake")
- logger.critical(msg)
- sys.exit(1)
diff --git a/yocto-poky/bitbake/lib/bb/namedtuple_with_abc.py b/yocto-poky/bitbake/lib/bb/namedtuple_with_abc.py
deleted file mode 100644
index 32f2fc642..000000000
--- a/yocto-poky/bitbake/lib/bb/namedtuple_with_abc.py
+++ /dev/null
@@ -1,255 +0,0 @@
-# http://code.activestate.com/recipes/577629-namedtupleabc-abstract-base-class-mix-in-for-named/
-#!/usr/bin/env python
-# Copyright (c) 2011 Jan Kaliszewski (zuo). Available under the MIT License.
-
-"""
-namedtuple_with_abc.py:
-* named tuple mix-in + ABC (abstract base class) recipe,
-* works under Python 2.6, 2.7 as well as 3.x.
-
-Import this module to patch collections.namedtuple() factory function
--- enriching it with the 'abc' attribute (an abstract base class + mix-in
-for named tuples) and decorating it with a wrapper that registers each
-newly created named tuple as a subclass of namedtuple.abc.
-
-How to import:
- import collections, namedtuple_with_abc
-or:
- import namedtuple_with_abc
- from collections import namedtuple
- # ^ in this variant you must import namedtuple function
- # *after* importing namedtuple_with_abc module
-or simply:
- from namedtuple_with_abc import namedtuple
-
-Simple usage example:
- class Credentials(namedtuple.abc):
- _fields = 'username password'
- def __str__(self):
- return ('{0.__class__.__name__}'
- '(username={0.username}, password=...)'.format(self))
- print(Credentials("alice", "Alice's password"))
-
-For more advanced examples -- see below the "if __name__ == '__main__':".
-"""
-
-import collections
-from abc import ABCMeta, abstractproperty
-from functools import wraps
-from sys import version_info
-
-__all__ = ('namedtuple',)
-_namedtuple = collections.namedtuple
-
-
-class _NamedTupleABCMeta(ABCMeta):
- '''The metaclass for the abstract base class + mix-in for named tuples.'''
- def __new__(mcls, name, bases, namespace):
- fields = namespace.get('_fields')
- for base in bases:
- if fields is not None:
- break
- fields = getattr(base, '_fields', None)
- if not isinstance(fields, abstractproperty):
- basetuple = _namedtuple(name, fields)
- bases = (basetuple,) + bases
- namespace.pop('_fields', None)
- namespace.setdefault('__doc__', basetuple.__doc__)
- namespace.setdefault('__slots__', ())
- return ABCMeta.__new__(mcls, name, bases, namespace)
-
-
-exec(
- # Python 2.x metaclass declaration syntax
- """class _NamedTupleABC(object):
- '''The abstract base class + mix-in for named tuples.'''
- __metaclass__ = _NamedTupleABCMeta
- _fields = abstractproperty()""" if version_info[0] < 3 else
- # Python 3.x metaclass declaration syntax
- """class _NamedTupleABC(metaclass=_NamedTupleABCMeta):
- '''The abstract base class + mix-in for named tuples.'''
- _fields = abstractproperty()"""
-)
-
-
-_namedtuple.abc = _NamedTupleABC
-#_NamedTupleABC.register(type(version_info)) # (and similar, in the future...)
-
-@wraps(_namedtuple)
-def namedtuple(*args, **kwargs):
- '''Named tuple factory with namedtuple.abc subclass registration.'''
- cls = _namedtuple(*args, **kwargs)
- _NamedTupleABC.register(cls)
- return cls
-
-collections.namedtuple = namedtuple
-
-
-
-
-if __name__ == '__main__':
-
- '''Examples and explanations'''
-
- # Simple usage
-
- class MyRecord(namedtuple.abc):
- _fields = 'x y z' # such form will be transformed into ('x', 'y', 'z')
- def _my_custom_method(self):
- return list(self._asdict().items())
- # (the '_fields' attribute belongs to the named tuple public API anyway)
-
- rec = MyRecord(1, 2, 3)
- print(rec)
- print(rec._my_custom_method())
- print(rec._replace(y=222))
- print(rec._replace(y=222)._my_custom_method())
-
- # Custom abstract classes...
-
- class MyAbstractRecord(namedtuple.abc):
- def _my_custom_method(self):
- return list(self._asdict().items())
-
- try:
- MyAbstractRecord() # (abstract classes cannot be instantiated)
- except TypeError as exc:
- print(exc)
-
- class AnotherAbstractRecord(MyAbstractRecord):
- def __str__(self):
- return '<<<{0}>>>'.format(super(AnotherAbstractRecord,
- self).__str__())
-
- # ...and their non-abstract subclasses
-
- class MyRecord2(MyAbstractRecord):
- _fields = 'a, b'
-
- class MyRecord3(AnotherAbstractRecord):
- _fields = 'p', 'q', 'r'
-
- rec2 = MyRecord2('foo', 'bar')
- print(rec2)
- print(rec2._my_custom_method())
- print(rec2._replace(b=222))
- print(rec2._replace(b=222)._my_custom_method())
-
- rec3 = MyRecord3('foo', 'bar', 'baz')
- print(rec3)
- print(rec3._my_custom_method())
- print(rec3._replace(q=222))
- print(rec3._replace(q=222)._my_custom_method())
-
- # You can also subclass non-abstract ones...
-
- class MyRecord33(MyRecord3):
- def __str__(self):
- return '< {0!r}, ..., {0!r} >'.format(self.p, self.r)
-
- rec33 = MyRecord33('foo', 'bar', 'baz')
- print(rec33)
- print(rec33._my_custom_method())
- print(rec33._replace(q=222))
- print(rec33._replace(q=222)._my_custom_method())
-
- # ...and even override the magic '_fields' attribute again
-
- class MyRecord345(MyRecord3):
- _fields = 'e f g h i j k'
-
- rec345 = MyRecord345(1, 2, 3, 4, 3, 2, 1)
- print(rec345)
- print(rec345._my_custom_method())
- print(rec345._replace(f=222))
- print(rec345._replace(f=222)._my_custom_method())
-
- # Mixing-in some other classes is also possible:
-
- class MyMixIn(object):
- def method(self):
- return "MyMixIn.method() called"
- def _my_custom_method(self):
- return "MyMixIn._my_custom_method() called"
- def count(self, item):
- return "MyMixIn.count({0}) called".format(item)
- def _asdict(self): # (cannot override a namedtuple method, see below)
- return "MyMixIn._asdict() called"
-
- class MyRecord4(MyRecord33, MyMixIn): # mix-in on the right
- _fields = 'j k l x'
-
- class MyRecord5(MyMixIn, MyRecord33): # mix-in on the left
- _fields = 'j k l x y'
-
- rec4 = MyRecord4(1, 2, 3, 2)
- print(rec4)
- print(rec4.method())
- print(rec4._my_custom_method()) # MyRecord33's
- print(rec4.count(2)) # tuple's
- print(rec4._replace(k=222))
- print(rec4._replace(k=222).method())
- print(rec4._replace(k=222)._my_custom_method()) # MyRecord33's
- print(rec4._replace(k=222).count(8)) # tuple's
-
- rec5 = MyRecord5(1, 2, 3, 2, 1)
- print(rec5)
- print(rec5.method())
- print(rec5._my_custom_method()) # MyMixIn's
- print(rec5.count(2)) # MyMixIn's
- print(rec5._replace(k=222))
- print(rec5._replace(k=222).method())
- print(rec5._replace(k=222)._my_custom_method()) # MyMixIn's
- print(rec5._replace(k=222).count(2)) # MyMixIn's
-
- # Note that behavior: the standard namedtuple methods cannot be
- # overridden by a foreign mix-in -- even if the mix-in is declared
- # as the leftmost base class (but, obviously, you can override them
- # in the defined class or its subclasses):
-
- print(rec4._asdict()) # (returns a dict, not "MyMixIn._asdict() called")
- print(rec5._asdict()) # (returns a dict, not "MyMixIn._asdict() called")
-
- class MyRecord6(MyRecord33):
- _fields = 'j k l x y z'
- def _asdict(self):
- return "MyRecord6._asdict() called"
- rec6 = MyRecord6(1, 2, 3, 1, 2, 3)
- print(rec6._asdict()) # (this returns "MyRecord6._asdict() called")
-
- # All that record classes are real subclasses of namedtuple.abc:
-
- assert issubclass(MyRecord, namedtuple.abc)
- assert issubclass(MyAbstractRecord, namedtuple.abc)
- assert issubclass(AnotherAbstractRecord, namedtuple.abc)
- assert issubclass(MyRecord2, namedtuple.abc)
- assert issubclass(MyRecord3, namedtuple.abc)
- assert issubclass(MyRecord33, namedtuple.abc)
- assert issubclass(MyRecord345, namedtuple.abc)
- assert issubclass(MyRecord4, namedtuple.abc)
- assert issubclass(MyRecord5, namedtuple.abc)
- assert issubclass(MyRecord6, namedtuple.abc)
-
- # ...but abstract ones are not subclasses of tuple
- # (and this is what you probably want):
-
- assert not issubclass(MyAbstractRecord, tuple)
- assert not issubclass(AnotherAbstractRecord, tuple)
-
- assert issubclass(MyRecord, tuple)
- assert issubclass(MyRecord2, tuple)
- assert issubclass(MyRecord3, tuple)
- assert issubclass(MyRecord33, tuple)
- assert issubclass(MyRecord345, tuple)
- assert issubclass(MyRecord4, tuple)
- assert issubclass(MyRecord5, tuple)
- assert issubclass(MyRecord6, tuple)
-
- # Named tuple classes created with namedtuple() factory function
- # (in the "traditional" way) are registered as "virtual" subclasses
- # of namedtuple.abc:
-
- MyTuple = namedtuple('MyTuple', 'a b c')
- mt = MyTuple(1, 2, 3)
- assert issubclass(MyTuple, namedtuple.abc)
- assert isinstance(mt, namedtuple.abc)
diff --git a/yocto-poky/bitbake/lib/bb/parse/__init__.py b/yocto-poky/bitbake/lib/bb/parse/__init__.py
deleted file mode 100644
index 26ae7ead8..000000000
--- a/yocto-poky/bitbake/lib/bb/parse/__init__.py
+++ /dev/null
@@ -1,170 +0,0 @@
-"""
-BitBake Parsers
-
-File parsers for the BitBake build tools.
-
-"""
-
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-handlers = []
-
-import errno
-import logging
-import os
-import stat
-import bb
-import bb.utils
-import bb.siggen
-
-logger = logging.getLogger("BitBake.Parsing")
-
-class ParseError(Exception):
- """Exception raised when parsing fails"""
- def __init__(self, msg, filename, lineno=0):
- self.msg = msg
- self.filename = filename
- self.lineno = lineno
- Exception.__init__(self, msg, filename, lineno)
-
- def __str__(self):
- if self.lineno:
- return "ParseError at %s:%d: %s" % (self.filename, self.lineno, self.msg)
- else:
- return "ParseError in %s: %s" % (self.filename, self.msg)
-
-class SkipRecipe(Exception):
- """Exception raised to skip this recipe"""
-
-class SkipPackage(SkipRecipe):
- """Exception raised to skip this recipe (use SkipRecipe in new code)"""
-
-__mtime_cache = {}
-def cached_mtime(f):
- if f not in __mtime_cache:
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- return __mtime_cache[f]
-
-def cached_mtime_noerror(f):
- if f not in __mtime_cache:
- try:
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- except OSError:
- return 0
- return __mtime_cache[f]
-
-def update_mtime(f):
- try:
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- except OSError:
- if f in __mtime_cache:
- del __mtime_cache[f]
- return 0
- return __mtime_cache[f]
-
-def update_cache(f):
- if f in __mtime_cache:
- logger.debug(1, "Updating mtime cache for %s" % f)
- update_mtime(f)
-
-def mark_dependency(d, f):
- if f.startswith('./'):
- f = "%s/%s" % (os.getcwd(), f[2:])
- deps = (d.getVar('__depends', False) or [])
- s = (f, cached_mtime_noerror(f))
- if s not in deps:
- deps.append(s)
- d.setVar('__depends', deps)
-
-def check_dependency(d, f):
- s = (f, cached_mtime_noerror(f))
- deps = (d.getVar('__depends', False) or [])
- return s in deps
-
-def supports(fn, data):
- """Returns true if we have a handler for this file, false otherwise"""
- for h in handlers:
- if h['supports'](fn, data):
- return 1
- return 0
-
-def handle(fn, data, include = 0):
- """Call the handler that is appropriate for this file"""
- for h in handlers:
- if h['supports'](fn, data):
- with data.inchistory.include(fn):
- return h['handle'](fn, data, include)
- raise ParseError("not a BitBake file", fn)
-
-def init(fn, data):
- for h in handlers:
- if h['supports'](fn):
- return h['init'](data)
-
-def init_parser(d):
- bb.parse.siggen = bb.siggen.init(d)
-
-def resolve_file(fn, d):
- if not os.path.isabs(fn):
- bbpath = d.getVar("BBPATH", True)
- newfn, attempts = bb.utils.which(bbpath, fn, history=True)
- for af in attempts:
- mark_dependency(d, af)
- if not newfn:
- raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
- fn = newfn
-
- mark_dependency(d, fn)
- if not os.path.isfile(fn):
- raise IOError(errno.ENOENT, "file %s not found" % fn)
-
- return fn
-
-# Used by OpenEmbedded metadata
-__pkgsplit_cache__={}
-def vars_from_file(mypkg, d):
- if not mypkg or not mypkg.endswith((".bb", ".bbappend")):
- return (None, None, None)
- if mypkg in __pkgsplit_cache__:
- return __pkgsplit_cache__[mypkg]
-
- myfile = os.path.splitext(os.path.basename(mypkg))
- parts = myfile[0].split('_')
- __pkgsplit_cache__[mypkg] = parts
- if len(parts) > 3:
- raise ParseError("Unable to generate default variables from filename (too many underscores)", mypkg)
- exp = 3 - len(parts)
- tmplist = []
- while exp != 0:
- exp -= 1
- tmplist.append(None)
- parts.extend(tmplist)
- return parts
-
-def get_file_depends(d):
- '''Return the dependent files'''
- dep_files = []
- depends = d.getVar('__base_depends', False) or []
- depends = depends + (d.getVar('__depends', False) or [])
- for (fn, _) in depends:
- dep_files.append(os.path.abspath(fn))
- return " ".join(dep_files)
-
-from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/yocto-poky/bitbake/lib/bb/parse/ast.py b/yocto-poky/bitbake/lib/bb/parse/ast.py
deleted file mode 100644
index 5f55af5ef..000000000
--- a/yocto-poky/bitbake/lib/bb/parse/ast.py
+++ /dev/null
@@ -1,476 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- AbstractSyntaxTree classes for the Bitbake language
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2009 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import absolute_import
-from future_builtins import filter
-import re
-import string
-import logging
-import bb
-import itertools
-from bb import methodpool
-from bb.parse import logger
-
-_bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
-
-class StatementGroup(list):
- def eval(self, data):
- for statement in self:
- statement.eval(data)
-
-class AstNode(object):
- def __init__(self, filename, lineno):
- self.filename = filename
- self.lineno = lineno
-
-class IncludeNode(AstNode):
- def __init__(self, filename, lineno, what_file, force):
- AstNode.__init__(self, filename, lineno)
- self.what_file = what_file
- self.force = force
-
- def eval(self, data):
- """
- Include the file and evaluate the statements
- """
- s = data.expand(self.what_file)
- logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
-
- # TODO: Cache those includes... maybe not here though
- if self.force:
- bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, "include required")
- else:
- bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
-
-class ExportNode(AstNode):
- def __init__(self, filename, lineno, var):
- AstNode.__init__(self, filename, lineno)
- self.var = var
-
- def eval(self, data):
- data.setVarFlag(self.var, "export", 1, op = 'exported')
-
-class DataNode(AstNode):
- """
- Various data related updates. For the sake of sanity
- we have one class doing all this. This means that all
- this need to be re-evaluated... we might be able to do
- that faster with multiple classes.
- """
- def __init__(self, filename, lineno, groupd):
- AstNode.__init__(self, filename, lineno)
- self.groupd = groupd
-
- def getFunc(self, key, data):
- if 'flag' in self.groupd and self.groupd['flag'] != None:
- return data.getVarFlag(key, self.groupd['flag'], expand=False, noweakdefault=True)
- else:
- return data.getVar(key, False, noweakdefault=True, parsing=True)
-
- def eval(self, data):
- groupd = self.groupd
- key = groupd["var"]
- loginfo = {
- 'variable': key,
- 'file': self.filename,
- 'line': self.lineno,
- }
- if "exp" in groupd and groupd["exp"] != None:
- data.setVarFlag(key, "export", 1, op = 'exported', **loginfo)
-
- op = "set"
- if "ques" in groupd and groupd["ques"] != None:
- val = self.getFunc(key, data)
- op = "set?"
- if val == None:
- val = groupd["value"]
- elif "colon" in groupd and groupd["colon"] != None:
- e = data.createCopy()
- bb.data.update_data(e)
- op = "immediate"
- val = e.expand(groupd["value"], key + "[:=]")
- elif "append" in groupd and groupd["append"] != None:
- op = "append"
- val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"])
- elif "prepend" in groupd and groupd["prepend"] != None:
- op = "prepend"
- val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or ""))
- elif "postdot" in groupd and groupd["postdot"] != None:
- op = "postdot"
- val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"])
- elif "predot" in groupd and groupd["predot"] != None:
- op = "predot"
- val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or ""))
- else:
- val = groupd["value"]
-
- flag = None
- if 'flag' in groupd and groupd['flag'] != None:
- flag = groupd['flag']
- elif groupd["lazyques"]:
- flag = "_defaultval"
-
- loginfo['op'] = op
- loginfo['detail'] = groupd["value"]
-
- if flag:
- data.setVarFlag(key, flag, val, **loginfo)
- else:
- data.setVar(key, val, parsing=True, **loginfo)
-
-class MethodNode(AstNode):
- tr_tbl = string.maketrans('/.+-@%&', '_______')
-
- def __init__(self, filename, lineno, func_name, body, python, fakeroot):
- AstNode.__init__(self, filename, lineno)
- self.func_name = func_name
- self.body = body
- self.python = python
- self.fakeroot = fakeroot
-
- def eval(self, data):
- text = '\n'.join(self.body)
- funcname = self.func_name
- if self.func_name == "__anonymous":
- funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
- self.python = True
- text = "def %s(d):\n" % (funcname) + text
- bb.methodpool.insert_method(funcname, text, self.filename, self.lineno - len(self.body))
- anonfuncs = data.getVar('__BBANONFUNCS', False) or []
- anonfuncs.append(funcname)
- data.setVar('__BBANONFUNCS', anonfuncs)
- if data.getVar(funcname, False):
- # clean up old version of this piece of metadata, as its
- # flags could cause problems
- data.delVarFlag(funcname, 'python')
- data.delVarFlag(funcname, 'fakeroot')
- if self.python:
- data.setVarFlag(funcname, "python", "1")
- if self.fakeroot:
- data.setVarFlag(funcname, "fakeroot", "1")
- data.setVarFlag(funcname, "func", 1)
- data.setVar(funcname, text, parsing=True)
- data.setVarFlag(funcname, 'filename', self.filename)
- data.setVarFlag(funcname, 'lineno', str(self.lineno - len(self.body)))
-
-class PythonMethodNode(AstNode):
- def __init__(self, filename, lineno, function, modulename, body):
- AstNode.__init__(self, filename, lineno)
- self.function = function
- self.modulename = modulename
- self.body = body
-
- def eval(self, data):
- # Note we will add root to parsedmethods after having parse
- # 'this' file. This means we will not parse methods from
- # bb classes twice
- text = '\n'.join(self.body)
- bb.methodpool.insert_method(self.modulename, text, self.filename, self.lineno - len(self.body) - 1)
- data.setVarFlag(self.function, "func", 1)
- data.setVarFlag(self.function, "python", 1)
- data.setVar(self.function, text, parsing=True)
- data.setVarFlag(self.function, 'filename', self.filename)
- data.setVarFlag(self.function, 'lineno', str(self.lineno - len(self.body) - 1))
-
-class ExportFuncsNode(AstNode):
- def __init__(self, filename, lineno, fns, classname):
- AstNode.__init__(self, filename, lineno)
- self.n = fns.split()
- self.classname = classname
-
- def eval(self, data):
-
- for func in self.n:
- calledfunc = self.classname + "_" + func
-
- if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
- continue
-
- if data.getVar(func, False):
- data.setVarFlag(func, 'python', None)
- data.setVarFlag(func, 'func', None)
-
- for flag in [ "func", "python" ]:
- if data.getVarFlag(calledfunc, flag, False):
- data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False))
- for flag in [ "dirs" ]:
- if data.getVarFlag(func, flag, False):
- data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False))
- data.setVarFlag(func, "filename", "autogenerated")
- data.setVarFlag(func, "lineno", 1)
-
- if data.getVarFlag(calledfunc, "python", False):
- data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
- else:
- if "-" in self.classname:
- bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
- data.setVar(func, " " + calledfunc + "\n", parsing=True)
- data.setVarFlag(func, 'export_func', '1')
-
-class AddTaskNode(AstNode):
- def __init__(self, filename, lineno, func, before, after):
- AstNode.__init__(self, filename, lineno)
- self.func = func
- self.before = before
- self.after = after
-
- def eval(self, data):
- bb.build.addtask(self.func, self.before, self.after, data)
-
-class DelTaskNode(AstNode):
- def __init__(self, filename, lineno, func):
- AstNode.__init__(self, filename, lineno)
- self.func = func
-
- def eval(self, data):
- bb.build.deltask(self.func, data)
-
-class BBHandlerNode(AstNode):
- def __init__(self, filename, lineno, fns):
- AstNode.__init__(self, filename, lineno)
- self.hs = fns.split()
-
- def eval(self, data):
- bbhands = data.getVar('__BBHANDLERS', False) or []
- for h in self.hs:
- bbhands.append(h)
- data.setVarFlag(h, "handler", 1)
- data.setVar('__BBHANDLERS', bbhands)
-
-class InheritNode(AstNode):
- def __init__(self, filename, lineno, classes):
- AstNode.__init__(self, filename, lineno)
- self.classes = classes
-
- def eval(self, data):
- bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
-
-def handleInclude(statements, filename, lineno, m, force):
- statements.append(IncludeNode(filename, lineno, m.group(1), force))
-
-def handleExport(statements, filename, lineno, m):
- statements.append(ExportNode(filename, lineno, m.group(1)))
-
-def handleData(statements, filename, lineno, groupd):
- statements.append(DataNode(filename, lineno, groupd))
-
-def handleMethod(statements, filename, lineno, func_name, body, python, fakeroot):
- statements.append(MethodNode(filename, lineno, func_name, body, python, fakeroot))
-
-def handlePythonMethod(statements, filename, lineno, funcname, modulename, body):
- statements.append(PythonMethodNode(filename, lineno, funcname, modulename, body))
-
-def handleExportFuncs(statements, filename, lineno, m, classname):
- statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
-
-def handleAddTask(statements, filename, lineno, m):
- func = m.group("func")
- before = m.group("before")
- after = m.group("after")
- if func is None:
- return
-
- statements.append(AddTaskNode(filename, lineno, func, before, after))
-
-def handleDelTask(statements, filename, lineno, m):
- func = m.group("func")
- if func is None:
- return
-
- statements.append(DelTaskNode(filename, lineno, func))
-
-def handleBBHandlers(statements, filename, lineno, m):
- statements.append(BBHandlerNode(filename, lineno, m.group(1)))
-
-def handleInherit(statements, filename, lineno, m):
- classes = m.group(1)
- statements.append(InheritNode(filename, lineno, classes))
-
-def finalize(fn, d, variant = None):
- all_handlers = {}
- for var in d.getVar('__BBHANDLERS', False) or []:
- # try to add the handler
- handlerfn = d.getVarFlag(var, "filename", False)
- handlerln = int(d.getVarFlag(var, "lineno", False))
- bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
-
- bb.event.fire(bb.event.RecipePreFinalise(fn), d)
-
- bb.data.expandKeys(d)
- bb.data.update_data(d)
- code = []
- for funcname in d.getVar("__BBANONFUNCS", False) or []:
- code.append("%s(d)" % funcname)
- bb.utils.better_exec("\n".join(code), {"d": d})
- bb.data.update_data(d)
-
- tasklist = d.getVar('__BBTASKS', False) or []
- bb.build.add_tasks(tasklist, d)
-
- bb.parse.siggen.finalise(fn, d, variant)
-
- d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
-
- bb.event.fire(bb.event.RecipeParsed(fn), d)
-
-def _create_variants(datastores, names, function, onlyfinalise):
- def create_variant(name, orig_d, arg = None):
- if onlyfinalise and name not in onlyfinalise:
- return
- new_d = bb.data.createCopy(orig_d)
- function(arg or name, new_d)
- datastores[name] = new_d
-
- for variant, variant_d in datastores.items():
- for name in names:
- if not variant:
- # Based on main recipe
- create_variant(name, variant_d)
- else:
- create_variant("%s-%s" % (variant, name), variant_d, name)
-
-def _expand_versions(versions):
- def expand_one(version, start, end):
- for i in xrange(start, end + 1):
- ver = _bbversions_re.sub(str(i), version, 1)
- yield ver
-
- versions = iter(versions)
- while True:
- try:
- version = next(versions)
- except StopIteration:
- break
-
- range_ver = _bbversions_re.search(version)
- if not range_ver:
- yield version
- else:
- newversions = expand_one(version, int(range_ver.group("from")),
- int(range_ver.group("to")))
- versions = itertools.chain(newversions, versions)
-
-def multi_finalize(fn, d):
- appends = (d.getVar("__BBAPPEND", True) or "").split()
- for append in appends:
- logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
- bb.parse.BBHandler.handle(append, d, True)
-
- onlyfinalise = d.getVar("__ONLYFINALISE", False)
-
- safe_d = d
- d = bb.data.createCopy(safe_d)
- try:
- finalize(fn, d)
- except bb.parse.SkipRecipe as e:
- d.setVar("__SKIPPED", e.args[0])
- datastores = {"": safe_d}
-
- versions = (d.getVar("BBVERSIONS", True) or "").split()
- if versions:
- pv = orig_pv = d.getVar("PV", True)
- baseversions = {}
-
- def verfunc(ver, d, pv_d = None):
- if pv_d is None:
- pv_d = d
-
- overrides = d.getVar("OVERRIDES", True).split(":")
- pv_d.setVar("PV", ver)
- overrides.append(ver)
- bpv = baseversions.get(ver) or orig_pv
- pv_d.setVar("BPV", bpv)
- overrides.append(bpv)
- d.setVar("OVERRIDES", ":".join(overrides))
-
- versions = list(_expand_versions(versions))
- for pos, version in enumerate(list(versions)):
- try:
- pv, bpv = version.split(":", 2)
- except ValueError:
- pass
- else:
- versions[pos] = pv
- baseversions[pv] = bpv
-
- if pv in versions and not baseversions.get(pv):
- versions.remove(pv)
- else:
- pv = versions.pop()
-
- # This is necessary because our existing main datastore
- # has already been finalized with the old PV, we need one
- # that's been finalized with the new PV.
- d = bb.data.createCopy(safe_d)
- verfunc(pv, d, safe_d)
- try:
- finalize(fn, d)
- except bb.parse.SkipRecipe as e:
- d.setVar("__SKIPPED", e.args[0])
-
- _create_variants(datastores, versions, verfunc, onlyfinalise)
-
- extended = d.getVar("BBCLASSEXTEND", True) or ""
- if extended:
- # the following is to support bbextends with arguments, for e.g. multilib
- # an example is as follows:
- # BBCLASSEXTEND = "multilib:lib32"
- # it will create foo-lib32, inheriting multilib.bbclass and set
- # BBEXTENDCURR to "multilib" and BBEXTENDVARIANT to "lib32"
- extendedmap = {}
- variantmap = {}
-
- for ext in extended.split():
- eext = ext.split(':', 2)
- if len(eext) > 1:
- extendedmap[ext] = eext[0]
- variantmap[ext] = eext[1]
- else:
- extendedmap[ext] = ext
-
- pn = d.getVar("PN", True)
- def extendfunc(name, d):
- if name != extendedmap[name]:
- d.setVar("BBEXTENDCURR", extendedmap[name])
- d.setVar("BBEXTENDVARIANT", variantmap[name])
- else:
- d.setVar("PN", "%s-%s" % (pn, name))
- bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d)
-
- safe_d.setVar("BBCLASSEXTEND", extended)
- _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
-
- for variant, variant_d in datastores.iteritems():
- if variant:
- try:
- if not onlyfinalise or variant in onlyfinalise:
- finalize(fn, variant_d, variant)
- except bb.parse.SkipRecipe as e:
- variant_d.setVar("__SKIPPED", e.args[0])
-
- if len(datastores) > 1:
- variants = filter(None, datastores.iterkeys())
- safe_d.setVar("__VARIANTS", " ".join(variants))
-
- datastores[""] = d
- return datastores
diff --git a/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
deleted file mode 100644
index ef72c3700..000000000
--- a/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ /dev/null
@@ -1,254 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- class for handling .bb files
-
- Reads a .bb file and obtains its metadata
-
-"""
-
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import absolute_import
-import re, bb, os
-import logging
-import bb.build, bb.utils
-from bb import data
-
-from . import ConfHandler
-from .. import resolve_file, ast, logger, ParseError
-from .ConfHandler import include, init
-
-# For compatibility
-bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-
-__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
-__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
-__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
-__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
-__deltask_regexp__ = re.compile("deltask\s+(?P<func>\w+)")
-__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
-__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
-__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
-
-__infunc__ = []
-__inpython__ = False
-__body__ = []
-__classname__ = ""
-
-cached_statements = {}
-
-def supports(fn, d):
- """Return True if fn has a supported extension"""
- return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
-
-def inherit(files, fn, lineno, d):
- __inherit_cache = d.getVar('__inherit_cache', False) or []
- files = d.expand(files).split()
- for file in files:
- if not os.path.isabs(file) and not file.endswith(".bbclass"):
- file = os.path.join('classes', '%s.bbclass' % file)
-
- if not os.path.isabs(file):
- bbpath = d.getVar("BBPATH", True)
- abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
- for af in attempts:
- if af != abs_fn:
- bb.parse.mark_dependency(d, af)
- if abs_fn:
- file = abs_fn
-
- if not file in __inherit_cache:
- logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
- __inherit_cache.append( file )
- d.setVar('__inherit_cache', __inherit_cache)
- include(fn, file, lineno, d, "inherit")
- __inherit_cache = d.getVar('__inherit_cache', False) or []
-
-def get_statements(filename, absolute_filename, base_name):
- global cached_statements
-
- try:
- return cached_statements[absolute_filename]
- except KeyError:
- file = open(absolute_filename, 'r')
- statements = ast.StatementGroup()
-
- lineno = 0
- while True:
- lineno = lineno + 1
- s = file.readline()
- if not s: break
- s = s.rstrip()
- feeder(lineno, s, filename, base_name, statements)
- file.close()
- if __inpython__:
- # add a blank line to close out any python definition
- feeder(lineno, "", filename, base_name, statements, eof=True)
-
- if filename.endswith(".bbclass") or filename.endswith(".inc"):
- cached_statements[absolute_filename] = statements
- return statements
-
-def handle(fn, d, include):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
- __body__ = []
- __infunc__ = []
- __classname__ = ""
- __residue__ = []
-
- base_name = os.path.basename(fn)
- (root, ext) = os.path.splitext(base_name)
- init(d)
-
- if ext == ".bbclass":
- __classname__ = root
- __inherit_cache = d.getVar('__inherit_cache', False) or []
- if not fn in __inherit_cache:
- __inherit_cache.append(fn)
- d.setVar('__inherit_cache', __inherit_cache)
-
- if include != 0:
- oldfile = d.getVar('FILE', False)
- else:
- oldfile = None
-
- abs_fn = resolve_file(fn, d)
-
- if include:
- bb.parse.mark_dependency(d, abs_fn)
-
- # actual loading
- statements = get_statements(fn, abs_fn, base_name)
-
- # DONE WITH PARSING... time to evaluate
- if ext != ".bbclass" and abs_fn != oldfile:
- d.setVar('FILE', abs_fn)
-
- try:
- statements.eval(d)
- except bb.parse.SkipRecipe:
- bb.data.setVar("__SKIPPED", True, d)
- if include == 0:
- return { "" : d }
-
- if __infunc__:
- raise ParseError("Shell function %s is never closed" % __infunc__[0], __infunc__[1], __infunc__[2])
- if __residue__:
- raise ParseError("Leftover unparsed (incomplete?) data %s from %s" % __residue__, fn)
-
- if ext != ".bbclass" and include == 0:
- return ast.multi_finalize(fn, d)
-
- if ext != ".bbclass" and oldfile and abs_fn != oldfile:
- d.setVar("FILE", oldfile)
-
- return d
-
-def feeder(lineno, s, fn, root, statements, eof=False):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
- if __infunc__:
- if s == '}':
- __body__.append('')
- ast.handleMethod(statements, fn, lineno, __infunc__[0], __body__, __infunc__[3], __infunc__[4])
- __infunc__ = []
- __body__ = []
- else:
- __body__.append(s)
- return
-
- if __inpython__:
- m = __python_func_regexp__.match(s)
- if m and not eof:
- __body__.append(s)
- return
- else:
- ast.handlePythonMethod(statements, fn, lineno, __inpython__,
- root, __body__)
- __body__ = []
- __inpython__ = False
-
- if eof:
- return
-
- if s and s[0] == '#':
- if len(__residue__) != 0 and __residue__[0][0] != "#":
- bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
-
- if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
- bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
-
- if s and s[-1] == '\\':
- __residue__.append(s[:-1])
- return
-
- s = "".join(__residue__) + s
- __residue__ = []
-
- # Skip empty lines
- if s == '':
- return
-
- # Skip comments
- if s[0] == '#':
- return
-
- m = __func_start_regexp__.match(s)
- if m:
- __infunc__ = [m.group("func") or "__anonymous", fn, lineno, m.group("py") is not None, m.group("fr") is not None]
- return
-
- m = __def_regexp__.match(s)
- if m:
- __body__.append(s)
- __inpython__ = m.group(1)
-
- return
-
- m = __export_func_regexp__.match(s)
- if m:
- ast.handleExportFuncs(statements, fn, lineno, m, __classname__)
- return
-
- m = __addtask_regexp__.match(s)
- if m:
- ast.handleAddTask(statements, fn, lineno, m)
- return
-
- m = __deltask_regexp__.match(s)
- if m:
- ast.handleDelTask(statements, fn, lineno, m)
- return
-
- m = __addhandler_regexp__.match(s)
- if m:
- ast.handleBBHandlers(statements, fn, lineno, m)
- return
-
- m = __inherit_regexp__.match(s)
- if m:
- ast.handleInherit(statements, fn, lineno, m)
- return
-
- return ConfHandler.feeder(lineno, s, fn, statements)
-
-# Add us to the handlers list
-from .. import handlers
-handlers.append({'supports': supports, 'handle': handle, 'init': init})
-del handlers
diff --git a/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
deleted file mode 100644
index fbd75b14a..000000000
--- a/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- class for handling configuration data files
-
- Reads a .conf file and obtains its metadata
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import errno
-import re
-import os
-import bb.utils
-from bb.parse import ParseError, resolve_file, ast, logger, handle
-
-__config_regexp__ = re.compile( r"""
- ^
- (?P<exp>export\s*)?
- (?P<var>[a-zA-Z0-9\-~_+.${}/]+?)
- (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
-
- \s* (
- (?P<colon>:=) |
- (?P<lazyques>\?\?=) |
- (?P<ques>\?=) |
- (?P<append>\+=) |
- (?P<prepend>=\+) |
- (?P<predot>=\.) |
- (?P<postdot>\.=) |
- =
- ) \s*
-
- (?!'[^']*'[^']*'$)
- (?!\"[^\"]*\"[^\"]*\"$)
- (?P<apo>['\"])
- (?P<value>.*)
- (?P=apo)
- $
- """, re.X)
-__include_regexp__ = re.compile( r"include\s+(.+)" )
-__require_regexp__ = re.compile( r"require\s+(.+)" )
-__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
-
-def init(data):
- topdir = data.getVar('TOPDIR', False)
- if not topdir:
- data.setVar('TOPDIR', os.getcwd())
-
-
-def supports(fn, d):
- return fn[-5:] == ".conf"
-
-def include(parentfn, fn, lineno, data, error_out):
- """
- error_out: A string indicating the verb (e.g. "include", "inherit") to be
- used in a ParseError that will be raised if the file to be included could
- not be included. Specify False to avoid raising an error in this case.
- """
- if parentfn == fn: # prevent infinite recursion
- return None
-
- fn = data.expand(fn)
- parentfn = data.expand(parentfn)
-
- if not os.path.isabs(fn):
- dname = os.path.dirname(parentfn)
- bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
- abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
- if abs_fn and bb.parse.check_dependency(data, abs_fn):
- logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
- for af in attempts:
- bb.parse.mark_dependency(data, af)
- if abs_fn:
- fn = abs_fn
- elif bb.parse.check_dependency(data, fn):
- logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
-
- try:
- bb.parse.handle(fn, data, True)
- except (IOError, OSError) as exc:
- if exc.errno == errno.ENOENT:
- if error_out:
- raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno)
- logger.debug(2, "CONF file '%s' not found", fn)
- else:
- if error_out:
- raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno)
- else:
- raise ParseError("Error parsing %s: %s" % (fn, exc.strerror), parentfn, lineno)
-
-# We have an issue where a UI might want to enforce particular settings such as
-# an empty DISTRO variable. If configuration files do something like assigning
-# a weak default, it turns out to be very difficult to filter out these changes,
-# particularly when the weak default might appear half way though parsing a chain
-# of configuration files. We therefore let the UIs hook into configuration file
-# parsing. This turns out to be a hard problem to solve any other way.
-confFilters = []
-
-def handle(fn, data, include):
- init(data)
-
- if include == 0:
- oldfile = None
- else:
- oldfile = data.getVar('FILE', False)
-
- abs_fn = resolve_file(fn, data)
- f = open(abs_fn, 'r')
-
- if include:
- bb.parse.mark_dependency(data, abs_fn)
-
- statements = ast.StatementGroup()
- lineno = 0
- while True:
- lineno = lineno + 1
- s = f.readline()
- if not s:
- break
- w = s.strip()
- # skip empty lines
- if not w:
- continue
- s = s.rstrip()
- while s[-1] == '\\':
- s2 = f.readline().strip()
- lineno = lineno + 1
- if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
- bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
- s = s[:-1] + s2
- # skip comments
- if s[0] == '#':
- continue
- feeder(lineno, s, abs_fn, statements)
-
- # DONE WITH PARSING... time to evaluate
- data.setVar('FILE', abs_fn)
- statements.eval(data)
- if oldfile:
- data.setVar('FILE', oldfile)
-
- f.close()
-
- for f in confFilters:
- f(fn, data)
-
- return data
-
-def feeder(lineno, s, fn, statements):
- m = __config_regexp__.match(s)
- if m:
- groupd = m.groupdict()
- ast.handleData(statements, fn, lineno, groupd)
- return
-
- m = __include_regexp__.match(s)
- if m:
- ast.handleInclude(statements, fn, lineno, m, False)
- return
-
- m = __require_regexp__.match(s)
- if m:
- ast.handleInclude(statements, fn, lineno, m, True)
- return
-
- m = __export_regexp__.match(s)
- if m:
- ast.handleExport(statements, fn, lineno, m)
- return
-
- raise ParseError("unparsed line: '%s'" % s, fn, lineno);
-
-# Add us to the handlers list
-from bb.parse import handlers
-handlers.append({'supports': supports, 'handle': handle, 'init': init})
-del handlers
diff --git a/yocto-poky/bitbake/lib/bb/parse/parse_py/__init__.py b/yocto-poky/bitbake/lib/bb/parse/parse_py/__init__.py
deleted file mode 100644
index 3e658d0de..000000000
--- a/yocto-poky/bitbake/lib/bb/parse/parse_py/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Parsers
-
-File parsers for the BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from __future__ import absolute_import
-from . import ConfHandler
-from . import BBHandler
-
-__version__ = '1.0'
diff --git a/yocto-poky/bitbake/lib/bb/persist_data.py b/yocto-poky/bitbake/lib/bb/persist_data.py
deleted file mode 100644
index e45042324..000000000
--- a/yocto-poky/bitbake/lib/bb/persist_data.py
+++ /dev/null
@@ -1,218 +0,0 @@
-"""BitBake Persistent Data Store
-
-Used to store data in a central location such that other threads/tasks can
-access them at some future date. Acts as a convenience wrapper around sqlite,
-currently, providing a key/value store accessed by 'domain'.
-"""
-
-# Copyright (C) 2007 Richard Purdie
-# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import collections
-import logging
-import os.path
-import sys
-import warnings
-from bb.compat import total_ordering
-from collections import Mapping
-
-try:
- import sqlite3
-except ImportError:
- from pysqlite2 import dbapi2 as sqlite3
-
-sqlversion = sqlite3.sqlite_version_info
-if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
- raise Exception("sqlite3 version 3.3.0 or later is required.")
-
-
-logger = logging.getLogger("BitBake.PersistData")
-if hasattr(sqlite3, 'enable_shared_cache'):
- try:
- sqlite3.enable_shared_cache(True)
- except sqlite3.OperationalError:
- pass
-
-
-@total_ordering
-class SQLTable(collections.MutableMapping):
- """Object representing a table/domain in the database"""
- def __init__(self, cachefile, table):
- self.cachefile = cachefile
- self.table = table
- self.cursor = connect(self.cachefile)
-
- self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
- % table)
-
- def _execute(self, *query):
- """Execute a query, waiting to acquire a lock if necessary"""
- count = 0
- while True:
- try:
- return self.cursor.execute(*query)
- except sqlite3.OperationalError as exc:
- if 'database is locked' in str(exc) and count < 500:
- count = count + 1
- self.cursor.close()
- self.cursor = connect(self.cachefile)
- continue
- raise
-
- def __enter__(self):
- self.cursor.__enter__()
- return self
-
- def __exit__(self, *excinfo):
- self.cursor.__exit__(*excinfo)
-
- def __getitem__(self, key):
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- for row in data:
- return row[1]
- raise KeyError(key)
-
- def __delitem__(self, key):
- if key not in self:
- raise KeyError(key)
- self._execute("DELETE from %s where key=?;" % self.table, [key])
-
- def __setitem__(self, key, value):
- if not isinstance(key, basestring):
- raise TypeError('Only string keys are supported')
- elif not isinstance(value, basestring):
- raise TypeError('Only string values are supported')
-
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- exists = len(list(data))
- if exists:
- self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
- [value, key])
- else:
- self._execute("INSERT into %s(key, value) values (?, ?);" %
- self.table, [key, value])
-
- def __contains__(self, key):
- return key in set(self)
-
- def __len__(self):
- data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
- for row in data:
- return row[0]
-
- def __iter__(self):
- data = self._execute("SELECT key FROM %s;" % self.table)
- return (row[0] for row in data)
-
- def __lt__(self, other):
- if not isinstance(other, Mapping):
- raise NotImplemented
-
- return len(self) < len(other)
-
- def get_by_pattern(self, pattern):
- data = self._execute("SELECT * FROM %s WHERE key LIKE ?;" %
- self.table, [pattern])
- return [row[1] for row in data]
-
- def values(self):
- return list(self.itervalues())
-
- def itervalues(self):
- data = self._execute("SELECT value FROM %s;" % self.table)
- return (row[0] for row in data)
-
- def items(self):
- return list(self.iteritems())
-
- def iteritems(self):
- return self._execute("SELECT * FROM %s;" % self.table)
-
- def clear(self):
- self._execute("DELETE FROM %s;" % self.table)
-
- def has_key(self, key):
- return key in self
-
-
-class PersistData(object):
- """Deprecated representation of the bitbake persistent data store"""
- def __init__(self, d):
- warnings.warn("Use of PersistData is deprecated. Please use "
- "persist(domain, d) instead.",
- category=DeprecationWarning,
- stacklevel=2)
-
- self.data = persist(d)
- logger.debug(1, "Using '%s' as the persistent data cache",
- self.data.filename)
-
- def addDomain(self, domain):
- """
- Add a domain (pending deprecation)
- """
- return self.data[domain]
-
- def delDomain(self, domain):
- """
- Removes a domain and all the data it contains
- """
- del self.data[domain]
-
- def getKeyValues(self, domain):
- """
- Return a list of key + value pairs for a domain
- """
- return self.data[domain].items()
-
- def getValue(self, domain, key):
- """
- Return the value of a key for a domain
- """
- return self.data[domain][key]
-
- def setValue(self, domain, key, value):
- """
- Sets the value of a key for a domain
- """
- self.data[domain][key] = value
-
- def delValue(self, domain, key):
- """
- Deletes a key/value pair
- """
- del self.data[domain][key]
-
-def connect(database):
- connection = sqlite3.connect(database, timeout=5, isolation_level=None)
- connection.execute("pragma synchronous = off;")
- connection.text_factory = str
- return connection
-
-def persist(domain, d):
- """Convenience factory for SQLTable objects based upon metadata"""
- import bb.utils
- cachedir = (d.getVar("PERSISTENT_DIR", True) or
- d.getVar("CACHE", True))
- if not cachedir:
- logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
- sys.exit(1)
-
- bb.utils.mkdirhier(cachedir)
- cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
- return SQLTable(cachefile, domain)
diff --git a/yocto-poky/bitbake/lib/bb/process.py b/yocto-poky/bitbake/lib/bb/process.py
deleted file mode 100644
index 1c07f2d9b..000000000
--- a/yocto-poky/bitbake/lib/bb/process.py
+++ /dev/null
@@ -1,156 +0,0 @@
-import logging
-import signal
-import subprocess
-import errno
-import select
-
-logger = logging.getLogger('BitBake.Process')
-
-def subprocess_setup():
- # Python installs a SIGPIPE handler by default. This is usually not what
- # non-Python subprocesses expect.
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-
-class CmdError(RuntimeError):
- def __init__(self, command, msg=None):
- self.command = command
- self.msg = msg
-
- def __str__(self):
- if not isinstance(self.command, basestring):
- cmd = subprocess.list2cmdline(self.command)
- else:
- cmd = self.command
-
- msg = "Execution of '%s' failed" % cmd
- if self.msg:
- msg += ': %s' % self.msg
- return msg
-
-class NotFoundError(CmdError):
- def __str__(self):
- return CmdError.__str__(self) + ": command not found"
-
-class ExecutionError(CmdError):
- def __init__(self, command, exitcode, stdout = None, stderr = None):
- CmdError.__init__(self, command)
- self.exitcode = exitcode
- self.stdout = stdout
- self.stderr = stderr
-
- def __str__(self):
- message = ""
- if self.stderr:
- message += self.stderr
- if self.stdout:
- message += self.stdout
- if message:
- message = ":\n" + message
- return (CmdError.__str__(self) +
- " with exit code %s" % self.exitcode + message)
-
-class Popen(subprocess.Popen):
- defaults = {
- "close_fds": True,
- "preexec_fn": subprocess_setup,
- "stdout": subprocess.PIPE,
- "stderr": subprocess.STDOUT,
- "stdin": subprocess.PIPE,
- "shell": False,
- }
-
- def __init__(self, *args, **kwargs):
- options = dict(self.defaults)
- options.update(kwargs)
- subprocess.Popen.__init__(self, *args, **options)
-
-def _logged_communicate(pipe, log, input, extrafiles):
- if pipe.stdin:
- if input is not None:
- pipe.stdin.write(input)
- pipe.stdin.close()
-
- outdata, errdata = [], []
- rin = []
-
- if pipe.stdout is not None:
- bb.utils.nonblockingfd(pipe.stdout.fileno())
- rin.append(pipe.stdout)
- if pipe.stderr is not None:
- bb.utils.nonblockingfd(pipe.stderr.fileno())
- rin.append(pipe.stderr)
- for fobj, _ in extrafiles:
- bb.utils.nonblockingfd(fobj.fileno())
- rin.append(fobj)
-
- def readextras(selected):
- for fobj, func in extrafiles:
- if fobj in selected:
- try:
- data = fobj.read()
- except IOError as err:
- if err.errno == errno.EAGAIN or err.errno == errno.EWOULDBLOCK:
- data = None
- if data is not None:
- func(data)
-
- try:
- while pipe.poll() is None:
- rlist = rin
- try:
- r,w,e = select.select (rlist, [], [], 1)
- except OSError as e:
- if e.errno != errno.EINTR:
- raise
-
- if pipe.stdout in r:
- data = pipe.stdout.read()
- if data is not None:
- outdata.append(data)
- log.write(data)
-
- if pipe.stderr in r:
- data = pipe.stderr.read()
- if data is not None:
- errdata.append(data)
- log.write(data)
-
- readextras(r)
-
- finally:
- log.flush()
-
- readextras([fobj for fobj, _ in extrafiles])
-
- if pipe.stdout is not None:
- pipe.stdout.close()
- if pipe.stderr is not None:
- pipe.stderr.close()
- return ''.join(outdata), ''.join(errdata)
-
-def run(cmd, input=None, log=None, extrafiles=None, **options):
- """Convenience function to run a command and return its output, raising an
- exception when the command fails"""
-
- if not extrafiles:
- extrafiles = []
-
- if isinstance(cmd, basestring) and not "shell" in options:
- options["shell"] = True
-
- try:
- pipe = Popen(cmd, **options)
- except OSError as exc:
- if exc.errno == 2:
- raise NotFoundError(cmd)
- else:
- raise CmdError(cmd, exc)
-
- if log:
- stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
- else:
- stdout, stderr = pipe.communicate(input)
-
- if pipe.returncode != 0:
- raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
- return stdout, stderr
diff --git a/yocto-poky/bitbake/lib/bb/providers.py b/yocto-poky/bitbake/lib/bb/providers.py
deleted file mode 100644
index 563a091fd..000000000
--- a/yocto-poky/bitbake/lib/bb/providers.py
+++ /dev/null
@@ -1,428 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re
-import logging
-from bb import data, utils
-from collections import defaultdict
-import bb
-
-logger = logging.getLogger("BitBake.Provider")
-
-class NoProvider(bb.BBHandledException):
- """Exception raised when no provider of a build dependency can be found"""
-
-class NoRProvider(bb.BBHandledException):
- """Exception raised when no provider of a runtime dependency can be found"""
-
-class MultipleRProvider(bb.BBHandledException):
- """Exception raised when multiple providers of a runtime dependency can be found"""
-
-def findProviders(cfgData, dataCache, pkg_pn = None):
- """
- Convenience function to get latest and preferred providers in pkg_pn
- """
-
- if not pkg_pn:
- pkg_pn = dataCache.pkg_pn
-
- # Need to ensure data store is expanded
- localdata = data.createCopy(cfgData)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
-
- preferred_versions = {}
- latest_versions = {}
-
- for pn in pkg_pn:
- (last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn)
- preferred_versions[pn] = (pref_ver, pref_file)
- latest_versions[pn] = (last_ver, last_file)
-
- return (latest_versions, preferred_versions)
-
-
-def allProviders(dataCache):
- """
- Find all providers for each pn
- """
- all_providers = defaultdict(list)
- for (fn, pn) in dataCache.pkg_fn.items():
- ver = dataCache.pkg_pepvpr[fn]
- all_providers[pn].append((ver, fn))
- return all_providers
-
-
-def sortPriorities(pn, dataCache, pkg_pn = None):
- """
- Reorder pkg_pn by file priority and default preference
- """
-
- if not pkg_pn:
- pkg_pn = dataCache.pkg_pn
-
- files = pkg_pn[pn]
- priorities = {}
- for f in files:
- priority = dataCache.bbfile_priority[f]
- preference = dataCache.pkg_dp[f]
- if priority not in priorities:
- priorities[priority] = {}
- if preference not in priorities[priority]:
- priorities[priority][preference] = []
- priorities[priority][preference].append(f)
- tmp_pn = []
- for pri in sorted(priorities):
- tmp_pref = []
- for pref in sorted(priorities[pri]):
- tmp_pref.extend(priorities[pri][pref])
- tmp_pn = [tmp_pref] + tmp_pn
-
- return tmp_pn
-
-def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
- """
- Check if the version pe,pv,pr is the preferred one.
- If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%'
- """
- if (pr == preferred_r or preferred_r == None):
- if (pe == preferred_e or preferred_e == None):
- if preferred_v == pv:
- return True
- if preferred_v != None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]):
- return True
- return False
-
-def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
- """
- Find the first provider in pkg_pn with a PREFERRED_VERSION set.
- """
-
- preferred_file = None
- preferred_ver = None
-
- # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
- # hence we do this manually rather than use OVERRIDES
- preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn, True)
- if not preferred_v:
- preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn, True)
- if not preferred_v:
- preferred_v = cfgData.getVar("PREFERRED_VERSION", True)
-
- if preferred_v:
- m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
- if m:
- if m.group(1):
- preferred_e = m.group(1)[:-1]
- else:
- preferred_e = None
- preferred_v = m.group(2)
- if m.group(3):
- preferred_r = m.group(3)[1:]
- else:
- preferred_r = None
- else:
- preferred_e = None
- preferred_r = None
-
- for file_set in pkg_pn:
- for f in file_set:
- pe, pv, pr = dataCache.pkg_pepvpr[f]
- if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
- preferred_file = f
- preferred_ver = (pe, pv, pr)
- break
- if preferred_file:
- break;
- if preferred_r:
- pv_str = '%s-%s' % (preferred_v, preferred_r)
- else:
- pv_str = preferred_v
- if not (preferred_e is None):
- pv_str = '%s:%s' % (preferred_e, pv_str)
- itemstr = ""
- if item:
- itemstr = " (for item %s)" % item
- if preferred_file is None:
- logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
- available_vers = []
- for file_set in pkg_pn:
- for f in file_set:
- pe, pv, pr = dataCache.pkg_pepvpr[f]
- ver_str = pv
- if pe:
- ver_str = "%s:%s" % (pe, ver_str)
- if not ver_str in available_vers:
- available_vers.append(ver_str)
- if available_vers:
- available_vers.sort()
- logger.info("versions of %s available: %s", pn, ' '.join(available_vers))
- else:
- logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
-
- return (preferred_ver, preferred_file)
-
-
-def findLatestProvider(pn, cfgData, dataCache, file_set):
- """
- Return the highest version of the providers in file_set.
- Take default preferences into account.
- """
- latest = None
- latest_p = 0
- latest_f = None
- for file_name in file_set:
- pe, pv, pr = dataCache.pkg_pepvpr[file_name]
- dp = dataCache.pkg_dp[file_name]
-
- if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p):
- latest = (pe, pv, pr)
- latest_f = file_name
- latest_p = dp
-
- return (latest, latest_f)
-
-
-def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
- """
- If there is a PREFERRED_VERSION, find the highest-priority bbfile
- providing that version. If not, find the latest version provided by
- an bbfile in the highest-priority set.
- """
-
- sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
- # Find the highest priority provider with a PREFERRED_VERSION set
- (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
- # Find the latest version of the highest priority provider
- (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
-
- if preferred_file is None:
- preferred_file = latest_f
- preferred_ver = latest
-
- return (latest, latest_f, preferred_ver, preferred_file)
-
-
-def _filterProviders(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables
- """
- eligible = []
- preferred_versions = {}
- sortpkg_pn = {}
-
- # The order of providers depends on the order of the files on the disk
- # up to here. Sort pkg_pn to make dependency issues reproducible rather
- # than effectively random.
- providers.sort()
-
- # Collate providers by PN
- pkg_pn = {}
- for p in providers:
- pn = dataCache.pkg_fn[p]
- if pn not in pkg_pn:
- pkg_pn[pn] = []
- pkg_pn[pn].append(p)
-
- logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
-
- # First add PREFERRED_VERSIONS
- for pn in pkg_pn:
- sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
- preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
- if preferred_versions[pn][1]:
- eligible.append(preferred_versions[pn][1])
-
- # Now add latest versions
- for pn in sortpkg_pn:
- if pn in preferred_versions and preferred_versions[pn][1]:
- continue
- preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
- eligible.append(preferred_versions[pn][1])
-
- if len(eligible) == 0:
- logger.error("no eligible providers for %s", item)
- return 0
-
- # If pn == item, give it a slight default preference
- # This means PREFERRED_PROVIDER_foobar defaults to foobar if available
- for p in providers:
- pn = dataCache.pkg_fn[p]
- if pn != item:
- continue
- (newvers, fn) = preferred_versions[pn]
- if not fn in eligible:
- continue
- eligible.remove(fn)
- eligible = [fn] + eligible
-
- return eligible
-
-
-def filterProviders(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables
- Takes a "normal" target item
- """
-
- eligible = _filterProviders(providers, item, cfgData, dataCache)
-
- prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, True)
- if prefervar:
- dataCache.preferred[item] = prefervar
-
- foundUnique = False
- if item in dataCache.preferred:
- for p in eligible:
- pn = dataCache.pkg_fn[p]
- if dataCache.preferred[item] == pn:
- logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item)
- eligible.remove(p)
- eligible = [p] + eligible
- foundUnique = True
- break
-
- logger.debug(1, "sorted providers for %s are: %s", item, eligible)
-
- return eligible, foundUnique
-
-def filterProvidersRunTime(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables
- Takes a "runtime" target item
- """
-
- eligible = _filterProviders(providers, item, cfgData, dataCache)
-
- # First try and match any PREFERRED_RPROVIDER entry
- prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item, True)
- foundUnique = False
- if prefervar:
- for p in eligible:
- pn = dataCache.pkg_fn[p]
- if prefervar == pn:
- logger.verbose("selecting %s to satisfy %s due to PREFERRED_RPROVIDER", pn, item)
- eligible.remove(p)
- eligible = [p] + eligible
- foundUnique = True
- numberPreferred = 1
- break
-
- # If we didn't find an RPROVIDER entry, try and infer the provider from PREFERRED_PROVIDER entries
- # by looking through the provides of each eligible recipe and seeing if a PREFERRED_PROVIDER was set.
- # This is most useful for virtual/ entries rather than having a RPROVIDER per entry.
- if not foundUnique:
- # Should use dataCache.preferred here?
- preferred = []
- preferred_vars = []
- pns = {}
- for p in eligible:
- pns[dataCache.pkg_fn[p]] = p
- for p in eligible:
- pn = dataCache.pkg_fn[p]
- provides = dataCache.pn_provides[pn]
- for provide in provides:
- prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
- #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
- if prefervar in pns and pns[prefervar] not in preferred:
- var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
- logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
- preferred_vars.append(var)
- pref = pns[prefervar]
- eligible.remove(pref)
- eligible = [pref] + eligible
- preferred.append(pref)
- break
-
- numberPreferred = len(preferred)
-
- if numberPreferred > 1:
- logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s. You could set PREFERRED_RPROVIDER_%s" % (item, preferred, preferred_vars, item))
-
- logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible)
-
- return eligible, numberPreferred
-
-regexp_cache = {}
-
-def getRuntimeProviders(dataCache, rdepend):
- """
- Return any providers of runtime dependency
- """
- rproviders = []
-
- if rdepend in dataCache.rproviders:
- rproviders += dataCache.rproviders[rdepend]
-
- if rdepend in dataCache.packages:
- rproviders += dataCache.packages[rdepend]
-
- if rproviders:
- return rproviders
-
- # Only search dynamic packages if we can't find anything in other variables
- for pattern in dataCache.packages_dynamic:
- pattern = pattern.replace('+', "\+")
- if pattern in regexp_cache:
- regexp = regexp_cache[pattern]
- else:
- try:
- regexp = re.compile(pattern)
- except:
- logger.error("Error parsing regular expression '%s'", pattern)
- raise
- regexp_cache[pattern] = regexp
- if regexp.match(rdepend):
- rproviders += dataCache.packages_dynamic[pattern]
- logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend)
-
- return rproviders
-
-
-def buildWorldTargetList(dataCache):
- """
- Build package list for "bitbake world"
- """
- if dataCache.world_target:
- return
-
- logger.debug(1, "collating packages for \"world\"")
- for f in dataCache.possible_world:
- terminal = True
- pn = dataCache.pkg_fn[f]
-
- for p in dataCache.pn_provides[pn]:
- if p.startswith('virtual/'):
- logger.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
- terminal = False
- break
- for pf in dataCache.providers[p]:
- if dataCache.pkg_fn[pf] != pn:
- logger.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
- terminal = False
- break
- if terminal:
- dataCache.world_target.add(pn)
diff --git a/yocto-poky/bitbake/lib/bb/pysh/__init__.py b/yocto-poky/bitbake/lib/bb/pysh/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/__init__.py
+++ /dev/null
diff --git a/yocto-poky/bitbake/lib/bb/pysh/builtin.py b/yocto-poky/bitbake/lib/bb/pysh/builtin.py
deleted file mode 100644
index b748e4a4f..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/builtin.py
+++ /dev/null
@@ -1,710 +0,0 @@
-# builtin.py - builtins and utilities definitions for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Builtin and internal utilities implementations.
-
-- Beware not to use python interpreter environment as if it were the shell
-environment. For instance, commands working directory must be explicitely handled
-through env['PWD'] instead of relying on python working directory.
-"""
-import errno
-import optparse
-import os
-import re
-import subprocess
-import sys
-import time
-
-def has_subprocess_bug():
- return getattr(subprocess, 'list2cmdline') and \
- ( subprocess.list2cmdline(['']) == '' or \
- subprocess.list2cmdline(['foo|bar']) == 'foo|bar')
-
-# Detect python bug 1634343: "subprocess swallows empty arguments under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470>
-# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470>
-if has_subprocess_bug():
- import subprocess_fix
- subprocess.list2cmdline = subprocess_fix.list2cmdline
-
-from sherrors import *
-
-class NonExitingParser(optparse.OptionParser):
- """OptionParser default behaviour upon error is to print the error message and
- exit. Raise a utility error instead.
- """
- def error(self, msg):
- raise UtilityError(msg)
-
-#-------------------------------------------------------------------------------
-# set special builtin
-#-------------------------------------------------------------------------------
-OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False,
- help='The shell shall disable pathname expansion.')
-OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False,
- help="""When this option is on, if a simple command fails for any of the \
- reasons listed in Consequences of Shell Errors or returns an exit status \
- value >0, and is not part of the compound list following a while, until, \
- or if keyword, and is not a part of an AND or OR list, and is not a \
- pipeline preceded by the ! reserved word, then the shell shall immediately \
- exit.""")
-OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False,
- help="""The shell shall write to standard error a trace for each command \
- after it expands the command and before it executes it. It is unspecified \
- whether the command that turns tracing off is traced.""")
-
-def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SET.parse_args(args)
- env = interp.get_env()
-
- if option.has_f:
- env.set_opt('-f')
- if option.has_e:
- env.set_opt('-e')
- if option.has_x:
- env.set_opt('-x')
- return 0
-
-#-------------------------------------------------------------------------------
-# shift special builtin
-#-------------------------------------------------------------------------------
-def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- params = interp.get_env().get_positional_args()
- if args:
- try:
- n = int(args[0])
- if n > len(params):
- raise ValueError()
- except ValueError:
- return 1
- else:
- n = 1
-
- params[:n] = []
- interp.get_env().set_positional_args(params)
- return 0
-
-#-------------------------------------------------------------------------------
-# export special builtin
-#-------------------------------------------------------------------------------
-OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_EXPORT.parse_args(args)
- if option.has_p:
- raise NotImplementedError()
-
- for arg in args:
- try:
- name, value = arg.split('=', 1)
- except ValueError:
- name, value = arg, None
- env = interp.get_env().export(name, value)
-
- return 0
-
-#-------------------------------------------------------------------------------
-# return special builtin
-#-------------------------------------------------------------------------------
-def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- res = 0
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = 0
- if not 0<=res<=255:
- res = 0
-
- # BUG: should be last executed command exit code
- raise ReturnSignal(res)
-
-#-------------------------------------------------------------------------------
-# trap special builtin
-#-------------------------------------------------------------------------------
-def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- if len(args) < 2:
- stderr.write('trap: usage: trap [[arg] signal_spec ...]\n')
- return 2
-
- action = args[0]
- for sig in args[1:]:
- try:
- env.traps[sig] = action
- except Exception as e:
- stderr.write('trap: %s\n' % str(e))
- return 0
-
-#-------------------------------------------------------------------------------
-# unset special builtin
-#-------------------------------------------------------------------------------
-OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions")
-OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False)
-OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False)
-
-def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_UNSET.parse_args(args)
-
- status = 0
- env = interp.get_env()
- for arg in args:
- try:
- if option.has_f:
- env.remove_function(arg)
- else:
- del env[arg]
- except KeyError:
- pass
- except VarAssignmentError:
- status = 1
-
- return status
-
-#-------------------------------------------------------------------------------
-# wait special builtin
-#-------------------------------------------------------------------------------
-def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return interp.wait([int(arg) for arg in args])
-
-#-------------------------------------------------------------------------------
-# cat utility
-#-------------------------------------------------------------------------------
-def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if not args:
- args = ['-']
-
- status = 0
- for arg in args:
- if arg == '-':
- data = stdin.read()
- else:
- path = os.path.join(env['PWD'], arg)
- try:
- f = file(path, 'rb')
- try:
- data = f.read()
- finally:
- f.close()
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
- status = 1
- continue
- stdout.write(data)
- stdout.flush()
- return status
-
-#-------------------------------------------------------------------------------
-# cd utility
-#-------------------------------------------------------------------------------
-OPT_CD = NonExitingParser("cd - change the working directory")
-
-def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_CD.parse_args(args)
- env = interp.get_env()
-
- directory = None
- printdir = False
- if not args:
- home = env.get('HOME')
- if home:
- # Unspecified, do nothing
- return 0
- else:
- directory = home
- elif len(args)==1:
- directory = args[0]
- if directory=='-':
- if 'OLDPWD' not in env:
- raise UtilityError("OLDPWD not set")
- printdir = True
- directory = env['OLDPWD']
- else:
- raise UtilityError("too many arguments")
-
- curpath = None
- # Absolute directories will be handled correctly by the os.path.join call.
- if not directory.startswith('.') and not directory.startswith('..'):
- cdpaths = env.get('CDPATH', '.').split(';')
- for cdpath in cdpaths:
- p = os.path.join(cdpath, directory)
- if os.path.isdir(p):
- curpath = p
- break
-
- if curpath is None:
- curpath = directory
- curpath = os.path.join(env['PWD'], directory)
-
- env['OLDPWD'] = env['PWD']
- env['PWD'] = curpath
- if printdir:
- stdout.write('%s\n' % curpath)
- return 0
-
-#-------------------------------------------------------------------------------
-# colon utility
-#-------------------------------------------------------------------------------
-def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# echo utility
-#-------------------------------------------------------------------------------
-def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Echo only takes arguments, no options. Use printf if you need fancy stuff.
- output = ' '.join(args) + '\n'
- stdout.write(output)
- stdout.flush()
- return 0
-
-#-------------------------------------------------------------------------------
-# egrep utility
-#-------------------------------------------------------------------------------
-# egrep is usually a shell script.
-# Unfortunately, pysh does not support shell scripts *with arguments* right now,
-# so the redirection is implemented here, assuming grep is available.
-def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-E'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# env utility
-#-------------------------------------------------------------------------------
-def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if args and args[0]=='-i':
- raise NotImplementedError('env: -i option is not implemented')
-
- i = 0
- for arg in args:
- if '=' not in arg:
- break
- # Update the current environment
- name, value = arg.split('=', 1)
- env[name] = value
- i += 1
-
- if args[i:]:
- # Find then execute the specified interpreter
- utility = env.find_in_path(args[i])
- if not utility:
- return 127
- args[i:i+1] = utility
- name = args[i]
- args = args[i+1:]
- try:
- return run_command(name, args, interp, env, stdin, stdout, stderr,
- debugflags)
- except UtilityError:
- stderr.write('env: failed to execute %s' % ' '.join([name]+args))
- return 126
- else:
- for pair in env.get_variables().iteritems():
- stdout.write('%s=%s\n' % pair)
- return 0
-
-#-------------------------------------------------------------------------------
-# exit utility
-#-------------------------------------------------------------------------------
-def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- res = None
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = None
- if not 0<=res<=255:
- res = None
-
- if res is None:
- # BUG: should be last executed command exit code
- res = 0
-
- raise ExitSignal(res)
-
-#-------------------------------------------------------------------------------
-# fgrep utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-F'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# gunzip utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# kill utility
-#-------------------------------------------------------------------------------
-def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- for arg in args:
- pid = int(arg)
- status = subprocess.call(['pskill', '/T', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- # pskill is asynchronous, hence the stupid polling loop
- while 1:
- p = subprocess.Popen(['pslist', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- output = p.communicate()[0]
- if ('process %d was not' % pid) in output:
- break
- time.sleep(1)
- return status
-
-#-------------------------------------------------------------------------------
-# mkdir utility
-#-------------------------------------------------------------------------------
-OPT_MKDIR = NonExitingParser("mkdir - make directories.")
-OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # TODO: implement umask
- # TODO: implement proper utility error report
- option, args = OPT_MKDIR.parse_args(args)
- for arg in args:
- path = os.path.join(env['PWD'], arg)
- if option.has_p:
- try:
- os.makedirs(path)
- except IOError as e:
- if e.errno != errno.EEXIST:
- raise
- else:
- os.mkdir(path)
- return 0
-
-#-------------------------------------------------------------------------------
-# netstat utility
-#-------------------------------------------------------------------------------
-def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- # Do you really expect me to implement netstat ?
- # This empty form is enough for Mercurial tests since it's
- # supposed to generate nothing upon success. Faking this test
- # is not a big deal either.
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# pwd utility
-#-------------------------------------------------------------------------------
-OPT_PWD = NonExitingParser("pwd - return working directory name")
-OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True,
- help="""If the PWD environment variable contains an absolute pathname of \
- the current directory that does not contain the filenames dot or dot-dot, \
- pwd shall write this pathname to standard output. Otherwise, the -L option \
- shall behave as the -P option.""")
-OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False,
- help="""The absolute pathname written shall not contain filenames that, in \
- the context of the pathname, refer to files of type symbolic link.""")
-
-def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_PWD.parse_args(args)
- stdout.write('%s\n' % env['PWD'])
- return 0
-
-#-------------------------------------------------------------------------------
-# printf utility
-#-------------------------------------------------------------------------------
-RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)')
-
-def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- def replace(m):
- assert m.group()
- g = m.group()[1:]
- if g.startswith('x'):
- return chr(int(g[1:], 16))
- if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g):
- # Yay, an octal number
- return chr(int(g, 8))
- return {
- 'a': '\a',
- 'b': '\b',
- 'f': '\f',
- 'n': '\n',
- 'r': '\r',
- 't': '\t',
- 'v': '\v',
- '\\': '\\',
- }.get(g)
-
- # Convert escape sequences
- format = re.sub(RE_UNESCAPE, replace, args[0])
- stdout.write(format % tuple(args[1:]))
- return 0
-
-#-------------------------------------------------------------------------------
-# true utility
-#-------------------------------------------------------------------------------
-def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# sed utility
-#-------------------------------------------------------------------------------
-RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$')
-
-# cygwin sed fails with some expressions when they do not end with a single space.
-# see unit tests for details. Interestingly, the same expressions works perfectly
-# in cygwin shell.
-def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Scan pattern arguments and append a space if necessary
- for i in xrange(len(args)):
- if not RE_SED.search(args[i]):
- continue
- args[i] = args[i] + ' '
-
- return run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# sleep utility
-#-------------------------------------------------------------------------------
-def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- time.sleep(int(args[0]))
- return 0
-
-#-------------------------------------------------------------------------------
-# sort utility
-#-------------------------------------------------------------------------------
-OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files")
-
-def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags):
-
- def sort(path):
- if path == '-':
- lines = stdin.readlines()
- else:
- try:
- f = file(path)
- try:
- lines = f.readlines()
- finally:
- f.close()
- except IOError as e:
- stderr.write(str(e) + '\n')
- return 1
-
- if lines and lines[-1][-1]!='\n':
- lines[-1] = lines[-1] + '\n'
- return lines
-
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SORT.parse_args(args)
- alllines = []
-
- if len(args)<=0:
- args += ['-']
-
- # Load all files lines
- curdir = os.getcwd()
- try:
- os.chdir(env['PWD'])
- for path in args:
- alllines += sort(path)
- finally:
- os.chdir(curdir)
-
- alllines.sort()
- for line in alllines:
- stdout.write(line)
- return 0
-
-#-------------------------------------------------------------------------------
-# hg utility
-#-------------------------------------------------------------------------------
-
-hgcommands = [
- 'add',
- 'addremove',
- 'commit', 'ci',
- 'debugrename',
- 'debugwalk',
- 'falabala', # Dummy command used in a mercurial test
- 'incoming',
- 'locate',
- 'pull',
- 'push',
- 'qinit',
- 'remove', 'rm',
- 'rename', 'mv',
- 'revert',
- 'showconfig',
- 'status', 'st',
- 'strip',
- ]
-
-def rewriteslashes(name, args):
- # Several hg commands output file paths, rewrite the separators
- if len(args) > 1 and name.lower().endswith('python') \
- and args[0].endswith('hg'):
- for cmd in hgcommands:
- if cmd in args[1:]:
- return True
-
- # svn output contains many paths with OS specific separators.
- # Normalize these to unix paths.
- base = os.path.basename(name)
- if base.startswith('svn'):
- return True
-
- return False
-
-def rewritehg(output):
- if not output:
- return output
- # Rewrite os specific messages
- output = output.replace(': The system cannot find the file specified',
- ': No such file or directory')
- output = re.sub(': Access is denied.*$', ': Permission denied', output)
- output = output.replace(': No connection could be made because the target machine actively refused it',
- ': Connection refused')
- return output
-
-
-def run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags):
- # Execute the command
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- hgbin = interp.options().hgbinary
- ishg = hgbin and ('hg' in name or args and 'hg' in args[0])
- unixoutput = 'cygwin' in name or ishg
-
- exec_env = env.get_variables()
- try:
- # BUG: comparing file descriptor is clearly not a reliable way to tell
- # whether they point on the same underlying object. But in pysh limited
- # scope this is usually right, we do not expect complicated redirections
- # besides usual 2>&1.
- # Still there is one case we have but cannot deal with is when stdout
- # and stderr are redirected *by pysh caller*. This the reason for the
- # --redirect pysh() option.
- # Now, we want to know they are the same because we sometimes need to
- # transform the command output, mostly remove CR-LF to ensure that
- # command output is unix-like. Cygwin utilies are a special case because
- # they explicitely set their output streams to binary mode, so we have
- # nothing to do. For all others commands, we have to guess whether they
- # are sending text data, in which case the transformation must be done.
- # Again, the NUL character test is unreliable but should be enough for
- # hg tests.
- redirected = stdout.fileno()==stderr.fileno()
- if not redirected:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- else:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, err = p.communicate()
- except WindowsError as e:
- raise UtilityError(str(e))
-
- if not unixoutput:
- def encode(s):
- if '\0' in s:
- return s
- return s.replace('\r\n', '\n')
- else:
- encode = lambda s: s
-
- if rewriteslashes(name, args):
- encode1_ = encode
- def encode(s):
- s = encode1_(s)
- s = s.replace('\\\\', '\\')
- s = s.replace('\\', '/')
- return s
-
- if ishg:
- encode2_ = encode
- def encode(s):
- return rewritehg(encode2_(s))
-
- stdout.write(encode(out))
- if not redirected:
- stderr.write(encode(err))
- return p.returncode
-
diff --git a/yocto-poky/bitbake/lib/bb/pysh/interp.py b/yocto-poky/bitbake/lib/bb/pysh/interp.py
deleted file mode 100644
index 25d8c92ec..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/interp.py
+++ /dev/null
@@ -1,1367 +0,0 @@
-# interp.py - shell interpreter for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Implement the shell interpreter.
-
-Most references are made to "The Open Group Base Specifications Issue 6".
-<http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html>
-"""
-# TODO: document the fact input streams must implement fileno() so Popen will work correctly.
-# it requires non-stdin stream to be implemented as files. Still to be tested...
-# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here.
-# TODO: stop command execution upon error.
-# TODO: sort out the filename/io_number mess. It should be possible to use filenames only.
-# TODO: review subshell implementation
-# TODO: test environment cloning for non-special builtins
-# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost
-# TODO: unit test for variable assignment
-# TODO: test error management wrt error type/utility type
-# TODO: test for binary output everywhere
-# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary.
-import base64
-import cPickle as pickle
-import errno
-import glob
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
-
-import builtin
-from sherrors import *
-import pyshlex
-import pyshyacc
-
-def mappend(func, *args, **kargs):
- """Like map but assume func returns a list. Returned lists are merged into
- a single one.
- """
- return reduce(lambda a,b: a+b, map(func, *args, **kargs), [])
-
-class FileWrapper:
- """File object wrapper to ease debugging.
-
- Allow mode checking and implement file duplication through a simple
- reference counting scheme. Not sure the latter is really useful since
- only real file descriptors can be used.
- """
- def __init__(self, mode, file, close=True):
- if mode not in ('r', 'w', 'a'):
- raise IOError('invalid mode: %s' % mode)
- self._mode = mode
- self._close = close
- if isinstance(file, FileWrapper):
- if file._refcount[0] <= 0:
- raise IOError(0, 'Error')
- self._refcount = file._refcount
- self._refcount[0] += 1
- self._file = file._file
- else:
- self._refcount = [1]
- self._file = file
-
- def dup(self):
- return FileWrapper(self._mode, self, self._close)
-
- def fileno(self):
- """fileno() should be only necessary for input streams."""
- return self._file.fileno()
-
- def read(self, size=-1):
- if self._mode!='r':
- raise IOError(0, 'Error')
- return self._file.read(size)
-
- def readlines(self, *args, **kwargs):
- return self._file.readlines(*args, **kwargs)
-
- def write(self, s):
- if self._mode not in ('w', 'a'):
- raise IOError(0, 'Error')
- return self._file.write(s)
-
- def flush(self):
- self._file.flush()
-
- def close(self):
- if not self._refcount:
- return
- assert self._refcount[0] > 0
-
- self._refcount[0] -= 1
- if self._refcount[0] == 0:
- self._mode = 'c'
- if self._close:
- self._file.close()
- self._refcount = None
-
- def mode(self):
- return self._mode
-
- def __getattr__(self, name):
- if name == 'name':
- self.name = getattr(self._file, name)
- return self.name
- else:
- raise AttributeError(name)
-
- def __del__(self):
- self.close()
-
-
-def win32_open_devnull(mode):
- return open('NUL', mode)
-
-
-class Redirections:
- """Stores open files and their mapping to pseudo-sh file descriptor.
- """
- # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does
- # not make 1 to redirect to 4
- def __init__(self, stdin=None, stdout=None, stderr=None):
- self._descriptors = {}
- if stdin is not None:
- self._add_descriptor(0, stdin)
- if stdout is not None:
- self._add_descriptor(1, stdout)
- if stderr is not None:
- self._add_descriptor(2, stderr)
-
- def add_here_document(self, interp, name, content, io_number=None):
- if io_number is None:
- io_number = 0
-
- if name==pyshlex.unquote_wordtree(name):
- content = interp.expand_here_document(('TOKEN', content))
-
- # Write document content in a temporary file
- tmp = tempfile.TemporaryFile()
- try:
- tmp.write(content)
- tmp.flush()
- tmp.seek(0)
- self._add_descriptor(io_number, FileWrapper('r', tmp))
- except:
- tmp.close()
- raise
-
- def add(self, interp, op, filename, io_number=None):
- if op not in ('<', '>', '>|', '>>', '>&'):
- # TODO: add descriptor duplication and here_documents
- raise RedirectionError('Unsupported redirection operator "%s"' % op)
-
- if io_number is not None:
- io_number = int(io_number)
-
- if (op == '>&' and filename.isdigit()) or filename=='-':
- # No expansion for file descriptors, quote them if you want a filename
- fullname = filename
- else:
- if filename.startswith('/'):
- # TODO: win32 kludge
- if filename=='/dev/null':
- fullname = 'NUL'
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError()
- else:
- fullname = interp.expand_redirection(('TOKEN', filename))
- if not fullname:
- raise RedirectionError('%s: ambiguous redirect' % filename)
- # Build absolute path based on PWD
- fullname = os.path.join(interp.get_env()['PWD'], fullname)
-
- if op=='<':
- return self._add_input_redirection(interp, fullname, io_number)
- elif op in ('>', '>|'):
- clobber = ('>|'==op)
- return self._add_output_redirection(interp, fullname, io_number, clobber)
- elif op=='>>':
- return self._add_output_appending(interp, fullname, io_number)
- elif op=='>&':
- return self._dup_output_descriptor(fullname, io_number)
-
- def close(self):
- if self._descriptors is not None:
- for desc in self._descriptors.itervalues():
- desc.flush()
- desc.close()
- self._descriptors = None
-
- def stdin(self):
- return self._descriptors[0]
-
- def stdout(self):
- return self._descriptors[1]
-
- def stderr(self):
- return self._descriptors[2]
-
- def clone(self):
- clone = Redirections()
- for desc, fileobj in self._descriptors.iteritems():
- clone._descriptors[desc] = fileobj.dup()
- return clone
-
- def _add_output_redirection(self, interp, filename, io_number, clobber):
- if io_number is None:
- # io_number default to standard output
- io_number = 1
-
- if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename):
- # File already exist in no-clobber mode, bail out
- raise RedirectionError('File "%s" already exists' % filename)
-
- # Open and register
- self._add_file_descriptor(io_number, filename, 'w')
-
- def _add_output_appending(self, interp, filename, io_number):
- if io_number is None:
- io_number = 1
- self._add_file_descriptor(io_number, filename, 'a')
-
- def _add_input_redirection(self, interp, filename, io_number):
- if io_number is None:
- io_number = 0
- self._add_file_descriptor(io_number, filename, 'r')
-
- def _add_file_descriptor(self, io_number, filename, mode):
- try:
- if filename.startswith('/'):
- if filename=='/dev/null':
- f = win32_open_devnull(mode+'b')
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError('cannot open absolute path %s' % repr(filename))
- else:
- f = file(filename, mode+'b')
- except IOError as e:
- raise RedirectionError(str(e))
-
- wrapper = None
- try:
- wrapper = FileWrapper(mode, f)
- f = None
- self._add_descriptor(io_number, wrapper)
- except:
- if f: f.close()
- if wrapper: wrapper.close()
- raise
-
- def _dup_output_descriptor(self, source_fd, dest_fd):
- if source_fd is None:
- source_fd = 1
- self._dup_file_descriptor(source_fd, dest_fd, 'w')
-
- def _dup_file_descriptor(self, source_fd, dest_fd, mode):
- source_fd = int(source_fd)
- if source_fd not in self._descriptors:
- raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd))
- source = self._descriptors[source_fd]
-
- if source.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode))
-
- if dest_fd=='-':
- # Close the source descriptor
- del self._descriptors[source_fd]
- source.close()
- else:
- dest_fd = int(dest_fd)
- if dest_fd not in self._descriptors:
- raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd))
-
- dest = self._descriptors[dest_fd]
- if dest.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode))
-
- self._descriptors[dest_fd] = source.dup()
- dest.close()
-
- def _add_descriptor(self, io_number, file):
- io_number = int(io_number)
-
- if io_number in self._descriptors:
- # Close the current descriptor
- d = self._descriptors[io_number]
- del self._descriptors[io_number]
- d.close()
-
- self._descriptors[io_number] = file
-
- def __str__(self):
- names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v
- in self._descriptors.iteritems()]
- names = ','.join(names)
- return 'Redirections(%s)' % names
-
- def __del__(self):
- self.close()
-
-def cygwin_to_windows_path(path):
- """Turn /cygdrive/c/foo into c:/foo, or return path if it
- is not a cygwin path.
- """
- if not path.startswith('/cygdrive/'):
- return path
- path = path[len('/cygdrive/'):]
- path = path[:1] + ':' + path[1:]
- return path
-
-def win32_to_unix_path(path):
- if path is not None:
- path = path.replace('\\', '/')
- return path
-
-_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?')
-_SHEBANG_CMDS = {
- '/usr/bin/env': 'env',
- '/bin/sh': 'pysh',
- 'python': 'python',
-}
-
-def resolve_shebang(path, ignoreshell=False):
- """Return a list of arguments as shebang interpreter call or an empty list
- if path does not refer to an executable script.
- See <http://www.opengroup.org/austin/docs/austin_51r2.txt>.
-
- ignoreshell - set to True to ignore sh shebangs. Return an empty list instead.
- """
- try:
- f = file(path)
- try:
- # At most 80 characters in the first line
- header = f.read(80).splitlines()[0]
- finally:
- f.close()
-
- m = _RE_SHEBANG.search(header)
- if not m:
- return []
- cmd, arg = m.group(1,2)
- if os.path.isfile(cmd):
- # Keep this one, the hg script for instance contains a weird windows
- # shebang referencing the current python install.
- cmdfile = os.path.basename(cmd).lower()
- if cmdfile == 'python.exe':
- cmd = 'python'
- pass
- elif cmd not in _SHEBANG_CMDS:
- raise CommandNotFound('Unknown interpreter "%s" referenced in '\
- 'shebang' % header)
- cmd = _SHEBANG_CMDS.get(cmd)
- if cmd is None or (ignoreshell and cmd == 'pysh'):
- return []
- if arg is None:
- return [cmd, win32_to_unix_path(path)]
- return [cmd, arg, win32_to_unix_path(path)]
- except IOError as e:
- if e.errno!=errno.ENOENT and \
- (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM
- raise
- return []
-
-def win32_find_in_path(name, path):
- if isinstance(path, str):
- path = path.split(os.pathsep)
-
- exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep)
- for p in path:
- p_name = os.path.join(p, name)
-
- prefix = resolve_shebang(p_name)
- if prefix:
- return prefix
-
- for ext in exts:
- p_name_ext = p_name + ext
- if os.path.exists(p_name_ext):
- return [win32_to_unix_path(p_name_ext)]
- return []
-
-class Traps(dict):
- def __setitem__(self, key, value):
- if key not in ('EXIT',):
- raise NotImplementedError()
- super(Traps, self).__setitem__(key, value)
-
-# IFS white spaces character class
-_IFS_WHITESPACES = (' ', '\t', '\n')
-
-class Environment:
- """Environment holds environment variables, export table, function
- definitions and whatever is defined in 2.12 "Shell Execution Environment",
- redirection excepted.
- """
- def __init__(self, pwd):
- self._opt = set() #Shell options
-
- self._functions = {}
- self._env = {'?': '0', '#': '0'}
- self._exported = set([
- 'HOME', 'IFS', 'PATH'
- ])
-
- # Set environment vars with side-effects
- self._ifs_ws = None # Set of IFS whitespace characters
- self._ifs_re = None # Regular expression used to split between words using IFS classes
- self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values
- self['PWD'] = pwd
- self.traps = Traps()
-
- def clone(self, subshell=False):
- env = Environment(self['PWD'])
- env._opt = set(self._opt)
- for k,v in self.get_variables().iteritems():
- if k in self._exported:
- env.export(k,v)
- elif subshell:
- env[k] = v
-
- if subshell:
- env._functions = dict(self._functions)
-
- return env
-
- def __getitem__(self, key):
- if key in ('@', '*', '-', '$'):
- raise NotImplementedError('%s is not implemented' % repr(key))
- return self._env[key]
-
- def get(self, key, defval=None):
- try:
- return self[key]
- except KeyError:
- return defval
-
- def __setitem__(self, key, value):
- if key=='IFS':
- # Update the whitespace/non-whitespace classes
- self._update_ifs(value)
- elif key=='PWD':
- pwd = os.path.abspath(value)
- if not os.path.isdir(pwd):
- raise VarAssignmentError('Invalid directory %s' % value)
- value = pwd
- elif key in ('?', '!'):
- value = str(int(value))
- self._env[key] = value
-
- def __delitem__(self, key):
- if key in ('IFS', 'PWD', '?'):
- raise VarAssignmentError('%s cannot be unset' % key)
- del self._env[key]
-
- def __contains__(self, item):
- return item in self._env
-
- def set_positional_args(self, args):
- """Set the content of 'args' as positional argument from 1 to len(args).
- Return previous argument as a list of strings.
- """
- # Save and remove previous arguments
- prevargs = []
- for i in xrange(int(self._env['#'])):
- i = str(i+1)
- prevargs.append(self._env[i])
- del self._env[i]
- self._env['#'] = '0'
-
- #Set new ones
- for i,arg in enumerate(args):
- self._env[str(i+1)] = str(arg)
- self._env['#'] = str(len(args))
-
- return prevargs
-
- def get_positional_args(self):
- return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
-
- def get_variables(self):
- return dict(self._env)
-
- def export(self, key, value=None):
- if value is not None:
- self[key] = value
- self._exported.add(key)
-
- def get_exported(self):
- return [(k,self._env.get(k)) for k in self._exported]
-
- def split_fields(self, word):
- if not self._ifs_ws or not word:
- return [word]
- return re.split(self._ifs_re, word)
-
- def _update_ifs(self, value):
- """Update the split_fields related variables when IFS character set is
- changed.
- """
- # TODO: handle NULL IFS
-
- # Separate characters in whitespace and non-whitespace
- chars = set(value)
- ws = [c for c in chars if c in _IFS_WHITESPACES]
- nws = [c for c in chars if c not in _IFS_WHITESPACES]
-
- # Keep whitespaces in a string for left and right stripping
- self._ifs_ws = ''.join(ws)
-
- # Build a regexp to split fields
- trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']'
- if nws:
- # First, the single non-whitespace occurence.
- nws = '[' + ''.join([re.escape(c) for c in nws]) + ']'
- nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)'
- else:
- # Then mix all parts with quantifiers
- nws = trailing + '+'
- self._ifs_re = re.compile(nws)
-
- def has_opt(self, opt, val=None):
- return (opt, val) in self._opt
-
- def set_opt(self, opt, val=None):
- self._opt.add((opt, val))
-
- def find_in_path(self, name, pwd=False):
- path = self._env.get('PATH', '').split(os.pathsep)
- if pwd:
- path[:0] = [self['PWD']]
- if os.name == 'nt':
- return win32_find_in_path(name, self._env.get('PATH', ''))
- else:
- raise NotImplementedError()
-
- def define_function(self, name, body):
- if not is_name(name):
- raise ShellSyntaxError('%s is not a valid function name' % repr(name))
- self._functions[name] = body
-
- def remove_function(self, name):
- del self._functions[name]
-
- def is_function(self, name):
- return name in self._functions
-
- def get_function(self, name):
- return self._functions.get(name)
-
-
-name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
-name_charset = dict(zip(name_charset,name_charset))
-
-def match_name(s):
- """Return the length in characters of the longest prefix made of name
- allowed characters in s.
- """
- for i,c in enumerate(s):
- if c not in name_charset:
- return s[:i]
- return s
-
-def is_name(s):
- return len([c for c in s if c not in name_charset])<=0
-
-def is_special_param(c):
- return len(c)==1 and c in ('@','*','#','?','-','$','!','0')
-
-def utility_not_implemented(name, *args, **kwargs):
- raise NotImplementedError('%s utility is not implemented' % name)
-
-
-class Utility:
- """Define utilities properties:
- func -- utility callable. See builtin module for utility samples.
- is_special -- see XCU 2.8.
- """
- def __init__(self, func, is_special=0):
- self.func = func
- self.is_special = bool(is_special)
-
-
-def encodeargs(args):
- def encodearg(s):
- lines = base64.encodestring(s)
- lines = [l.splitlines()[0] for l in lines]
- return ''.join(lines)
-
- s = pickle.dumps(args)
- return encodearg(s)
-
-def decodeargs(s):
- s = base64.decodestring(s)
- return pickle.loads(s)
-
-
-class GlobError(Exception):
- pass
-
-class Options:
- def __init__(self):
- # True if Mercurial operates with binary streams
- self.hgbinary = True
-
-class Interpreter:
- # Implementation is very basic: the execute() method just makes a DFS on the
- # AST and execute nodes one by one. Nodes are tuple (name,obj) where name
- # is a string identifier and obj the AST element returned by the parser.
- #
- # Handler are named after the node identifiers.
- # TODO: check node names and remove the switch in execute with some
- # dynamic getattr() call to find node handlers.
- """Shell interpreter.
-
- The following debugging flags can be passed:
- debug-parsing - enable PLY debugging.
- debug-tree - print the generated AST.
- debug-cmd - trace command execution before word expansion, plus exit status.
- debug-utility - trace utility execution.
- """
-
- # List supported commands.
- COMMANDS = {
- 'cat': Utility(builtin.utility_cat,),
- 'cd': Utility(builtin.utility_cd,),
- ':': Utility(builtin.utility_colon,),
- 'echo': Utility(builtin.utility_echo),
- 'env': Utility(builtin.utility_env),
- 'exit': Utility(builtin.utility_exit),
- 'export': Utility(builtin.builtin_export, is_special=1),
- 'egrep': Utility(builtin.utility_egrep),
- 'fgrep': Utility(builtin.utility_fgrep),
- 'gunzip': Utility(builtin.utility_gunzip),
- 'kill': Utility(builtin.utility_kill),
- 'mkdir': Utility(builtin.utility_mkdir),
- 'netstat': Utility(builtin.utility_netstat),
- 'printf': Utility(builtin.utility_printf),
- 'pwd': Utility(builtin.utility_pwd),
- 'return': Utility(builtin.builtin_return, is_special=1),
- 'sed': Utility(builtin.utility_sed,),
- 'set': Utility(builtin.builtin_set,),
- 'shift': Utility(builtin.builtin_shift,),
- 'sleep': Utility(builtin.utility_sleep,),
- 'sort': Utility(builtin.utility_sort,),
- 'trap': Utility(builtin.builtin_trap, is_special=1),
- 'true': Utility(builtin.utility_true),
- 'unset': Utility(builtin.builtin_unset, is_special=1),
- 'wait': Utility(builtin.builtin_wait, is_special=1),
- }
-
- def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None,
- stdout=None, stderr=None, opts=Options()):
- self._env = env
- if self._env is None:
- self._env = Environment(pwd)
- self._children = {}
-
- self._redirs = redirs
- self._close_redirs = False
-
- if self._redirs is None:
- if stdin is None:
- stdin = sys.stdin
- if stdout is None:
- stdout = sys.stdout
- if stderr is None:
- stderr = sys.stderr
- stdin = FileWrapper('r', stdin, False)
- stdout = FileWrapper('w', stdout, False)
- stderr = FileWrapper('w', stderr, False)
- self._redirs = Redirections(stdin, stdout, stderr)
- self._close_redirs = True
-
- self._debugflags = list(debugflags)
- self._logfile = sys.stderr
- self._options = opts
-
- def close(self):
- """Must be called when the interpreter is no longer used."""
- script = self._env.traps.get('EXIT')
- if script:
- try:
- self.execute_script(script=script)
- except:
- pass
-
- if self._redirs is not None and self._close_redirs:
- self._redirs.close()
- self._redirs = None
-
- def log(self, s):
- self._logfile.write(s)
- self._logfile.flush()
-
- def __getitem__(self, key):
- return self._env[key]
-
- def __setitem__(self, key, value):
- self._env[key] = value
-
- def options(self):
- return self._options
-
- def redirect(self, redirs, ios):
- def add_redir(io):
- if isinstance(io, pyshyacc.IORedirect):
- redirs.add(self, io.op, io.filename, io.io_number)
- else:
- redirs.add_here_document(self, io.name, io.content, io.io_number)
-
- map(add_redir, ios)
- return redirs
-
- def execute_script(self, script=None, ast=None, sourced=False,
- scriptpath=None):
- """If script is not None, parse the input. Otherwise takes the supplied
- AST. Then execute the AST.
- Return the script exit status.
- """
- try:
- if scriptpath is not None:
- self._env['0'] = os.path.abspath(scriptpath)
-
- if script is not None:
- debug_parsing = ('debug-parsing' in self._debugflags)
- cmds, script = pyshyacc.parse(script, True, debug_parsing)
- if 'debug-tree' in self._debugflags:
- pyshyacc.print_commands(cmds, self._logfile)
- self._logfile.flush()
- else:
- cmds, script = ast, ''
-
- status = 0
- for cmd in cmds:
- try:
- status = self.execute(cmd)
- except ExitSignal as e:
- if sourced:
- raise
- status = int(e.args[0])
- return status
- except ShellError:
- self._env['?'] = 1
- raise
- if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags:
- self.log('returncode ' + str(status)+ '\n')
- return status
- except CommandNotFound as e:
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
- except RedirectionError as e:
- # TODO: should be handled depending on the utility status
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
-
- def dotcommand(self, env, args):
- if len(args) < 1:
- raise ShellError('. expects at least one argument')
- path = args[0]
- if '/' not in path:
- found = env.find_in_path(args[0], True)
- if found:
- path = found[0]
- script = file(path).read()
- return self.execute_script(script=script, sourced=True)
-
- def execute(self, token, redirs=None):
- """Execute and AST subtree with supplied redirections overriding default
- interpreter ones.
- Return the exit status.
- """
- if not token:
- return 0
-
- if redirs is None:
- redirs = self._redirs
-
- if isinstance(token, list):
- # Commands sequence
- res = 0
- for t in token:
- res = self.execute(t, redirs)
- return res
-
- type, value = token
- status = 0
- if type=='simple_command':
- redirs_copy = redirs.clone()
- try:
- # TODO: define and handle command return values
- # TODO: implement set -e
- status = self._execute_simple_command(value, redirs_copy)
- finally:
- redirs_copy.close()
- elif type=='pipeline':
- status = self._execute_pipeline(value, redirs)
- elif type=='and_or':
- status = self._execute_and_or(value, redirs)
- elif type=='for_clause':
- status = self._execute_for_clause(value, redirs)
- elif type=='while_clause':
- status = self._execute_while_clause(value, redirs)
- elif type=='function_definition':
- status = self._execute_function_definition(value, redirs)
- elif type=='brace_group':
- status = self._execute_brace_group(value, redirs)
- elif type=='if_clause':
- status = self._execute_if_clause(value, redirs)
- elif type=='subshell':
- status = self.subshell(ast=value.cmds, redirs=redirs)
- elif type=='async':
- status = self._asynclist(value)
- elif type=='redirect_list':
- redirs_copy = self.redirect(redirs.clone(), value.redirs)
- try:
- status = self.execute(value.cmd, redirs_copy)
- finally:
- redirs_copy.close()
- else:
- raise NotImplementedError('Unsupported token type ' + type)
-
- if status < 0:
- status = 255
- return status
-
- def _execute_if_clause(self, if_clause, redirs):
- cond_status = self.execute(if_clause.cond, redirs)
- if cond_status==0:
- return self.execute(if_clause.if_cmds, redirs)
- else:
- return self.execute(if_clause.else_cmds, redirs)
-
- def _execute_brace_group(self, group, redirs):
- status = 0
- for cmd in group.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_function_definition(self, fundef, redirs):
- self._env.define_function(fundef.name, fundef.body)
- return 0
-
- def _execute_while_clause(self, while_clause, redirs):
- status = 0
- while 1:
- cond_status = 0
- for cond in while_clause.condition:
- cond_status = self.execute(cond, redirs)
-
- if cond_status:
- break
-
- for cmd in while_clause.cmds:
- status = self.execute(cmd, redirs)
-
- return status
-
- def _execute_for_clause(self, for_clause, redirs):
- if not is_name(for_clause.name):
- raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name))
- items = mappend(self.expand_token, for_clause.items)
-
- status = 0
- for item in items:
- self._env[for_clause.name] = item
- for cmd in for_clause.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_and_or(self, or_and, redirs):
- res = self.execute(or_and.left, redirs)
- if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0):
- res = self.execute(or_and.right, redirs)
- return res
-
- def _execute_pipeline(self, pipeline, redirs):
- if len(pipeline.commands)==1:
- status = self.execute(pipeline.commands[0], redirs)
- else:
- # Execute all commands one after the other
- status = 0
- inpath, outpath = None, None
- try:
- # Commands inputs and outputs cannot really be plugged as done
- # by a real shell. Run commands sequentially and chain their
- # input/output throught temporary files.
- tmpfd, inpath = tempfile.mkstemp()
- os.close(tmpfd)
- tmpfd, outpath = tempfile.mkstemp()
- os.close(tmpfd)
-
- inpath = win32_to_unix_path(inpath)
- outpath = win32_to_unix_path(outpath)
-
- for i, cmd in enumerate(pipeline.commands):
- call_redirs = redirs.clone()
- try:
- if i!=0:
- call_redirs.add(self, '<', inpath)
- if i!=len(pipeline.commands)-1:
- call_redirs.add(self, '>', outpath)
-
- status = self.execute(cmd, call_redirs)
-
- # Chain inputs/outputs
- inpath, outpath = outpath, inpath
- finally:
- call_redirs.close()
- finally:
- if inpath: os.remove(inpath)
- if outpath: os.remove(outpath)
-
- if pipeline.reverse_status:
- status = int(not status)
- self._env['?'] = status
- return status
-
- def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others):
- assert interp is self
-
- func = env.get_function(name)
- #Set positional parameters
- prevargs = None
- try:
- prevargs = env.set_positional_args(args)
- try:
- redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup())
- try:
- status = self.execute(func, redirs)
- finally:
- redirs.close()
- except ReturnSignal as e:
- status = int(e.args[0])
- env['?'] = status
- return status
- finally:
- #Reset positional parameters
- if prevargs is not None:
- env.set_positional_args(prevargs)
-
- def _execute_simple_command(self, token, redirs):
- """Can raise ReturnSignal when return builtin is called, ExitSignal when
- exit is called, and other shell exceptions upon builtin failures.
- """
- debug_command = 'debug-cmd' in self._debugflags
- if debug_command:
- self.log('word' + repr(token.words) + '\n')
- self.log('assigns' + repr(token.assigns) + '\n')
- self.log('redirs' + repr(token.redirs) + '\n')
-
- is_special = None
- env = self._env
-
- try:
- # Word expansion
- args = []
- for word in token.words:
- args += self.expand_token(word)
- if is_special is None and args:
- is_special = env.is_function(args[0]) or \
- (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special)
-
- if debug_command:
- self.log('_execute_simple_command' + str(args) + '\n')
-
- if not args:
- # Redirections happen is a subshell
- redirs = redirs.clone()
- elif not is_special:
- env = self._env.clone()
-
- # Redirections
- self.redirect(redirs, token.redirs)
-
- # Variables assignments
- res = 0
- for type,(k,v) in token.assigns:
- status, expanded = self.expand_variable((k,v))
- if status is not None:
- res = status
- if args:
- env.export(k, expanded)
- else:
- env[k] = expanded
-
- if args and args[0] in ('.', 'source'):
- res = self.dotcommand(env, args[1:])
- elif args:
- if args[0] in self.COMMANDS:
- command = self.COMMANDS[args[0]]
- elif env.is_function(args[0]):
- command = Utility(self._execute_function, is_special=True)
- else:
- if not '/' in args[0].replace('\\', '/'):
- cmd = env.find_in_path(args[0])
- if not cmd:
- # TODO: test error code on unknown command => 127
- raise CommandNotFound('Unknown command: "%s"' % args[0])
- else:
- # Handle commands like '/cygdrive/c/foo.bat'
- cmd = cygwin_to_windows_path(args[0])
- if not os.path.exists(cmd):
- raise CommandNotFound('%s: No such file or directory' % args[0])
- shebang = resolve_shebang(cmd)
- if shebang:
- cmd = shebang
- else:
- cmd = [cmd]
- args[0:1] = cmd
- command = Utility(builtin.run_command)
-
- # Command execution
- if 'debug-cmd' in self._debugflags:
- self.log('redirections ' + str(redirs) + '\n')
-
- res = command.func(args[0], args[1:], self, env,
- redirs.stdin(), redirs.stdout(),
- redirs.stderr(), self._debugflags)
-
- if self._env.has_opt('-x'):
- # Trace command execution in shell environment
- # BUG: would be hard to reproduce a real shell behaviour since
- # the AST is not annotated with source lines/tokens.
- self._redirs.stdout().write(' '.join(args))
-
- except ReturnSignal:
- raise
- except ShellError as e:
- if is_special or isinstance(e, (ExitSignal,
- ShellSyntaxError, ExpansionError)):
- raise e
- self._redirs.stderr().write(str(e)+'\n')
- return 1
-
- return res
-
- def expand_token(self, word):
- """Expand a word as specified in [2.6 Word Expansions]. Return the list
- of expanded words.
- """
- status, wtrees = self._expand_word(word)
- return map(pyshlex.wordtree_as_string, wtrees)
-
- def expand_variable(self, word):
- """Return a status code (or None if no command expansion occurred)
- and a single word.
- """
- status, wtrees = self._expand_word(word, pathname=False, split=False)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return status, words[0]
-
- def expand_here_document(self, word):
- """Return the expanded document as a single word. The here document is
- assumed to be unquoted.
- """
- status, wtrees = self._expand_word(word, pathname=False,
- split=False, here_document=True)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return words[0]
-
- def expand_redirection(self, word):
- """Return a single word."""
- return self.expand_variable(word)[1]
-
- def get_env(self):
- return self._env
-
- def _expand_word(self, token, pathname=True, split=True, here_document=False):
- wtree = pyshlex.make_wordtree(token[1], here_document=here_document)
-
- # TODO: implement tilde expansion
- def expand(wtree):
- """Return a pseudo wordtree: the tree or its subelements can be empty
- lists when no value result from the expansion.
- """
- status = None
- for part in wtree:
- if not isinstance(part, list):
- continue
- if part[0]in ("'", '\\'):
- continue
- elif part[0] in ('`', '$('):
- status, result = self._expand_command(part)
- part[:] = result
- elif part[0] in ('$', '${'):
- part[:] = self._expand_parameter(part, wtree[0]=='"', split)
- elif part[0] in ('', '"'):
- status, result = expand(part)
- part[:] = result
- else:
- raise NotImplementedError('%s expansion is not implemented'
- % part[0])
- # [] is returned when an expansion result in no-field,
- # like an empty $@
- wtree = [p for p in wtree if p != []]
- if len(wtree) < 3:
- return status, []
- return status, wtree
-
- status, wtree = expand(wtree)
- if len(wtree) == 0:
- return status, wtree
- wtree = pyshlex.normalize_wordtree(wtree)
-
- if split:
- wtrees = self._split_fields(wtree)
- else:
- wtrees = [wtree]
-
- if pathname:
- wtrees = mappend(self._expand_pathname, wtrees)
-
- wtrees = map(self._remove_quotes, wtrees)
- return status, wtrees
-
- def _expand_command(self, wtree):
- # BUG: there is something to do with backslashes and quoted
- # characters here
- command = pyshlex.wordtree_as_string(wtree[1:-1])
- status, output = self.subshell_output(command)
- return status, ['', output, '']
-
- def _expand_parameter(self, wtree, quoted=False, split=False):
- """Return a valid wtree or an empty list when no parameter results."""
- # Get the parameter name
- # TODO: implement weird expansion rules with ':'
- name = pyshlex.wordtree_as_string(wtree[1:-1])
- if not is_name(name) and not is_special_param(name):
- raise ExpansionError('Bad substitution "%s"' % name)
- # TODO: implement special parameters
- if name in ('@', '*'):
- args = self._env.get_positional_args()
- if len(args) == 0:
- return []
- if len(args)<2:
- return ['', ''.join(args), '']
-
- sep = self._env.get('IFS', '')[:1]
- if split and quoted and name=='@':
- # Introduce a new token to tell the caller that these parameters
- # cause a split as specified in 2.5.2
- return ['@'] + args + ['']
- else:
- return ['', sep.join(args), '']
-
- return ['', self._env.get(name, ''), '']
-
- def _split_fields(self, wtree):
- def is_empty(split):
- return split==['', '', '']
-
- def split_positional(quoted):
- # Return a list of wtree split according positional parameters rules.
- # All remaining '@' groups are removed.
- assert quoted[0]=='"'
-
- splits = [[]]
- for part in quoted:
- if not isinstance(part, list) or part[0]!='@':
- splits[-1].append(part)
- else:
- # Empty or single argument list were dealt with already
- assert len(part)>3
- # First argument must join with the beginning part of the original word
- splits[-1].append(part[1])
- # Create double-quotes expressions for every argument after the first
- for arg in part[2:-1]:
- splits[-1].append('"')
- splits.append(['"', arg])
- return splits
-
- # At this point, all expansions but pathnames have occured. Only quoted
- # and positional sequences remain. Thus, all candidates for field splitting
- # are in the tree root, or are positional splits ('@') and lie in root
- # children.
- if not wtree or wtree[0] not in ('', '"'):
- # The whole token is quoted or empty, nothing to split
- return [wtree]
-
- if wtree[0]=='"':
- wtree = ['', wtree, '']
-
- result = [['', '']]
- for part in wtree[1:-1]:
- if isinstance(part, list):
- if part[0]=='"':
- splits = split_positional(part)
- if len(splits)<=1:
- result[-1] += [part, '']
- else:
- # Terminate the current split
- result[-1] += [splits[0], '']
- result += splits[1:-1]
- # Create a new split
- result += [['', splits[-1], '']]
- else:
- result[-1] += [part, '']
- else:
- splits = self._env.split_fields(part)
- if len(splits)<=1:
- # No split
- result[-1][-1] += part
- else:
- # Terminate the current resulting part and create a new one
- result[-1][-1] += splits[0]
- result[-1].append('')
- result += [['', r, ''] for r in splits[1:-1]]
- result += [['', splits[-1]]]
- result[-1].append('')
-
- # Leading and trailing empty groups come from leading/trailing blanks
- if result and is_empty(result[-1]):
- result[-1:] = []
- if result and is_empty(result[0]):
- result[:1] = []
- return result
-
- def _expand_pathname(self, wtree):
- """See [2.6.6 Pathname Expansion]."""
- if self._env.has_opt('-f'):
- return [wtree]
-
- # All expansions have been performed, only quoted sequences should remain
- # in the tree. Generate the pattern by folding the tree, escaping special
- # characters when appear quoted
- special_chars = '*?[]'
-
- def make_pattern(wtree):
- subpattern = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = make_pattern(part)
- elif wtree[0]!='':
- for c in part:
- # Meta-characters cannot be quoted
- if c in special_chars:
- raise GlobError()
- subpattern.append(part)
- return ''.join(subpattern)
-
- def pwd_glob(pattern):
- cwd = os.getcwd()
- os.chdir(self._env['PWD'])
- try:
- return glob.glob(pattern)
- finally:
- os.chdir(cwd)
-
- #TODO: check working directory issues here wrt relative patterns
- try:
- pattern = make_pattern(wtree)
- paths = pwd_glob(pattern)
- except GlobError:
- # BUG: Meta-characters were found in quoted sequences. The should
- # have been used literally but this is unsupported in current glob module.
- # Instead we consider the whole tree must be used literally and
- # therefore there is no point in globbing. This is wrong when meta
- # characters are mixed with quoted meta in the same pattern like:
- # < foo*"py*" >
- paths = []
-
- if not paths:
- return [wtree]
- return [['', path, ''] for path in paths]
-
- def _remove_quotes(self, wtree):
- """See [2.6.7 Quote Removal]."""
-
- def unquote(wtree):
- unquoted = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = unquote(part)
- unquoted.append(part)
- return ''.join(unquoted)
-
- return ['', unquote(wtree), '']
-
- def subshell(self, script=None, ast=None, redirs=None):
- """Execute the script or AST in a subshell, with inherited redirections
- if redirs is not None.
- """
- if redirs:
- sub_redirs = redirs
- else:
- sub_redirs = redirs.clone()
-
- subshell = None
- try:
- subshell = Interpreter(None, self._debugflags, self._env.clone(True),
- sub_redirs, opts=self._options)
- return subshell.execute_script(script, ast)
- finally:
- if not redirs: sub_redirs.close()
- if subshell: subshell.close()
-
- def subshell_output(self, script):
- """Execute the script in a subshell and return the captured output."""
- # Create temporary file to capture subshell output
- tmpfd, tmppath = tempfile.mkstemp()
- try:
- tmpfile = os.fdopen(tmpfd, 'wb')
- stdout = FileWrapper('w', tmpfile)
-
- redirs = Redirections(self._redirs.stdin().dup(),
- stdout,
- self._redirs.stderr().dup())
- try:
- status = self.subshell(script=script, redirs=redirs)
- finally:
- redirs.close()
- redirs = None
-
- # Extract subshell standard output
- tmpfile = open(tmppath, 'rb')
- try:
- output = tmpfile.read()
- return status, output.rstrip('\n')
- finally:
- tmpfile.close()
- finally:
- os.remove(tmppath)
-
- def _asynclist(self, cmd):
- args = (self._env.get_variables(), cmd)
- arg = encodeargs(args)
- assert len(args) < 30*1024
- cmd = ['pysh.bat', '--ast', '-c', arg]
- p = subprocess.Popen(cmd, cwd=self._env['PWD'])
- self._children[p.pid] = p
- self._env['!'] = p.pid
- return 0
-
- def wait(self, pids=None):
- if not pids:
- pids = self._children.keys()
-
- status = 127
- for pid in pids:
- if pid not in self._children:
- continue
- p = self._children.pop(pid)
- status = p.wait()
-
- return status
-
diff --git a/yocto-poky/bitbake/lib/bb/pysh/lsprof.py b/yocto-poky/bitbake/lib/bb/pysh/lsprof.py
deleted file mode 100644
index b1831c22a..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/lsprof.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#! /usr/bin/env python
-
-import sys
-from _lsprof import Profiler, profiler_entry
-
-__all__ = ['profile', 'Stats']
-
-def profile(f, *args, **kwds):
- """XXX docstring"""
- p = Profiler()
- p.enable(subcalls=True, builtins=True)
- try:
- f(*args, **kwds)
- finally:
- p.disable()
- return Stats(p.getstats())
-
-
-class Stats(object):
- """XXX docstring"""
-
- def __init__(self, data):
- self.data = data
-
- def sort(self, crit="inlinetime"):
- """XXX docstring"""
- if crit not in profiler_entry.__dict__:
- raise ValueError("Can't sort by %s" % crit)
- self.data.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
- for e in self.data:
- if e.calls:
- e.calls.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
-
- def pprint(self, top=None, file=None, limit=None, climit=None):
- """XXX docstring"""
- if file is None:
- file = sys.stdout
- d = self.data
- if top is not None:
- d = d[:top]
- cols = "% 12s %12s %11.4f %11.4f %s\n"
- hcols = "% 12s %12s %12s %12s %s\n"
- cols2 = "+%12s %12s %11.4f %11.4f + %s\n"
- file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
- "Inline(ms)", "module:lineno(function)"))
- count = 0
- for e in d:
- file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
- e.inlinetime, label(e.code)))
- count += 1
- if limit is not None and count == limit:
- return
- ccount = 0
- if e.calls:
- for se in e.calls:
- file.write(cols % ("+%s" % se.callcount, se.reccallcount,
- se.totaltime, se.inlinetime,
- "+%s" % label(se.code)))
- count += 1
- ccount += 1
- if limit is not None and count == limit:
- return
- if climit is not None and ccount == climit:
- break
-
- def freeze(self):
- """Replace all references to code objects with string
- descriptions; this makes it possible to pickle the instance."""
-
- # this code is probably rather ickier than it needs to be!
- for i in range(len(self.data)):
- e = self.data[i]
- if not isinstance(e.code, str):
- self.data[i] = type(e)((label(e.code),) + e[1:])
- if e.calls:
- for j in range(len(e.calls)):
- se = e.calls[j]
- if not isinstance(se.code, str):
- e.calls[j] = type(se)((label(se.code),) + se[1:])
-
-_fn2mod = {}
-
-def label(code):
- if isinstance(code, str):
- return code
- try:
- mname = _fn2mod[code.co_filename]
- except KeyError:
- for k, v in sys.modules.items():
- if v is None:
- continue
- if not hasattr(v, '__file__'):
- continue
- if not isinstance(v.__file__, str):
- continue
- if v.__file__.startswith(code.co_filename):
- mname = _fn2mod[code.co_filename] = k
- break
- else:
- mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
-
- return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
-
-
-if __name__ == '__main__':
- import os
- sys.argv = sys.argv[1:]
- if not sys.argv:
- print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
- sys.exit(2)
- sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
- stats = profile(execfile, sys.argv[0], globals(), locals())
- stats.sort()
- stats.pprint()
diff --git a/yocto-poky/bitbake/lib/bb/pysh/pysh.py b/yocto-poky/bitbake/lib/bb/pysh/pysh.py
deleted file mode 100644
index b4e6145b5..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/pysh.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# pysh.py - command processing for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-import optparse
-import os
-import sys
-
-import interp
-
-SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1')
-SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None,
- help='A string that shall be interpreted by the shell as one or more commands')
-SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None,
- help='Redirect script commands stdout and stderr to the specified file')
-# See utility_command in builtin.py about the reason for this flag.
-SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False,
- help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout')
-SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False,
- help='Trace PLY execution')
-SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False,
- help='Display the generated syntax tree.')
-SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False,
- help='Trace command execution before parameters expansion and exit status.')
-SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False,
- help='Trace utility calls, after parameters expansions')
-SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False,
- help='Encoded commands to execute in a subprocess')
-SH_OPT.add_option('--profile', action='store_true', default=False,
- help='Profile pysh run')
-
-
-def split_args(args):
- # Separate shell arguments from command ones
- # Just stop at the first argument not starting with a dash. I know, this is completely broken,
- # it ignores files starting with a dash or may take option values for command file. This is not
- # supposed to happen for now
- command_index = len(args)
- for i,arg in enumerate(args):
- if not arg.startswith('-'):
- command_index = i
- break
-
- return args[:command_index], args[command_index:]
-
-
-def fixenv(env):
- path = env.get('PATH')
- if path is not None:
- parts = path.split(os.pathsep)
- # Remove Windows utilities from PATH, they are useless at best and
- # some of them (find) may be confused with other utilities.
- parts = [p for p in parts if 'system32' not in p.lower()]
- env['PATH'] = os.pathsep.join(parts)
- if env.get('HOME') is None:
- # Several utilities, including cvsps, cannot work without
- # a defined HOME directory.
- env['HOME'] = os.path.expanduser('~')
- return env
-
-def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None):
- if os.environ.get('PYSH_TEXT') != '1':
- import msvcrt
- for fp in (sys.stdin, sys.stdout, sys.stderr):
- msvcrt.setmode(fp.fileno(), os.O_BINARY)
-
- hgbin = os.environ.get('PYSH_HGTEXT') != '1'
-
- if debugflags is None:
- debugflags = []
- if options.debug_parsing: debugflags.append('debug-parsing')
- if options.debug_utility: debugflags.append('debug-utility')
- if options.debug_cmd: debugflags.append('debug-cmd')
- if options.debug_tree: debugflags.append('debug-tree')
-
- if env is None:
- env = fixenv(dict(os.environ))
- if cwd is None:
- cwd = os.getcwd()
-
- if not cmdargs:
- # Nothing to do
- return 0
-
- ast = None
- command_file = None
- if options.command_string:
- input = cmdargs[0]
- if not options.ast:
- input += '\n'
- else:
- args, input = interp.decodeargs(input), None
- env, ast = args
- cwd = env.get('PWD', cwd)
- else:
- command_file = cmdargs[0]
- arguments = cmdargs[1:]
-
- prefix = interp.resolve_shebang(command_file, ignoreshell=True)
- if prefix:
- input = ' '.join(prefix + [command_file] + arguments)
- else:
- # Read commands from file
- f = file(command_file)
- try:
- # Trailing newline to help the parser
- input = f.read() + '\n'
- finally:
- f.close()
-
- redirect = None
- try:
- if options.redirected:
- stdout = sys.stdout
- stderr = stdout
- elif options.redirect_to:
- redirect = open(options.redirect_to, 'wb')
- stdout = redirect
- stderr = redirect
- else:
- stdout = sys.stdout
- stderr = sys.stderr
-
- # TODO: set arguments to environment variables
- opts = interp.Options()
- opts.hgbinary = hgbin
- ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr,
- opts=opts)
- try:
- # Export given environment in shell object
- for k,v in env.iteritems():
- ip.get_env().export(k,v)
- return ip.execute_script(input, ast, scriptpath=command_file)
- finally:
- ip.close()
- finally:
- if redirect is not None:
- redirect.close()
-
-def sh(cwd=None, args=None, debugflags=None, env=None):
- if args is None:
- args = sys.argv[1:]
- shargs, cmdargs = split_args(args)
- options, shargs = SH_OPT.parse_args(shargs)
-
- if options.profile:
- import lsprof
- p = lsprof.Profiler()
- p.enable(subcalls=True)
- try:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
- finally:
- p.disable()
- stats = lsprof.Stats(p.getstats())
- stats.sort()
- stats.pprint(top=10, file=sys.stderr, climit=5)
- else:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
-
-def main():
- sys.exit(sh())
-
-if __name__=='__main__':
- main()
diff --git a/yocto-poky/bitbake/lib/bb/pysh/pyshlex.py b/yocto-poky/bitbake/lib/bb/pysh/pyshlex.py
deleted file mode 100644
index b30123675..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/pyshlex.py
+++ /dev/null
@@ -1,888 +0,0 @@
-# pyshlex.py - PLY compatible lexer for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-# TODO:
-# - review all "char in 'abc'" snippets: the empty string can be matched
-# - test line continuations within quoted/expansion strings
-# - eof is buggy wrt sublexers
-# - the lexer cannot really work in pull mode as it would be required to run
-# PLY in pull mode. It was designed to work incrementally and it would not be
-# that hard to enable pull mode.
-import re
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
-
-from ply import lex
-from sherrors import *
-
-class NeedMore(Exception):
- pass
-
-def is_blank(c):
- return c in (' ', '\t')
-
-_RE_DIGITS = re.compile(r'^\d+$')
-
-def are_digits(s):
- return _RE_DIGITS.search(s) is not None
-
-_OPERATORS = dict([
- ('&&', 'AND_IF'),
- ('||', 'OR_IF'),
- (';;', 'DSEMI'),
- ('<<', 'DLESS'),
- ('>>', 'DGREAT'),
- ('<&', 'LESSAND'),
- ('>&', 'GREATAND'),
- ('<>', 'LESSGREAT'),
- ('<<-', 'DLESSDASH'),
- ('>|', 'CLOBBER'),
- ('&', 'AMP'),
- (';', 'COMMA'),
- ('<', 'LESS'),
- ('>', 'GREATER'),
- ('(', 'LPARENS'),
- (')', 'RPARENS'),
-])
-
-#Make a function to silence pychecker "Local variable shadows global"
-def make_partial_ops():
- partials = {}
- for k in _OPERATORS:
- for i in range(1, len(k)+1):
- partials[k[:i]] = None
- return partials
-
-_PARTIAL_OPERATORS = make_partial_ops()
-
-def is_partial_op(s):
- """Return True if s matches a non-empty subpart of an operator starting
- at its first character.
- """
- return s in _PARTIAL_OPERATORS
-
-def is_op(s):
- """If s matches an operator, returns the operator identifier. Return None
- otherwise.
- """
- return _OPERATORS.get(s)
-
-_RESERVEDS = dict([
- ('if', 'If'),
- ('then', 'Then'),
- ('else', 'Else'),
- ('elif', 'Elif'),
- ('fi', 'Fi'),
- ('do', 'Do'),
- ('done', 'Done'),
- ('case', 'Case'),
- ('esac', 'Esac'),
- ('while', 'While'),
- ('until', 'Until'),
- ('for', 'For'),
- ('{', 'Lbrace'),
- ('}', 'Rbrace'),
- ('!', 'Bang'),
- ('in', 'In'),
- ('|', 'PIPE'),
-])
-
-def get_reserved(s):
- return _RESERVEDS.get(s)
-
-_RE_NAME = re.compile(r'^[0-9a-zA-Z_]+$')
-
-def is_name(s):
- return _RE_NAME.search(s) is not None
-
-def find_chars(seq, chars):
- for i,v in enumerate(seq):
- if v in chars:
- return i,v
- return -1, None
-
-class WordLexer:
- """WordLexer parse quoted or expansion expressions and return an expression
- tree. The input string can be any well formed sequence beginning with quoting
- or expansion character. Embedded expressions are handled recursively. The
- resulting tree is made of lists and strings. Lists represent quoted or
- expansion expressions. Each list first element is the opening separator,
- the last one the closing separator. In-between can be any number of strings
- or lists for sub-expressions. Non quoted/expansion expression can written as
- strings or as lists with empty strings as starting and ending delimiters.
- """
-
- NAME_CHARSET = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
- NAME_CHARSET = dict(zip(NAME_CHARSET, NAME_CHARSET))
-
- SPECIAL_CHARSET = '@*#?-$!0'
-
- #Characters which can be escaped depends on the current delimiters
- ESCAPABLE = {
- '`': set(['$', '\\', '`']),
- '"': set(['$', '\\', '`', '"']),
- "'": set(),
- }
-
- def __init__(self, heredoc = False):
- # _buffer is the unprocessed input characters buffer
- self._buffer = []
- # _stack is empty or contains a quoted list being processed
- # (this is the DFS path to the quoted expression being evaluated).
- self._stack = []
- self._escapable = None
- # True when parsing unquoted here documents
- self._heredoc = heredoc
-
- def add(self, data, eof=False):
- """Feed the lexer with more data. If the quoted expression can be
- delimited, return a tuple (expr, remaining) containing the expression
- tree and the unconsumed data.
- Otherwise, raise NeedMore.
- """
- self._buffer += list(data)
- self._parse(eof)
-
- result = self._stack[0]
- remaining = ''.join(self._buffer)
- self._stack = []
- self._buffer = []
- return result, remaining
-
- def _is_escapable(self, c, delim=None):
- if delim is None:
- if self._heredoc:
- # Backslashes works as if they were double quoted in unquoted
- # here-documents
- delim = '"'
- else:
- if len(self._stack)<=1:
- return True
- delim = self._stack[-2][0]
-
- escapables = self.ESCAPABLE.get(delim, None)
- return escapables is None or c in escapables
-
- def _parse_squote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
- try:
- pos = buf.index("'")
- except ValueError:
- raise NeedMore()
- result[-1] += ''.join(buf[:pos])
- result += ["'"]
- return pos+1, True
-
- def _parse_bquote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- if buf[0]=='\n':
- #Remove line continuations
- result[:] = ['', '', '']
- elif self._is_escapable(buf[0]):
- result[-1] += buf[0]
- result += ['']
- else:
- #Keep as such
- result[:] = ['', '\\'+buf[0], '']
-
- return 1, True
-
- def _parse_dquote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
- pos, sep = find_chars(buf, '$\\`"')
- if pos==-1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if sep=='"':
- result += ['"']
- return pos+1, True
- else:
- #Keep everything until the separator and defer processing
- return pos, False
-
- def _parse_command(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- chars = '$\\`"\''
- if result[0] == '$(':
- chars += ')'
- pos, sep = find_chars(buf, chars)
- if pos == -1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if (result[0]=='$(' and sep==')') or (result[0]=='`' and sep=='`'):
- result += [sep]
- return pos+1, True
- else:
- return pos, False
-
- def _parse_parameter(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- pos, sep = find_chars(buf, '$\\`"\'}')
- if pos==-1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if sep=='}':
- result += [sep]
- return pos+1, True
- else:
- return pos, False
-
- def _parse_dollar(self, buf, result, eof):
- sep = result[0]
- if sep=='$':
- if not buf:
- #TODO: handle empty $
- raise NeedMore()
- if buf[0]=='(':
- if len(buf)==1:
- raise NeedMore()
-
- if buf[1]=='(':
- result[0] = '$(('
- buf[:2] = []
- else:
- result[0] = '$('
- buf[:1] = []
-
- elif buf[0]=='{':
- result[0] = '${'
- buf[:1] = []
- else:
- if buf[0] in self.SPECIAL_CHARSET:
- result[-1] = buf[0]
- read = 1
- else:
- for read,c in enumerate(buf):
- if c not in self.NAME_CHARSET:
- break
- else:
- if not eof:
- raise NeedMore()
- read += 1
-
- result[-1] += ''.join(buf[0:read])
-
- if not result[-1]:
- result[:] = ['', result[0], '']
- else:
- result += ['']
- return read,True
-
- sep = result[0]
- if sep=='$(':
- parsefunc = self._parse_command
- elif sep=='${':
- parsefunc = self._parse_parameter
- else:
- raise NotImplementedError(sep)
-
- pos, closed = parsefunc(buf, result, eof)
- return pos, closed
-
- def _parse(self, eof):
- buf = self._buffer
- stack = self._stack
- recurse = False
-
- while 1:
- if not stack or recurse:
- if not buf:
- raise NeedMore()
- if buf[0] not in ('"\\`$\''):
- raise ShellSyntaxError('Invalid quoted string sequence')
- stack.append([buf[0], ''])
- buf[:1] = []
- recurse = False
-
- result = stack[-1]
- if result[0]=="'":
- parsefunc = self._parse_squote
- elif result[0]=='\\':
- parsefunc = self._parse_bquote
- elif result[0]=='"':
- parsefunc = self._parse_dquote
- elif result[0]=='`':
- parsefunc = self._parse_command
- elif result[0][0]=='$':
- parsefunc = self._parse_dollar
- else:
- raise NotImplementedError()
-
- read, closed = parsefunc(buf, result, eof)
-
- buf[:read] = []
- if closed:
- if len(stack)>1:
- #Merge in parent expression
- parsed = stack.pop()
- stack[-1] += [parsed]
- stack[-1] += ['']
- else:
- break
- else:
- recurse = True
-
-def normalize_wordtree(wtree):
- """Fold back every literal sequence (delimited with empty strings) into
- parent sequence.
- """
- def normalize(wtree):
- result = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = normalize(part)
- if part[0]=='':
- #Move the part content back at current level
- result += part[1:-1]
- continue
- elif not part:
- #Remove empty strings
- continue
- result.append(part)
- if not result:
- result = ['']
- return [wtree[0]] + result + [wtree[-1]]
-
- return normalize(wtree)
-
-
-def make_wordtree(token, here_document=False):
- """Parse a delimited token and return a tree similar to the ones returned by
- WordLexer. token may contain any combinations of expansion/quoted fields and
- non-ones.
- """
- tree = ['']
- remaining = token
- delimiters = '\\$`'
- if not here_document:
- delimiters += '\'"'
-
- while 1:
- pos, sep = find_chars(remaining, delimiters)
- if pos==-1:
- tree += [remaining, '']
- return normalize_wordtree(tree)
- tree.append(remaining[:pos])
- remaining = remaining[pos:]
-
- try:
- result, remaining = WordLexer(heredoc = here_document).add(remaining, True)
- except NeedMore:
- raise ShellSyntaxError('Invalid token "%s"')
- tree.append(result)
-
-
-def wordtree_as_string(wtree):
- """Rewrite an expression tree generated by make_wordtree as string."""
- def visit(node, output):
- for child in node:
- if isinstance(child, list):
- visit(child, output)
- else:
- output.append(child)
-
- output = []
- visit(wtree, output)
- return ''.join(output)
-
-
-def unquote_wordtree(wtree):
- """Fold the word tree while removing quotes everywhere. Other expansion
- sequences are joined as such.
- """
- def unquote(wtree):
- unquoted = []
- if wtree[0] in ('', "'", '"', '\\'):
- wtree = wtree[1:-1]
-
- for part in wtree:
- if isinstance(part, list):
- part = unquote(part)
- unquoted.append(part)
- return ''.join(unquoted)
-
- return unquote(wtree)
-
-
-class HereDocLexer:
- """HereDocLexer delimits whatever comes from the here-document starting newline
- not included to the closing delimiter line included.
- """
- def __init__(self, op, delim):
- assert op in ('<<', '<<-')
- if not delim:
- raise ShellSyntaxError('invalid here document delimiter %s' % str(delim))
-
- self._op = op
- self._delim = delim
- self._buffer = []
- self._token = []
-
- def add(self, data, eof):
- """If the here-document was delimited, return a tuple (content, remaining).
- Raise NeedMore() otherwise.
- """
- self._buffer += list(data)
- self._parse(eof)
- token = ''.join(self._token)
- remaining = ''.join(self._buffer)
- self._token, self._remaining = [], []
- return token, remaining
-
- def _parse(self, eof):
- while 1:
- #Look for first unescaped newline. Quotes may be ignored
- escaped = False
- for i,c in enumerate(self._buffer):
- if escaped:
- escaped = False
- elif c=='\\':
- escaped = True
- elif c=='\n':
- break
- else:
- i = -1
-
- if i==-1 or self._buffer[i]!='\n':
- if not eof:
- raise NeedMore()
- #No more data, maybe the last line is closing delimiter
- line = ''.join(self._buffer)
- eol = ''
- self._buffer[:] = []
- else:
- line = ''.join(self._buffer[:i])
- eol = self._buffer[i]
- self._buffer[:i+1] = []
-
- if self._op=='<<-':
- line = line.lstrip('\t')
-
- if line==self._delim:
- break
-
- self._token += [line, eol]
- if i==-1:
- break
-
-class Token:
- #TODO: check this is still in use
- OPERATOR = 'OPERATOR'
- WORD = 'WORD'
-
- def __init__(self):
- self.value = ''
- self.type = None
-
- def __getitem__(self, key):
- #Behave like a two elements tuple
- if key==0:
- return self.type
- if key==1:
- return self.value
- raise IndexError(key)
-
-
-class HereDoc:
- def __init__(self, op, name=None):
- self.op = op
- self.name = name
- self.pendings = []
-
-TK_COMMA = 'COMMA'
-TK_AMPERSAND = 'AMP'
-TK_OP = 'OP'
-TK_TOKEN = 'TOKEN'
-TK_COMMENT = 'COMMENT'
-TK_NEWLINE = 'NEWLINE'
-TK_IONUMBER = 'IO_NUMBER'
-TK_ASSIGNMENT = 'ASSIGNMENT_WORD'
-TK_HERENAME = 'HERENAME'
-
-class Lexer:
- """Main lexer.
-
- Call add() until the script AST is returned.
- """
- # Here-document handling makes the whole thing more complex because they basically
- # force tokens to be reordered: here-content must come right after the operator
- # and the here-document name, while some other tokens might be following the
- # here-document expression on the same line.
- #
- # So, here-doc states are basically:
- # *self._state==ST_NORMAL
- # - self._heredoc.op is None: no here-document
- # - self._heredoc.op is not None but name is: here-document operator matched,
- # waiting for the document name/delimiter
- # - self._heredoc.op and name are not None: here-document is ready, following
- # tokens are being stored and will be pushed again when the document is
- # completely parsed.
- # *self._state==ST_HEREDOC
- # - The here-document is being delimited by self._herelexer. Once it is done
- # the content is pushed in front of the pending token list then all these
- # tokens are pushed once again.
- ST_NORMAL = 'ST_NORMAL'
- ST_OP = 'ST_OP'
- ST_BACKSLASH = 'ST_BACKSLASH'
- ST_QUOTED = 'ST_QUOTED'
- ST_COMMENT = 'ST_COMMENT'
- ST_HEREDOC = 'ST_HEREDOC'
-
- #Match end of backquote strings
- RE_BACKQUOTE_END = re.compile(r'(?<!\\)(`)')
-
- def __init__(self, parent_state = None):
- self._input = []
- self._pos = 0
-
- self._token = ''
- self._type = TK_TOKEN
-
- self._state = self.ST_NORMAL
- self._parent_state = parent_state
- self._wordlexer = None
-
- self._heredoc = HereDoc(None)
- self._herelexer = None
-
- ### Following attributes are not used for delimiting token and can safely
- ### be changed after here-document detection (see _push_toke)
-
- # Count the number of tokens following a 'For' reserved word. Needed to
- # return an 'In' reserved word if it comes in third place.
- self._for_count = None
-
- def add(self, data, eof=False):
- """Feed the lexer with data.
-
- When eof is set to True, returns unconsumed data or raise if the lexer
- is in the middle of a delimiting operation.
- Raise NeedMore otherwise.
- """
- self._input += list(data)
- self._parse(eof)
- self._input[:self._pos] = []
- return ''.join(self._input)
-
- def _parse(self, eof):
- while self._state:
- if self._pos>=len(self._input):
- if not eof:
- raise NeedMore()
- elif self._state not in (self.ST_OP, self.ST_QUOTED, self.ST_HEREDOC):
- #Delimit the current token and leave cleanly
- self._push_token('')
- break
- else:
- #Let the sublexer handle the eof themselves
- pass
-
- if self._state==self.ST_NORMAL:
- self._parse_normal()
- elif self._state==self.ST_COMMENT:
- self._parse_comment()
- elif self._state==self.ST_OP:
- self._parse_op(eof)
- elif self._state==self.ST_QUOTED:
- self._parse_quoted(eof)
- elif self._state==self.ST_HEREDOC:
- self._parse_heredoc(eof)
- else:
- assert False, "Unknown state " + str(self._state)
-
- if self._heredoc.op is not None:
- raise ShellSyntaxError('missing here-document delimiter')
-
- def _parse_normal(self):
- c = self._input[self._pos]
- if c=='\n':
- self._push_token(c)
- self._token = c
- self._type = TK_NEWLINE
- self._push_token('')
- self._pos += 1
- elif c in ('\\', '\'', '"', '`', '$'):
- self._state = self.ST_QUOTED
- elif is_partial_op(c):
- self._push_token(c)
-
- self._type = TK_OP
- self._token += c
- self._pos += 1
- self._state = self.ST_OP
- elif is_blank(c):
- self._push_token(c)
-
- #Discard blanks
- self._pos += 1
- elif self._token:
- self._token += c
- self._pos += 1
- elif c=='#':
- self._state = self.ST_COMMENT
- self._type = TK_COMMENT
- self._pos += 1
- else:
- self._pos += 1
- self._token += c
-
- def _parse_op(self, eof):
- assert self._token
-
- while 1:
- if self._pos>=len(self._input):
- if not eof:
- raise NeedMore()
- c = ''
- else:
- c = self._input[self._pos]
-
- op = self._token + c
- if c and is_partial_op(op):
- #Still parsing an operator
- self._token = op
- self._pos += 1
- else:
- #End of operator
- self._push_token(c)
- self._state = self.ST_NORMAL
- break
-
- def _parse_comment(self):
- while 1:
- if self._pos>=len(self._input):
- raise NeedMore()
-
- c = self._input[self._pos]
- if c=='\n':
- #End of comment, do not consume the end of line
- self._state = self.ST_NORMAL
- break
- else:
- self._token += c
- self._pos += 1
-
- def _parse_quoted(self, eof):
- """Precondition: the starting backquote/dollar is still in the input queue."""
- if not self._wordlexer:
- self._wordlexer = WordLexer()
-
- if self._pos<len(self._input):
- #Transfer input queue character into the subparser
- input = self._input[self._pos:]
- self._pos += len(input)
-
- wtree, remaining = self._wordlexer.add(input, eof)
- self._wordlexer = None
- self._token += wordtree_as_string(wtree)
-
- #Put unparsed character back in the input queue
- if remaining:
- self._input[self._pos:self._pos] = list(remaining)
- self._state = self.ST_NORMAL
-
- def _parse_heredoc(self, eof):
- assert not self._token
-
- if self._herelexer is None:
- self._herelexer = HereDocLexer(self._heredoc.op, self._heredoc.name)
-
- if self._pos<len(self._input):
- #Transfer input queue character into the subparser
- input = self._input[self._pos:]
- self._pos += len(input)
-
- self._token, remaining = self._herelexer.add(input, eof)
-
- #Reset here-document state
- self._herelexer = None
- heredoc, self._heredoc = self._heredoc, HereDoc(None)
- if remaining:
- self._input[self._pos:self._pos] = list(remaining)
- self._state = self.ST_NORMAL
-
- #Push pending tokens
- heredoc.pendings[:0] = [(self._token, self._type, heredoc.name)]
- for token, type, delim in heredoc.pendings:
- self._token = token
- self._type = type
- self._push_token(delim)
-
- def _push_token(self, delim):
- if not self._token:
- return 0
-
- if self._heredoc.op is not None:
- if self._heredoc.name is None:
- #Here-document name
- if self._type!=TK_TOKEN:
- raise ShellSyntaxError("expecting here-document name, got '%s'" % self._token)
- self._heredoc.name = unquote_wordtree(make_wordtree(self._token))
- self._type = TK_HERENAME
- else:
- #Capture all tokens until the newline starting the here-document
- if self._type==TK_NEWLINE:
- assert self._state==self.ST_NORMAL
- self._state = self.ST_HEREDOC
-
- self._heredoc.pendings.append((self._token, self._type, delim))
- self._token = ''
- self._type = TK_TOKEN
- return 1
-
- # BEWARE: do not change parser state from here to the end of the function:
- # when parsing between an here-document operator to the end of the line
- # tokens are stored in self._heredoc.pendings. Therefore, they will not
- # reach the section below.
-
- #Check operators
- if self._type==TK_OP:
- #False positive because of partial op matching
- op = is_op(self._token)
- if not op:
- self._type = TK_TOKEN
- else:
- #Map to the specific operator
- self._type = op
- if self._token in ('<<', '<<-'):
- #Done here rather than in _parse_op because there is no need
- #to change the parser state since we are still waiting for
- #the here-document name
- if self._heredoc.op is not None:
- raise ShellSyntaxError("syntax error near token '%s'" % self._token)
- assert self._heredoc.op is None
- self._heredoc.op = self._token
-
- if self._type==TK_TOKEN:
- if '=' in self._token and not delim:
- if self._token.startswith('='):
- #Token is a WORD... a TOKEN that is.
- pass
- else:
- prev = self._token[:self._token.find('=')]
- if is_name(prev):
- self._type = TK_ASSIGNMENT
- else:
- #Just a token (unspecified)
- pass
- else:
- reserved = get_reserved(self._token)
- if reserved is not None:
- if reserved=='In' and self._for_count!=2:
- #Sorry, not a reserved word after all
- pass
- else:
- self._type = reserved
- if reserved in ('For', 'Case'):
- self._for_count = 0
- elif are_digits(self._token) and delim in ('<', '>'):
- #Detect IO_NUMBER
- self._type = TK_IONUMBER
- elif self._token==';':
- self._type = TK_COMMA
- elif self._token=='&':
- self._type = TK_AMPERSAND
- elif self._type==TK_COMMENT:
- #Comments are not part of sh grammar, ignore them
- self._token = ''
- self._type = TK_TOKEN
- return 0
-
- if self._for_count is not None:
- #Track token count in 'For' expression to detect 'In' reserved words.
- #Can only be in third position, no need to go beyond
- self._for_count += 1
- if self._for_count==3:
- self._for_count = None
-
- self.on_token((self._token, self._type))
- self._token = ''
- self._type = TK_TOKEN
- return 1
-
- def on_token(self, token):
- raise NotImplementedError
-
-
-tokens = [
- TK_TOKEN,
-# To silence yacc unused token warnings
-# TK_COMMENT,
- TK_NEWLINE,
- TK_IONUMBER,
- TK_ASSIGNMENT,
- TK_HERENAME,
-]
-
-#Add specific operators
-tokens += _OPERATORS.values()
-#Add reserved words
-tokens += _RESERVEDS.values()
-
-class PLYLexer(Lexer):
- """Bridge Lexer and PLY lexer interface."""
- def __init__(self):
- Lexer.__init__(self)
- self._tokens = []
- self._current = 0
- self.lineno = 0
-
- def on_token(self, token):
- value, type = token
-
- self.lineno = 0
- t = lex.LexToken()
- t.value = value
- t.type = type
- t.lexer = self
- t.lexpos = 0
- t.lineno = 0
-
- self._tokens.append(t)
-
- def is_empty(self):
- return not bool(self._tokens)
-
- #PLY compliant interface
- def token(self):
- if self._current>=len(self._tokens):
- return None
- t = self._tokens[self._current]
- self._current += 1
- return t
-
-
-def get_tokens(s):
- """Parse the input string and return a tuple (tokens, unprocessed) where
- tokens is a list of parsed tokens and unprocessed is the part of the input
- string left untouched by the lexer.
- """
- lexer = PLYLexer()
- untouched = lexer.add(s, True)
- tokens = []
- while 1:
- token = lexer.token()
- if token is None:
- break
- tokens.append(token)
-
- tokens = [(t.value, t.type) for t in tokens]
- return tokens, untouched
diff --git a/yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py b/yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py
deleted file mode 100644
index e8e80aac4..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/pyshyacc.py
+++ /dev/null
@@ -1,779 +0,0 @@
-# pyshyacc.py - PLY grammar definition for pysh
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""PLY grammar file.
-"""
-import os.path
-import sys
-
-import pyshlex
-tokens = pyshlex.tokens
-
-from ply import yacc
-import sherrors
-
-class IORedirect:
- def __init__(self, op, filename, io_number=None):
- self.op = op
- self.filename = filename
- self.io_number = io_number
-
-class HereDocument:
- def __init__(self, op, name, content, io_number=None):
- self.op = op
- self.name = name
- self.content = content
- self.io_number = io_number
-
-def make_io_redirect(p):
- """Make an IORedirect instance from the input 'io_redirect' production."""
- name, io_number, io_target = p
- assert name=='io_redirect'
-
- if io_target[0]=='io_file':
- io_type, io_op, io_file = io_target
- return IORedirect(io_op, io_file, io_number)
- elif io_target[0]=='io_here':
- io_type, io_op, io_name, io_content = io_target
- return HereDocument(io_op, io_name, io_content, io_number)
- else:
- assert False, "Invalid IO redirection token %s" % repr(io_type)
-
-class SimpleCommand:
- """
- assigns contains (name, value) pairs.
- """
- def __init__(self, words, redirs, assigns):
- self.words = list(words)
- self.redirs = list(redirs)
- self.assigns = list(assigns)
-
-class Pipeline:
- def __init__(self, commands, reverse_status=False):
- self.commands = list(commands)
- assert self.commands #Grammar forbids this
- self.reverse_status = reverse_status
-
-class AndOr:
- def __init__(self, op, left, right):
- self.op = str(op)
- self.left = left
- self.right = right
-
-class ForLoop:
- def __init__(self, name, items, cmds):
- self.name = str(name)
- self.items = list(items)
- self.cmds = list(cmds)
-
-class WhileLoop:
- def __init__(self, condition, cmds):
- self.condition = list(condition)
- self.cmds = list(cmds)
-
-class UntilLoop:
- def __init__(self, condition, cmds):
- self.condition = list(condition)
- self.cmds = list(cmds)
-
-class FunDef:
- def __init__(self, name, body):
- self.name = str(name)
- self.body = body
-
-class BraceGroup:
- def __init__(self, cmds):
- self.cmds = list(cmds)
-
-class IfCond:
- def __init__(self, cond, if_cmds, else_cmds):
- self.cond = list(cond)
- self.if_cmds = if_cmds
- self.else_cmds = else_cmds
-
-class Case:
- def __init__(self, name, items):
- self.name = name
- self.items = items
-
-class SubShell:
- def __init__(self, cmds):
- self.cmds = cmds
-
-class RedirectList:
- def __init__(self, cmd, redirs):
- self.cmd = cmd
- self.redirs = list(redirs)
-
-def get_production(productions, ptype):
- """productions must be a list of production tuples like (name, obj) where
- name is the production string identifier.
- Return the first production named 'ptype'. Raise KeyError if None can be
- found.
- """
- for production in productions:
- if production is not None and production[0]==ptype:
- return production
- raise KeyError(ptype)
-
-#-------------------------------------------------------------------------------
-# PLY grammar definition
-#-------------------------------------------------------------------------------
-
-def p_multiple_commands(p):
- """multiple_commands : newline_sequence
- | complete_command
- | multiple_commands complete_command"""
- if len(p)==2:
- if p[1] is not None:
- p[0] = [p[1]]
- else:
- p[0] = []
- else:
- p[0] = p[1] + [p[2]]
-
-def p_complete_command(p):
- """complete_command : list separator
- | list"""
- if len(p)==3 and p[2] and p[2][1] == '&':
- p[0] = ('async', p[1])
- else:
- p[0] = p[1]
-
-def p_list(p):
- """list : list separator_op and_or
- | and_or"""
- if len(p)==2:
- p[0] = [p[1]]
- else:
- #if p[2]!=';':
- # raise NotImplementedError('AND-OR list asynchronous execution is not implemented')
- p[0] = p[1] + [p[3]]
-
-def p_and_or(p):
- """and_or : pipeline
- | and_or AND_IF linebreak pipeline
- | and_or OR_IF linebreak pipeline"""
- if len(p)==2:
- p[0] = p[1]
- else:
- p[0] = ('and_or', AndOr(p[2], p[1], p[4]))
-
-def p_maybe_bang_word(p):
- """maybe_bang_word : Bang"""
- p[0] = ('maybe_bang_word', p[1])
-
-def p_pipeline(p):
- """pipeline : pipe_sequence
- | bang_word pipe_sequence"""
- if len(p)==3:
- p[0] = ('pipeline', Pipeline(p[2][1:], True))
- else:
- p[0] = ('pipeline', Pipeline(p[1][1:]))
-
-def p_pipe_sequence(p):
- """pipe_sequence : command
- | pipe_sequence PIPE linebreak command"""
- if len(p)==2:
- p[0] = ['pipe_sequence', p[1]]
- else:
- p[0] = p[1] + [p[4]]
-
-def p_command(p):
- """command : simple_command
- | compound_command
- | compound_command redirect_list
- | function_definition"""
-
- if p[1][0] in ( 'simple_command',
- 'for_clause',
- 'while_clause',
- 'until_clause',
- 'case_clause',
- 'if_clause',
- 'function_definition',
- 'subshell',
- 'brace_group',):
- if len(p) == 2:
- p[0] = p[1]
- else:
- p[0] = ('redirect_list', RedirectList(p[1], p[2][1:]))
- else:
- raise NotImplementedError('%s command is not implemented' % repr(p[1][0]))
-
-def p_compound_command(p):
- """compound_command : brace_group
- | subshell
- | for_clause
- | case_clause
- | if_clause
- | while_clause
- | until_clause"""
- p[0] = p[1]
-
-def p_subshell(p):
- """subshell : LPARENS compound_list RPARENS"""
- p[0] = ('subshell', SubShell(p[2][1:]))
-
-def p_compound_list(p):
- """compound_list : term
- | newline_list term
- | term separator
- | newline_list term separator"""
- productions = p[1:]
- try:
- sep = get_production(productions, 'separator')
- if sep[1]!=';':
- raise NotImplementedError()
- except KeyError:
- pass
- term = get_production(productions, 'term')
- p[0] = ['compound_list'] + term[1:]
-
-def p_term(p):
- """term : term separator and_or
- | and_or"""
- if len(p)==2:
- p[0] = ['term', p[1]]
- else:
- if p[2] is not None and p[2][1] == '&':
- p[0] = ['term', ('async', p[1][1:])] + [p[3]]
- else:
- p[0] = p[1] + [p[3]]
-
-def p_maybe_for_word(p):
- # Rearrange 'For' priority wrt TOKEN. See p_for_word
- """maybe_for_word : For"""
- p[0] = ('maybe_for_word', p[1])
-
-def p_for_clause(p):
- """for_clause : for_word name linebreak do_group
- | for_word name linebreak in sequential_sep do_group
- | for_word name linebreak in wordlist sequential_sep do_group"""
- productions = p[1:]
- do_group = get_production(productions, 'do_group')
- try:
- items = get_production(productions, 'in')[1:]
- except KeyError:
- raise NotImplementedError('"in" omission is not implemented')
-
- try:
- items = get_production(productions, 'wordlist')[1:]
- except KeyError:
- items = []
-
- name = p[2]
- p[0] = ('for_clause', ForLoop(name, items, do_group[1:]))
-
-def p_name(p):
- """name : token""" #Was NAME instead of token
- p[0] = p[1]
-
-def p_in(p):
- """in : In"""
- p[0] = ('in', p[1])
-
-def p_wordlist(p):
- """wordlist : wordlist token
- | token"""
- if len(p)==2:
- p[0] = ['wordlist', ('TOKEN', p[1])]
- else:
- p[0] = p[1] + [('TOKEN', p[2])]
-
-def p_case_clause(p):
- """case_clause : Case token linebreak in linebreak case_list Esac
- | Case token linebreak in linebreak case_list_ns Esac
- | Case token linebreak in linebreak Esac"""
- if len(p) < 8:
- items = []
- else:
- items = p[6][1:]
- name = p[2]
- p[0] = ('case_clause', Case(name, [c[1] for c in items]))
-
-def p_case_list_ns(p):
- """case_list_ns : case_list case_item_ns
- | case_item_ns"""
- p_case_list(p)
-
-def p_case_list(p):
- """case_list : case_list case_item
- | case_item"""
- if len(p)==2:
- p[0] = ['case_list', p[1]]
- else:
- p[0] = p[1] + [p[2]]
-
-def p_case_item_ns(p):
- """case_item_ns : pattern RPARENS linebreak
- | pattern RPARENS compound_list linebreak
- | LPARENS pattern RPARENS linebreak
- | LPARENS pattern RPARENS compound_list linebreak"""
- p_case_item(p)
-
-def p_case_item(p):
- """case_item : pattern RPARENS linebreak DSEMI linebreak
- | pattern RPARENS compound_list DSEMI linebreak
- | LPARENS pattern RPARENS linebreak DSEMI linebreak
- | LPARENS pattern RPARENS compound_list DSEMI linebreak"""
- if len(p) < 7:
- name = p[1][1:]
- else:
- name = p[2][1:]
-
- try:
- cmds = get_production(p[1:], "compound_list")[1:]
- except KeyError:
- cmds = []
-
- p[0] = ('case_item', (name, cmds))
-
-def p_pattern(p):
- """pattern : token
- | pattern PIPE token"""
- if len(p)==2:
- p[0] = ['pattern', ('TOKEN', p[1])]
- else:
- p[0] = p[1] + [('TOKEN', p[2])]
-
-def p_maybe_if_word(p):
- # Rearrange 'If' priority wrt TOKEN. See p_if_word
- """maybe_if_word : If"""
- p[0] = ('maybe_if_word', p[1])
-
-def p_maybe_then_word(p):
- # Rearrange 'Then' priority wrt TOKEN. See p_then_word
- """maybe_then_word : Then"""
- p[0] = ('maybe_then_word', p[1])
-
-def p_if_clause(p):
- """if_clause : if_word compound_list then_word compound_list else_part Fi
- | if_word compound_list then_word compound_list Fi"""
- else_part = []
- if len(p)==7:
- else_part = p[5]
- p[0] = ('if_clause', IfCond(p[2][1:], p[4][1:], else_part))
-
-def p_else_part(p):
- """else_part : Elif compound_list then_word compound_list else_part
- | Elif compound_list then_word compound_list
- | Else compound_list"""
- if len(p)==3:
- p[0] = p[2][1:]
- else:
- else_part = []
- if len(p)==6:
- else_part = p[5]
- p[0] = ('elif', IfCond(p[2][1:], p[4][1:], else_part))
-
-def p_while_clause(p):
- """while_clause : While compound_list do_group"""
- p[0] = ('while_clause', WhileLoop(p[2][1:], p[3][1:]))
-
-def p_maybe_until_word(p):
- # Rearrange 'Until' priority wrt TOKEN. See p_until_word
- """maybe_until_word : Until"""
- p[0] = ('maybe_until_word', p[1])
-
-def p_until_clause(p):
- """until_clause : until_word compound_list do_group"""
- p[0] = ('until_clause', UntilLoop(p[2][1:], p[3][1:]))
-
-def p_function_definition(p):
- """function_definition : fname LPARENS RPARENS linebreak function_body"""
- p[0] = ('function_definition', FunDef(p[1], p[5]))
-
-def p_function_body(p):
- """function_body : compound_command
- | compound_command redirect_list"""
- if len(p)!=2:
- raise NotImplementedError('functions redirections lists are not implemented')
- p[0] = p[1]
-
-def p_fname(p):
- """fname : TOKEN""" #Was NAME instead of token
- p[0] = p[1]
-
-def p_brace_group(p):
- """brace_group : Lbrace compound_list Rbrace"""
- p[0] = ('brace_group', BraceGroup(p[2][1:]))
-
-def p_maybe_done_word(p):
- #See p_assignment_word for details.
- """maybe_done_word : Done"""
- p[0] = ('maybe_done_word', p[1])
-
-def p_maybe_do_word(p):
- """maybe_do_word : Do"""
- p[0] = ('maybe_do_word', p[1])
-
-def p_do_group(p):
- """do_group : do_word compound_list done_word"""
- #Do group contains a list of AndOr
- p[0] = ['do_group'] + p[2][1:]
-
-def p_simple_command(p):
- """simple_command : cmd_prefix cmd_word cmd_suffix
- | cmd_prefix cmd_word
- | cmd_prefix
- | cmd_name cmd_suffix
- | cmd_name"""
- words, redirs, assigns = [], [], []
- for e in p[1:]:
- name = e[0]
- if name in ('cmd_prefix', 'cmd_suffix'):
- for sube in e[1:]:
- subname = sube[0]
- if subname=='io_redirect':
- redirs.append(make_io_redirect(sube))
- elif subname=='ASSIGNMENT_WORD':
- assigns.append(sube)
- else:
- words.append(sube)
- elif name in ('cmd_word', 'cmd_name'):
- words.append(e)
-
- cmd = SimpleCommand(words, redirs, assigns)
- p[0] = ('simple_command', cmd)
-
-def p_cmd_name(p):
- """cmd_name : TOKEN"""
- p[0] = ('cmd_name', p[1])
-
-def p_cmd_word(p):
- """cmd_word : token"""
- p[0] = ('cmd_word', p[1])
-
-def p_maybe_assignment_word(p):
- #See p_assignment_word for details.
- """maybe_assignment_word : ASSIGNMENT_WORD"""
- p[0] = ('maybe_assignment_word', p[1])
-
-def p_cmd_prefix(p):
- """cmd_prefix : io_redirect
- | cmd_prefix io_redirect
- | assignment_word
- | cmd_prefix assignment_word"""
- try:
- prefix = get_production(p[1:], 'cmd_prefix')
- except KeyError:
- prefix = ['cmd_prefix']
-
- try:
- value = get_production(p[1:], 'assignment_word')[1]
- value = ('ASSIGNMENT_WORD', value.split('=', 1))
- except KeyError:
- value = get_production(p[1:], 'io_redirect')
- p[0] = prefix + [value]
-
-def p_cmd_suffix(p):
- """cmd_suffix : io_redirect
- | cmd_suffix io_redirect
- | token
- | cmd_suffix token
- | maybe_for_word
- | cmd_suffix maybe_for_word
- | maybe_done_word
- | cmd_suffix maybe_done_word
- | maybe_do_word
- | cmd_suffix maybe_do_word
- | maybe_until_word
- | cmd_suffix maybe_until_word
- | maybe_assignment_word
- | cmd_suffix maybe_assignment_word
- | maybe_if_word
- | cmd_suffix maybe_if_word
- | maybe_then_word
- | cmd_suffix maybe_then_word
- | maybe_bang_word
- | cmd_suffix maybe_bang_word"""
- try:
- suffix = get_production(p[1:], 'cmd_suffix')
- token = p[2]
- except KeyError:
- suffix = ['cmd_suffix']
- token = p[1]
-
- if isinstance(token, tuple):
- if token[0]=='io_redirect':
- p[0] = suffix + [token]
- else:
- #Convert maybe_* to TOKEN if necessary
- p[0] = suffix + [('TOKEN', token[1])]
- else:
- p[0] = suffix + [('TOKEN', token)]
-
-def p_redirect_list(p):
- """redirect_list : io_redirect
- | redirect_list io_redirect"""
- if len(p) == 2:
- p[0] = ['redirect_list', make_io_redirect(p[1])]
- else:
- p[0] = p[1] + [make_io_redirect(p[2])]
-
-def p_io_redirect(p):
- """io_redirect : io_file
- | IO_NUMBER io_file
- | io_here
- | IO_NUMBER io_here"""
- if len(p)==3:
- p[0] = ('io_redirect', p[1], p[2])
- else:
- p[0] = ('io_redirect', None, p[1])
-
-def p_io_file(p):
- #Return the tuple (operator, filename)
- """io_file : LESS filename
- | LESSAND filename
- | GREATER filename
- | GREATAND filename
- | DGREAT filename
- | LESSGREAT filename
- | CLOBBER filename"""
- #Extract the filename from the file
- p[0] = ('io_file', p[1], p[2][1])
-
-def p_filename(p):
- #Return the filename
- """filename : TOKEN"""
- p[0] = ('filename', p[1])
-
-def p_io_here(p):
- """io_here : DLESS here_end
- | DLESSDASH here_end"""
- p[0] = ('io_here', p[1], p[2][1], p[2][2])
-
-def p_here_end(p):
- """here_end : HERENAME TOKEN"""
- p[0] = ('here_document', p[1], p[2])
-
-def p_newline_sequence(p):
- # Nothing in the grammar can handle leading NEWLINE productions, so add
- # this one with the lowest possible priority relatively to newline_list.
- """newline_sequence : newline_list"""
- p[0] = None
-
-def p_newline_list(p):
- """newline_list : NEWLINE
- | newline_list NEWLINE"""
- p[0] = None
-
-def p_linebreak(p):
- """linebreak : newline_list
- | empty"""
- p[0] = None
-
-def p_separator_op(p):
- """separator_op : COMMA
- | AMP"""
- p[0] = p[1]
-
-def p_separator(p):
- """separator : separator_op linebreak
- | newline_list"""
- if len(p)==2:
- #Ignore newlines
- p[0] = None
- else:
- #Keep the separator operator
- p[0] = ('separator', p[1])
-
-def p_sequential_sep(p):
- """sequential_sep : COMMA linebreak
- | newline_list"""
- p[0] = None
-
-# Low priority TOKEN => for_word conversion.
-# Let maybe_for_word be used as a token when necessary in higher priority
-# rules.
-def p_for_word(p):
- """for_word : maybe_for_word"""
- p[0] = p[1]
-
-def p_if_word(p):
- """if_word : maybe_if_word"""
- p[0] = p[1]
-
-def p_then_word(p):
- """then_word : maybe_then_word"""
- p[0] = p[1]
-
-def p_done_word(p):
- """done_word : maybe_done_word"""
- p[0] = p[1]
-
-def p_do_word(p):
- """do_word : maybe_do_word"""
- p[0] = p[1]
-
-def p_until_word(p):
- """until_word : maybe_until_word"""
- p[0] = p[1]
-
-def p_assignment_word(p):
- """assignment_word : maybe_assignment_word"""
- p[0] = ('assignment_word', p[1][1])
-
-def p_bang_word(p):
- """bang_word : maybe_bang_word"""
- p[0] = ('bang_word', p[1][1])
-
-def p_token(p):
- """token : TOKEN
- | Fi"""
- p[0] = p[1]
-
-def p_empty(p):
- 'empty :'
- p[0] = None
-
-# Error rule for syntax errors
-def p_error(p):
- msg = []
- w = msg.append
- w('%r\n' % p)
- w('followed by:\n')
- for i in range(5):
- n = yacc.token()
- if not n:
- break
- w(' %r\n' % n)
- raise sherrors.ShellSyntaxError(''.join(msg))
-
-# Build the parser
-try:
- import pyshtables
-except ImportError:
- outputdir = os.path.dirname(__file__)
- if not os.access(outputdir, os.W_OK):
- outputdir = ''
- yacc.yacc(tabmodule = 'pyshtables', outputdir = outputdir, debug = 0)
-else:
- yacc.yacc(tabmodule = 'pysh.pyshtables', write_tables = 0, debug = 0)
-
-
-def parse(input, eof=False, debug=False):
- """Parse a whole script at once and return the generated AST and unconsumed
- data in a tuple.
-
- NOTE: eof is probably meaningless for now, the parser being unable to work
- in pull mode. It should be set to True.
- """
- lexer = pyshlex.PLYLexer()
- remaining = lexer.add(input, eof)
- if lexer.is_empty():
- return [], remaining
- if debug:
- debug = 2
- return yacc.parse(lexer=lexer, debug=debug), remaining
-
-#-------------------------------------------------------------------------------
-# AST rendering helpers
-#-------------------------------------------------------------------------------
-
-def format_commands(v):
- """Return a tree made of strings and lists. Make command trees easier to
- display.
- """
- if isinstance(v, list):
- return [format_commands(c) for c in v]
- if isinstance(v, tuple):
- if len(v)==2 and isinstance(v[0], str) and not isinstance(v[1], str):
- if v[0] == 'async':
- return ['AsyncList', map(format_commands, v[1])]
- else:
- #Avoid decomposing tuples like ('pipeline', Pipeline(...))
- return format_commands(v[1])
- return format_commands(list(v))
- elif isinstance(v, IfCond):
- name = ['IfCond']
- name += ['if', map(format_commands, v.cond)]
- name += ['then', map(format_commands, v.if_cmds)]
- name += ['else', map(format_commands, v.else_cmds)]
- return name
- elif isinstance(v, ForLoop):
- name = ['ForLoop']
- name += [repr(v.name)+' in ', map(str, v.items)]
- name += ['commands', map(format_commands, v.cmds)]
- return name
- elif isinstance(v, AndOr):
- return [v.op, format_commands(v.left), format_commands(v.right)]
- elif isinstance(v, Pipeline):
- name = 'Pipeline'
- if v.reverse_status:
- name = '!' + name
- return [name, format_commands(v.commands)]
- elif isinstance(v, Case):
- name = ['Case']
- name += [v.name, format_commands(v.items)]
- elif isinstance(v, SimpleCommand):
- name = ['SimpleCommand']
- if v.words:
- name += ['words', map(str, v.words)]
- if v.assigns:
- assigns = [tuple(a[1]) for a in v.assigns]
- name += ['assigns', map(str, assigns)]
- if v.redirs:
- name += ['redirs', map(format_commands, v.redirs)]
- return name
- elif isinstance(v, RedirectList):
- name = ['RedirectList']
- if v.redirs:
- name += ['redirs', map(format_commands, v.redirs)]
- name += ['command', format_commands(v.cmd)]
- return name
- elif isinstance(v, IORedirect):
- return ' '.join(map(str, (v.io_number, v.op, v.filename)))
- elif isinstance(v, HereDocument):
- return ' '.join(map(str, (v.io_number, v.op, repr(v.name), repr(v.content))))
- elif isinstance(v, SubShell):
- return ['SubShell', map(format_commands, v.cmds)]
- else:
- return repr(v)
-
-def print_commands(cmds, output=sys.stdout):
- """Pretty print a command tree."""
- def print_tree(cmd, spaces, output):
- if isinstance(cmd, list):
- for c in cmd:
- print_tree(c, spaces + 3, output)
- else:
- print >>output, ' '*spaces + str(cmd)
-
- formatted = format_commands(cmds)
- print_tree(formatted, 0, output)
-
-
-def stringify_commands(cmds):
- """Serialize a command tree as a string.
-
- Returned string is not pretty and is currently used for unit tests only.
- """
- def stringify(value):
- output = []
- if isinstance(value, list):
- formatted = []
- for v in value:
- formatted.append(stringify(v))
- formatted = ' '.join(formatted)
- output.append(''.join(['<', formatted, '>']))
- else:
- output.append(value)
- return ' '.join(output)
-
- return stringify(format_commands(cmds))
-
-
-def visit_commands(cmds, callable):
- """Visit the command tree and execute callable on every Pipeline and
- SimpleCommand instances.
- """
- if isinstance(cmds, (tuple, list)):
- map(lambda c: visit_commands(c,callable), cmds)
- elif isinstance(cmds, (Pipeline, SimpleCommand)):
- callable(cmds)
diff --git a/yocto-poky/bitbake/lib/bb/pysh/sherrors.py b/yocto-poky/bitbake/lib/bb/pysh/sherrors.py
deleted file mode 100644
index 49d0533de..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/sherrors.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# sherrors.py - shell errors and signals
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Define shell exceptions and error codes.
-"""
-
-class ShellError(Exception):
- pass
-
-class ShellSyntaxError(ShellError):
- pass
-
-class UtilityError(ShellError):
- """Raised upon utility syntax error (option or operand error)."""
- pass
-
-class ExpansionError(ShellError):
- pass
-
-class CommandNotFound(ShellError):
- """Specified command was not found."""
- pass
-
-class RedirectionError(ShellError):
- pass
-
-class VarAssignmentError(ShellError):
- """Variable assignment error."""
- pass
-
-class ExitSignal(ShellError):
- """Exit signal."""
- pass
-
-class ReturnSignal(ShellError):
- """Exit signal."""
- pass
diff --git a/yocto-poky/bitbake/lib/bb/pysh/subprocess_fix.py b/yocto-poky/bitbake/lib/bb/pysh/subprocess_fix.py
deleted file mode 100644
index 46eca2280..000000000
--- a/yocto-poky/bitbake/lib/bb/pysh/subprocess_fix.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# subprocess - Subprocesses with accessible I/O streams
-#
-# For more information about this module, see PEP 324.
-#
-# This module should remain compatible with Python 2.2, see PEP 291.
-#
-# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
-#
-# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/2.4/license for licensing details.
-
-def list2cmdline(seq):
- """
- Translate a sequence of arguments into a command line
- string, using the same rules as the MS C runtime:
-
- 1) Arguments are delimited by white space, which is either a
- space or a tab.
-
- 2) A string surrounded by double quotation marks is
- interpreted as a single argument, regardless of white space
- contained within. A quoted string can be embedded in an
- argument.
-
- 3) A double quotation mark preceded by a backslash is
- interpreted as a literal double quotation mark.
-
- 4) Backslashes are interpreted literally, unless they
- immediately precede a double quotation mark.
-
- 5) If backslashes immediately precede a double quotation mark,
- every pair of backslashes is interpreted as a literal
- backslash. If the number of backslashes is odd, the last
- backslash escapes the next double quotation mark as
- described in rule 3.
- """
-
- # See
- # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
- result = []
- needquote = False
- for arg in seq:
- bs_buf = []
-
- # Add a space to separate this argument from the others
- if result:
- result.append(' ')
-
- needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == ""
- if needquote:
- result.append('"')
-
- for c in arg:
- if c == '\\':
- # Don't know if we need to double yet.
- bs_buf.append(c)
- elif c == '"':
- # Double backspaces.
- result.append('\\' * len(bs_buf)*2)
- bs_buf = []
- result.append('\\"')
- else:
- # Normal char
- if bs_buf:
- result.extend(bs_buf)
- bs_buf = []
- result.append(c)
-
- # Add remaining backspaces, if any.
- if bs_buf:
- result.extend(bs_buf)
-
- if needquote:
- result.extend(bs_buf)
- result.append('"')
-
- return ''.join(result)
diff --git a/yocto-poky/bitbake/lib/bb/runqueue.py b/yocto-poky/bitbake/lib/bb/runqueue.py
deleted file mode 100644
index e1b9b2e66..000000000
--- a/yocto-poky/bitbake/lib/bb/runqueue.py
+++ /dev/null
@@ -1,2285 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'RunQueue' implementation
-
-Handles preparation and execution of a queue of tasks
-"""
-
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import copy
-import os
-import sys
-import signal
-import stat
-import fcntl
-import errno
-import logging
-import re
-import bb
-from bb import msg, data, event
-from bb import monitordisk
-import subprocess
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-bblogger = logging.getLogger("BitBake")
-logger = logging.getLogger("BitBake.RunQueue")
-
-__find_md5__ = re.compile( r'(?i)(?<![a-z0-9])[a-f0-9]{32}(?![a-z0-9])' )
-
-class RunQueueStats:
- """
- Holds statistics on the tasks handled by the associated runQueue
- """
- def __init__(self, total):
- self.completed = 0
- self.skipped = 0
- self.failed = 0
- self.active = 0
- self.total = total
-
- def copy(self):
- obj = self.__class__(self.total)
- obj.__dict__.update(self.__dict__)
- return obj
-
- def taskFailed(self):
- self.active = self.active - 1
- self.failed = self.failed + 1
-
- def taskCompleted(self, number = 1):
- self.active = self.active - number
- self.completed = self.completed + number
-
- def taskSkipped(self, number = 1):
- self.active = self.active + number
- self.skipped = self.skipped + number
-
- def taskActive(self):
- self.active = self.active + 1
-
-# These values indicate the next step due to be run in the
-# runQueue state machine
-runQueuePrepare = 2
-runQueueSceneInit = 3
-runQueueSceneRun = 4
-runQueueRunInit = 5
-runQueueRunning = 6
-runQueueFailed = 7
-runQueueCleanUp = 8
-runQueueComplete = 9
-
-class RunQueueScheduler(object):
- """
- Control the order tasks are scheduled in.
- """
- name = "basic"
-
- def __init__(self, runqueue, rqdata):
- """
- The default scheduler just returns the first buildable task (the
- priority map is sorted by task number)
- """
- self.rq = runqueue
- self.rqdata = rqdata
- self.numTasks = len(self.rqdata.runq_fnid)
-
- self.prio_map = []
- self.prio_map.extend(range(self.numTasks))
-
- self.buildable = []
- self.stamps = {}
- for taskid in xrange(self.numTasks):
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[taskid]]
- taskname = self.rqdata.runq_task[taskid]
- self.stamps[taskid] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
- if self.rq.runq_buildable[taskid] == 1:
- self.buildable.append(taskid)
-
- self.rev_prio_map = None
-
- def next_buildable_task(self):
- """
- Return the id of the first task we find that is buildable
- """
- self.buildable = [x for x in self.buildable if not self.rq.runq_running[x] == 1]
- if not self.buildable:
- return None
- if len(self.buildable) == 1:
- taskid = self.buildable[0]
- stamp = self.stamps[taskid]
- if stamp not in self.rq.build_stamps.itervalues():
- return taskid
-
- if not self.rev_prio_map:
- self.rev_prio_map = range(self.numTasks)
- for taskid in xrange(self.numTasks):
- self.rev_prio_map[self.prio_map[taskid]] = taskid
-
- best = None
- bestprio = None
- for taskid in self.buildable:
- prio = self.rev_prio_map[taskid]
- if bestprio is None or bestprio > prio:
- stamp = self.stamps[taskid]
- if stamp in self.rq.build_stamps.itervalues():
- continue
- bestprio = prio
- best = taskid
-
- return best
-
- def next(self):
- """
- Return the id of the task we should build next
- """
- if self.rq.stats.active < self.rq.number_tasks:
- return self.next_buildable_task()
-
- def newbuilable(self, task):
- self.buildable.append(task)
-
-class RunQueueSchedulerSpeed(RunQueueScheduler):
- """
- A scheduler optimised for speed. The priority map is sorted by task weight,
- heavier weighted tasks (tasks needed by the most other tasks) are run first.
- """
- name = "speed"
-
- def __init__(self, runqueue, rqdata):
- """
- The priority map is sorted by task weight.
- """
- RunQueueScheduler.__init__(self, runqueue, rqdata)
-
- sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight))
- copyweight = copy.deepcopy(self.rqdata.runq_weight)
- self.prio_map = []
-
- for weight in sortweight:
- idx = copyweight.index(weight)
- self.prio_map.append(idx)
- copyweight[idx] = -1
-
- self.prio_map.reverse()
-
-class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
- """
- A scheduler optimised to complete .bb files are quickly as possible. The
- priority map is sorted by task weight, but then reordered so once a given
- .bb file starts to build, it's completed as quickly as possible. This works
- well where disk space is at a premium and classes like OE's rm_work are in
- force.
- """
- name = "completion"
-
- def __init__(self, runqueue, rqdata):
- RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata)
-
- #FIXME - whilst this groups all fnids together it does not reorder the
- #fnid groups optimally.
-
- basemap = copy.deepcopy(self.prio_map)
- self.prio_map = []
- while (len(basemap) > 0):
- entry = basemap.pop(0)
- self.prio_map.append(entry)
- fnid = self.rqdata.runq_fnid[entry]
- todel = []
- for entry in basemap:
- entry_fnid = self.rqdata.runq_fnid[entry]
- if entry_fnid == fnid:
- todel.append(basemap.index(entry))
- self.prio_map.append(entry)
- todel.reverse()
- for idx in todel:
- del basemap[idx]
-
-class RunQueueData:
- """
- BitBake Run Queue implementation
- """
- def __init__(self, rq, cooker, cfgData, dataCache, taskData, targets):
- self.cooker = cooker
- self.dataCache = dataCache
- self.taskData = taskData
- self.targets = targets
- self.rq = rq
- self.warn_multi_bb = False
-
- self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or ""
- self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
-
- self.reset()
-
- def reset(self):
- self.runq_fnid = []
- self.runq_task = []
- self.runq_depends = []
- self.runq_revdeps = []
- self.runq_hash = []
-
- def runq_depends_names(self, ids):
- import re
- ret = []
- for id in self.runq_depends[ids]:
- nam = os.path.basename(self.get_user_idstring(id))
- nam = re.sub("_[^,]*,", ",", nam)
- ret.extend([nam])
- return ret
-
- def get_task_name(self, task):
- return self.runq_task[task]
-
- def get_task_file(self, task):
- return self.taskData.fn_index[self.runq_fnid[task]]
-
- def get_task_hash(self, task):
- return self.runq_hash[task]
-
- def get_user_idstring(self, task, task_name_suffix = ""):
- fn = self.taskData.fn_index[self.runq_fnid[task]]
- taskname = self.runq_task[task] + task_name_suffix
- return "%s, %s" % (fn, taskname)
-
- def get_short_user_idstring(self, task, task_name_suffix = ""):
- fn = self.taskData.fn_index[self.runq_fnid[task]]
- pn = self.dataCache.pkg_fn[fn]
- taskname = self.runq_task[task] + task_name_suffix
- return "%s:%s" % (pn, taskname)
-
-
- def get_task_id(self, fnid, taskname):
- for listid in xrange(len(self.runq_fnid)):
- if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
- return listid
- return None
-
- def circular_depchains_handler(self, tasks):
- """
- Some tasks aren't buildable, likely due to circular dependency issues.
- Identify the circular dependencies and print them in a user readable format.
- """
- from copy import deepcopy
-
- valid_chains = []
- explored_deps = {}
- msgs = []
-
- def chain_reorder(chain):
- """
- Reorder a dependency chain so the lowest task id is first
- """
- lowest = 0
- new_chain = []
- for entry in xrange(len(chain)):
- if chain[entry] < chain[lowest]:
- lowest = entry
- new_chain.extend(chain[lowest:])
- new_chain.extend(chain[:lowest])
- return new_chain
-
- def chain_compare_equal(chain1, chain2):
- """
- Compare two dependency chains and see if they're the same
- """
- if len(chain1) != len(chain2):
- return False
- for index in xrange(len(chain1)):
- if chain1[index] != chain2[index]:
- return False
- return True
-
- def chain_array_contains(chain, chain_array):
- """
- Return True if chain_array contains chain
- """
- for ch in chain_array:
- if chain_compare_equal(ch, chain):
- return True
- return False
-
- def find_chains(taskid, prev_chain):
- prev_chain.append(taskid)
- total_deps = []
- total_deps.extend(self.runq_revdeps[taskid])
- for revdep in self.runq_revdeps[taskid]:
- if revdep in prev_chain:
- idx = prev_chain.index(revdep)
- # To prevent duplicates, reorder the chain to start with the lowest taskid
- # and search through an array of those we've already printed
- chain = prev_chain[idx:]
- new_chain = chain_reorder(chain)
- if not chain_array_contains(new_chain, valid_chains):
- valid_chains.append(new_chain)
- msgs.append("Dependency loop #%d found:\n" % len(valid_chains))
- for dep in new_chain:
- msgs.append(" Task %s (%s) (dependent Tasks %s)\n" % (dep, self.get_user_idstring(dep), self.runq_depends_names(dep)))
- msgs.append("\n")
- if len(valid_chains) > 10:
- msgs.append("Aborted dependency loops search after 10 matches.\n")
- return msgs
- continue
- scan = False
- if revdep not in explored_deps:
- scan = True
- elif revdep in explored_deps[revdep]:
- scan = True
- else:
- for dep in prev_chain:
- if dep in explored_deps[revdep]:
- scan = True
- if scan:
- find_chains(revdep, copy.deepcopy(prev_chain))
- for dep in explored_deps[revdep]:
- if dep not in total_deps:
- total_deps.append(dep)
-
- explored_deps[taskid] = total_deps
-
- for task in tasks:
- find_chains(task, [])
-
- return msgs
-
- def calculate_task_weights(self, endpoints):
- """
- Calculate a number representing the "weight" of each task. Heavier weighted tasks
- have more dependencies and hence should be executed sooner for maximum speed.
-
- This function also sanity checks the task list finding tasks that are not
- possible to execute due to circular dependencies.
- """
-
- numTasks = len(self.runq_fnid)
- weight = []
- deps_left = []
- task_done = []
-
- for listid in xrange(numTasks):
- task_done.append(False)
- weight.append(1)
- deps_left.append(len(self.runq_revdeps[listid]))
-
- for listid in endpoints:
- weight[listid] = 10
- task_done[listid] = True
-
- while True:
- next_points = []
- for listid in endpoints:
- for revdep in self.runq_depends[listid]:
- weight[revdep] = weight[revdep] + weight[listid]
- deps_left[revdep] = deps_left[revdep] - 1
- if deps_left[revdep] == 0:
- next_points.append(revdep)
- task_done[revdep] = True
- endpoints = next_points
- if len(next_points) == 0:
- break
-
- # Circular dependency sanity check
- problem_tasks = []
- for task in xrange(numTasks):
- if task_done[task] is False or deps_left[task] != 0:
- problem_tasks.append(task)
- logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task))
- logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[task], deps_left[task])
-
- if problem_tasks:
- message = "Unbuildable tasks were found.\n"
- message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
- message = message + "Identifying dependency loops (this may take a short while)...\n"
- logger.error(message)
-
- msgs = self.circular_depchains_handler(problem_tasks)
-
- message = "\n"
- for msg in msgs:
- message = message + msg
- bb.msg.fatal("RunQueue", message)
-
- return weight
-
- def prepare(self):
- """
- Turn a set of taskData into a RunQueue and compute data needed
- to optimise the execution order.
- """
-
- runq_build = []
- recursivetasks = {}
- recursiveitasks = {}
- recursivetasksselfref = set()
-
- taskData = self.taskData
-
- if len(taskData.tasks_name) == 0:
- # Nothing to do
- return 0
-
- logger.info("Preparing RunQueue")
-
- # Step A - Work out a list of tasks to run
- #
- # Taskdata gives us a list of possible providers for every build and run
- # target ordered by priority. It also gives information on each of those
- # providers.
- #
- # To create the actual list of tasks to execute we fix the list of
- # providers and then resolve the dependencies into task IDs. This
- # process is repeated for each type of dependency (tdepends, deptask,
- # rdeptast, recrdeptask, idepends).
-
- def add_build_dependencies(depids, tasknames, depends):
- for depid in depids:
- # Won't be in build_targets if ASSUME_PROVIDED
- if depid not in taskData.build_targets:
- continue
- depdata = taskData.build_targets[depid][0]
- if depdata is None:
- continue
- for taskname in tasknames:
- taskid = taskData.gettask_id_fromfnid(depdata, taskname)
- if taskid is not None:
- depends.add(taskid)
-
- def add_runtime_dependencies(depids, tasknames, depends):
- for depid in depids:
- if depid not in taskData.run_targets:
- continue
- depdata = taskData.run_targets[depid][0]
- if depdata is None:
- continue
- for taskname in tasknames:
- taskid = taskData.gettask_id_fromfnid(depdata, taskname)
- if taskid is not None:
- depends.add(taskid)
-
- def add_resolved_dependencies(depids, tasknames, depends):
- for depid in depids:
- for taskname in tasknames:
- taskid = taskData.gettask_id_fromfnid(depid, taskname)
- if taskid is not None:
- depends.add(taskid)
-
- for task in xrange(len(taskData.tasks_name)):
- depends = set()
- fnid = taskData.tasks_fnid[task]
- fn = taskData.fn_index[fnid]
- task_deps = self.dataCache.task_deps[fn]
-
- #logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
-
- if fnid not in taskData.failed_fnids:
-
- # Resolve task internal dependencies
- #
- # e.g. addtask before X after Y
- depends = set(taskData.tasks_tdepends[task])
-
- # Resolve 'deptask' dependencies
- #
- # e.g. do_sometask[deptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS)
- if 'deptask' in task_deps and taskData.tasks_name[task] in task_deps['deptask']:
- tasknames = task_deps['deptask'][taskData.tasks_name[task]].split()
- add_build_dependencies(taskData.depids[fnid], tasknames, depends)
-
- # Resolve 'rdeptask' dependencies
- #
- # e.g. do_sometask[rdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all RDEPENDS)
- if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
- tasknames = task_deps['rdeptask'][taskData.tasks_name[task]].split()
- add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
-
- # Resolve inter-task dependencies
- #
- # e.g. do_sometask[depends] = "targetname:do_someothertask"
- # (makes sure sometask runs after targetname's someothertask)
- idepends = taskData.tasks_idepends[task]
- for (depid, idependtask) in idepends:
- if depid in taskData.build_targets and not depid in taskData.failed_deps:
- # Won't be in build_targets if ASSUME_PROVIDED
- depdata = taskData.build_targets[depid][0]
- if depdata is not None:
- taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
- if taskid is None:
- bb.msg.fatal("RunQueue", "Task %s in %s depends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, taskData.fn_index[depdata]))
- depends.add(taskid)
- irdepends = taskData.tasks_irdepends[task]
- for (depid, idependtask) in irdepends:
- if depid in taskData.run_targets:
- # Won't be in run_targets if ASSUME_PROVIDED
- depdata = taskData.run_targets[depid][0]
- if depdata is not None:
- taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
- if taskid is None:
- bb.msg.fatal("RunQueue", "Task %s in %s rdepends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, taskData.fn_index[depdata]))
- depends.add(taskid)
-
- # Resolve recursive 'recrdeptask' dependencies (Part A)
- #
- # e.g. do_sometask[recrdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
- # We cover the recursive part of the dependencies below
- if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
- tasknames = task_deps['recrdeptask'][taskData.tasks_name[task]].split()
- recursivetasks[task] = tasknames
- add_build_dependencies(taskData.depids[fnid], tasknames, depends)
- add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
- if taskData.tasks_name[task] in tasknames:
- recursivetasksselfref.add(task)
-
- if 'recideptask' in task_deps and taskData.tasks_name[task] in task_deps['recideptask']:
- recursiveitasks[task] = []
- for t in task_deps['recideptask'][taskData.tasks_name[task]].split():
- newdep = taskData.gettask_id_fromfnid(fnid, t)
- recursiveitasks[task].append(newdep)
-
- self.runq_fnid.append(taskData.tasks_fnid[task])
- self.runq_task.append(taskData.tasks_name[task])
- self.runq_depends.append(depends)
- self.runq_revdeps.append(set())
- self.runq_hash.append("")
-
- runq_build.append(0)
-
- # Resolve recursive 'recrdeptask' dependencies (Part B)
- #
- # e.g. do_sometask[recrdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
- # We need to do this separately since we need all of self.runq_depends to be complete before this is processed
- extradeps = {}
- for task in recursivetasks:
- extradeps[task] = set(self.runq_depends[task])
- tasknames = recursivetasks[task]
- seendeps = set()
- seenfnid = []
-
- def generate_recdeps(t):
- newdeps = set()
- add_resolved_dependencies([taskData.tasks_fnid[t]], tasknames, newdeps)
- extradeps[task].update(newdeps)
- seendeps.add(t)
- newdeps.add(t)
- for i in newdeps:
- for n in self.runq_depends[i]:
- if n not in seendeps:
- generate_recdeps(n)
- generate_recdeps(task)
-
- if task in recursiveitasks:
- for dep in recursiveitasks[task]:
- generate_recdeps(dep)
-
- # Remove circular references so that do_a[recrdeptask] = "do_a do_b" can work
- for task in recursivetasks:
- extradeps[task].difference_update(recursivetasksselfref)
-
- for task in xrange(len(taskData.tasks_name)):
- # Add in extra dependencies
- if task in extradeps:
- self.runq_depends[task] = extradeps[task]
- # Remove all self references
- if task in self.runq_depends[task]:
- logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], self.runq_depends[task])
- self.runq_depends[task].remove(task)
-
- # Step B - Mark all active tasks
- #
- # Start with the tasks we were asked to run and mark all dependencies
- # as active too. If the task is to be 'forced', clear its stamp. Once
- # all active tasks are marked, prune the ones we don't need.
-
- logger.verbose("Marking Active Tasks")
-
- def mark_active(listid, depth):
- """
- Mark an item as active along with its depends
- (calls itself recursively)
- """
-
- if runq_build[listid] == 1:
- return
-
- runq_build[listid] = 1
-
- depends = self.runq_depends[listid]
- for depend in depends:
- mark_active(depend, depth+1)
-
- self.target_pairs = []
- for target in self.targets:
- targetid = taskData.getbuild_id(target[0])
-
- if targetid not in taskData.build_targets:
- continue
-
- if targetid in taskData.failed_deps:
- continue
-
- fnid = taskData.build_targets[targetid][0]
- fn = taskData.fn_index[fnid]
- task = target[1]
- parents = False
- if task.endswith('-'):
- parents = True
- task = task[:-1]
-
- self.target_pairs.append((fn, task))
-
- if fnid in taskData.failed_fnids:
- continue
-
- if task not in taskData.tasks_lookup[fnid]:
- import difflib
- close_matches = difflib.get_close_matches(task, taskData.tasks_lookup[fnid], cutoff=0.7)
- if close_matches:
- extra = ". Close matches:\n %s" % "\n ".join(close_matches)
- else:
- extra = ""
- bb.msg.fatal("RunQueue", "Task %s does not exist for target %s%s" % (task, target[0], extra))
-
- # For tasks called "XXXX-", ony run their dependencies
- listid = taskData.tasks_lookup[fnid][task]
- if parents:
- for i in self.runq_depends[listid]:
- mark_active(i, 1)
- else:
- mark_active(listid, 1)
-
- # Step C - Prune all inactive tasks
- #
- # Once all active tasks are marked, prune the ones we don't need.
-
- maps = []
- delcount = 0
- for listid in xrange(len(self.runq_fnid)):
- if runq_build[listid-delcount] == 1:
- maps.append(listid-delcount)
- else:
- del self.runq_fnid[listid-delcount]
- del self.runq_task[listid-delcount]
- del self.runq_depends[listid-delcount]
- del runq_build[listid-delcount]
- del self.runq_revdeps[listid-delcount]
- del self.runq_hash[listid-delcount]
- delcount = delcount + 1
- maps.append(-1)
-
- #
- # Step D - Sanity checks and computation
- #
-
- # Check to make sure we still have tasks to run
- if len(self.runq_fnid) == 0:
- if not taskData.abort:
- bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
- else:
- bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
-
- logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
-
- # Remap the dependencies to account for the deleted tasks
- # Check we didn't delete a task we depend on
- for listid in xrange(len(self.runq_fnid)):
- newdeps = []
- origdeps = self.runq_depends[listid]
- for origdep in origdeps:
- if maps[origdep] == -1:
- bb.msg.fatal("RunQueue", "Invalid mapping - Should never happen!")
- newdeps.append(maps[origdep])
- self.runq_depends[listid] = set(newdeps)
-
- logger.verbose("Assign Weightings")
-
- # Generate a list of reverse dependencies to ease future calculations
- for listid in xrange(len(self.runq_fnid)):
- for dep in self.runq_depends[listid]:
- self.runq_revdeps[dep].add(listid)
-
- # Identify tasks at the end of dependency chains
- # Error on circular dependency loops (length two)
- endpoints = []
- for listid in xrange(len(self.runq_fnid)):
- revdeps = self.runq_revdeps[listid]
- if len(revdeps) == 0:
- endpoints.append(listid)
- for dep in revdeps:
- if dep in self.runq_depends[listid]:
- #self.dump_data(taskData)
- bb.msg.fatal("RunQueue", "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
-
- logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
-
- # Calculate task weights
- # Check of higher length circular dependencies
- self.runq_weight = self.calculate_task_weights(endpoints)
-
- # Sanity Check - Check for multiple tasks building the same provider
- prov_list = {}
- seen_fn = []
- for task in xrange(len(self.runq_fnid)):
- fn = taskData.fn_index[self.runq_fnid[task]]
- if fn in seen_fn:
- continue
- seen_fn.append(fn)
- for prov in self.dataCache.fn_provides[fn]:
- if prov not in prov_list:
- prov_list[prov] = [fn]
- elif fn not in prov_list[prov]:
- prov_list[prov].append(fn)
- for prov in prov_list:
- if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
- seen_pn = []
- # If two versions of the same PN are being built its fatal, we don't support it.
- for fn in prov_list[prov]:
- pn = self.dataCache.pkg_fn[fn]
- if pn not in seen_pn:
- seen_pn.append(pn)
- else:
- bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
- msg = "Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))
- #
- # Construct a list of things which uniquely depend on each provider
- # since this may help the user figure out which dependency is triggering this warning
- #
- msg += "\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from."
- deplist = {}
- commondeps = None
- for provfn in prov_list[prov]:
- deps = set()
- for task, fnid in enumerate(self.runq_fnid):
- fn = taskData.fn_index[fnid]
- if fn != provfn:
- continue
- for dep in self.runq_revdeps[task]:
- fn = taskData.fn_index[self.runq_fnid[dep]]
- if fn == provfn:
- continue
- deps.add(self.get_short_user_idstring(dep))
- if not commondeps:
- commondeps = set(deps)
- else:
- commondeps &= deps
- deplist[provfn] = deps
- for provfn in deplist:
- msg += "\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps))
- #
- # Construct a list of provides and runtime providers for each recipe
- # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC)
- #
- msg += "\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful."
- provide_results = {}
- rprovide_results = {}
- commonprovs = None
- commonrprovs = None
- for provfn in prov_list[prov]:
- provides = set(self.dataCache.fn_provides[provfn])
- rprovides = set()
- for rprovide in self.dataCache.rproviders:
- if provfn in self.dataCache.rproviders[rprovide]:
- rprovides.add(rprovide)
- for package in self.dataCache.packages:
- if provfn in self.dataCache.packages[package]:
- rprovides.add(package)
- for package in self.dataCache.packages_dynamic:
- if provfn in self.dataCache.packages_dynamic[package]:
- rprovides.add(package)
- if not commonprovs:
- commonprovs = set(provides)
- else:
- commonprovs &= provides
- provide_results[provfn] = provides
- if not commonrprovs:
- commonrprovs = set(rprovides)
- else:
- commonrprovs &= rprovides
- rprovide_results[provfn] = rprovides
- #msg += "\nCommon provides:\n %s" % ("\n ".join(commonprovs))
- #msg += "\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs))
- for provfn in prov_list[prov]:
- msg += "\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs))
- msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs))
-
- if self.warn_multi_bb:
- logger.warn(msg)
- else:
- logger.error(msg)
-
- # Create a whitelist usable by the stamp checks
- stampfnwhitelist = []
- for entry in self.stampwhitelist.split():
- entryid = self.taskData.getbuild_id(entry)
- if entryid not in self.taskData.build_targets:
- continue
- fnid = self.taskData.build_targets[entryid][0]
- fn = self.taskData.fn_index[fnid]
- stampfnwhitelist.append(fn)
- self.stampfnwhitelist = stampfnwhitelist
-
- # Iterate over the task list looking for tasks with a 'setscene' function
- self.runq_setscene = []
- if not self.cooker.configuration.nosetscene:
- for task in range(len(self.runq_fnid)):
- setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
- if not setscene:
- continue
- self.runq_setscene.append(task)
-
- def invalidate_task(fn, taskname, error_nostamp):
- taskdep = self.dataCache.task_deps[fn]
- fnid = self.taskData.getfn_id(fn)
- if taskname not in taskData.tasks_lookup[fnid]:
- logger.warn("Task %s does not exist, invalidating this task will have no effect" % taskname)
- if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
- if error_nostamp:
- bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname)
- else:
- bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
- else:
- logger.verbose("Invalidate task %s, %s", taskname, fn)
- bb.parse.siggen.invalidate_task(taskname, self.dataCache, fn)
-
- # Invalidate task if force mode active
- if self.cooker.configuration.force:
- for (fn, target) in self.target_pairs:
- invalidate_task(fn, target, False)
-
- # Invalidate task if invalidate mode active
- if self.cooker.configuration.invalidate_stamp:
- for (fn, target) in self.target_pairs:
- for st in self.cooker.configuration.invalidate_stamp.split(','):
- if not st.startswith("do_"):
- st = "do_%s" % st
- invalidate_task(fn, st, True)
-
- # Create and print to the logs a virtual/xxxx -> PN (fn) table
- virtmap = taskData.get_providermap(prefix="virtual/")
- virtpnmap = {}
- for v in virtmap:
- virtpnmap[v] = self.dataCache.pkg_fn[virtmap[v]]
- bb.debug(2, "%s resolved to: %s (%s)" % (v, virtpnmap[v], virtmap[v]))
- if hasattr(bb.parse.siggen, "tasks_resolved"):
- bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCache)
-
- # Iterate over the task list and call into the siggen code
- dealtwith = set()
- todeal = set(range(len(self.runq_fnid)))
- while len(todeal) > 0:
- for task in todeal.copy():
- if len(self.runq_depends[task] - dealtwith) == 0:
- dealtwith.add(task)
- todeal.remove(task)
- procdep = []
- for dep in self.runq_depends[task]:
- procdep.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
- self.runq_hash[task] = bb.parse.siggen.get_taskhash(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task], procdep, self.dataCache)
-
- bb.parse.siggen.writeout_file_checksum_cache()
- return len(self.runq_fnid)
-
- def dump_data(self, taskQueue):
- """
- Dump some debug information on the internal data structures
- """
- logger.debug(3, "run_tasks:")
- for task in xrange(len(self.rqdata.runq_task)):
- logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task])
-
- logger.debug(3, "sorted_tasks:")
- for task1 in xrange(len(self.rqdata.runq_task)):
- if task1 in self.prio_map:
- task = self.prio_map[task1]
- logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task])
-
-class RunQueue:
- def __init__(self, cooker, cfgData, dataCache, taskData, targets):
-
- self.cooker = cooker
- self.cfgData = cfgData
- self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets)
-
- self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile"
- self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None
- self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION", True) or None
- self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID", True) or None
-
- self.state = runQueuePrepare
-
- # For disk space monitor
- self.dm = monitordisk.diskMonitor(cfgData)
-
- self.rqexe = None
- self.worker = None
- self.workerpipe = None
- self.fakeworker = None
- self.fakeworkerpipe = None
-
- def _start_worker(self, fakeroot = False, rqexec = None):
- logger.debug(1, "Starting bitbake-worker")
- magic = "decafbad"
- if self.cooker.configuration.profile:
- magic = "decafbadbad"
- if fakeroot:
- magic = magic + "beef"
- fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True)
- fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split()
- env = os.environ.copy()
- for key, value in (var.split('=') for var in fakerootenv):
- env[key] = value
- worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
- else:
- worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
- bb.utils.nonblockingfd(worker.stdout)
- workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
-
- workerdata = {
- "taskdeps" : self.rqdata.dataCache.task_deps,
- "fakerootenv" : self.rqdata.dataCache.fakerootenv,
- "fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
- "fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
- "sigdata" : bb.parse.siggen.get_taskdata(),
- "runq_hash" : self.rqdata.runq_hash,
- "logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
- "logdefaultverbose" : bb.msg.loggerDefaultVerbose,
- "logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
- "logdefaultdomain" : bb.msg.loggerDefaultDomains,
- "prhost" : self.cooker.prhost,
- "buildname" : self.cfgData.getVar("BUILDNAME", True),
- "date" : self.cfgData.getVar("DATE", True),
- "time" : self.cfgData.getVar("TIME", True),
- }
-
- worker.stdin.write("<cookerconfig>" + pickle.dumps(self.cooker.configuration) + "</cookerconfig>")
- worker.stdin.write("<workerdata>" + pickle.dumps(workerdata) + "</workerdata>")
- worker.stdin.flush()
-
- return worker, workerpipe
-
- def _teardown_worker(self, worker, workerpipe):
- if not worker:
- return
- logger.debug(1, "Teardown for bitbake-worker")
- try:
- worker.stdin.write("<quit></quit>")
- worker.stdin.flush()
- except IOError:
- pass
- while worker.returncode is None:
- workerpipe.read()
- worker.poll()
- while workerpipe.read():
- continue
- workerpipe.close()
-
- def start_worker(self):
- if self.worker:
- self.teardown_workers()
- self.teardown = False
- self.worker, self.workerpipe = self._start_worker()
-
- def start_fakeworker(self, rqexec):
- if not self.fakeworker:
- self.fakeworker, self.fakeworkerpipe = self._start_worker(True, rqexec)
-
- def teardown_workers(self):
- self.teardown = True
- self._teardown_worker(self.worker, self.workerpipe)
- self.worker = None
- self.workerpipe = None
- self._teardown_worker(self.fakeworker, self.fakeworkerpipe)
- self.fakeworker = None
- self.fakeworkerpipe = None
-
- def read_workers(self):
- self.workerpipe.read()
- if self.fakeworkerpipe:
- self.fakeworkerpipe.read()
-
- def active_fds(self):
- fds = []
- if self.workerpipe:
- fds.append(self.workerpipe.input)
- if self.fakeworkerpipe:
- fds.append(self.fakeworkerpipe.input)
- return fds
-
- def check_stamp_task(self, task, taskname = None, recurse = False, cache = None):
- def get_timestamp(f):
- try:
- if not os.access(f, os.F_OK):
- return None
- return os.stat(f)[stat.ST_MTIME]
- except:
- return None
-
- if self.stamppolicy == "perfile":
- fulldeptree = False
- else:
- fulldeptree = True
- stampwhitelist = []
- if self.stamppolicy == "whitelist":
- stampwhitelist = self.rqdata.stampfnwhitelist
-
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- if taskname is None:
- taskname = self.rqdata.runq_task[task]
-
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
-
- # If the stamp is missing, it's not current
- if not os.access(stampfile, os.F_OK):
- logger.debug(2, "Stampfile %s not available", stampfile)
- return False
- # If it's a 'nostamp' task, it's not current
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
- logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
- return False
-
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
- return True
-
- if cache is None:
- cache = {}
-
- iscurrent = True
- t1 = get_timestamp(stampfile)
- for dep in self.rqdata.runq_depends[task]:
- if iscurrent:
- fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
- taskname2 = self.rqdata.runq_task[dep]
- stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
- stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCache, fn2)
- t2 = get_timestamp(stampfile2)
- t3 = get_timestamp(stampfile3)
- if t3 and t3 > t2:
- continue
- if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
- if not t2:
- logger.debug(2, 'Stampfile %s does not exist', stampfile2)
- iscurrent = False
- if t1 < t2:
- logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
- iscurrent = False
- if recurse and iscurrent:
- if dep in cache:
- iscurrent = cache[dep]
- if not iscurrent:
- logger.debug(2, 'Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2))
- else:
- iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
- cache[dep] = iscurrent
- if recurse:
- cache[task] = iscurrent
- return iscurrent
-
- def _execute_runqueue(self):
- """
- Run the tasks in a queue prepared by rqdata.prepare()
- Upon failure, optionally try to recover the build using any alternate providers
- (if the abort on failure configuration option isn't set)
- """
-
- retval = True
-
- if self.state is runQueuePrepare:
- self.rqexe = RunQueueExecuteDummy(self)
- if self.rqdata.prepare() == 0:
- self.state = runQueueComplete
- else:
- self.state = runQueueSceneInit
-
- # we are ready to run, see if any UI client needs the dependency info
- if bb.cooker.CookerFeatures.SEND_DEPENDS_TREE in self.cooker.featureset:
- depgraph = self.cooker.buildDependTree(self, self.rqdata.taskData)
- bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.cooker.data)
-
- if self.state is runQueueSceneInit:
- dump = self.cooker.configuration.dump_signatures
- if dump:
- if 'printdiff' in dump:
- invalidtasks = self.print_diffscenetasks()
- self.dump_signatures(dump)
- if 'printdiff' in dump:
- self.write_diffscenetasks(invalidtasks)
- self.state = runQueueComplete
- else:
- self.start_worker()
- self.rqexe = RunQueueExecuteScenequeue(self)
-
- if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp]:
- self.dm.check(self)
-
- if self.state is runQueueSceneRun:
- retval = self.rqexe.execute()
-
- if self.state is runQueueRunInit:
- if self.cooker.configuration.setsceneonly:
- self.state = runQueueComplete
- else:
- logger.info("Executing RunQueue Tasks")
- self.rqexe = RunQueueExecuteTasks(self)
- self.state = runQueueRunning
-
- if self.state is runQueueRunning:
- retval = self.rqexe.execute()
-
- if self.state is runQueueCleanUp:
- retval = self.rqexe.finish()
-
- if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
- self.teardown_workers()
- if self.rqexe.stats.failed:
- logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
- else:
- # Let's avoid the word "failed" if nothing actually did
- logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and all succeeded.", self.rqexe.stats.completed, self.rqexe.stats.skipped)
-
- if self.state is runQueueFailed:
- if not self.rqdata.taskData.tryaltconfigs:
- raise bb.runqueue.TaskFailure(self.rqexe.failed_fnids)
- for fnid in self.rqexe.failed_fnids:
- self.rqdata.taskData.fail_fnid(fnid)
- self.rqdata.reset()
-
- if self.state is runQueueComplete:
- # All done
- return False
-
- # Loop
- return retval
-
- def execute_runqueue(self):
- # Catch unexpected exceptions and ensure we exit when an error occurs, not loop.
- try:
- return self._execute_runqueue()
- except bb.runqueue.TaskFailure:
- raise
- except SystemExit:
- raise
- except bb.BBHandledException:
- try:
- self.teardown_workers()
- except:
- pass
- self.state = runQueueComplete
- raise
- except:
- logger.error("An uncaught exception occured in runqueue, please see the failure below:")
- try:
- self.teardown_workers()
- except:
- pass
- self.state = runQueueComplete
- raise
-
- def finish_runqueue(self, now = False):
- if not self.rqexe:
- self.state = runQueueComplete
- return
-
- if now:
- self.rqexe.finish_now()
- else:
- self.rqexe.finish()
-
- def dump_signatures(self, options):
- done = set()
- bb.note("Reparsing files to collect dependency data")
- for task in range(len(self.rqdata.runq_fnid)):
- if self.rqdata.runq_fnid[task] not in done:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn), self.cooker.data)
- done.add(self.rqdata.runq_fnid[task])
-
- bb.parse.siggen.dump_sigs(self.rqdata.dataCache, options)
-
- return
-
- def print_diffscenetasks(self):
-
- valid = []
- sq_hash = []
- sq_hashfn = []
- sq_fn = []
- sq_taskname = []
- sq_task = []
- noexec = []
- stamppresent = []
- valid_new = set()
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
- taskdep = self.rqdata.dataCache.task_deps[fn]
-
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- noexec.append(task)
- continue
-
- sq_fn.append(fn)
- sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
- sq_hash.append(self.rqdata.runq_hash[task])
- sq_taskname.append(taskname)
- sq_task.append(task)
- locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
- try:
- call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
- valid = bb.utils.better_eval(call, locs)
- # Handle version with no siginfo parameter
- except TypeError:
- call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
- valid = bb.utils.better_eval(call, locs)
- for v in valid:
- valid_new.add(sq_task[v])
-
- # Tasks which are both setscene and noexec never care about dependencies
- # We therefore find tasks which are setscene and noexec and mark their
- # unique dependencies as valid.
- for task in noexec:
- if task not in self.rqdata.runq_setscene:
- continue
- for dep in self.rqdata.runq_depends[task]:
- hasnoexecparents = True
- for dep2 in self.rqdata.runq_revdeps[dep]:
- if dep2 in self.rqdata.runq_setscene and dep2 in noexec:
- continue
- hasnoexecparents = False
- break
- if hasnoexecparents:
- valid_new.add(dep)
-
- invalidtasks = set()
- for task in xrange(len(self.rqdata.runq_fnid)):
- if task not in valid_new and task not in noexec:
- invalidtasks.add(task)
-
- found = set()
- processed = set()
- for task in invalidtasks:
- toprocess = set([task])
- while toprocess:
- next = set()
- for t in toprocess:
- for dep in self.rqdata.runq_depends[t]:
- if dep in invalidtasks:
- found.add(task)
- if dep not in processed:
- processed.add(dep)
- next.add(dep)
- toprocess = next
- if task in found:
- toprocess = set()
-
- tasklist = []
- for task in invalidtasks.difference(found):
- tasklist.append(self.rqdata.get_user_idstring(task))
-
- if tasklist:
- bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
-
- return invalidtasks.difference(found)
-
- def write_diffscenetasks(self, invalidtasks):
-
- # Define recursion callback
- def recursecb(key, hash1, hash2):
- hashes = [hash1, hash2]
- hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData)
-
- recout = []
- if len(hashfiles) == 2:
- out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb)
- recout.extend(list(' ' + l for l in out2))
- else:
- recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
-
- return recout
-
-
- for task in invalidtasks:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- pn = self.rqdata.dataCache.pkg_fn[fn]
- taskname = self.rqdata.runq_task[task]
- h = self.rqdata.runq_hash[task]
- matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData)
- match = None
- for m in matches:
- if h in m:
- match = m
- if match is None:
- bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h)
- matches = {k : v for k, v in matches.iteritems() if h not in k}
- if matches:
- latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
- prevh = __find_md5__.search(latestmatch).group(0)
- output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
- bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
-
-class RunQueueExecute:
-
- def __init__(self, rq):
- self.rq = rq
- self.cooker = rq.cooker
- self.cfgData = rq.cfgData
- self.rqdata = rq.rqdata
-
- self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1)
- self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed"
-
- self.runq_buildable = []
- self.runq_running = []
- self.runq_complete = []
-
- self.build_stamps = {}
- self.build_stamps2 = []
- self.failed_fnids = []
-
- self.stampcache = {}
-
- rq.workerpipe.setrunqueueexec(self)
- if rq.fakeworkerpipe:
- rq.fakeworkerpipe.setrunqueueexec(self)
-
- if self.number_tasks <= 0:
- bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
-
- def runqueue_process_waitpid(self, task, status):
-
- # self.build_stamps[pid] may not exist when use shared work directory.
- if task in self.build_stamps:
- self.build_stamps2.remove(self.build_stamps[task])
- del self.build_stamps[task]
-
- if status != 0:
- self.task_fail(task, status)
- else:
- self.task_complete(task)
- return True
-
- def finish_now(self):
-
- for worker in [self.rq.worker, self.rq.fakeworker]:
- if not worker:
- continue
- try:
- worker.stdin.write("<finishnow></finishnow>")
- worker.stdin.flush()
- except IOError:
- # worker must have died?
- pass
-
- if len(self.failed_fnids) != 0:
- self.rq.state = runQueueFailed
- return
-
- self.rq.state = runQueueComplete
- return
-
- def finish(self):
- self.rq.state = runQueueCleanUp
-
- if self.stats.active > 0:
- bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
- self.rq.read_workers()
- return self.rq.active_fds()
-
- if len(self.failed_fnids) != 0:
- self.rq.state = runQueueFailed
- return True
-
- self.rq.state = runQueueComplete
- return True
-
- def check_dependencies(self, task, taskdeps, setscene = False):
- if not self.rq.depvalidate:
- return False
-
- taskdata = {}
- taskdeps.add(task)
- for dep in taskdeps:
- if setscene:
- depid = self.rqdata.runq_setscene[dep]
- else:
- depid = dep
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[depid]]
- pn = self.rqdata.dataCache.pkg_fn[fn]
- taskname = self.rqdata.runq_task[depid]
- taskdata[dep] = [pn, taskname, fn]
- call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
- locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
- valid = bb.utils.better_eval(call, locs)
- return valid
-
-class RunQueueExecuteDummy(RunQueueExecute):
- def __init__(self, rq):
- self.rq = rq
- self.stats = RunQueueStats(0)
-
- def finish(self):
- self.rq.state = runQueueComplete
- return
-
-class RunQueueExecuteTasks(RunQueueExecute):
- def __init__(self, rq):
- RunQueueExecute.__init__(self, rq)
-
- self.stats = RunQueueStats(len(self.rqdata.runq_fnid))
-
- self.stampcache = {}
-
- initial_covered = self.rq.scenequeue_covered.copy()
-
- # Mark initial buildable tasks
- for task in xrange(self.stats.total):
- self.runq_running.append(0)
- self.runq_complete.append(0)
- if len(self.rqdata.runq_depends[task]) == 0:
- self.runq_buildable.append(1)
- else:
- self.runq_buildable.append(0)
- if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
- self.rq.scenequeue_covered.add(task)
-
- found = True
- while found:
- found = False
- for task in xrange(self.stats.total):
- if task in self.rq.scenequeue_covered:
- continue
- logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
-
- if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
- found = True
- self.rq.scenequeue_covered.add(task)
-
- logger.debug(1, 'Skip list (pre setsceneverify) %s', sorted(self.rq.scenequeue_covered))
-
- # Allow the metadata to elect for setscene tasks to run anyway
- covered_remove = set()
- if self.rq.setsceneverify:
- invalidtasks = []
- for task in xrange(len(self.rqdata.runq_task)):
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
- taskdep = self.rqdata.dataCache.task_deps[fn]
-
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- continue
- if self.rq.check_stamp_task(task, taskname + "_setscene", cache=self.stampcache):
- logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
- continue
- if self.rq.check_stamp_task(task, taskname, recurse = True, cache=self.stampcache):
- logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
- continue
- invalidtasks.append(task)
-
- call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
- call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
- locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
- # Backwards compatibility with older versions without invalidtasks
- try:
- covered_remove = bb.utils.better_eval(call, locs)
- except TypeError:
- covered_remove = bb.utils.better_eval(call2, locs)
-
- def removecoveredtask(task):
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task] + '_setscene'
- bb.build.del_stamp(taskname, self.rqdata.dataCache, fn)
- self.rq.scenequeue_covered.remove(task)
-
- toremove = covered_remove
- for task in toremove:
- logger.debug(1, 'Not skipping task %s due to setsceneverify', task)
- while toremove:
- covered_remove = []
- for task in toremove:
- removecoveredtask(task)
- for deptask in self.rqdata.runq_depends[task]:
- if deptask not in self.rq.scenequeue_covered:
- continue
- if deptask in toremove or deptask in covered_remove or deptask in initial_covered:
- continue
- logger.debug(1, 'Task %s depends on task %s so not skipping' % (task, deptask))
- covered_remove.append(deptask)
- toremove = covered_remove
-
- logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
-
- event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData)
-
- schedulers = self.get_schedulers()
- for scheduler in schedulers:
- if self.scheduler == scheduler.name:
- self.sched = scheduler(self, self.rqdata)
- logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
- break
- else:
- bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
- (self.scheduler, ", ".join(obj.name for obj in schedulers)))
-
- def get_schedulers(self):
- schedulers = set(obj for obj in globals().values()
- if type(obj) is type and
- issubclass(obj, RunQueueScheduler))
-
- user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True)
- if user_schedulers:
- for sched in user_schedulers.split():
- if not "." in sched:
- bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched)
- continue
-
- modname, name = sched.rsplit(".", 1)
- try:
- module = __import__(modname, fromlist=(name,))
- except ImportError as exc:
- logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
- raise SystemExit(1)
- else:
- schedulers.add(getattr(module, name))
- return schedulers
-
- def setbuildable(self, task):
- self.runq_buildable[task] = 1
- self.sched.newbuilable(task)
-
- def task_completeoutright(self, task):
- """
- Mark a task as completed
- Look at the reverse dependencies and mark any task with
- completed dependencies as buildable
- """
- self.runq_complete[task] = 1
- for revdep in self.rqdata.runq_revdeps[task]:
- if self.runq_running[revdep] == 1:
- continue
- if self.runq_buildable[revdep] == 1:
- continue
- alldeps = 1
- for dep in self.rqdata.runq_depends[revdep]:
- if self.runq_complete[dep] != 1:
- alldeps = 0
- if alldeps == 1:
- self.setbuildable(revdep)
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
- taskname = self.rqdata.runq_task[revdep]
- logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
-
- def task_complete(self, task):
- self.stats.taskCompleted()
- bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
- self.task_completeoutright(task)
-
- def task_fail(self, task, exitcode):
- """
- Called when a task has failed
- Updates the state engine with the failure
- """
- self.stats.taskFailed()
- fnid = self.rqdata.runq_fnid[task]
- self.failed_fnids.append(fnid)
- bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
- if self.rqdata.taskData.abort:
- self.rq.state = runQueueCleanUp
-
- def task_skip(self, task, reason):
- self.runq_running[task] = 1
- self.setbuildable(task)
- bb.event.fire(runQueueTaskSkipped(task, self.stats, self.rq, reason), self.cfgData)
- self.task_completeoutright(task)
- self.stats.taskCompleted()
- self.stats.taskSkipped()
-
- def execute(self):
- """
- Run the tasks in a queue prepared by rqdata.prepare()
- """
-
- self.rq.read_workers()
-
-
- if self.stats.total == 0:
- # nothing to do
- self.rq.state = runQueueCleanUp
-
- task = self.sched.next()
- if task is not None:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
-
- if task in self.rq.scenequeue_covered:
- logger.debug(2, "Setscene covered task %s (%s)", task,
- self.rqdata.get_user_idstring(task))
- self.task_skip(task, "covered")
- return True
-
- if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
- logger.debug(2, "Stamp current task %s (%s)", task,
- self.rqdata.get_user_idstring(task))
- self.task_skip(task, "existing")
- return True
-
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- startevent = runQueueTaskStarted(task, self.stats, self.rq,
- noexec=True)
- bb.event.fire(startevent, self.cfgData)
- self.runq_running[task] = 1
- self.stats.taskActive()
- if not self.cooker.configuration.dry_run:
- bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
- self.task_complete(task)
- return True
- else:
- startevent = runQueueTaskStarted(task, self.stats, self.rq)
- bb.event.fire(startevent, self.cfgData)
-
- taskdepdata = self.build_taskdepdata(task)
-
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
- if not self.rq.fakeworker:
- try:
- self.rq.start_fakeworker(self)
- except OSError as exc:
- logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc)))
- self.rq.state = runQueueFailed
- return True
- self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
- self.rq.fakeworker.stdin.flush()
- else:
- self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
- self.rq.worker.stdin.flush()
-
- self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
- self.build_stamps2.append(self.build_stamps[task])
- self.runq_running[task] = 1
- self.stats.taskActive()
- if self.stats.active < self.number_tasks:
- return True
-
- if self.stats.active > 0:
- self.rq.read_workers()
- return self.rq.active_fds()
-
- if len(self.failed_fnids) != 0:
- self.rq.state = runQueueFailed
- return True
-
- # Sanity Checks
- for task in xrange(self.stats.total):
- if self.runq_buildable[task] == 0:
- logger.error("Task %s never buildable!", task)
- if self.runq_running[task] == 0:
- logger.error("Task %s never ran!", task)
- if self.runq_complete[task] == 0:
- logger.error("Task %s never completed!", task)
- self.rq.state = runQueueComplete
-
- return True
-
- def build_taskdepdata(self, task):
- taskdepdata = {}
- next = self.rqdata.runq_depends[task]
- next.add(task)
- while next:
- additional = []
- for revdep in next:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
- pn = self.rqdata.dataCache.pkg_fn[fn]
- taskname = self.rqdata.runq_task[revdep]
- deps = self.rqdata.runq_depends[revdep]
- provides = self.rqdata.dataCache.fn_provides[fn]
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides]
- for revdep2 in deps:
- if revdep2 not in taskdepdata:
- additional.append(revdep2)
- next = additional
-
- #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n"))
- return taskdepdata
-
-class RunQueueExecuteScenequeue(RunQueueExecute):
- def __init__(self, rq):
- RunQueueExecute.__init__(self, rq)
-
- self.scenequeue_covered = set()
- self.scenequeue_notcovered = set()
- self.scenequeue_notneeded = set()
-
- # If we don't have any setscene functions, skip this step
- if len(self.rqdata.runq_setscene) == 0:
- rq.scenequeue_covered = set()
- rq.state = runQueueRunInit
- return
-
- self.stats = RunQueueStats(len(self.rqdata.runq_setscene))
-
- sq_revdeps = []
- sq_revdeps_new = []
- sq_revdeps_squash = []
- self.sq_harddeps = {}
-
- # We need to construct a dependency graph for the setscene functions. Intermediate
- # dependencies between the setscene tasks only complicate the code. This code
- # therefore aims to collapse the huge runqueue dependency tree into a smaller one
- # only containing the setscene functions.
-
- for task in xrange(self.stats.total):
- self.runq_running.append(0)
- self.runq_complete.append(0)
- self.runq_buildable.append(0)
-
- # First process the chains up to the first setscene task.
- endpoints = {}
- for task in xrange(len(self.rqdata.runq_fnid)):
- sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task]))
- sq_revdeps_new.append(set())
- if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
- endpoints[task] = set()
-
- # Secondly process the chains between setscene tasks.
- for task in self.rqdata.runq_setscene:
- for dep in self.rqdata.runq_depends[task]:
- if dep not in endpoints:
- endpoints[dep] = set()
- endpoints[dep].add(task)
-
- def process_endpoints(endpoints):
- newendpoints = {}
- for point, task in endpoints.items():
- tasks = set()
- if task:
- tasks |= task
- if sq_revdeps_new[point]:
- tasks |= sq_revdeps_new[point]
- sq_revdeps_new[point] = set()
- if point in self.rqdata.runq_setscene:
- sq_revdeps_new[point] = tasks
- tasks = set()
- for dep in self.rqdata.runq_depends[point]:
- if point in sq_revdeps[dep]:
- sq_revdeps[dep].remove(point)
- if tasks:
- sq_revdeps_new[dep] |= tasks
- if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene:
- newendpoints[dep] = task
- if len(newendpoints) != 0:
- process_endpoints(newendpoints)
-
- process_endpoints(endpoints)
-
- # Build a list of setscene tasks which are "unskippable"
- # These are direct endpoints referenced by the build
- endpoints2 = {}
- sq_revdeps2 = []
- sq_revdeps_new2 = []
- def process_endpoints2(endpoints):
- newendpoints = {}
- for point, task in endpoints.items():
- tasks = set([point])
- if task:
- tasks |= task
- if sq_revdeps_new2[point]:
- tasks |= sq_revdeps_new2[point]
- sq_revdeps_new2[point] = set()
- if point in self.rqdata.runq_setscene:
- sq_revdeps_new2[point] = tasks
- for dep in self.rqdata.runq_depends[point]:
- if point in sq_revdeps2[dep]:
- sq_revdeps2[dep].remove(point)
- if tasks:
- sq_revdeps_new2[dep] |= tasks
- if (len(sq_revdeps2[dep]) == 0 or len(sq_revdeps_new2[dep]) != 0) and dep not in self.rqdata.runq_setscene:
- newendpoints[dep] = tasks
- if len(newendpoints) != 0:
- process_endpoints2(newendpoints)
- for task in xrange(len(self.rqdata.runq_fnid)):
- sq_revdeps2.append(copy.copy(self.rqdata.runq_revdeps[task]))
- sq_revdeps_new2.append(set())
- if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
- endpoints2[task] = set()
- process_endpoints2(endpoints2)
- self.unskippable = []
- for task in self.rqdata.runq_setscene:
- if sq_revdeps_new2[task]:
- self.unskippable.append(self.rqdata.runq_setscene.index(task))
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- if task in self.rqdata.runq_setscene:
- deps = set()
- for dep in sq_revdeps_new[task]:
- deps.add(self.rqdata.runq_setscene.index(dep))
- sq_revdeps_squash.append(deps)
- elif len(sq_revdeps_new[task]) != 0:
- bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
-
- # Resolve setscene inter-task dependencies
- # e.g. do_sometask_setscene[depends] = "targetname:do_someothertask_setscene"
- # Note that anything explicitly depended upon will have its reverse dependencies removed to avoid circular dependencies
- for task in self.rqdata.runq_setscene:
- realid = self.rqdata.taskData.gettask_id(self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]], self.rqdata.runq_task[task] + "_setscene", False)
- idepends = self.rqdata.taskData.tasks_idepends[realid]
- for (depid, idependtask) in idepends:
- if depid not in self.rqdata.taskData.build_targets:
- continue
-
- depdata = self.rqdata.taskData.build_targets[depid][0]
- if depdata is None:
- continue
- dep = self.rqdata.taskData.fn_index[depdata]
- taskid = self.rqdata.get_task_id(self.rqdata.taskData.getfn_id(dep), idependtask.replace("_setscene", ""))
- if taskid is None:
- bb.msg.fatal("RunQueue", "Task %s_setscene depends upon non-existent task %s:%s" % (self.rqdata.get_user_idstring(task), dep, idependtask))
-
- if not self.rqdata.runq_setscene.index(taskid) in self.sq_harddeps:
- self.sq_harddeps[self.rqdata.runq_setscene.index(taskid)] = set()
- self.sq_harddeps[self.rqdata.runq_setscene.index(taskid)].add(self.rqdata.runq_setscene.index(task))
-
- sq_revdeps_squash[self.rqdata.runq_setscene.index(task)].add(self.rqdata.runq_setscene.index(taskid))
- # Have to zero this to avoid circular dependencies
- sq_revdeps_squash[self.rqdata.runq_setscene.index(taskid)] = set()
-
- for task in self.sq_harddeps:
- for dep in self.sq_harddeps[task]:
- sq_revdeps_squash[dep].add(task)
-
- #for task in xrange(len(sq_revdeps_squash)):
- # realtask = self.rqdata.runq_setscene[task]
- # bb.warn("Task %s: %s_setscene is %s " % (task, self.rqdata.get_user_idstring(realtask) , sq_revdeps_squash[task]))
-
- self.sq_deps = []
- self.sq_revdeps = sq_revdeps_squash
- self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps)
-
- for task in xrange(len(self.sq_revdeps)):
- self.sq_deps.append(set())
- for task in xrange(len(self.sq_revdeps)):
- for dep in self.sq_revdeps[task]:
- self.sq_deps[dep].add(task)
-
- for task in xrange(len(self.sq_revdeps)):
- if len(self.sq_revdeps[task]) == 0:
- self.runq_buildable[task] = 1
-
- self.outrightfail = []
- if self.rq.hashvalidate:
- sq_hash = []
- sq_hashfn = []
- sq_fn = []
- sq_taskname = []
- sq_task = []
- noexec = []
- stamppresent = []
- for task in xrange(len(self.sq_revdeps)):
- realtask = self.rqdata.runq_setscene[task]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
- taskname = self.rqdata.runq_task[realtask]
- taskdep = self.rqdata.dataCache.task_deps[fn]
-
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- noexec.append(task)
- self.task_skip(task)
- bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
- continue
-
- if self.rq.check_stamp_task(realtask, taskname + "_setscene", cache=self.stampcache):
- logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
- stamppresent.append(task)
- self.task_skip(task)
- continue
-
- if self.rq.check_stamp_task(realtask, taskname, recurse = True, cache=self.stampcache):
- logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
- stamppresent.append(task)
- self.task_skip(task)
- continue
-
- sq_fn.append(fn)
- sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
- sq_hash.append(self.rqdata.runq_hash[realtask])
- sq_taskname.append(taskname)
- sq_task.append(task)
- call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
- locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
- valid = bb.utils.better_eval(call, locs)
-
- valid_new = stamppresent
- for v in valid:
- valid_new.append(sq_task[v])
-
- for task in xrange(len(self.sq_revdeps)):
- if task not in valid_new and task not in noexec:
- realtask = self.rqdata.runq_setscene[task]
- logger.debug(2, 'No package found, so skipping setscene task %s',
- self.rqdata.get_user_idstring(realtask))
- self.outrightfail.append(task)
-
- logger.info('Executing SetScene Tasks')
-
- self.rq.state = runQueueSceneRun
-
- def scenequeue_updatecounters(self, task, fail = False):
- for dep in self.sq_deps[task]:
- if fail and task in self.sq_harddeps and dep in self.sq_harddeps[task]:
- realtask = self.rqdata.runq_setscene[task]
- realdep = self.rqdata.runq_setscene[dep]
- logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
- self.scenequeue_updatecounters(dep, fail)
- continue
- if task not in self.sq_revdeps2[dep]:
- # May already have been removed by the fail case above
- continue
- self.sq_revdeps2[dep].remove(task)
- if len(self.sq_revdeps2[dep]) == 0:
- self.runq_buildable[dep] = 1
-
- def task_completeoutright(self, task):
- """
- Mark a task as completed
- Look at the reverse dependencies and mark any task with
- completed dependencies as buildable
- """
-
- index = self.rqdata.runq_setscene[task]
- logger.debug(1, 'Found task %s which could be accelerated',
- self.rqdata.get_user_idstring(index))
-
- self.scenequeue_covered.add(task)
- self.scenequeue_updatecounters(task)
-
- def task_complete(self, task):
- self.stats.taskCompleted()
- bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
- self.task_completeoutright(task)
-
- def task_fail(self, task, result):
- self.stats.taskFailed()
- bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
- self.scenequeue_notcovered.add(task)
- self.scenequeue_updatecounters(task, True)
-
- def task_failoutright(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
- self.stats.taskCompleted()
- self.stats.taskSkipped()
- index = self.rqdata.runq_setscene[task]
- self.scenequeue_notcovered.add(task)
- self.scenequeue_updatecounters(task, True)
-
- def task_skip(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
- self.task_completeoutright(task)
- self.stats.taskCompleted()
- self.stats.taskSkipped()
-
- def execute(self):
- """
- Run the tasks in a queue prepared by prepare_runqueue
- """
-
- self.rq.read_workers()
-
- task = None
- if self.stats.active < self.number_tasks:
- # Find the next setscene to run
- for nexttask in xrange(self.stats.total):
- if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1:
- if nexttask in self.unskippable:
- logger.debug(2, "Setscene task %s is unskippable" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
- if nexttask not in self.unskippable and len(self.sq_revdeps[nexttask]) > 0 and self.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sq_revdeps[nexttask], True):
- realtask = self.rqdata.runq_setscene[nexttask]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
- foundtarget = False
- for target in self.rqdata.target_pairs:
- if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
- foundtarget = True
- break
- if not foundtarget:
- logger.debug(2, "Skipping setscene for task %s" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
- self.task_skip(nexttask)
- self.scenequeue_notneeded.add(nexttask)
- return True
- if nexttask in self.outrightfail:
- self.task_failoutright(nexttask)
- return True
- task = nexttask
- break
- if task is not None:
- realtask = self.rqdata.runq_setscene[task]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
-
- taskname = self.rqdata.runq_task[realtask] + "_setscene"
- if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True, cache=self.stampcache):
- logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
- task, self.rqdata.get_user_idstring(realtask))
- self.task_failoutright(task)
- return True
-
- if self.cooker.configuration.force:
- for target in self.rqdata.target_pairs:
- if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
- self.task_failoutright(task)
- return True
-
- if self.rq.check_stamp_task(realtask, taskname, cache=self.stampcache):
- logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
- task, self.rqdata.get_user_idstring(realtask))
- self.task_skip(task)
- return True
-
- startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
- bb.event.fire(startevent, self.cfgData)
-
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'fakeroot' in taskdep and taskname in taskdep['fakeroot']:
- if not self.rq.fakeworker:
- self.rq.start_fakeworker(self)
- self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
- self.rq.fakeworker.stdin.flush()
- else:
- self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
- self.rq.worker.stdin.flush()
-
- self.runq_running[task] = 1
- self.stats.taskActive()
- if self.stats.active < self.number_tasks:
- return True
-
- if self.stats.active > 0:
- self.rq.read_workers()
- return self.rq.active_fds()
-
- #for task in xrange(self.stats.total):
- # if self.runq_running[task] != 1:
- # buildable = self.runq_buildable[task]
- # revdeps = self.sq_revdeps[task]
- # bb.warn("Found we didn't run %s %s %s %s" % (task, buildable, str(revdeps), self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task])))
-
- # Convert scenequeue_covered task numbers into full taskgraph ids
- oldcovered = self.scenequeue_covered
- self.rq.scenequeue_covered = set()
- for task in oldcovered:
- self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
-
- logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
-
- self.rq.state = runQueueRunInit
-
- completeevent = sceneQueueComplete(self.stats, self.rq)
- bb.event.fire(completeevent, self.cfgData)
-
- return True
-
- def runqueue_process_waitpid(self, task, status):
- task = self.rq.rqdata.runq_setscene.index(task)
-
- RunQueueExecute.runqueue_process_waitpid(self, task, status)
-
-class TaskFailure(Exception):
- """
- Exception raised when a task in a runqueue fails
- """
- def __init__(self, x):
- self.args = x
-
-
-class runQueueExitWait(bb.event.Event):
- """
- Event when waiting for task processes to exit
- """
-
- def __init__(self, remain):
- self.remain = remain
- self.message = "Waiting for %s active tasks to finish" % remain
- bb.event.Event.__init__(self)
-
-class runQueueEvent(bb.event.Event):
- """
- Base runQueue event class
- """
- def __init__(self, task, stats, rq):
- self.taskid = task
- self.taskstring = rq.rqdata.get_user_idstring(task)
- self.taskname = rq.rqdata.get_task_name(task)
- self.taskfile = rq.rqdata.get_task_file(task)
- self.taskhash = rq.rqdata.get_task_hash(task)
- self.stats = stats.copy()
- bb.event.Event.__init__(self)
-
-class sceneQueueEvent(runQueueEvent):
- """
- Base sceneQueue event class
- """
- def __init__(self, task, stats, rq, noexec=False):
- runQueueEvent.__init__(self, task, stats, rq)
- realtask = rq.rqdata.runq_setscene[task]
- self.taskstring = rq.rqdata.get_user_idstring(realtask, "_setscene")
- self.taskname = rq.rqdata.get_task_name(realtask) + "_setscene"
- self.taskfile = rq.rqdata.get_task_file(realtask)
- self.taskhash = rq.rqdata.get_task_hash(realtask)
-
-class runQueueTaskStarted(runQueueEvent):
- """
- Event notifying a task was started
- """
- def __init__(self, task, stats, rq, noexec=False):
- runQueueEvent.__init__(self, task, stats, rq)
- self.noexec = noexec
-
-class sceneQueueTaskStarted(sceneQueueEvent):
- """
- Event notifying a setscene task was started
- """
- def __init__(self, task, stats, rq, noexec=False):
- sceneQueueEvent.__init__(self, task, stats, rq)
- self.noexec = noexec
-
-class runQueueTaskFailed(runQueueEvent):
- """
- Event notifying a task failed
- """
- def __init__(self, task, stats, exitcode, rq):
- runQueueEvent.__init__(self, task, stats, rq)
- self.exitcode = exitcode
-
-class sceneQueueTaskFailed(sceneQueueEvent):
- """
- Event notifying a setscene task failed
- """
- def __init__(self, task, stats, exitcode, rq):
- sceneQueueEvent.__init__(self, task, stats, rq)
- self.exitcode = exitcode
-
-class sceneQueueComplete(sceneQueueEvent):
- """
- Event when all the sceneQueue tasks are complete
- """
- def __init__(self, stats, rq):
- self.stats = stats.copy()
- bb.event.Event.__init__(self)
-
-class runQueueTaskCompleted(runQueueEvent):
- """
- Event notifying a task completed
- """
-
-class sceneQueueTaskCompleted(sceneQueueEvent):
- """
- Event notifying a setscene task completed
- """
-
-class runQueueTaskSkipped(runQueueEvent):
- """
- Event notifying a task was skipped
- """
- def __init__(self, task, stats, rq, reason):
- runQueueEvent.__init__(self, task, stats, rq)
- self.reason = reason
-
-class runQueuePipe():
- """
- Abstraction for a pipe between a worker thread and the server
- """
- def __init__(self, pipein, pipeout, d, rq, rqexec):
- self.input = pipein
- if pipeout:
- pipeout.close()
- bb.utils.nonblockingfd(self.input)
- self.queue = ""
- self.d = d
- self.rq = rq
- self.rqexec = rqexec
-
- def setrunqueueexec(self, rqexec):
- self.rqexec = rqexec
-
- def read(self):
- for w in [self.rq.worker, self.rq.fakeworker]:
- if not w:
- continue
- w.poll()
- if w.returncode is not None and not self.rq.teardown:
- name = None
- if self.rq.worker and w.pid == self.rq.worker.pid:
- name = "Worker"
- elif self.rq.fakeworker and w.pid == self.rq.fakeworker.pid:
- name = "Fakeroot"
- bb.error("%s process (%s) exited unexpectedly (%s), shutting down..." % (name, w.pid, str(w.returncode)))
- self.rq.finish_runqueue(True)
-
- start = len(self.queue)
- try:
- self.queue = self.queue + self.input.read(102400)
- except (OSError, IOError) as e:
- if e.errno != errno.EAGAIN:
- raise
- end = len(self.queue)
- found = True
- while found and len(self.queue):
- found = False
- index = self.queue.find("</event>")
- while index != -1 and self.queue.startswith("<event>"):
- try:
- event = pickle.loads(self.queue[7:index])
- except ValueError as e:
- bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[7:index]))
- bb.event.fire_from_worker(event, self.d)
- found = True
- self.queue = self.queue[index+8:]
- index = self.queue.find("</event>")
- index = self.queue.find("</exitcode>")
- while index != -1 and self.queue.startswith("<exitcode>"):
- try:
- task, status = pickle.loads(self.queue[10:index])
- except ValueError as e:
- bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
- self.rqexec.runqueue_process_waitpid(task, status)
- found = True
- self.queue = self.queue[index+11:]
- index = self.queue.find("</exitcode>")
- return (end > start)
-
- def close(self):
- while self.read():
- continue
- if len(self.queue) > 0:
- print("Warning, worker left partial message: %s" % self.queue)
- self.input.close()
diff --git a/yocto-poky/bitbake/lib/bb/server/__init__.py b/yocto-poky/bitbake/lib/bb/server/__init__.py
deleted file mode 100644
index 538a633fe..000000000
--- a/yocto-poky/bitbake/lib/bb/server/__init__.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#
-# BitBake Base Server Code
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2008 Richard Purdie
-# Copyright (C) 2013 Alexandru Damian
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-""" Base code for Bitbake server process
-
-Have a common base for that all Bitbake server classes ensures a consistent
-approach to the interface, and minimize risks associated with code duplication.
-
-"""
-
-""" BaseImplServer() the base class for all XXServer() implementations.
-
- These classes contain the actual code that runs the server side, i.e.
- listens for the commands and executes them. Although these implementations
- contain all the data of the original bitbake command, i.e the cooker instance,
- they may well run on a different process or even machine.
-
-"""
-
-class BaseImplServer():
- def __init__(self):
- self._idlefuns = {}
-
- def addcooker(self, cooker):
- self.cooker = cooker
-
- def register_idle_function(self, function, data):
- """Register a function to be called while the server is idle"""
- assert hasattr(function, '__call__')
- self._idlefuns[function] = data
-
-
-
-""" BitBakeBaseServerConnection class is the common ancestor to all
- BitBakeServerConnection classes.
-
- These classes control the remote server. The only command currently
- implemented is the terminate() command.
-
-"""
-
-class BitBakeBaseServerConnection():
- def __init__(self, serverImpl):
- pass
-
- def terminate(self):
- pass
-
- def setupEventQueue(self):
- pass
-
-
-""" BitBakeBaseServer class is the common ancestor to all Bitbake servers
-
- Derive this class in order to implement a BitBakeServer which is the
- controlling stub for the actual server implementation
-
-"""
-class BitBakeBaseServer(object):
- def initServer(self):
- self.serverImpl = None # we ensure a runtime crash if not overloaded
- self.connection = None
- return
-
- def addcooker(self, cooker):
- self.cooker = cooker
- self.serverImpl.addcooker(cooker)
-
- def getServerIdleCB(self):
- return self.serverImpl.register_idle_function
-
- def saveConnectionDetails(self):
- return
-
- def detach(self):
- return
-
- def establishConnection(self, featureset):
- raise "Must redefine the %s.establishConnection()" % self.__class__.__name__
-
- def endSession(self):
- self.connection.terminate()
diff --git a/yocto-poky/bitbake/lib/bb/server/process.py b/yocto-poky/bitbake/lib/bb/server/process.py
deleted file mode 100644
index a3078a873..000000000
--- a/yocto-poky/bitbake/lib/bb/server/process.py
+++ /dev/null
@@ -1,268 +0,0 @@
-#
-# BitBake Process based server.
-#
-# Copyright (C) 2010 Bob Foerster <robert@erafx.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- This module implements a multiprocessing.Process based server for bitbake.
-"""
-
-import bb
-import bb.event
-import itertools
-import logging
-import multiprocessing
-import os
-import signal
-import sys
-import time
-import select
-from Queue import Empty
-from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager
-
-from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
-
-logger = logging.getLogger('BitBake')
-
-class ServerCommunicator():
- def __init__(self, connection, event_handle, server):
- self.connection = connection
- self.event_handle = event_handle
- self.server = server
-
- def runCommand(self, command):
- # @todo try/except
- self.connection.send(command)
-
- if not self.server.is_alive():
- raise SystemExit
-
- while True:
- # don't let the user ctrl-c while we're waiting for a response
- try:
- for idx in range(0,4): # 0, 1, 2, 3
- if self.connection.poll(5):
- return self.connection.recv()
- else:
- bb.warn("Timeout while attempting to communicate with bitbake server")
- bb.fatal("Gave up; Too many tries: timeout while attempting to communicate with bitbake server")
- except KeyboardInterrupt:
- pass
-
- def getEventHandle(self):
- return self.event_handle.value
-
-class EventAdapter():
- """
- Adapter to wrap our event queue since the caller (bb.event) expects to
- call a send() method, but our actual queue only has put()
- """
- def __init__(self, queue):
- self.queue = queue
-
- def send(self, event):
- try:
- self.queue.put(event)
- except Exception as err:
- print("EventAdapter puked: %s" % str(err))
-
-
-class ProcessServer(Process, BaseImplServer):
- profile_filename = "profile.log"
- profile_processed_filename = "profile.log.processed"
-
- def __init__(self, command_channel, event_queue, featurelist):
- BaseImplServer.__init__(self)
- Process.__init__(self)
- self.command_channel = command_channel
- self.event_queue = event_queue
- self.event = EventAdapter(event_queue)
- self.featurelist = featurelist
- self.quit = False
-
- self.quitin, self.quitout = Pipe()
- self.event_handle = multiprocessing.Value("i")
-
- def run(self):
- for event in bb.event.ui_queue:
- self.event_queue.put(event)
- self.event_handle.value = bb.event.register_UIHhandler(self, True)
-
- bb.cooker.server_main(self.cooker, self.main)
-
- def main(self):
- # Ignore SIGINT within the server, as all SIGINT handling is done by
- # the UI and communicated to us
- self.quitin.close()
- signal.signal(signal.SIGINT, signal.SIG_IGN)
- bb.utils.set_process_name("Cooker")
- while not self.quit:
- try:
- if self.command_channel.poll():
- command = self.command_channel.recv()
- self.runCommand(command)
- if self.quitout.poll():
- self.quitout.recv()
- self.quit = True
- try:
- self.runCommand(["stateForceShutdown"])
- except:
- pass
-
- self.idle_commands(.1, [self.command_channel, self.quitout])
- except Exception:
- logger.exception('Running command %s', command)
-
- self.event_queue.close()
- bb.event.unregister_UIHhandler(self.event_handle.value)
- self.command_channel.close()
- self.cooker.shutdown(True)
- self.quitout.close()
-
- def idle_commands(self, delay, fds=None):
- nextsleep = delay
- if not fds:
- fds = []
-
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, False)
- if retval is False:
- del self._idlefuns[function]
- nextsleep = None
- elif retval is True:
- nextsleep = None
- elif isinstance(retval, float):
- if (retval < nextsleep):
- nextsleep = retval
- elif nextsleep is None:
- continue
- else:
- fds = fds + retval
- except SystemExit:
- raise
- except Exception as exc:
- if not isinstance(exc, bb.BBHandledException):
- logger.exception('Running idle function')
- del self._idlefuns[function]
- self.quit = True
-
- if nextsleep is not None:
- select.select(fds,[],[],nextsleep)
-
- def runCommand(self, command):
- """
- Run a cooker command on the server
- """
- self.command_channel.send(self.cooker.command.runCommand(command))
-
- def stop(self):
- self.quitin.send("quit")
- self.quitin.close()
-
-class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
- def __init__(self, serverImpl, ui_channel, event_queue):
- self.procserver = serverImpl
- self.ui_channel = ui_channel
- self.event_queue = event_queue
- self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle, self.procserver)
- self.events = self.event_queue
- self.terminated = False
-
- def sigterm_terminate(self):
- bb.error("UI received SIGTERM")
- self.terminate()
-
- def terminate(self):
- if self.terminated:
- return
- self.terminated = True
- def flushevents():
- while True:
- try:
- event = self.event_queue.get(block=False)
- except (Empty, IOError):
- break
- if isinstance(event, logging.LogRecord):
- logger.handle(event)
-
- signal.signal(signal.SIGINT, signal.SIG_IGN)
- self.procserver.stop()
-
- while self.procserver.is_alive():
- flushevents()
- self.procserver.join(0.1)
-
- self.ui_channel.close()
- self.event_queue.close()
- self.event_queue.setexit()
-
-# Wrap Queue to provide API which isn't server implementation specific
-class ProcessEventQueue(multiprocessing.queues.Queue):
- def __init__(self, maxsize):
- multiprocessing.queues.Queue.__init__(self, maxsize)
- self.exit = False
- bb.utils.set_process_name("ProcessEQueue")
-
- def setexit(self):
- self.exit = True
-
- def waitEvent(self, timeout):
- if self.exit:
- sys.exit(1)
- try:
- if not self.server.is_alive():
- self.setexit()
- return None
- return self.get(True, timeout)
- except Empty:
- return None
-
- def getEvent(self):
- try:
- if not self.server.is_alive():
- self.setexit()
- return None
- return self.get(False)
- except Empty:
- return None
-
-
-class BitBakeServer(BitBakeBaseServer):
- def initServer(self, single_use=True):
- # establish communication channels. We use bidirectional pipes for
- # ui <--> server command/response pairs
- # and a queue for server -> ui event notifications
- #
- self.ui_channel, self.server_channel = Pipe()
- self.event_queue = ProcessEventQueue(0)
- self.serverImpl = ProcessServer(self.server_channel, self.event_queue, None)
- self.event_queue.server = self.serverImpl
-
- def detach(self):
- self.serverImpl.start()
- return
-
- def establishConnection(self, featureset):
-
- self.connection = BitBakeProcessServerConnection(self.serverImpl, self.ui_channel, self.event_queue)
-
- _, error = self.connection.connection.runCommand(["setFeatures", featureset])
- if error:
- logger.error("Unable to set the cooker to the correct featureset: %s" % error)
- raise BaseException(error)
- signal.signal(signal.SIGTERM, lambda i, s: self.connection.sigterm_terminate())
- return self.connection
diff --git a/yocto-poky/bitbake/lib/bb/server/xmlrpc.py b/yocto-poky/bitbake/lib/bb/server/xmlrpc.py
deleted file mode 100644
index ace1cf646..000000000
--- a/yocto-poky/bitbake/lib/bb/server/xmlrpc.py
+++ /dev/null
@@ -1,390 +0,0 @@
-#
-# BitBake XMLRPC Server
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2008 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- This module implements an xmlrpc server for BitBake.
-
- Use this by deriving a class from BitBakeXMLRPCServer and then adding
- methods which you want to "export" via XMLRPC. If the methods have the
- prefix xmlrpc_, then registering those function will happen automatically,
- if not, you need to call register_function.
-
- Use register_idle_function() to add a function which the xmlrpc server
- calls from within server_forever when no requests are pending. Make sure
- that those functions are non-blocking or else you will introduce latency
- in the server's main loop.
-"""
-
-import bb
-import xmlrpclib, sys
-from bb import daemonize
-from bb.ui import uievent
-import hashlib, time
-import socket
-import os, signal
-import threading
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-DEBUG = False
-
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-import inspect, select, httplib
-
-from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
-
-class BBTransport(xmlrpclib.Transport):
- def __init__(self, timeout):
- self.timeout = timeout
- self.connection_token = None
- xmlrpclib.Transport.__init__(self)
-
- # Modified from default to pass timeout to HTTPConnection
- def make_connection(self, host):
- #return an existing connection if possible. This allows
- #HTTP/1.1 keep-alive.
- if self._connection and host == self._connection[0]:
- return self._connection[1]
-
- # create a HTTP connection object from a host descriptor
- chost, self._extra_headers, x509 = self.get_host_info(host)
- #store the host argument along with the connection object
- self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout)
- return self._connection[1]
-
- def set_connection_token(self, token):
- self.connection_token = token
-
- def send_content(self, h, body):
- if self.connection_token:
- h.putheader("Bitbake-token", self.connection_token)
- xmlrpclib.Transport.send_content(self, h, body)
-
-def _create_server(host, port, timeout = 60):
- t = BBTransport(timeout)
- s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
- return s, t
-
-class BitBakeServerCommands():
-
- def __init__(self, server):
- self.server = server
- self.has_client = False
-
- def registerEventHandler(self, host, port):
- """
- Register a remote UI Event Handler
- """
- s, t = _create_server(host, port)
-
- # we don't allow connections if the cooker is running
- if (self.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]):
- return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.cooker.state)
-
- self.event_handle = bb.event.register_UIHhandler(s, True)
- return self.event_handle, 'OK'
-
- def unregisterEventHandler(self, handlerNum):
- """
- Unregister a remote UI Event Handler
- """
- return bb.event.unregister_UIHhandler(handlerNum)
-
- def runCommand(self, command):
- """
- Run a cooker command on the server
- """
- return self.cooker.command.runCommand(command, self.server.readonly)
-
- def getEventHandle(self):
- return self.event_handle
-
- def terminateServer(self):
- """
- Trigger the server to quit
- """
- self.server.quit = True
- print("Server (cooker) exiting")
- return
-
- def addClient(self):
- if self.has_client:
- return None
- token = hashlib.md5(str(time.time())).hexdigest()
- self.server.set_connection_token(token)
- self.has_client = True
- return token
-
- def removeClient(self):
- if self.has_client:
- self.server.set_connection_token(None)
- self.has_client = False
- if self.server.single_use:
- self.server.quit = True
-
-# This request handler checks if the request has a "Bitbake-token" header
-# field (this comes from the client side) and compares it with its internal
-# "Bitbake-token" field (this comes from the server). If the two are not
-# equal, it is assumed that a client is trying to connect to the server
-# while another client is connected to the server. In this case, a 503 error
-# ("service unavailable") is returned to the client.
-class BitBakeXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
- def __init__(self, request, client_address, server):
- self.server = server
- SimpleXMLRPCRequestHandler.__init__(self, request, client_address, server)
-
- def do_POST(self):
- try:
- remote_token = self.headers["Bitbake-token"]
- except:
- remote_token = None
- if remote_token != self.server.connection_token and remote_token != "observer":
- self.report_503()
- else:
- if remote_token == "observer":
- self.server.readonly = True
- else:
- self.server.readonly = False
- SimpleXMLRPCRequestHandler.do_POST(self)
-
- def report_503(self):
- self.send_response(503)
- response = 'No more client allowed'
- self.send_header("Content-type", "text/plain")
- self.send_header("Content-length", str(len(response)))
- self.end_headers()
- self.wfile.write(response)
-
-
-class XMLRPCProxyServer(BaseImplServer):
- """ not a real working server, but a stub for a proxy server connection
-
- """
- def __init__(self, host, port):
- self.host = host
- self.port = port
-
-class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
- # remove this when you're done with debugging
- # allow_reuse_address = True
-
- def __init__(self, interface, single_use=False):
- """
- Constructor
- """
- BaseImplServer.__init__(self)
- self.single_use = single_use
- # Use auto port configuration
- if (interface[1] == -1):
- interface = (interface[0], 0)
- SimpleXMLRPCServer.__init__(self, interface,
- requestHandler=BitBakeXMLRPCRequestHandler,
- logRequests=False, allow_none=True)
- self.host, self.port = self.socket.getsockname()
- self.connection_token = None
- #self.register_introspection_functions()
- self.commands = BitBakeServerCommands(self)
- self.autoregister_all_functions(self.commands, "")
- self.interface = interface
-
- def addcooker(self, cooker):
- BaseImplServer.addcooker(self, cooker)
- self.commands.cooker = cooker
-
- def autoregister_all_functions(self, context, prefix):
- """
- Convenience method for registering all functions in the scope
- of this class that start with a common prefix
- """
- methodlist = inspect.getmembers(context, inspect.ismethod)
- for name, method in methodlist:
- if name.startswith(prefix):
- self.register_function(method, name[len(prefix):])
-
-
- def serve_forever(self):
- # Start the actual XMLRPC server
- bb.cooker.server_main(self.cooker, self._serve_forever)
-
- def _serve_forever(self):
- """
- Serve Requests. Overloaded to honor a quit command
- """
- self.quit = False
- while not self.quit:
- fds = [self]
- nextsleep = 0.1
- for function, data in self._idlefuns.items():
- retval = None
- try:
- retval = function(self, data, False)
- if retval is False:
- del self._idlefuns[function]
- elif retval is True:
- nextsleep = 0
- elif isinstance(retval, float):
- if (retval < nextsleep):
- nextsleep = retval
- else:
- fds = fds + retval
- except SystemExit:
- raise
- except:
- import traceback
- traceback.print_exc()
- if retval == None:
- # the function execute failed; delete it
- del self._idlefuns[function]
- pass
-
- socktimeout = self.socket.gettimeout() or nextsleep
- socktimeout = min(socktimeout, nextsleep)
- # Mirror what BaseServer handle_request would do
- try:
- fd_sets = select.select(fds, [], [], socktimeout)
- if fd_sets[0] and self in fd_sets[0]:
- self._handle_request_noblock()
- except IOError:
- # we ignore interrupted calls
- pass
-
- # Tell idle functions we're exiting
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, True)
- except:
- pass
- self.server_close()
- return
-
- def set_connection_token(self, token):
- self.connection_token = token
-
-class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
- def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = None):
- self.connection, self.transport = _create_server(serverImpl.host, serverImpl.port)
- self.clientinfo = clientinfo
- self.serverImpl = serverImpl
- self.observer_only = observer_only
- if featureset:
- self.featureset = featureset
- else:
- self.featureset = []
-
- def connect(self, token = None):
- if token is None:
- if self.observer_only:
- token = "observer"
- else:
- token = self.connection.addClient()
-
- if token is None:
- return None
-
- self.transport.set_connection_token(token)
- return self
-
- def setupEventQueue(self):
- self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
- for event in bb.event.ui_queue:
- self.events.queue_event(event)
-
- _, error = self.connection.runCommand(["setFeatures", self.featureset])
- if error:
- # disconnect the client, we can't make the setFeature work
- self.connection.removeClient()
- # no need to log it here, the error shall be sent to the client
- raise BaseException(error)
-
- def removeClient(self):
- if not self.observer_only:
- self.connection.removeClient()
-
- def terminate(self):
- # Don't wait for server indefinitely
- import socket
- socket.setdefaulttimeout(2)
- try:
- self.events.system_quit()
- except:
- pass
- try:
- self.connection.removeClient()
- except:
- pass
-
-class BitBakeServer(BitBakeBaseServer):
- def initServer(self, interface = ("localhost", 0), single_use = False):
- self.interface = interface
- self.serverImpl = XMLRPCServer(interface, single_use)
-
- def detach(self):
- daemonize.createDaemon(self.serverImpl.serve_forever, "bitbake-cookerdaemon.log")
- del self.cooker
-
- def establishConnection(self, featureset):
- self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, self.interface, False, featureset)
- return self.connection.connect()
-
- def set_connection_token(self, token):
- self.connection.transport.set_connection_token(token)
-
-class BitBakeXMLRPCClient(BitBakeBaseServer):
-
- def __init__(self, observer_only = False, token = None):
- self.token = token
-
- self.observer_only = observer_only
- # if we need extra caches, just tell the server to load them all
- pass
-
- def saveConnectionDetails(self, remote):
- self.remote = remote
-
- def establishConnection(self, featureset):
- # The format of "remote" must be "server:port"
- try:
- [host, port] = self.remote.split(":")
- port = int(port)
- except Exception as e:
- bb.warn("Failed to read remote definition (%s)" % str(e))
- raise e
-
- # We need our IP for the server connection. We get the IP
- # by trying to connect with the server
- try:
- s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- s.connect((host, port))
- ip = s.getsockname()[0]
- s.close()
- except Exception as e:
- bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
- raise e
- try:
- self.serverImpl = XMLRPCProxyServer(host, port)
- self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
- return self.connection.connect(self.token)
- except Exception as e:
- bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
- raise e
-
- def endSession(self):
- self.connection.removeClient()
diff --git a/yocto-poky/bitbake/lib/bb/shell.py b/yocto-poky/bitbake/lib/bb/shell.py
deleted file mode 100644
index 1dd8d54bd..000000000
--- a/yocto-poky/bitbake/lib/bb/shell.py
+++ /dev/null
@@ -1,820 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-##########################################################################
-#
-# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
-# Copyright (C) 2005-2006 Vanille Media
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-##########################################################################
-#
-# Thanks to:
-# * Holger Freyther <zecke@handhelds.org>
-# * Justin Patrin <papercrane@reversefold.com>
-#
-##########################################################################
-
-"""
-BitBake Shell
-
-IDEAS:
- * list defined tasks per package
- * list classes
- * toggle force
- * command to reparse just one (or more) bbfile(s)
- * automatic check if reparsing is necessary (inotify?)
- * frontend for bb file manipulation
- * more shell-like features:
- - output control, i.e. pipe output into grep, sort, etc.
- - job control, i.e. bring running commands into background and foreground
- * start parsing in background right after startup
- * ncurses interface
-
-PROBLEMS:
- * force doesn't always work
- * readline completion for commands with more than one parameters
-
-"""
-
-##########################################################################
-# Import and setup global variables
-##########################################################################
-
-from __future__ import print_function
-from functools import reduce
-try:
- set
-except NameError:
- from sets import Set as set
-import sys, os, readline, socket, httplib, urllib, commands, popen2, shlex, Queue, fnmatch
-from bb import data, parse, build, cache, taskdata, runqueue, providers as Providers
-
-__version__ = "0.5.3.1"
-__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
-Type 'help' for more information, press CTRL-D to exit.""" % __version__
-
-cmds = {}
-leave_mainloop = False
-last_exception = None
-cooker = None
-parsed = False
-debug = os.environ.get( "BBSHELL_DEBUG", "" )
-
-##########################################################################
-# Class BitBakeShellCommands
-##########################################################################
-
-class BitBakeShellCommands:
- """This class contains the valid commands for the shell"""
-
- def __init__( self, shell ):
- """Register all the commands"""
- self._shell = shell
- for attr in BitBakeShellCommands.__dict__:
- if not attr.startswith( "_" ):
- if attr.endswith( "_" ):
- command = attr[:-1].lower()
- else:
- command = attr[:].lower()
- method = getattr( BitBakeShellCommands, attr )
- debugOut( "registering command '%s'" % command )
- # scan number of arguments
- usage = getattr( method, "usage", "" )
- if usage != "<...>":
- numArgs = len( usage.split() )
- else:
- numArgs = -1
- shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ )
-
- def _checkParsed( self ):
- if not parsed:
- print("SHELL: This command needs to parse bbfiles...")
- self.parse( None )
-
- def _findProvider( self, item ):
- self._checkParsed()
- # Need to use taskData for this information
- preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
- if not preferred: preferred = item
- try:
- lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
- except KeyError:
- if item in cooker.status.providers:
- pf = cooker.status.providers[item][0]
- else:
- pf = None
- return pf
-
- def alias( self, params ):
- """Register a new name for a command"""
- new, old = params
- if not old in cmds:
- print("ERROR: Command '%s' not known" % old)
- else:
- cmds[new] = cmds[old]
- print("OK")
- alias.usage = "<alias> <command>"
-
- def buffer( self, params ):
- """Dump specified output buffer"""
- index = params[0]
- print(self._shell.myout.buffer( int( index ) ))
- buffer.usage = "<index>"
-
- def buffers( self, params ):
- """Show the available output buffers"""
- commands = self._shell.myout.bufferedCommands()
- if not commands:
- print("SHELL: No buffered commands available yet. Start doing something.")
- else:
- print("="*35, "Available Output Buffers", "="*27)
- for index, cmd in enumerate( commands ):
- print("| %s %s" % ( str( index ).ljust( 3 ), cmd ))
- print("="*88)
-
- def build( self, params, cmd = "build" ):
- """Build a providee"""
- global last_exception
- globexpr = params[0]
- self._checkParsed()
- names = globfilter( cooker.status.pkg_pn, globexpr )
- if len( names ) == 0: names = [ globexpr ]
- print("SHELL: Building %s" % ' '.join( names ))
-
- td = taskdata.TaskData(cooker.configuration.abort)
- localdata = data.createCopy(cooker.configuration.data)
- data.update_data(localdata)
- data.expandKeys(localdata)
-
- try:
- tasks = []
- for name in names:
- td.add_provider(localdata, cooker.status, name)
- providers = td.get_provider(name)
-
- if len(providers) == 0:
- raise Providers.NoProvider
-
- tasks.append([name, "do_%s" % cmd])
-
- td.add_unresolved(localdata, cooker.status)
-
- rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
- rq.prepare_runqueue()
- rq.execute_runqueue()
-
- except Providers.NoProvider:
- print("ERROR: No Provider")
- last_exception = Providers.NoProvider
-
- except runqueue.TaskFailure as fnids:
- last_exception = runqueue.TaskFailure
-
- except build.FuncFailed as e:
- print("ERROR: Couldn't build '%s'" % names)
- last_exception = e
-
-
- build.usage = "<providee>"
-
- def clean( self, params ):
- """Clean a providee"""
- self.build( params, "clean" )
- clean.usage = "<providee>"
-
- def compile( self, params ):
- """Execute 'compile' on a providee"""
- self.build( params, "compile" )
- compile.usage = "<providee>"
-
- def configure( self, params ):
- """Execute 'configure' on a providee"""
- self.build( params, "configure" )
- configure.usage = "<providee>"
-
- def install( self, params ):
- """Execute 'install' on a providee"""
- self.build( params, "install" )
- install.usage = "<providee>"
-
- def edit( self, params ):
- """Call $EDITOR on a providee"""
- name = params[0]
- bbfile = self._findProvider( name )
- if bbfile is not None:
- os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
- else:
- print("ERROR: Nothing provides '%s'" % name)
- edit.usage = "<providee>"
-
- def environment( self, params ):
- """Dump out the outer BitBake environment"""
- cooker.showEnvironment()
-
- def exit_( self, params ):
- """Leave the BitBake Shell"""
- debugOut( "setting leave_mainloop to true" )
- global leave_mainloop
- leave_mainloop = True
-
- def fetch( self, params ):
- """Fetch a providee"""
- self.build( params, "fetch" )
- fetch.usage = "<providee>"
-
- def fileBuild( self, params, cmd = "build" ):
- """Parse and build a .bb file"""
- global last_exception
- name = params[0]
- bf = completeFilePath( name )
- print("SHELL: Calling '%s' on '%s'" % ( cmd, bf ))
-
- try:
- cooker.buildFile(bf, cmd)
- except parse.ParseError:
- print("ERROR: Unable to open or parse '%s'" % bf)
- except build.FuncFailed as e:
- print("ERROR: Couldn't build '%s'" % name)
- last_exception = e
-
- fileBuild.usage = "<bbfile>"
-
- def fileClean( self, params ):
- """Clean a .bb file"""
- self.fileBuild( params, "clean" )
- fileClean.usage = "<bbfile>"
-
- def fileEdit( self, params ):
- """Call $EDITOR on a .bb file"""
- name = params[0]
- os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) )
- fileEdit.usage = "<bbfile>"
-
- def fileRebuild( self, params ):
- """Rebuild (clean & build) a .bb file"""
- self.fileBuild( params, "rebuild" )
- fileRebuild.usage = "<bbfile>"
-
- def fileReparse( self, params ):
- """(re)Parse a bb file"""
- bbfile = params[0]
- print("SHELL: Parsing '%s'" % bbfile)
- parse.update_mtime( bbfile )
- cooker.parser.reparse(bbfile)
- if False: #fromCache:
- print("SHELL: File has not been updated, not reparsing")
- else:
- print("SHELL: Parsed")
- fileReparse.usage = "<bbfile>"
-
- def abort( self, params ):
- """Toggle abort task execution flag (see bitbake -k)"""
- cooker.configuration.abort = not cooker.configuration.abort
- print("SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ))
-
- def force( self, params ):
- """Toggle force task execution flag (see bitbake -f)"""
- cooker.configuration.force = not cooker.configuration.force
- print("SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ))
-
- def help( self, params ):
- """Show a comprehensive list of commands and their purpose"""
- print("="*30, "Available Commands", "="*30)
- for cmd in sorted(cmds):
- function, numparams, usage, helptext = cmds[cmd]
- print("| %s | %s" % (usage.ljust(30), helptext))
- print("="*78)
-
- def lastError( self, params ):
- """Show the reason or log that was produced by the last BitBake event exception"""
- if last_exception is None:
- print("SHELL: No Errors yet (Phew)...")
- else:
- reason, event = last_exception.args
- print("SHELL: Reason for the last error: '%s'" % reason)
- if ':' in reason:
- msg, filename = reason.split( ':' )
- filename = filename.strip()
- print("SHELL: Dumping log file for last error:")
- try:
- print(open( filename ).read())
- except IOError:
- print("ERROR: Couldn't open '%s'" % filename)
-
- def match( self, params ):
- """Dump all files or providers matching a glob expression"""
- what, globexpr = params
- if what == "files":
- self._checkParsed()
- for key in globfilter( cooker.status.pkg_fn, globexpr ): print(key)
- elif what == "providers":
- self._checkParsed()
- for key in globfilter( cooker.status.pkg_pn, globexpr ): print(key)
- else:
- print("Usage: match %s" % self.print_.usage)
- match.usage = "<files|providers> <glob>"
-
- def new( self, params ):
- """Create a new .bb file and open the editor"""
- dirname, filename = params
- packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] )
- fulldirname = "%s/%s" % ( packages, dirname )
-
- if not os.path.exists( fulldirname ):
- print("SHELL: Creating '%s'" % fulldirname)
- os.mkdir( fulldirname )
- if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
- if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
- print("SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ))
- return False
- print("SHELL: Creating '%s/%s'" % ( fulldirname, filename ))
- newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
- print("""DESCRIPTION = ""
-SECTION = ""
-AUTHOR = ""
-HOMEPAGE = ""
-MAINTAINER = ""
-LICENSE = "GPL"
-PR = "r0"
-
-SRC_URI = ""
-
-#inherit base
-
-#do_configure() {
-#
-#}
-
-#do_compile() {
-#
-#}
-
-#do_stage() {
-#
-#}
-
-#do_install() {
-#
-#}
-""", file=newpackage)
- newpackage.close()
- os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
- new.usage = "<directory> <filename>"
-
- def package( self, params ):
- """Execute 'package' on a providee"""
- self.build( params, "package" )
- package.usage = "<providee>"
-
- def pasteBin( self, params ):
- """Send a command + output buffer to the pastebin at http://rafb.net/paste"""
- index = params[0]
- contents = self._shell.myout.buffer( int( index ) )
- sendToPastebin( "output of " + params[0], contents )
- pasteBin.usage = "<index>"
-
- def pasteLog( self, params ):
- """Send the last event exception error log (if there is one) to http://rafb.net/paste"""
- if last_exception is None:
- print("SHELL: No Errors yet (Phew)...")
- else:
- reason, event = last_exception.args
- print("SHELL: Reason for the last error: '%s'" % reason)
- if ':' in reason:
- msg, filename = reason.split( ':' )
- filename = filename.strip()
- print("SHELL: Pasting log file to pastebin...")
-
- file = open( filename ).read()
- sendToPastebin( "contents of " + filename, file )
-
- def patch( self, params ):
- """Execute 'patch' command on a providee"""
- self.build( params, "patch" )
- patch.usage = "<providee>"
-
- def parse( self, params ):
- """(Re-)parse .bb files and calculate the dependency graph"""
- cooker.status = cache.CacheData(cooker.caches_array)
- ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
- cooker.status.ignored_dependencies = set( ignore.split() )
- cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )
-
- (filelist, masked) = cooker.collect_bbfiles()
- cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
- cooker.buildDepgraph()
- global parsed
- parsed = True
- print()
-
- def reparse( self, params ):
- """(re)Parse a providee's bb file"""
- bbfile = self._findProvider( params[0] )
- if bbfile is not None:
- print("SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] ))
- self.fileReparse( [ bbfile ] )
- else:
- print("ERROR: Nothing provides '%s'" % params[0])
- reparse.usage = "<providee>"
-
- def getvar( self, params ):
- """Dump the contents of an outer BitBake environment variable"""
- var = params[0]
- value = data.getVar( var, cooker.configuration.data, 1 )
- print(value)
- getvar.usage = "<variable>"
-
- def peek( self, params ):
- """Dump contents of variable defined in providee's metadata"""
- name, var = params
- bbfile = self._findProvider( name )
- if bbfile is not None:
- the_data = cache.Cache.loadDataFull(bbfile, cooker.configuration.data)
- value = the_data.getVar( var, 1 )
- print(value)
- else:
- print("ERROR: Nothing provides '%s'" % name)
- peek.usage = "<providee> <variable>"
-
- def poke( self, params ):
- """Set contents of variable defined in providee's metadata"""
- name, var, value = params
- bbfile = self._findProvider( name )
- if bbfile is not None:
- print("ERROR: Sorry, this functionality is currently broken")
- #d = cooker.pkgdata[bbfile]
- #data.setVar( var, value, d )
-
- # mark the change semi persistant
- #cooker.pkgdata.setDirty(bbfile, d)
- #print "OK"
- else:
- print("ERROR: Nothing provides '%s'" % name)
- poke.usage = "<providee> <variable> <value>"
-
- def print_( self, params ):
- """Dump all files or providers"""
- what = params[0]
- if what == "files":
- self._checkParsed()
- for key in cooker.status.pkg_fn: print(key)
- elif what == "providers":
- self._checkParsed()
- for key in cooker.status.providers: print(key)
- else:
- print("Usage: print %s" % self.print_.usage)
- print_.usage = "<files|providers>"
-
- def python( self, params ):
- """Enter the expert mode - an interactive BitBake Python Interpreter"""
- sys.ps1 = "EXPERT BB>>> "
- sys.ps2 = "EXPERT BB... "
- import code
- interpreter = code.InteractiveConsole( dict( globals() ) )
- interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
-
- def showdata( self, params ):
- """Execute 'showdata' on a providee"""
- cooker.showEnvironment(None, params)
- showdata.usage = "<providee>"
-
- def setVar( self, params ):
- """Set an outer BitBake environment variable"""
- var, value = params
- data.setVar( var, value, cooker.configuration.data )
- print("OK")
- setVar.usage = "<variable> <value>"
-
- def rebuild( self, params ):
- """Clean and rebuild a .bb file or a providee"""
- self.build( params, "clean" )
- self.build( params, "build" )
- rebuild.usage = "<providee>"
-
- def shell( self, params ):
- """Execute a shell command and dump the output"""
- if params != "":
- print(commands.getoutput( " ".join( params ) ))
- shell.usage = "<...>"
-
- def stage( self, params ):
- """Execute 'stage' on a providee"""
- self.build( params, "populate_staging" )
- stage.usage = "<providee>"
-
- def status( self, params ):
- """<just for testing>"""
- print("-" * 78)
- print("building list = '%s'" % cooker.building_list)
- print("build path = '%s'" % cooker.build_path)
- print("consider_msgs_cache = '%s'" % cooker.consider_msgs_cache)
- print("build stats = '%s'" % cooker.stats)
- if last_exception is not None: print("last_exception = '%s'" % repr( last_exception.args ))
- print("memory output contents = '%s'" % self._shell.myout._buffer)
-
- def test( self, params ):
- """<just for testing>"""
- print("testCommand called with '%s'" % params)
-
- def unpack( self, params ):
- """Execute 'unpack' on a providee"""
- self.build( params, "unpack" )
- unpack.usage = "<providee>"
-
- def which( self, params ):
- """Computes the providers for a given providee"""
- # Need to use taskData for this information
- item = params[0]
-
- self._checkParsed()
-
- preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
- if not preferred: preferred = item
-
- try:
- lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
- except KeyError:
- lv, lf, pv, pf = (None,)*4
-
- try:
- providers = cooker.status.providers[item]
- except KeyError:
- print("SHELL: ERROR: Nothing provides", preferred)
- else:
- for provider in providers:
- if provider == pf: provider = " (***) %s" % provider
- else: provider = " %s" % provider
- print(provider)
- which.usage = "<providee>"
-
-##########################################################################
-# Common helper functions
-##########################################################################
-
-def completeFilePath( bbfile ):
- """Get the complete bbfile path"""
- if not cooker.status: return bbfile
- if not cooker.status.pkg_fn: return bbfile
- for key in cooker.status.pkg_fn:
- if key.endswith( bbfile ):
- return key
- return bbfile
-
-def sendToPastebin( desc, content ):
- """Send content to http://oe.pastebin.com"""
- mydata = {}
- mydata["lang"] = "Plain Text"
- mydata["desc"] = desc
- mydata["cvt_tabs"] = "No"
- mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
- mydata["text"] = content
- params = urllib.urlencode( mydata )
- headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
-
- host = "rafb.net"
- conn = httplib.HTTPConnection( "%s:80" % host )
- conn.request("POST", "/paste/paste.php", params, headers )
-
- response = conn.getresponse()
- conn.close()
-
- if response.status == 302:
- location = response.getheader( "location" ) or "unknown"
- print("SHELL: Pasted to http://%s%s" % ( host, location ))
- else:
- print("ERROR: %s %s" % ( response.status, response.reason ))
-
-def completer( text, state ):
- """Return a possible readline completion"""
- debugOut( "completer called with text='%s', state='%d'" % ( text, state ) )
-
- if state == 0:
- line = readline.get_line_buffer()
- if " " in line:
- line = line.split()
- # we are in second (or more) argument
- if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage
- u = getattr( cmds[line[0]][0], "usage" ).split()[0]
- if u == "<variable>":
- allmatches = cooker.configuration.data.keys()
- elif u == "<bbfile>":
- if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
- else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn ]
- elif u == "<providee>":
- if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
- else: allmatches = cooker.status.providers.iterkeys()
- else: allmatches = [ "(No tab completion available for this command)" ]
- else: allmatches = [ "(No tab completion available for this command)" ]
- else:
- # we are in first argument
- allmatches = cmds.iterkeys()
-
- completer.matches = [ x for x in allmatches if x[:len(text)] == text ]
- #print "completer.matches = '%s'" % completer.matches
- if len( completer.matches ) > state:
- return completer.matches[state]
- else:
- return None
-
-def debugOut( text ):
- if debug:
- sys.stderr.write( "( %s )\n" % text )
-
-def columnize( alist, width = 80 ):
- """
- A word-wrap function that preserves existing line breaks
- and most spaces in the text. Expects that existing line
- breaks are posix newlines (\n).
- """
- return reduce(lambda line, word, width=width: '%s%s%s' %
- (line,
- ' \n'[(len(line[line.rfind('\n')+1:])
- + len(word.split('\n', 1)[0]
- ) >= width)],
- word),
- alist
- )
-
-def globfilter( names, pattern ):
- return fnmatch.filter( names, pattern )
-
-##########################################################################
-# Class MemoryOutput
-##########################################################################
-
-class MemoryOutput:
- """File-like output class buffering the output of the last 10 commands"""
- def __init__( self, delegate ):
- self.delegate = delegate
- self._buffer = []
- self.text = []
- self._command = None
-
- def startCommand( self, command ):
- self._command = command
- self.text = []
- def endCommand( self ):
- if self._command is not None:
- if len( self._buffer ) == 10: del self._buffer[0]
- self._buffer.append( ( self._command, self.text ) )
- def removeLast( self ):
- if self._buffer:
- del self._buffer[ len( self._buffer ) - 1 ]
- self.text = []
- self._command = None
- def lastBuffer( self ):
- if self._buffer:
- return self._buffer[ len( self._buffer ) -1 ][1]
- def bufferedCommands( self ):
- return [ cmd for cmd, output in self._buffer ]
- def buffer( self, i ):
- if i < len( self._buffer ):
- return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) )
- else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 )
- def write( self, text ):
- if self._command is not None and text != "BB>> ": self.text.append( text )
- if self.delegate is not None: self.delegate.write( text )
- def flush( self ):
- return self.delegate.flush()
- def fileno( self ):
- return self.delegate.fileno()
- def isatty( self ):
- return self.delegate.isatty()
-
-##########################################################################
-# Class BitBakeShell
-##########################################################################
-
-class BitBakeShell:
-
- def __init__( self ):
- """Register commands and set up readline"""
- self.commandQ = Queue.Queue()
- self.commands = BitBakeShellCommands( self )
- self.myout = MemoryOutput( sys.stdout )
- self.historyfilename = os.path.expanduser( "~/.bbsh_history" )
- self.startupfilename = os.path.expanduser( "~/.bbsh_startup" )
-
- readline.set_completer( completer )
- readline.set_completer_delims( " " )
- readline.parse_and_bind("tab: complete")
-
- try:
- readline.read_history_file( self.historyfilename )
- except IOError:
- pass # It doesn't exist yet.
-
- print(__credits__)
-
- def cleanup( self ):
- """Write readline history and clean up resources"""
- debugOut( "writing command history" )
- try:
- readline.write_history_file( self.historyfilename )
- except:
- print("SHELL: Unable to save command history")
-
- def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
- """Register a command"""
- if usage == "": usage = command
- if helptext == "": helptext = function.__doc__ or "<not yet documented>"
- cmds[command] = ( function, numparams, usage, helptext )
-
- def processCommand( self, command, params ):
- """Process a command. Check number of params and print a usage string, if appropriate"""
- debugOut( "processing command '%s'..." % command )
- try:
- function, numparams, usage, helptext = cmds[command]
- except KeyError:
- print("SHELL: ERROR: '%s' command is not a valid command." % command)
- self.myout.removeLast()
- else:
- if (numparams != -1) and (not len( params ) == numparams):
- print("Usage: '%s'" % usage)
- return
-
- result = function( self.commands, params )
- debugOut( "result was '%s'" % result )
-
- def processStartupFile( self ):
- """Read and execute all commands found in $HOME/.bbsh_startup"""
- if os.path.exists( self.startupfilename ):
- startupfile = open( self.startupfilename, "r" )
- for cmdline in startupfile:
- debugOut( "processing startup line '%s'" % cmdline )
- if not cmdline:
- continue
- if "|" in cmdline:
- print("ERROR: '|' in startup file is not allowed. Ignoring line")
- continue
- self.commandQ.put( cmdline.strip() )
-
- def main( self ):
- """The main command loop"""
- while not leave_mainloop:
- try:
- if self.commandQ.empty():
- sys.stdout = self.myout.delegate
- cmdline = raw_input( "BB>> " )
- sys.stdout = self.myout
- else:
- cmdline = self.commandQ.get()
- if cmdline:
- allCommands = cmdline.split( ';' )
- for command in allCommands:
- pipecmd = None
- #
- # special case for expert mode
- if command == 'python':
- sys.stdout = self.myout.delegate
- self.processCommand( command, "" )
- sys.stdout = self.myout
- else:
- self.myout.startCommand( command )
- if '|' in command: # disable output
- command, pipecmd = command.split( '|' )
- delegate = self.myout.delegate
- self.myout.delegate = None
- tokens = shlex.split( command, True )
- self.processCommand( tokens[0], tokens[1:] or "" )
- self.myout.endCommand()
- if pipecmd is not None: # restore output
- self.myout.delegate = delegate
-
- pipe = popen2.Popen4( pipecmd )
- pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) )
- pipe.tochild.close()
- sys.stdout.write( pipe.fromchild.read() )
- #
- except EOFError:
- print()
- return
- except KeyboardInterrupt:
- print()
-
-##########################################################################
-# Start function - called from the BitBake command line utility
-##########################################################################
-
-def start( aCooker ):
- global cooker
- cooker = aCooker
- bbshell = BitBakeShell()
- bbshell.processStartupFile()
- bbshell.main()
- bbshell.cleanup()
-
-if __name__ == "__main__":
- print("SHELL: Sorry, this program should only be called by BitBake.")
diff --git a/yocto-poky/bitbake/lib/bb/siggen.py b/yocto-poky/bitbake/lib/bb/siggen.py
deleted file mode 100644
index 88fc0f1d5..000000000
--- a/yocto-poky/bitbake/lib/bb/siggen.py
+++ /dev/null
@@ -1,601 +0,0 @@
-import hashlib
-import logging
-import os
-import re
-import tempfile
-import bb.data
-from bb.checksum import FileChecksumCache
-
-logger = logging.getLogger('BitBake.SigGen')
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
-def init(d):
- siggens = [obj for obj in globals().itervalues()
- if type(obj) is type and issubclass(obj, SignatureGenerator)]
-
- desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
- for sg in siggens:
- if desired == sg.name:
- return sg(d)
- break
- else:
- logger.error("Invalid signature generator '%s', using default 'noop'\n"
- "Available generators: %s", desired,
- ', '.join(obj.name for obj in siggens))
- return SignatureGenerator(d)
-
-class SignatureGenerator(object):
- """
- """
- name = "noop"
-
- def __init__(self, data):
- self.taskhash = {}
- self.runtaskdeps = {}
- self.file_checksum_values = {}
- self.taints = {}
-
- def finalise(self, fn, d, varient):
- return
-
- def get_taskhash(self, fn, task, deps, dataCache):
- return "0"
-
- def writeout_file_checksum_cache(self):
- """Write/update the file checksum cache onto disk"""
- return
-
- def stampfile(self, stampbase, file_name, taskname, extrainfo):
- return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
-
- def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
- return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
-
- def dump_sigtask(self, fn, task, stampbase, runtime):
- return
-
- def invalidate_task(self, task, d, fn):
- bb.build.del_stamp(task, d, fn)
-
- def dump_sigs(self, dataCache, options):
- return
-
- def get_taskdata(self):
- return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints)
-
- def set_taskdata(self, data):
- self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints = data
-
-
-class SignatureGeneratorBasic(SignatureGenerator):
- """
- """
- name = "basic"
-
- def __init__(self, data):
- self.basehash = {}
- self.taskhash = {}
- self.taskdeps = {}
- self.runtaskdeps = {}
- self.file_checksum_values = {}
- self.taints = {}
- self.gendeps = {}
- self.lookupcache = {}
- self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
- self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
- self.taskwhitelist = None
- self.init_rundepcheck(data)
- checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE", True)
- if checksum_cache_file:
- self.checksum_cache = FileChecksumCache()
- self.checksum_cache.init_cache(data, checksum_cache_file)
- else:
- self.checksum_cache = None
-
- def init_rundepcheck(self, data):
- self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
- if self.taskwhitelist:
- self.twl = re.compile(self.taskwhitelist)
- else:
- self.twl = None
-
- def _build_data(self, fn, d):
-
- tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
-
- taskdeps = {}
- basehash = {}
-
- for task in tasklist:
- data = lookupcache[task]
-
- if data is None:
- bb.error("Task %s from %s seems to be empty?!" % (task, fn))
- data = ''
-
- gendeps[task] -= self.basewhitelist
- newdeps = gendeps[task]
- seen = set()
- while newdeps:
- nextdeps = newdeps
- seen |= nextdeps
- newdeps = set()
- for dep in nextdeps:
- if dep in self.basewhitelist:
- continue
- gendeps[dep] -= self.basewhitelist
- newdeps |= gendeps[dep]
- newdeps -= seen
-
- alldeps = sorted(seen)
- for dep in alldeps:
- data = data + dep
- var = lookupcache[dep]
- if var is not None:
- data = data + str(var)
- self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
- taskdeps[task] = alldeps
-
- self.taskdeps[fn] = taskdeps
- self.gendeps[fn] = gendeps
- self.lookupcache[fn] = lookupcache
-
- return taskdeps
-
- def finalise(self, fn, d, variant):
-
- if variant:
- fn = "virtual:" + variant + ":" + fn
-
- try:
- taskdeps = self._build_data(fn, d)
- except:
- bb.warn("Error during finalise of %s" % fn)
- raise
-
- #Slow but can be useful for debugging mismatched basehashes
- #for task in self.taskdeps[fn]:
- # self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
-
- for task in taskdeps:
- d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
-
- def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
- # Return True if we should keep the dependency, False to drop it
- # We only manipulate the dependencies for packages not in the whitelist
- if self.twl and not self.twl.search(recipename):
- # then process the actual dependencies
- if self.twl.search(depname):
- return False
- return True
-
- def read_taint(self, fn, task, stampbase):
- taint = None
- try:
- with open(stampbase + '.' + task + '.taint', 'r') as taintf:
- taint = taintf.read()
- except IOError:
- pass
- return taint
-
- def get_taskhash(self, fn, task, deps, dataCache):
- k = fn + "." + task
- data = dataCache.basetaskhash[k]
- self.runtaskdeps[k] = []
- self.file_checksum_values[k] = []
- recipename = dataCache.pkg_fn[fn]
-
- for dep in sorted(deps, key=clean_basepath):
- depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
- if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
- continue
- if dep not in self.taskhash:
- bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
- data = data + self.taskhash[dep]
- self.runtaskdeps[k].append(dep)
-
- if task in dataCache.file_checksums[fn]:
- if self.checksum_cache:
- checksums = self.checksum_cache.get_checksums(dataCache.file_checksums[fn][task], recipename)
- else:
- checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
- for (f,cs) in checksums:
- self.file_checksum_values[k].append((f,cs))
- if cs:
- data = data + cs
-
- taskdep = dataCache.task_deps[fn]
- if 'nostamp' in taskdep and task in taskdep['nostamp']:
- # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
- import uuid
- taint = str(uuid.uuid4())
- data = data + taint
- self.taints[k] = "nostamp:" + taint
-
- taint = self.read_taint(fn, task, dataCache.stamp[fn])
- if taint:
- data = data + taint
- self.taints[k] = taint
- logger.warn("%s is tainted from a forced run" % k)
-
- h = hashlib.md5(data).hexdigest()
- self.taskhash[k] = h
- #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
- return h
-
- def writeout_file_checksum_cache(self):
- """Write/update the file checksum cache onto disk"""
- if self.checksum_cache:
- self.checksum_cache.save_extras()
- self.checksum_cache.save_merge()
- else:
- bb.fetch2.fetcher_parse_save()
- bb.fetch2.fetcher_parse_done()
-
- def dump_sigtask(self, fn, task, stampbase, runtime):
-
- k = fn + "." + task
- referencestamp = stampbase
- if isinstance(runtime, str) and runtime.startswith("customfile"):
- sigfile = stampbase
- referencestamp = runtime[11:]
- elif runtime and k in self.taskhash:
- sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
- else:
- sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
-
- bb.utils.mkdirhier(os.path.dirname(sigfile))
-
- data = {}
- data['task'] = task
- data['basewhitelist'] = self.basewhitelist
- data['taskwhitelist'] = self.taskwhitelist
- data['taskdeps'] = self.taskdeps[fn][task]
- data['basehash'] = self.basehash[k]
- data['gendeps'] = {}
- data['varvals'] = {}
- data['varvals'][task] = self.lookupcache[fn][task]
- for dep in self.taskdeps[fn][task]:
- if dep in self.basewhitelist:
- continue
- data['gendeps'][dep] = self.gendeps[fn][dep]
- data['varvals'][dep] = self.lookupcache[fn][dep]
-
- if runtime and k in self.taskhash:
- data['runtaskdeps'] = self.runtaskdeps[k]
- data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k]]
- data['runtaskhashes'] = {}
- for dep in data['runtaskdeps']:
- data['runtaskhashes'][dep] = self.taskhash[dep]
- data['taskhash'] = self.taskhash[k]
-
- taint = self.read_taint(fn, task, referencestamp)
- if taint:
- data['taint'] = taint
-
- if runtime and k in self.taints:
- if 'nostamp:' in self.taints[k]:
- data['taint'] = self.taints[k]
-
- fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
- try:
- with os.fdopen(fd, "wb") as stream:
- p = pickle.dump(data, stream, -1)
- stream.flush()
- os.chmod(tmpfile, 0664)
- os.rename(tmpfile, sigfile)
- except (OSError, IOError) as err:
- try:
- os.unlink(tmpfile)
- except OSError:
- pass
- raise err
-
- computed_basehash = calc_basehash(data)
- if computed_basehash != self.basehash[k]:
- bb.error("Basehash mismatch %s verses %s for %s" % (computed_basehash, self.basehash[k], k))
- if k in self.taskhash:
- computed_taskhash = calc_taskhash(data)
- if computed_taskhash != self.taskhash[k]:
- bb.error("Taskhash mismatch %s verses %s for %s" % (computed_taskhash, self.taskhash[k], k))
-
-
- def dump_sigs(self, dataCache, options):
- for fn in self.taskdeps:
- for task in self.taskdeps[fn]:
- k = fn + "." + task
- if k not in self.taskhash:
- continue
- if dataCache.basetaskhash[k] != self.basehash[k]:
- bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
- bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
- self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
-
-class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
- name = "basichash"
-
- def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
- k = fn + "." + taskname[:-9]
- else:
- k = fn + "." + taskname
- if clean:
- h = "*"
- elif k in self.taskhash:
- h = self.taskhash[k]
- else:
- # If k is not in basehash, then error
- h = self.basehash[k]
- return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
-
- def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
- return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
-
- def invalidate_task(self, task, d, fn):
- bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
- bb.build.write_taint(task, d, fn)
-
-def dump_this_task(outfile, d):
- import bb.parse
- fn = d.getVar("BB_FILENAME", True)
- task = "do_" + d.getVar("BB_CURRENTTASK", True)
- referencestamp = bb.build.stamp_internal(task, d, None, True)
- bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)
-
-def clean_basepath(a):
- b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
- if a.startswith("virtual:"):
- b = b + ":" + a.rsplit(":", 1)[0]
- return b
-
-def clean_basepaths(a):
- b = {}
- for x in a:
- b[clean_basepath(x)] = a[x]
- return b
-
-def clean_basepaths_list(a):
- b = []
- for x in a:
- b.append(clean_basepath(x))
- return b
-
-def compare_sigfiles(a, b, recursecb = None):
- output = []
-
- p1 = pickle.Unpickler(open(a, "rb"))
- a_data = p1.load()
- p2 = pickle.Unpickler(open(b, "rb"))
- b_data = p2.load()
-
- def dict_diff(a, b, whitelist=set()):
- sa = set(a.keys())
- sb = set(b.keys())
- common = sa & sb
- changed = set()
- for i in common:
- if a[i] != b[i] and i not in whitelist:
- changed.add(i)
- added = sb - sa
- removed = sa - sb
- return changed, added, removed
-
- def file_checksums_diff(a, b):
- from collections import Counter
- # Handle old siginfo format
- if isinstance(a, dict):
- a = [(os.path.basename(f), cs) for f, cs in a.items()]
- if isinstance(b, dict):
- b = [(os.path.basename(f), cs) for f, cs in b.items()]
- # Compare lists, ensuring we can handle duplicate filenames if they exist
- removedcount = Counter(a)
- removedcount.subtract(b)
- addedcount = Counter(b)
- addedcount.subtract(a)
- added = []
- for x in b:
- if addedcount[x] > 0:
- addedcount[x] -= 1
- added.append(x)
- removed = []
- changed = []
- for x in a:
- if removedcount[x] > 0:
- removedcount[x] -= 1
- for y in added:
- if y[0] == x[0]:
- changed.append((x[0], x[1], y[1]))
- added.remove(y)
- break
- else:
- removed.append(x)
- added = [x[0] for x in added]
- removed = [x[0] for x in removed]
- return changed, added, removed
-
- if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
- output.append("basewhitelist changed from '%s' to '%s'" % (a_data['basewhitelist'], b_data['basewhitelist']))
- if a_data['basewhitelist'] and b_data['basewhitelist']:
- output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
-
- if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
- output.append("taskwhitelist changed from '%s' to '%s'" % (a_data['taskwhitelist'], b_data['taskwhitelist']))
- if a_data['taskwhitelist'] and b_data['taskwhitelist']:
- output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
-
- if a_data['taskdeps'] != b_data['taskdeps']:
- output.append("Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
-
- if a_data['basehash'] != b_data['basehash']:
- output.append("basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash']))
-
- changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
- if changed:
- for dep in changed:
- output.append("List of dependencies for variable %s changed from '%s' to '%s'" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
- if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
- output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
- if added:
- for dep in added:
- output.append("Dependency on variable %s was added" % (dep))
- if removed:
- for dep in removed:
- output.append("Dependency on Variable %s was removed" % (dep))
-
-
- changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
- if changed:
- for dep in changed:
- output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
-
- changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
- if changed:
- for f, old, new in changed:
- output.append("Checksum for file %s changed from %s to %s" % (f, old, new))
- if added:
- for f in added:
- output.append("Dependency on checksum of file %s was added" % (f))
- if removed:
- for f in removed:
- output.append("Dependency on checksum of file %s was removed" % (f))
-
-
- if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
- changed = ["Number of task dependencies changed"]
- else:
- changed = []
- for idx, task in enumerate(a_data['runtaskdeps']):
- a = a_data['runtaskdeps'][idx]
- b = b_data['runtaskdeps'][idx]
- if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
- changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
-
- if changed:
- output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
- output.append("\n".join(changed))
-
-
- if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
- a = a_data['runtaskhashes']
- b = b_data['runtaskhashes']
- changed, added, removed = dict_diff(a, b)
- if added:
- for dep in added:
- bdep_found = False
- if removed:
- for bdep in removed:
- if b[dep] == a[bdep]:
- #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
- bdep_found = True
- if not bdep_found:
- output.append("Dependency on task %s was added with hash %s" % (clean_basepath(dep), b[dep]))
- if removed:
- for dep in removed:
- adep_found = False
- if added:
- for adep in added:
- if b[adep] == a[dep]:
- #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
- adep_found = True
- if not adep_found:
- output.append("Dependency on task %s was removed with hash %s" % (clean_basepath(dep), a[dep]))
- if changed:
- for dep in changed:
- output.append("Hash for dependent task %s changed from %s to %s" % (clean_basepath(dep), a[dep], b[dep]))
- if callable(recursecb):
- # If a dependent hash changed, might as well print the line above and then defer to the changes in
- # that hash since in all likelyhood, they're the same changes this task also saw.
- recout = recursecb(dep, a[dep], b[dep])
- if recout:
- output = [output[-1]] + recout
-
- a_taint = a_data.get('taint', None)
- b_taint = b_data.get('taint', None)
- if a_taint != b_taint:
- output.append("Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint))
-
- return output
-
-
-def calc_basehash(sigdata):
- task = sigdata['task']
- basedata = sigdata['varvals'][task]
-
- if basedata is None:
- basedata = ''
-
- alldeps = sigdata['taskdeps']
- for dep in alldeps:
- basedata = basedata + dep
- val = sigdata['varvals'][dep]
- if val is not None:
- basedata = basedata + str(val)
-
- return hashlib.md5(basedata).hexdigest()
-
-def calc_taskhash(sigdata):
- data = sigdata['basehash']
-
- for dep in sigdata['runtaskdeps']:
- data = data + sigdata['runtaskhashes'][dep]
-
- for c in sigdata['file_checksum_values']:
- data = data + c[1]
-
- if 'taint' in sigdata:
- if 'nostamp:' in sigdata['taint']:
- data = data + sigdata['taint'][8:]
- else:
- data = data + sigdata['taint']
-
- return hashlib.md5(data).hexdigest()
-
-
-def dump_sigfile(a):
- output = []
-
- p1 = pickle.Unpickler(open(a, "rb"))
- a_data = p1.load()
-
- output.append("basewhitelist: %s" % (a_data['basewhitelist']))
-
- output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
-
- output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
-
- output.append("basehash: %s" % (a_data['basehash']))
-
- for dep in a_data['gendeps']:
- output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
-
- for dep in a_data['varvals']:
- output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
-
- if 'runtaskdeps' in a_data:
- output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
-
- if 'file_checksum_values' in a_data:
- output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
-
- if 'runtaskhashes' in a_data:
- for dep in a_data['runtaskhashes']:
- output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
-
- if 'taint' in a_data:
- output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
-
- if 'task' in a_data:
- computed_basehash = calc_basehash(a_data)
- output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
- else:
- output.append("Unable to compute base hash")
-
- computed_taskhash = calc_taskhash(a_data)
- output.append("Computed task hash is %s" % computed_taskhash)
-
- return output
diff --git a/yocto-poky/bitbake/lib/bb/taskdata.py b/yocto-poky/bitbake/lib/bb/taskdata.py
deleted file mode 100644
index 9ae52d77d..000000000
--- a/yocto-poky/bitbake/lib/bb/taskdata.py
+++ /dev/null
@@ -1,690 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'TaskData' implementation
-
-Task data collection and handling
-
-"""
-
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import logging
-import re
-import bb
-
-logger = logging.getLogger("BitBake.TaskData")
-
-def re_match_strings(target, strings):
- """
- Whether or not the string 'target' matches
- any one string of the strings which can be regular expression string
- """
- return any(name == target or re.match(name, target)
- for name in strings)
-
-class TaskData:
- """
- BitBake Task Data implementation
- """
- def __init__(self, abort = True, tryaltconfigs = False, skiplist = None, allowincomplete = False):
- self.build_names_index = []
- self.run_names_index = []
- self.fn_index = []
-
- self.build_targets = {}
- self.run_targets = {}
-
- self.external_targets = []
-
- self.tasks_fnid = []
- self.tasks_name = []
- self.tasks_tdepends = []
- self.tasks_idepends = []
- self.tasks_irdepends = []
- # Cache to speed up task ID lookups
- self.tasks_lookup = {}
-
- self.depids = {}
- self.rdepids = {}
-
- self.consider_msgs_cache = []
-
- self.failed_deps = []
- self.failed_rdeps = []
- self.failed_fnids = []
-
- self.abort = abort
- self.tryaltconfigs = tryaltconfigs
- self.allowincomplete = allowincomplete
-
- self.skiplist = skiplist
-
- def getbuild_id(self, name):
- """
- Return an ID number for the build target name.
- If it doesn't exist, create one.
- """
- if not name in self.build_names_index:
- self.build_names_index.append(name)
- return len(self.build_names_index) - 1
-
- return self.build_names_index.index(name)
-
- def getrun_id(self, name):
- """
- Return an ID number for the run target name.
- If it doesn't exist, create one.
- """
- if not name in self.run_names_index:
- self.run_names_index.append(name)
- return len(self.run_names_index) - 1
-
- return self.run_names_index.index(name)
-
- def getfn_id(self, name):
- """
- Return an ID number for the filename.
- If it doesn't exist, create one.
- """
- if not name in self.fn_index:
- self.fn_index.append(name)
- return len(self.fn_index) - 1
-
- return self.fn_index.index(name)
-
- def gettask_ids(self, fnid):
- """
- Return an array of the ID numbers matching a given fnid.
- """
- ids = []
- if fnid in self.tasks_lookup:
- for task in self.tasks_lookup[fnid]:
- ids.append(self.tasks_lookup[fnid][task])
- return ids
-
- def gettask_id_fromfnid(self, fnid, task):
- """
- Return an ID number for the task matching fnid and task.
- """
- if fnid in self.tasks_lookup:
- if task in self.tasks_lookup[fnid]:
- return self.tasks_lookup[fnid][task]
-
- return None
-
- def gettask_id(self, fn, task, create = True):
- """
- Return an ID number for the task matching fn and task.
- If it doesn't exist, create one by default.
- Optionally return None instead.
- """
- fnid = self.getfn_id(fn)
-
- if fnid in self.tasks_lookup:
- if task in self.tasks_lookup[fnid]:
- return self.tasks_lookup[fnid][task]
-
- if not create:
- return None
-
- self.tasks_name.append(task)
- self.tasks_fnid.append(fnid)
- self.tasks_tdepends.append([])
- self.tasks_idepends.append([])
- self.tasks_irdepends.append([])
-
- listid = len(self.tasks_name) - 1
-
- if fnid not in self.tasks_lookup:
- self.tasks_lookup[fnid] = {}
- self.tasks_lookup[fnid][task] = listid
-
- return listid
-
- def add_tasks(self, fn, dataCache):
- """
- Add tasks for a given fn to the database
- """
-
- task_deps = dataCache.task_deps[fn]
-
- fnid = self.getfn_id(fn)
-
- if fnid in self.failed_fnids:
- bb.msg.fatal("TaskData", "Trying to re-add a failed file? Something is broken...")
-
- # Check if we've already seen this fn
- if fnid in self.tasks_fnid:
- return
-
- self.add_extra_deps(fn, dataCache)
-
- for task in task_deps['tasks']:
-
- # Work out task dependencies
- parentids = []
- for dep in task_deps['parents'][task]:
- if dep not in task_deps['tasks']:
- bb.debug(2, "Not adding dependeny of %s on %s since %s does not exist" % (task, dep, dep))
- continue
- parentid = self.gettask_id(fn, dep)
- parentids.append(parentid)
- taskid = self.gettask_id(fn, task)
- self.tasks_tdepends[taskid].extend(parentids)
-
- # Touch all intertask dependencies
- if 'depends' in task_deps and task in task_deps['depends']:
- ids = []
- for dep in task_deps['depends'][task].split():
- if dep:
- if ":" not in dep:
- bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
- ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
- self.tasks_idepends[taskid].extend(ids)
- if 'rdepends' in task_deps and task in task_deps['rdepends']:
- ids = []
- for dep in task_deps['rdepends'][task].split():
- if dep:
- if ":" not in dep:
- bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
- ids.append(((self.getrun_id(dep.split(":")[0])), dep.split(":")[1]))
- self.tasks_irdepends[taskid].extend(ids)
-
-
- # Work out build dependencies
- if not fnid in self.depids:
- dependids = {}
- for depend in dataCache.deps[fn]:
- dependids[self.getbuild_id(depend)] = None
- self.depids[fnid] = dependids.keys()
- logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
-
- # Work out runtime dependencies
- if not fnid in self.rdepids:
- rdependids = {}
- rdepends = dataCache.rundeps[fn]
- rrecs = dataCache.runrecs[fn]
- rdependlist = []
- rreclist = []
- for package in rdepends:
- for rdepend in rdepends[package]:
- rdependlist.append(rdepend)
- rdependids[self.getrun_id(rdepend)] = None
- for package in rrecs:
- for rdepend in rrecs[package]:
- rreclist.append(rdepend)
- rdependids[self.getrun_id(rdepend)] = None
- if rdependlist:
- logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn)
- if rreclist:
- logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn)
- self.rdepids[fnid] = rdependids.keys()
-
- for dep in self.depids[fnid]:
- if dep in self.failed_deps:
- self.fail_fnid(fnid)
- return
- for dep in self.rdepids[fnid]:
- if dep in self.failed_rdeps:
- self.fail_fnid(fnid)
- return
-
- def add_extra_deps(self, fn, dataCache):
- func = dataCache.extradepsfunc.get(fn, None)
- if func:
- bb.providers.buildWorldTargetList(dataCache)
- pn = dataCache.pkg_fn[fn]
- params = {'deps': dataCache.deps[fn],
- 'world_target': dataCache.world_target,
- 'pkg_pn': dataCache.pkg_pn,
- 'self_pn': pn}
- funcname = '_%s_calculate_extra_depends' % pn.replace('-', '_')
- paramlist = ','.join(params.keys())
- func = 'def %s(%s):\n%s\n\n%s(%s)' % (funcname, paramlist, func, funcname, paramlist)
- bb.utils.better_exec(func, params)
-
-
- def have_build_target(self, target):
- """
- Have we a build target matching this name?
- """
- targetid = self.getbuild_id(target)
-
- if targetid in self.build_targets:
- return True
- return False
-
- def have_runtime_target(self, target):
- """
- Have we a runtime target matching this name?
- """
- targetid = self.getrun_id(target)
-
- if targetid in self.run_targets:
- return True
- return False
-
- def add_build_target(self, fn, item):
- """
- Add a build target.
- If already present, append the provider fn to the list
- """
- targetid = self.getbuild_id(item)
- fnid = self.getfn_id(fn)
-
- if targetid in self.build_targets:
- if fnid in self.build_targets[targetid]:
- return
- self.build_targets[targetid].append(fnid)
- return
- self.build_targets[targetid] = [fnid]
-
- def add_runtime_target(self, fn, item):
- """
- Add a runtime target.
- If already present, append the provider fn to the list
- """
- targetid = self.getrun_id(item)
- fnid = self.getfn_id(fn)
-
- if targetid in self.run_targets:
- if fnid in self.run_targets[targetid]:
- return
- self.run_targets[targetid].append(fnid)
- return
- self.run_targets[targetid] = [fnid]
-
- def mark_external_target(self, item):
- """
- Mark a build target as being externally requested
- """
- targetid = self.getbuild_id(item)
-
- if targetid not in self.external_targets:
- self.external_targets.append(targetid)
-
- def get_unresolved_build_targets(self, dataCache):
- """
- Return a list of build targets who's providers
- are unknown.
- """
- unresolved = []
- for target in self.build_names_index:
- if re_match_strings(target, dataCache.ignored_dependencies):
- continue
- if self.build_names_index.index(target) in self.failed_deps:
- continue
- if not self.have_build_target(target):
- unresolved.append(target)
- return unresolved
-
- def get_unresolved_run_targets(self, dataCache):
- """
- Return a list of runtime targets who's providers
- are unknown.
- """
- unresolved = []
- for target in self.run_names_index:
- if re_match_strings(target, dataCache.ignored_dependencies):
- continue
- if self.run_names_index.index(target) in self.failed_rdeps:
- continue
- if not self.have_runtime_target(target):
- unresolved.append(target)
- return unresolved
-
- def get_provider(self, item):
- """
- Return a list of providers of item
- """
- targetid = self.getbuild_id(item)
-
- return self.build_targets[targetid]
-
- def get_dependees(self, itemid):
- """
- Return a list of targets which depend on item
- """
- dependees = []
- for fnid in self.depids:
- if itemid in self.depids[fnid]:
- dependees.append(fnid)
- return dependees
-
- def get_dependees_str(self, item):
- """
- Return a list of targets which depend on item as a user readable string
- """
- itemid = self.getbuild_id(item)
- dependees = []
- for fnid in self.depids:
- if itemid in self.depids[fnid]:
- dependees.append(self.fn_index[fnid])
- return dependees
-
- def get_rdependees(self, itemid):
- """
- Return a list of targets which depend on runtime item
- """
- dependees = []
- for fnid in self.rdepids:
- if itemid in self.rdepids[fnid]:
- dependees.append(fnid)
- return dependees
-
- def get_rdependees_str(self, item):
- """
- Return a list of targets which depend on runtime item as a user readable string
- """
- itemid = self.getrun_id(item)
- dependees = []
- for fnid in self.rdepids:
- if itemid in self.rdepids[fnid]:
- dependees.append(self.fn_index[fnid])
- return dependees
-
- def get_reasons(self, item, runtime=False):
- """
- Get the reason(s) for an item not being provided, if any
- """
- reasons = []
- if self.skiplist:
- for fn in self.skiplist:
- skipitem = self.skiplist[fn]
- if skipitem.pn == item:
- reasons.append("%s was skipped: %s" % (skipitem.pn, skipitem.skipreason))
- elif runtime and item in skipitem.rprovides:
- reasons.append("%s RPROVIDES %s but was skipped: %s" % (skipitem.pn, item, skipitem.skipreason))
- elif not runtime and item in skipitem.provides:
- reasons.append("%s PROVIDES %s but was skipped: %s" % (skipitem.pn, item, skipitem.skipreason))
- return reasons
-
- def get_close_matches(self, item, provider_list):
- import difflib
- if self.skiplist:
- skipped = []
- for fn in self.skiplist:
- skipped.append(self.skiplist[fn].pn)
- full_list = provider_list + skipped
- else:
- full_list = provider_list
- return difflib.get_close_matches(item, full_list, cutoff=0.7)
-
- def add_provider(self, cfgData, dataCache, item):
- try:
- self.add_provider_internal(cfgData, dataCache, item)
- except bb.providers.NoProvider:
- if self.abort:
- raise
- self.remove_buildtarget(self.getbuild_id(item))
-
- self.mark_external_target(item)
-
- def add_provider_internal(self, cfgData, dataCache, item):
- """
- Add the providers of item to the task data
- Mark entries were specifically added externally as against dependencies
- added internally during dependency resolution
- """
-
- if re_match_strings(item, dataCache.ignored_dependencies):
- return
-
- if not item in dataCache.providers:
- close_matches = self.get_close_matches(item, dataCache.providers.keys())
- # Is it in RuntimeProviders ?
- all_p = bb.providers.getRuntimeProviders(dataCache, item)
- for fn in all_p:
- new = dataCache.pkg_fn[fn] + " RPROVIDES " + item
- if new not in close_matches:
- close_matches.append(new)
- bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=self.get_reasons(item), close_matches=close_matches), cfgData)
- raise bb.providers.NoProvider(item)
-
- if self.have_build_target(item):
- return
-
- all_p = dataCache.providers[item]
-
- eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
- eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
-
- if not eligible:
- bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
- raise bb.providers.NoProvider(item)
-
- if len(eligible) > 1 and foundUnique == False:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData)
- self.consider_msgs_cache.append(item)
-
- for fn in eligible:
- fnid = self.getfn_id(fn)
- if fnid in self.failed_fnids:
- continue
- logger.debug(2, "adding %s to satisfy %s", fn, item)
- self.add_build_target(fn, item)
- self.add_tasks(fn, dataCache)
-
-
- #item = dataCache.pkg_fn[fn]
-
- def add_rprovider(self, cfgData, dataCache, item):
- """
- Add the runtime providers of item to the task data
- (takes item names from RDEPENDS/PACKAGES namespace)
- """
-
- if re_match_strings(item, dataCache.ignored_dependencies):
- return
-
- if self.have_runtime_target(item):
- return
-
- all_p = bb.providers.getRuntimeProviders(dataCache, item)
-
- if not all_p:
- bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=self.get_reasons(item, True)), cfgData)
- raise bb.providers.NoRProvider(item)
-
- eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
- eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
-
- if not eligible:
- bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
- raise bb.providers.NoRProvider(item)
-
- if len(eligible) > 1 and numberPreferred == 0:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
- self.consider_msgs_cache.append(item)
-
- if numberPreferred > 1:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
- self.consider_msgs_cache.append(item)
- raise bb.providers.MultipleRProvider(item)
-
- # run through the list until we find one that we can build
- for fn in eligible:
- fnid = self.getfn_id(fn)
- if fnid in self.failed_fnids:
- continue
- logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
- self.add_runtime_target(fn, item)
- self.add_tasks(fn, dataCache)
-
- def fail_fnid(self, fnid, missing_list=None):
- """
- Mark a file as failed (unbuildable)
- Remove any references from build and runtime provider lists
-
- missing_list, A list of missing requirements for this target
- """
- if fnid in self.failed_fnids:
- return
- if not missing_list:
- missing_list = []
- logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
- self.failed_fnids.append(fnid)
- for target in self.build_targets:
- if fnid in self.build_targets[target]:
- self.build_targets[target].remove(fnid)
- if len(self.build_targets[target]) == 0:
- self.remove_buildtarget(target, missing_list)
- for target in self.run_targets:
- if fnid in self.run_targets[target]:
- self.run_targets[target].remove(fnid)
- if len(self.run_targets[target]) == 0:
- self.remove_runtarget(target, missing_list)
-
- def remove_buildtarget(self, targetid, missing_list=None):
- """
- Mark a build target as failed (unbuildable)
- Trigger removal of any files that have this as a dependency
- """
- if not missing_list:
- missing_list = [self.build_names_index[targetid]]
- else:
- missing_list = [self.build_names_index[targetid]] + missing_list
- logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
- self.failed_deps.append(targetid)
- dependees = self.get_dependees(targetid)
- for fnid in dependees:
- self.fail_fnid(fnid, missing_list)
- for taskid in xrange(len(self.tasks_idepends)):
- idepends = self.tasks_idepends[taskid]
- for (idependid, idependtask) in idepends:
- if idependid == targetid:
- self.fail_fnid(self.tasks_fnid[taskid], missing_list)
-
- if self.abort and targetid in self.external_targets:
- target = self.build_names_index[targetid]
- logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
- raise bb.providers.NoProvider(target)
-
- def remove_runtarget(self, targetid, missing_list=None):
- """
- Mark a run target as failed (unbuildable)
- Trigger removal of any files that have this as a dependency
- """
- if not missing_list:
- missing_list = [self.run_names_index[targetid]]
- else:
- missing_list = [self.run_names_index[targetid]] + missing_list
-
- logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
- self.failed_rdeps.append(targetid)
- dependees = self.get_rdependees(targetid)
- for fnid in dependees:
- self.fail_fnid(fnid, missing_list)
- for taskid in xrange(len(self.tasks_irdepends)):
- irdepends = self.tasks_irdepends[taskid]
- for (idependid, idependtask) in irdepends:
- if idependid == targetid:
- self.fail_fnid(self.tasks_fnid[taskid], missing_list)
-
- def add_unresolved(self, cfgData, dataCache):
- """
- Resolve all unresolved build and runtime targets
- """
- logger.info("Resolving any missing task queue dependencies")
- while True:
- added = 0
- for target in self.get_unresolved_build_targets(dataCache):
- try:
- self.add_provider_internal(cfgData, dataCache, target)
- added = added + 1
- except bb.providers.NoProvider:
- targetid = self.getbuild_id(target)
- if self.abort and targetid in self.external_targets and not self.allowincomplete:
- raise
- if not self.allowincomplete:
- self.remove_buildtarget(targetid)
- for target in self.get_unresolved_run_targets(dataCache):
- try:
- self.add_rprovider(cfgData, dataCache, target)
- added = added + 1
- except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
- self.remove_runtarget(self.getrun_id(target))
- logger.debug(1, "Resolved " + str(added) + " extra dependencies")
- if added == 0:
- break
- # self.dump_data()
-
- def get_providermap(self, prefix=None):
- provmap = {}
- for name in self.build_names_index:
- if prefix and not name.startswith(prefix):
- continue
- if self.have_build_target(name):
- provider = self.get_provider(name)
- if provider:
- provmap[name] = self.fn_index[provider[0]]
- return provmap
-
- def dump_data(self):
- """
- Dump some debug information on the internal data structures
- """
- logger.debug(3, "build_names:")
- logger.debug(3, ", ".join(self.build_names_index))
-
- logger.debug(3, "run_names:")
- logger.debug(3, ", ".join(self.run_names_index))
-
- logger.debug(3, "build_targets:")
- for buildid in xrange(len(self.build_names_index)):
- target = self.build_names_index[buildid]
- targets = "None"
- if buildid in self.build_targets:
- targets = self.build_targets[buildid]
- logger.debug(3, " (%s)%s: %s", buildid, target, targets)
-
- logger.debug(3, "run_targets:")
- for runid in xrange(len(self.run_names_index)):
- target = self.run_names_index[runid]
- targets = "None"
- if runid in self.run_targets:
- targets = self.run_targets[runid]
- logger.debug(3, " (%s)%s: %s", runid, target, targets)
-
- logger.debug(3, "tasks:")
- for task in xrange(len(self.tasks_name)):
- logger.debug(3, " (%s)%s - %s: %s",
- task,
- self.fn_index[self.tasks_fnid[task]],
- self.tasks_name[task],
- self.tasks_tdepends[task])
-
- logger.debug(3, "dependency ids (per fn):")
- for fnid in self.depids:
- logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
-
- logger.debug(3, "runtime dependency ids (per fn):")
- for fnid in self.rdepids:
- logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
diff --git a/yocto-poky/bitbake/lib/bb/tests/__init__.py b/yocto-poky/bitbake/lib/bb/tests/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/yocto-poky/bitbake/lib/bb/tests/__init__.py
+++ /dev/null
diff --git a/yocto-poky/bitbake/lib/bb/tests/codeparser.py b/yocto-poky/bitbake/lib/bb/tests/codeparser.py
deleted file mode 100644
index bb820e403..000000000
--- a/yocto-poky/bitbake/lib/bb/tests/codeparser.py
+++ /dev/null
@@ -1,380 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Test for codeparser.py
-#
-# Copyright (C) 2010 Chris Larson
-# Copyright (C) 2012 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-
-import unittest
-import logging
-import bb
-
-logger = logging.getLogger('BitBake.TestCodeParser')
-
-# bb.data references bb.parse but can't directly import due to circular dependencies.
-# Hack around it for now :(
-import bb.parse
-import bb.data
-
-class ReferenceTest(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
-
- def setEmptyVars(self, varlist):
- for k in varlist:
- self.d.setVar(k, "")
-
- def setValues(self, values):
- for k, v in values.items():
- self.d.setVar(k, v)
-
- def assertReferences(self, refs):
- self.assertEqual(self.references, refs)
-
- def assertExecs(self, execs):
- self.assertEqual(self.execs, execs)
-
-class VariableReferenceTest(ReferenceTest):
-
- def parseExpression(self, exp):
- parsedvar = self.d.expandWithRefs(exp, None)
- self.references = parsedvar.references
-
- def test_simple_reference(self):
- self.setEmptyVars(["FOO"])
- self.parseExpression("${FOO}")
- self.assertReferences(set(["FOO"]))
-
- def test_nested_reference(self):
- self.setEmptyVars(["BAR"])
- self.d.setVar("FOO", "BAR")
- self.parseExpression("${${FOO}}")
- self.assertReferences(set(["FOO", "BAR"]))
-
- def test_python_reference(self):
- self.setEmptyVars(["BAR"])
- self.parseExpression("${@bb.data.getVar('BAR', d, True) + 'foo'}")
- self.assertReferences(set(["BAR"]))
-
-class ShellReferenceTest(ReferenceTest):
-
- def parseExpression(self, exp):
- parsedvar = self.d.expandWithRefs(exp, None)
- parser = bb.codeparser.ShellParser("ParserTest", logger)
- parser.parse_shell(parsedvar.value)
-
- self.references = parsedvar.references
- self.execs = parser.execs
-
- def test_quotes_inside_assign(self):
- self.parseExpression('foo=foo"bar"baz')
- self.assertReferences(set([]))
-
- def test_quotes_inside_arg(self):
- self.parseExpression('sed s#"bar baz"#"alpha beta"#g')
- self.assertExecs(set(["sed"]))
-
- def test_arg_continuation(self):
- self.parseExpression("sed -i -e s,foo,bar,g \\\n *.pc")
- self.assertExecs(set(["sed"]))
-
- def test_dollar_in_quoted(self):
- self.parseExpression('sed -i -e "foo$" *.pc')
- self.assertExecs(set(["sed"]))
-
- def test_quotes_inside_arg_continuation(self):
- self.setEmptyVars(["bindir", "D", "libdir"])
- self.parseExpression("""
-sed -i -e s#"moc_location=.*$"#"moc_location=${bindir}/moc4"# \\
--e s#"uic_location=.*$"#"uic_location=${bindir}/uic4"# \\
-${D}${libdir}/pkgconfig/*.pc
-""")
- self.assertReferences(set(["bindir", "D", "libdir"]))
-
- def test_assign_subshell_expansion(self):
- self.parseExpression("foo=$(echo bar)")
- self.assertExecs(set(["echo"]))
-
- def test_shell_unexpanded(self):
- self.setEmptyVars(["QT_BASE_NAME"])
- self.parseExpression('echo "${QT_BASE_NAME}"')
- self.assertExecs(set(["echo"]))
- self.assertReferences(set(["QT_BASE_NAME"]))
-
- def test_incomplete_varexp_single_quotes(self):
- self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
- self.assertExecs(set(["sed"]))
-
-
- def test_until(self):
- self.parseExpression("until false; do echo true; done")
- self.assertExecs(set(["false", "echo"]))
- self.assertReferences(set())
-
- def test_case(self):
- self.parseExpression("""
-case $foo in
-*)
-bar
-;;
-esac
-""")
- self.assertExecs(set(["bar"]))
- self.assertReferences(set())
-
- def test_assign_exec(self):
- self.parseExpression("a=b c='foo bar' alpha 1 2 3")
- self.assertExecs(set(["alpha"]))
-
- def test_redirect_to_file(self):
- self.setEmptyVars(["foo"])
- self.parseExpression("echo foo >${foo}/bar")
- self.assertExecs(set(["echo"]))
- self.assertReferences(set(["foo"]))
-
- def test_heredoc(self):
- self.setEmptyVars(["theta"])
- self.parseExpression("""
-cat <<END
-alpha
-beta
-${theta}
-END
-""")
- self.assertReferences(set(["theta"]))
-
- def test_redirect_from_heredoc(self):
- v = ["B", "SHADOW_MAILDIR", "SHADOW_MAILFILE", "SHADOW_UTMPDIR", "SHADOW_LOGDIR", "bindir"]
- self.setEmptyVars(v)
- self.parseExpression("""
-cat <<END >${B}/cachedpaths
-shadow_cv_maildir=${SHADOW_MAILDIR}
-shadow_cv_mailfile=${SHADOW_MAILFILE}
-shadow_cv_utmpdir=${SHADOW_UTMPDIR}
-shadow_cv_logdir=${SHADOW_LOGDIR}
-shadow_cv_passwd_dir=${bindir}
-END
-""")
- self.assertReferences(set(v))
- self.assertExecs(set(["cat"]))
-
-# def test_incomplete_command_expansion(self):
-# self.assertRaises(reftracker.ShellSyntaxError, reftracker.execs,
-# bbvalue.shparse("cp foo`", self.d), self.d)
-
-# def test_rogue_dollarsign(self):
-# self.setValues({"D" : "/tmp"})
-# self.parseExpression("install -d ${D}$")
-# self.assertReferences(set(["D"]))
-# self.assertExecs(set(["install"]))
-
-
-class PythonReferenceTest(ReferenceTest):
-
- def setUp(self):
- self.d = bb.data.init()
- if hasattr(bb.utils, "_context"):
- self.context = bb.utils._context
- else:
- import __builtin__
- self.context = __builtin__.__dict__
-
- def parseExpression(self, exp):
- parsedvar = self.d.expandWithRefs(exp, None)
- parser = bb.codeparser.PythonParser("ParserTest", logger)
- parser.parse_python(parsedvar.value)
-
- self.references = parsedvar.references | parser.references
- self.execs = parser.execs
-
- @staticmethod
- def indent(value):
- """Python Snippets have to be indented, python values don't have to
-be. These unit tests are testing snippets."""
- return " " + value
-
- def test_getvar_reference(self):
- self.parseExpression("bb.data.getVar('foo', d, True)")
- self.assertReferences(set(["foo"]))
- self.assertExecs(set())
-
- def test_getvar_computed_reference(self):
- self.parseExpression("bb.data.getVar('f' + 'o' + 'o', d, True)")
- self.assertReferences(set())
- self.assertExecs(set())
-
- def test_getvar_exec_reference(self):
- self.parseExpression("eval('bb.data.getVar(\"foo\", d, True)')")
- self.assertReferences(set())
- self.assertExecs(set(["eval"]))
-
- def test_var_reference(self):
- self.context["foo"] = lambda x: x
- self.setEmptyVars(["FOO"])
- self.parseExpression("foo('${FOO}')")
- self.assertReferences(set(["FOO"]))
- self.assertExecs(set(["foo"]))
- del self.context["foo"]
-
- def test_var_exec(self):
- for etype in ("func", "task"):
- self.d.setVar("do_something", "echo 'hi mom! ${FOO}'")
- self.d.setVarFlag("do_something", etype, True)
- self.parseExpression("bb.build.exec_func('do_something', d)")
- self.assertReferences(set([]))
- self.assertExecs(set(["do_something"]))
-
- def test_function_reference(self):
- self.context["testfunc"] = lambda msg: bb.msg.note(1, None, msg)
- self.d.setVar("FOO", "Hello, World!")
- self.parseExpression("testfunc('${FOO}')")
- self.assertReferences(set(["FOO"]))
- self.assertExecs(set(["testfunc"]))
- del self.context["testfunc"]
-
- def test_qualified_function_reference(self):
- self.parseExpression("time.time()")
- self.assertExecs(set(["time.time"]))
-
- def test_qualified_function_reference_2(self):
- self.parseExpression("os.path.dirname('/foo/bar')")
- self.assertExecs(set(["os.path.dirname"]))
-
- def test_qualified_function_reference_nested(self):
- self.parseExpression("time.strftime('%Y%m%d',time.gmtime())")
- self.assertExecs(set(["time.strftime", "time.gmtime"]))
-
- def test_function_reference_chained(self):
- self.context["testget"] = lambda: "\tstrip me "
- self.parseExpression("testget().strip()")
- self.assertExecs(set(["testget"]))
- del self.context["testget"]
-
-
-class DependencyReferenceTest(ReferenceTest):
-
- pydata = """
-bb.data.getVar('somevar', d, True)
-def test(d):
- foo = 'bar %s' % 'foo'
-def test2(d):
- d.getVar(foo, True)
- d.getVar('bar', False)
- test2(d)
-
-def a():
- \"\"\"some
- stuff
- \"\"\"
- return "heh"
-
-test(d)
-
-bb.data.expand(bb.data.getVar("something", False, d), d)
-bb.data.expand("${inexpand} somethingelse", d)
-bb.data.getVar(a(), d, False)
-"""
-
- def test_python(self):
- self.d.setVar("FOO", self.pydata)
- self.setEmptyVars(["inexpand", "a", "test2", "test"])
- self.d.setVarFlags("FOO", {
- "func": True,
- "python": True,
- "lineno": 1,
- "filename": "example.bb",
- })
-
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
-
- self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
-
-
- shelldata = """
-foo () {
-bar
-}
-{
-echo baz
-$(heh)
-eval `moo`
-}
-a=b
-c=d
-(
-true && false
-test -f foo
-testval=something
-$testval
-) || aiee
-! inverted
-echo ${somevar}
-
-case foo in
-bar)
-echo bar
-;;
-baz)
-echo baz
-;;
-foo*)
-echo foo
-;;
-esac
-"""
-
- def test_shell(self):
- execs = ["bar", "echo", "heh", "moo", "true", "aiee"]
- self.d.setVar("somevar", "heh")
- self.d.setVar("inverted", "echo inverted...")
- self.d.setVarFlag("inverted", "func", True)
- self.d.setVar("FOO", self.shelldata)
- self.d.setVarFlags("FOO", {"func": True})
- self.setEmptyVars(execs)
-
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
-
- self.assertEquals(deps, set(["somevar", "inverted"] + execs))
-
-
- def test_vardeps(self):
- self.d.setVar("oe_libinstall", "echo test")
- self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
- self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
-
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
-
- self.assertEquals(deps, set(["oe_libinstall"]))
-
- def test_vardeps_expand(self):
- self.d.setVar("oe_libinstall", "echo test")
- self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
- self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
-
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
-
- self.assertEquals(deps, set(["oe_libinstall"]))
-
- #Currently no wildcard support
- #def test_vardeps_wildcards(self):
- # self.d.setVar("oe_libinstall", "echo test")
- # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
- # self.d.setVarFlag("FOO", "vardeps", "oe_*")
- # self.assertEquals(deps, set(["oe_libinstall"]))
-
-
diff --git a/yocto-poky/bitbake/lib/bb/tests/cow.py b/yocto-poky/bitbake/lib/bb/tests/cow.py
deleted file mode 100644
index 35c5841f3..000000000
--- a/yocto-poky/bitbake/lib/bb/tests/cow.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Tests for Copy-on-Write (cow.py)
-#
-# Copyright 2006 Holger Freyther <freyther@handhelds.org>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-
-import unittest
-import os
-
-class COWTestCase(unittest.TestCase):
- """
- Test case for the COW module from mithro
- """
-
- def testGetSet(self):
- """
- Test and set
- """
- from bb.COW import COWDictBase
- a = COWDictBase.copy()
-
- self.assertEquals(False, a.has_key('a'))
-
- a['a'] = 'a'
- a['b'] = 'b'
- self.assertEquals(True, a.has_key('a'))
- self.assertEquals(True, a.has_key('b'))
- self.assertEquals('a', a['a'] )
- self.assertEquals('b', a['b'] )
-
- def testCopyCopy(self):
- """
- Test the copy of copies
- """
-
- from bb.COW import COWDictBase
-
- # create two COW dict 'instances'
- b = COWDictBase.copy()
- c = COWDictBase.copy()
-
- # assign some keys to one instance, some keys to another
- b['a'] = 10
- b['c'] = 20
- c['a'] = 30
-
- # test separation of the two instances
- self.assertEquals(False, c.has_key('c'))
- self.assertEquals(30, c['a'])
- self.assertEquals(10, b['a'])
-
- # test copy
- b_2 = b.copy()
- c_2 = c.copy()
-
- self.assertEquals(False, c_2.has_key('c'))
- self.assertEquals(10, b_2['a'])
-
- b_2['d'] = 40
- self.assertEquals(False, c_2.has_key('d'))
- self.assertEquals(True, b_2.has_key('d'))
- self.assertEquals(40, b_2['d'])
- self.assertEquals(False, b.has_key('d'))
- self.assertEquals(False, c.has_key('d'))
-
- c_2['d'] = 30
- self.assertEquals(True, c_2.has_key('d'))
- self.assertEquals(True, b_2.has_key('d'))
- self.assertEquals(30, c_2['d'])
- self.assertEquals(40, b_2['d'])
- self.assertEquals(False, b.has_key('d'))
- self.assertEquals(False, c.has_key('d'))
-
- # test copy of the copy
- c_3 = c_2.copy()
- b_3 = b_2.copy()
- b_3_2 = b_2.copy()
-
- c_3['e'] = 4711
- self.assertEquals(4711, c_3['e'])
- self.assertEquals(False, c_2.has_key('e'))
- self.assertEquals(False, b_3.has_key('e'))
- self.assertEquals(False, b_3_2.has_key('e'))
- self.assertEquals(False, b_2.has_key('e'))
-
- b_3['e'] = 'viel'
- self.assertEquals('viel', b_3['e'])
- self.assertEquals(4711, c_3['e'])
- self.assertEquals(False, c_2.has_key('e'))
- self.assertEquals(True, b_3.has_key('e'))
- self.assertEquals(False, b_3_2.has_key('e'))
- self.assertEquals(False, b_2.has_key('e'))
-
- def testCow(self):
- from bb.COW import COWDictBase
- c = COWDictBase.copy()
- c['123'] = 1027
- c['other'] = 4711
- c['d'] = { 'abc' : 10, 'bcd' : 20 }
-
- copy = c.copy()
-
- self.assertEquals(1027, c['123'])
- self.assertEquals(4711, c['other'])
- self.assertEquals({'abc':10, 'bcd':20}, c['d'])
- self.assertEquals(1027, copy['123'])
- self.assertEquals(4711, copy['other'])
- self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
-
- # cow it now
- copy['123'] = 1028
- copy['other'] = 4712
- copy['d']['abc'] = 20
-
-
- self.assertEquals(1027, c['123'])
- self.assertEquals(4711, c['other'])
- self.assertEquals({'abc':10, 'bcd':20}, c['d'])
- self.assertEquals(1028, copy['123'])
- self.assertEquals(4712, copy['other'])
- self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
diff --git a/yocto-poky/bitbake/lib/bb/tests/data.py b/yocto-poky/bitbake/lib/bb/tests/data.py
deleted file mode 100644
index 12232305c..000000000
--- a/yocto-poky/bitbake/lib/bb/tests/data.py
+++ /dev/null
@@ -1,446 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Tests for the Data Store (data.py/data_smart.py)
-#
-# Copyright (C) 2010 Chris Larson
-# Copyright (C) 2012 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-
-import unittest
-import bb
-import bb.data
-import bb.parse
-import logging
-
-class LogRecord():
- def __enter__(self):
- logs = []
- class LogHandler(logging.Handler):
- def emit(self, record):
- logs.append(record)
- logger = logging.getLogger("BitBake")
- handler = LogHandler()
- self.handler = handler
- logger.addHandler(handler)
- return logs
- def __exit__(self, type, value, traceback):
- logger = logging.getLogger("BitBake")
- logger.removeHandler(self.handler)
- return
-
-def logContains(item, logs):
- for l in logs:
- m = l.getMessage()
- if item in m:
- return True
- return False
-
-class DataExpansions(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
- self.d["foo"] = "value_of_foo"
- self.d["bar"] = "value_of_bar"
- self.d["value_of_foo"] = "value_of_'value_of_foo'"
-
- def test_one_var(self):
- val = self.d.expand("${foo}")
- self.assertEqual(str(val), "value_of_foo")
-
- def test_indirect_one_var(self):
- val = self.d.expand("${${foo}}")
- self.assertEqual(str(val), "value_of_'value_of_foo'")
-
- def test_indirect_and_another(self):
- val = self.d.expand("${${foo}} ${bar}")
- self.assertEqual(str(val), "value_of_'value_of_foo' value_of_bar")
-
- def test_python_snippet(self):
- val = self.d.expand("${@5*12}")
- self.assertEqual(str(val), "60")
-
- def test_expand_in_python_snippet(self):
- val = self.d.expand("${@'boo ' + '${foo}'}")
- self.assertEqual(str(val), "boo value_of_foo")
-
- def test_python_snippet_getvar(self):
- val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
- self.assertEqual(str(val), "value_of_foo value_of_bar")
-
- def test_python_unexpanded(self):
- self.d.setVar("bar", "${unsetvar}")
- val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
- self.assertEqual(str(val), "${@d.getVar('foo', True) + ' ${unsetvar}'}")
-
- def test_python_snippet_syntax_error(self):
- self.d.setVar("FOO", "${@foo = 5}")
- self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
-
- def test_python_snippet_runtime_error(self):
- self.d.setVar("FOO", "${@int('test')}")
- self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
-
- def test_python_snippet_error_path(self):
- self.d.setVar("FOO", "foo value ${BAR}")
- self.d.setVar("BAR", "bar value ${@int('test')}")
- self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
-
- def test_value_containing_value(self):
- val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
- self.assertEqual(str(val), "value_of_foo value_of_bar")
-
- def test_reference_undefined_var(self):
- val = self.d.expand("${undefinedvar} meh")
- self.assertEqual(str(val), "${undefinedvar} meh")
-
- def test_double_reference(self):
- self.d.setVar("BAR", "bar value")
- self.d.setVar("FOO", "${BAR} foo ${BAR}")
- val = self.d.getVar("FOO", True)
- self.assertEqual(str(val), "bar value foo bar value")
-
- def test_direct_recursion(self):
- self.d.setVar("FOO", "${FOO}")
- self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
-
- def test_indirect_recursion(self):
- self.d.setVar("FOO", "${BAR}")
- self.d.setVar("BAR", "${BAZ}")
- self.d.setVar("BAZ", "${FOO}")
- self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
-
- def test_recursion_exception(self):
- self.d.setVar("FOO", "${BAR}")
- self.d.setVar("BAR", "${${@'FOO'}}")
- self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
-
- def test_incomplete_varexp_single_quotes(self):
- self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc")
- val = self.d.getVar("FOO", True)
- self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc")
-
- def test_nonstring(self):
- self.d.setVar("TEST", 5)
- val = self.d.getVar("TEST", True)
- self.assertEqual(str(val), "5")
-
- def test_rename(self):
- self.d.renameVar("foo", "newfoo")
- self.assertEqual(self.d.getVar("newfoo", False), "value_of_foo")
- self.assertEqual(self.d.getVar("foo", False), None)
-
- def test_deletion(self):
- self.d.delVar("foo")
- self.assertEqual(self.d.getVar("foo", False), None)
-
- def test_keys(self):
- keys = self.d.keys()
- self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
-
- def test_keys_deletion(self):
- newd = bb.data.createCopy(self.d)
- newd.delVar("bar")
- keys = newd.keys()
- self.assertEqual(keys, ['value_of_foo', 'foo'])
-
-class TestNestedExpansions(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
- self.d["foo"] = "foo"
- self.d["bar"] = "bar"
- self.d["value_of_foobar"] = "187"
-
- def test_refs(self):
- val = self.d.expand("${value_of_${foo}${bar}}")
- self.assertEqual(str(val), "187")
-
- #def test_python_refs(self):
- # val = self.d.expand("${@${@3}**2 + ${@4}**2}")
- # self.assertEqual(str(val), "25")
-
- def test_ref_in_python_ref(self):
- val = self.d.expand("${@'${foo}' + 'bar'}")
- self.assertEqual(str(val), "foobar")
-
- def test_python_ref_in_ref(self):
- val = self.d.expand("${${@'f'+'o'+'o'}}")
- self.assertEqual(str(val), "foo")
-
- def test_deep_nesting(self):
- depth = 100
- val = self.d.expand("${" * depth + "foo" + "}" * depth)
- self.assertEqual(str(val), "foo")
-
- #def test_deep_python_nesting(self):
- # depth = 50
- # val = self.d.expand("${@" * depth + "1" + "+1}" * depth)
- # self.assertEqual(str(val), str(depth + 1))
-
- def test_mixed(self):
- val = self.d.expand("${value_of_${@('${foo}'+'bar')[0:3]}${${@'BAR'.lower()}}}")
- self.assertEqual(str(val), "187")
-
- def test_runtime(self):
- val = self.d.expand("${${@'value_of' + '_f'+'o'+'o'+'b'+'a'+'r'}}")
- self.assertEqual(str(val), "187")
-
-class TestMemoize(unittest.TestCase):
- def test_memoized(self):
- d = bb.data.init()
- d.setVar("FOO", "bar")
- self.assertTrue(d.getVar("FOO", False) is d.getVar("FOO", False))
-
- def test_not_memoized(self):
- d1 = bb.data.init()
- d2 = bb.data.init()
- d1.setVar("FOO", "bar")
- d2.setVar("FOO", "bar2")
- self.assertTrue(d1.getVar("FOO", False) is not d2.getVar("FOO", False))
-
- def test_changed_after_memoized(self):
- d = bb.data.init()
- d.setVar("foo", "value of foo")
- self.assertEqual(str(d.getVar("foo", False)), "value of foo")
- d.setVar("foo", "second value of foo")
- self.assertEqual(str(d.getVar("foo", False)), "second value of foo")
-
- def test_same_value(self):
- d = bb.data.init()
- d.setVar("foo", "value of")
- d.setVar("bar", "value of")
- self.assertEqual(d.getVar("foo", False),
- d.getVar("bar", False))
-
-class TestConcat(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
- self.d.setVar("FOO", "foo")
- self.d.setVar("VAL", "val")
- self.d.setVar("BAR", "bar")
-
- def test_prepend(self):
- self.d.setVar("TEST", "${VAL}")
- self.d.prependVar("TEST", "${FOO}:")
- self.assertEqual(self.d.getVar("TEST", True), "foo:val")
-
- def test_append(self):
- self.d.setVar("TEST", "${VAL}")
- self.d.appendVar("TEST", ":${BAR}")
- self.assertEqual(self.d.getVar("TEST", True), "val:bar")
-
- def test_multiple_append(self):
- self.d.setVar("TEST", "${VAL}")
- self.d.prependVar("TEST", "${FOO}:")
- self.d.appendVar("TEST", ":val2")
- self.d.appendVar("TEST", ":${BAR}")
- self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
-
-class TestConcatOverride(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
- self.d.setVar("FOO", "foo")
- self.d.setVar("VAL", "val")
- self.d.setVar("BAR", "bar")
-
- def test_prepend(self):
- self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_prepend", "${FOO}:")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "foo:val")
-
- def test_append(self):
- self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_append", ":${BAR}")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "val:bar")
-
- def test_multiple_append(self):
- self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_prepend", "${FOO}:")
- self.d.setVar("TEST_append", ":val2")
- self.d.setVar("TEST_append", ":${BAR}")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
-
- def test_append_unset(self):
- self.d.setVar("TEST_prepend", "${FOO}:")
- self.d.setVar("TEST_append", ":val2")
- self.d.setVar("TEST_append", ":${BAR}")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar")
-
- def test_remove(self):
- self.d.setVar("TEST", "${VAL} ${BAR}")
- self.d.setVar("TEST_remove", "val")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "bar")
-
- def test_doubleref_remove(self):
- self.d.setVar("TEST", "${VAL} ${BAR}")
- self.d.setVar("TEST_remove", "val")
- self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST_TEST", True), "bar bar")
-
- def test_empty_remove(self):
- self.d.setVar("TEST", "")
- self.d.setVar("TEST_remove", "val")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "")
-
- def test_remove_expansion(self):
- self.d.setVar("BAR", "Z")
- self.d.setVar("TEST", "${BAR}/X Y")
- self.d.setVar("TEST_remove", "${BAR}/X")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "Y")
-
- def test_remove_expansion_items(self):
- self.d.setVar("TEST", "A B C D")
- self.d.setVar("BAR", "B D")
- self.d.setVar("TEST_remove", "${BAR}")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "A C")
-
-class TestOverrides(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
- self.d.setVar("OVERRIDES", "foo:bar:local")
- self.d.setVar("TEST", "testvalue")
-
- def test_no_override(self):
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "testvalue")
-
- def test_one_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
-
- def test_one_override_unset(self):
- self.d.setVar("TEST2_bar", "testvalue2")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
- self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
-
- def test_multiple_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_local", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
- self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
-
- def test_multiple_combined_overrides(self):
- self.d.setVar("TEST_local_foo_bar", "testvalue3")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
-
- def test_multiple_overrides_unset(self):
- self.d.setVar("TEST2_local_foo_bar", "testvalue3")
- bb.data.update_data(self.d)
- self.assertEqual(self.d.getVar("TEST2", True), "testvalue3")
-
- def test_keyexpansion_override(self):
- self.d.setVar("LOCAL", "local")
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_${LOCAL}", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
- bb.data.update_data(self.d)
- bb.data.expandKeys(self.d)
- self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
-
- def test_rename_override(self):
- self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
- self.d.setVar("OVERRIDES", "class-target")
- bb.data.update_data(self.d)
- self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
- self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a")
-
- def test_underscore_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_some_val", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
- self.d.setVar("OVERRIDES", "foo:bar:some_val")
- self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
-
-class TestKeyExpansion(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
- self.d.setVar("FOO", "foo")
- self.d.setVar("BAR", "foo")
-
- def test_keyexpand(self):
- self.d.setVar("VAL_${FOO}", "A")
- self.d.setVar("VAL_${BAR}", "B")
- with LogRecord() as logs:
- bb.data.expandKeys(self.d)
- self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs))
- self.assertEqual(self.d.getVar("VAL_foo", True), "A")
-
-class TestFlags(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
- self.d.setVar("foo", "value of foo")
- self.d.setVarFlag("foo", "flag1", "value of flag1")
- self.d.setVarFlag("foo", "flag2", "value of flag2")
-
- def test_setflag(self):
- self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
- self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2")
-
- def test_delflag(self):
- self.d.delVarFlag("foo", "flag2")
- self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
- self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
-
-
-class Contains(unittest.TestCase):
- def setUp(self):
- self.d = bb.data.init()
- self.d.setVar("SOMEFLAG", "a b c")
-
- def test_contains(self):
- self.assertTrue(bb.utils.contains("SOMEFLAG", "a", True, False, self.d))
- self.assertTrue(bb.utils.contains("SOMEFLAG", "b", True, False, self.d))
- self.assertTrue(bb.utils.contains("SOMEFLAG", "c", True, False, self.d))
-
- self.assertTrue(bb.utils.contains("SOMEFLAG", "a b", True, False, self.d))
- self.assertTrue(bb.utils.contains("SOMEFLAG", "b c", True, False, self.d))
- self.assertTrue(bb.utils.contains("SOMEFLAG", "c a", True, False, self.d))
-
- self.assertTrue(bb.utils.contains("SOMEFLAG", "a b c", True, False, self.d))
- self.assertTrue(bb.utils.contains("SOMEFLAG", "c b a", True, False, self.d))
-
- self.assertFalse(bb.utils.contains("SOMEFLAG", "x", True, False, self.d))
- self.assertFalse(bb.utils.contains("SOMEFLAG", "a x", True, False, self.d))
- self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b", True, False, self.d))
- self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b a", True, False, self.d))
-
- def test_contains_any(self):
- self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a", True, False, self.d))
- self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b", True, False, self.d))
- self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c", True, False, self.d))
-
- self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a b", True, False, self.d))
- self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b c", True, False, self.d))
- self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c a", True, False, self.d))
-
- self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a x", True, False, self.d))
- self.assertTrue(bb.utils.contains_any("SOMEFLAG", "x c", True, False, self.d))
-
- self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x", True, False, self.d))
- self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d))
diff --git a/yocto-poky/bitbake/lib/bb/tests/fetch.py b/yocto-poky/bitbake/lib/bb/tests/fetch.py
deleted file mode 100644
index 4ba688bfe..000000000
--- a/yocto-poky/bitbake/lib/bb/tests/fetch.py
+++ /dev/null
@@ -1,812 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Tests for the Fetcher (fetch2/)
-#
-# Copyright (C) 2012 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-
-import unittest
-import tempfile
-import subprocess
-import os
-from bb.fetch2 import URI
-from bb.fetch2 import FetchMethod
-import bb
-
-class URITest(unittest.TestCase):
- test_uris = {
- "http://www.google.com/index.html" : {
- 'uri': 'http://www.google.com/index.html',
- 'scheme': 'http',
- 'hostname': 'www.google.com',
- 'port': None,
- 'hostport': 'www.google.com',
- 'path': '/index.html',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {},
- 'relative': False
- },
- "http://www.google.com/index.html;param1=value1" : {
- 'uri': 'http://www.google.com/index.html;param1=value1',
- 'scheme': 'http',
- 'hostname': 'www.google.com',
- 'port': None,
- 'hostport': 'www.google.com',
- 'path': '/index.html',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {
- 'param1': 'value1'
- },
- 'query': {},
- 'relative': False
- },
- "http://www.example.org/index.html?param1=value1" : {
- 'uri': 'http://www.example.org/index.html?param1=value1',
- 'scheme': 'http',
- 'hostname': 'www.example.org',
- 'port': None,
- 'hostport': 'www.example.org',
- 'path': '/index.html',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {
- 'param1': 'value1'
- },
- 'relative': False
- },
- "http://www.example.org/index.html?qparam1=qvalue1;param2=value2" : {
- 'uri': 'http://www.example.org/index.html?qparam1=qvalue1;param2=value2',
- 'scheme': 'http',
- 'hostname': 'www.example.org',
- 'port': None,
- 'hostport': 'www.example.org',
- 'path': '/index.html',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {
- 'param2': 'value2'
- },
- 'query': {
- 'qparam1': 'qvalue1'
- },
- 'relative': False
- },
- "http://www.example.com:8080/index.html" : {
- 'uri': 'http://www.example.com:8080/index.html',
- 'scheme': 'http',
- 'hostname': 'www.example.com',
- 'port': 8080,
- 'hostport': 'www.example.com:8080',
- 'path': '/index.html',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {},
- 'relative': False
- },
- "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : {
- 'uri': 'cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg',
- 'scheme': 'cvs',
- 'hostname': 'cvs.handhelds.org',
- 'port': None,
- 'hostport': 'cvs.handhelds.org',
- 'path': '/cvs',
- 'userinfo': 'anoncvs',
- 'username': 'anoncvs',
- 'password': '',
- 'params': {
- 'module': 'familiar/dist/ipkg'
- },
- 'query': {},
- 'relative': False
- },
- "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg": {
- 'uri': 'cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg',
- 'scheme': 'cvs',
- 'hostname': 'cvs.handhelds.org',
- 'port': None,
- 'hostport': 'cvs.handhelds.org',
- 'path': '/cvs',
- 'userinfo': 'anoncvs:anonymous',
- 'username': 'anoncvs',
- 'password': 'anonymous',
- 'params': {
- 'tag': 'V0-99-81',
- 'module': 'familiar/dist/ipkg'
- },
- 'query': {},
- 'relative': False
- },
- "file://example.diff": { # NOTE: Not RFC compliant!
- 'uri': 'file:example.diff',
- 'scheme': 'file',
- 'hostname': '',
- 'port': None,
- 'hostport': '',
- 'path': 'example.diff',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {},
- 'relative': True
- },
- "file:example.diff": { # NOTE: RFC compliant version of the former
- 'uri': 'file:example.diff',
- 'scheme': 'file',
- 'hostname': '',
- 'port': None,
- 'hostport': '',
- 'path': 'example.diff',
- 'userinfo': '',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {},
- 'relative': True
- },
- "file:///tmp/example.diff": {
- 'uri': 'file:///tmp/example.diff',
- 'scheme': 'file',
- 'hostname': '',
- 'port': None,
- 'hostport': '',
- 'path': '/tmp/example.diff',
- 'userinfo': '',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {},
- 'relative': False
- },
- "git:///path/example.git": {
- 'uri': 'git:///path/example.git',
- 'scheme': 'git',
- 'hostname': '',
- 'port': None,
- 'hostport': '',
- 'path': '/path/example.git',
- 'userinfo': '',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {},
- 'relative': False
- },
- "git:path/example.git": {
- 'uri': 'git:path/example.git',
- 'scheme': 'git',
- 'hostname': '',
- 'port': None,
- 'hostport': '',
- 'path': 'path/example.git',
- 'userinfo': '',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {},
- 'relative': True
- },
- "git://example.net/path/example.git": {
- 'uri': 'git://example.net/path/example.git',
- 'scheme': 'git',
- 'hostname': 'example.net',
- 'port': None,
- 'hostport': 'example.net',
- 'path': '/path/example.git',
- 'userinfo': '',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {},
- 'query': {},
- 'relative': False
- },
- "http://somesite.net;someparam=1": {
- 'uri': 'http://somesite.net;someparam=1',
- 'scheme': 'http',
- 'hostname': 'somesite.net',
- 'port': None,
- 'hostport': 'somesite.net',
- 'path': '',
- 'userinfo': '',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {"someparam" : "1"},
- 'query': {},
- 'relative': False
- },
- "file://somelocation;someparam=1": {
- 'uri': 'file:somelocation;someparam=1',
- 'scheme': 'file',
- 'hostname': '',
- 'port': None,
- 'hostport': '',
- 'path': 'somelocation',
- 'userinfo': '',
- 'userinfo': '',
- 'username': '',
- 'password': '',
- 'params': {"someparam" : "1"},
- 'query': {},
- 'relative': True
- }
-
- }
-
- def test_uri(self):
- for test_uri, ref in self.test_uris.items():
- uri = URI(test_uri)
-
- self.assertEqual(str(uri), ref['uri'])
-
- # expected attributes
- self.assertEqual(uri.scheme, ref['scheme'])
-
- self.assertEqual(uri.userinfo, ref['userinfo'])
- self.assertEqual(uri.username, ref['username'])
- self.assertEqual(uri.password, ref['password'])
-
- self.assertEqual(uri.hostname, ref['hostname'])
- self.assertEqual(uri.port, ref['port'])
- self.assertEqual(uri.hostport, ref['hostport'])
-
- self.assertEqual(uri.path, ref['path'])
- self.assertEqual(uri.params, ref['params'])
-
- self.assertEqual(uri.relative, ref['relative'])
-
- def test_dict(self):
- for test in self.test_uris.values():
- uri = URI()
-
- self.assertEqual(uri.scheme, '')
- self.assertEqual(uri.userinfo, '')
- self.assertEqual(uri.username, '')
- self.assertEqual(uri.password, '')
- self.assertEqual(uri.hostname, '')
- self.assertEqual(uri.port, None)
- self.assertEqual(uri.path, '')
- self.assertEqual(uri.params, {})
-
-
- uri.scheme = test['scheme']
- self.assertEqual(uri.scheme, test['scheme'])
-
- uri.userinfo = test['userinfo']
- self.assertEqual(uri.userinfo, test['userinfo'])
- self.assertEqual(uri.username, test['username'])
- self.assertEqual(uri.password, test['password'])
-
- # make sure changing the values doesn't do anything unexpected
- uri.username = 'changeme'
- self.assertEqual(uri.username, 'changeme')
- self.assertEqual(uri.password, test['password'])
- uri.password = 'insecure'
- self.assertEqual(uri.username, 'changeme')
- self.assertEqual(uri.password, 'insecure')
-
- # reset back after our trickery
- uri.userinfo = test['userinfo']
- self.assertEqual(uri.userinfo, test['userinfo'])
- self.assertEqual(uri.username, test['username'])
- self.assertEqual(uri.password, test['password'])
-
- uri.hostname = test['hostname']
- self.assertEqual(uri.hostname, test['hostname'])
- self.assertEqual(uri.hostport, test['hostname'])
-
- uri.port = test['port']
- self.assertEqual(uri.port, test['port'])
- self.assertEqual(uri.hostport, test['hostport'])
-
- uri.path = test['path']
- self.assertEqual(uri.path, test['path'])
-
- uri.params = test['params']
- self.assertEqual(uri.params, test['params'])
-
- uri.query = test['query']
- self.assertEqual(uri.query, test['query'])
-
- self.assertEqual(str(uri), test['uri'])
-
- uri.params = {}
- self.assertEqual(uri.params, {})
- self.assertEqual(str(uri), (str(uri).split(";"))[0])
-
-class FetcherTest(unittest.TestCase):
-
- def setUp(self):
- self.origdir = os.getcwd()
- self.d = bb.data.init()
- self.tempdir = tempfile.mkdtemp()
- self.dldir = os.path.join(self.tempdir, "download")
- os.mkdir(self.dldir)
- self.d.setVar("DL_DIR", self.dldir)
- self.unpackdir = os.path.join(self.tempdir, "unpacked")
- os.mkdir(self.unpackdir)
- persistdir = os.path.join(self.tempdir, "persistdata")
- self.d.setVar("PERSISTENT_DIR", persistdir)
-
- def tearDown(self):
- os.chdir(self.origdir)
- bb.utils.prunedir(self.tempdir)
-
-class MirrorUriTest(FetcherTest):
-
- replaceuris = {
- ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/")
- : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
- ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
- : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
- : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
- : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
- : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890",
- ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
- : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
- ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
- : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
- ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
- : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
- ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
- : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
- ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
- : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
- ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
- : "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
- ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
- : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
- : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
- : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
-
- #Renaming files doesn't work
- #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
- #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
- }
-
- mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \
- "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \
- "https://.*/.* file:///someotherpath/downloads/ \n" \
- "http://.*/.* file:///someotherpath/downloads/ \n"
-
- def test_urireplace(self):
- for k, v in self.replaceuris.items():
- ud = bb.fetch.FetchData(k[0], self.d)
- ud.setup_localpath(self.d)
- mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
- newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
- self.assertEqual([v], newuris)
-
- def test_urilist1(self):
- fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
- mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
- uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
- self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz', 'file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
-
- def test_urilist2(self):
- # Catch https:// -> files:// bug
- fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
- mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
- uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
- self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
-
- def test_mirror_of_mirror(self):
- # Test if mirror of a mirror works
- mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n"
- mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n"
- fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
- mirrors = bb.fetch2.mirror_from_string(mirrorvar)
- uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
- self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz',
- 'file:///someotherpath/downloads/bitbake-1.0.tar.gz',
- 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
- 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
-
- recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \
- "https://.*/[^/]* https://BBBB/B/B/B/ \n"
-
- def test_recursive(self):
- fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
- mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar)
- uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
- self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz',
- 'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz',
- 'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
-
-class FetcherLocalTest(FetcherTest):
- def setUp(self):
- def touch(fn):
- with file(fn, 'a'):
- os.utime(fn, None)
-
- super(FetcherLocalTest, self).setUp()
- self.localsrcdir = os.path.join(self.tempdir, 'localsrc')
- os.makedirs(self.localsrcdir)
- touch(os.path.join(self.localsrcdir, 'a'))
- touch(os.path.join(self.localsrcdir, 'b'))
- os.makedirs(os.path.join(self.localsrcdir, 'dir'))
- touch(os.path.join(self.localsrcdir, 'dir', 'c'))
- touch(os.path.join(self.localsrcdir, 'dir', 'd'))
- os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
- touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
- self.d.setVar("FILESPATH", self.localsrcdir)
-
- def fetchUnpack(self, uris):
- fetcher = bb.fetch.Fetch(uris, self.d)
- fetcher.download()
- fetcher.unpack(self.unpackdir)
- flst = []
- for root, dirs, files in os.walk(self.unpackdir):
- for f in files:
- flst.append(os.path.relpath(os.path.join(root, f), self.unpackdir))
- flst.sort()
- return flst
-
- def test_local(self):
- tree = self.fetchUnpack(['file://a', 'file://dir/c'])
- self.assertEqual(tree, ['a', 'dir/c'])
-
- def test_local_wildcard(self):
- tree = self.fetchUnpack(['file://a', 'file://dir/*'])
- self.assertEqual(tree, ['a', 'dir/c', 'dir/d', 'dir/subdir/e'])
-
- def test_local_dir(self):
- tree = self.fetchUnpack(['file://a', 'file://dir'])
- self.assertEqual(tree, ['a', 'dir/c', 'dir/d', 'dir/subdir/e'])
-
- def test_local_subdir(self):
- tree = self.fetchUnpack(['file://dir/subdir'])
- self.assertEqual(tree, ['dir/subdir/e'])
-
- def test_local_subdir_file(self):
- tree = self.fetchUnpack(['file://dir/subdir/e'])
- self.assertEqual(tree, ['dir/subdir/e'])
-
- def test_local_subdirparam(self):
- tree = self.fetchUnpack(['file://a;subdir=bar', 'file://dir;subdir=foo/moo'])
- self.assertEqual(tree, ['bar/a', 'foo/moo/dir/c', 'foo/moo/dir/d', 'foo/moo/dir/subdir/e'])
-
- def test_local_deepsubdirparam(self):
- tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
- self.assertEqual(tree, ['bar/dir/subdir/e'])
-
-class FetcherNetworkTest(FetcherTest):
-
- if os.environ.get("BB_SKIP_NETTESTS") == "yes":
- print("Unset BB_SKIP_NETTESTS to run network tests")
- else:
- def test_fetch(self):
- fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
- fetcher.download()
- self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
- self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
- self.d.setVar("BB_NO_NETWORK", "1")
- fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
- fetcher.download()
- fetcher.unpack(self.unpackdir)
- self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
- self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.1/")), 9)
-
- def test_fetch_mirror(self):
- self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
- fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
- fetcher.download()
- self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
-
- def test_fetch_mirror_of_mirror(self):
- self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake")
- fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
- fetcher.download()
- self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
-
- def test_fetch_file_mirror_of_mirror(self):
- self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake")
- fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
- os.mkdir(self.dldir + "/some2where")
- fetcher.download()
- self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
-
- def test_fetch_premirror(self):
- self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
- fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
- fetcher.download()
- self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
-
- def gitfetcher(self, url1, url2):
- def checkrevision(self, fetcher):
- fetcher.unpack(self.unpackdir)
- revision = bb.process.run("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git")[0].strip()
- self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
-
- self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
- self.d.setVar("SRCREV", "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
- fetcher = bb.fetch.Fetch([url1], self.d)
- fetcher.download()
- checkrevision(self, fetcher)
- # Wipe out the dldir clone and the unpacked source, turn off the network and check mirror tarball works
- bb.utils.prunedir(self.dldir + "/git2/")
- bb.utils.prunedir(self.unpackdir)
- self.d.setVar("BB_NO_NETWORK", "1")
- fetcher = bb.fetch.Fetch([url2], self.d)
- fetcher.download()
- checkrevision(self, fetcher)
-
- def test_gitfetch(self):
- url1 = url2 = "git://git.openembedded.org/bitbake"
- self.gitfetcher(url1, url2)
-
- def test_gitfetch_goodsrcrev(self):
- # SRCREV is set but matches rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
- self.gitfetcher(url1, url2)
-
- def test_gitfetch_badsrcrev(self):
- # SRCREV is set but does not match rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5"
- self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
-
- def test_gitfetch_tagandrev(self):
- # SRCREV is set but does not match rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
- self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
-
- def test_gitfetch_premirror(self):
- url1 = "git://git.openembedded.org/bitbake"
- url2 = "git://someserver.org/bitbake"
- self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
- self.gitfetcher(url1, url2)
-
- def test_gitfetch_premirror2(self):
- url1 = url2 = "git://someserver.org/bitbake"
- self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
- self.gitfetcher(url1, url2)
-
- def test_gitfetch_premirror3(self):
- realurl = "git://git.openembedded.org/bitbake"
- dummyurl = "git://someserver.org/bitbake"
- self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
- os.chdir(self.tempdir)
- bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True)
- self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (dummyurl, self.sourcedir))
- self.gitfetcher(dummyurl, dummyurl)
-
- def test_git_submodule(self):
- fetcher = bb.fetch.Fetch(["gitsm://git.yoctoproject.org/git-submodule-test;rev=f12e57f2edf0aa534cf1616fa983d165a92b0842"], self.d)
- fetcher.download()
- # Previous cwd has been deleted
- os.chdir(os.path.dirname(self.unpackdir))
- fetcher.unpack(self.unpackdir)
-
-
-class TrustedNetworksTest(FetcherTest):
- def test_trusted_network(self):
- # Ensure trusted_network returns False when the host IS in the list.
- url = "git://Someserver.org/foo;rev=1"
- self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
- self.assertTrue(bb.fetch.trusted_network(self.d, url))
-
- def test_wild_trusted_network(self):
- # Ensure trusted_network returns true when the *.host IS in the list.
- url = "git://Someserver.org/foo;rev=1"
- self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
- self.assertTrue(bb.fetch.trusted_network(self.d, url))
-
- def test_prefix_wild_trusted_network(self):
- # Ensure trusted_network returns true when the prefix matches *.host.
- url = "git://git.Someserver.org/foo;rev=1"
- self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
- self.assertTrue(bb.fetch.trusted_network(self.d, url))
-
- def test_two_prefix_wild_trusted_network(self):
- # Ensure trusted_network returns true when the prefix matches *.host.
- url = "git://something.git.Someserver.org/foo;rev=1"
- self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
- self.assertTrue(bb.fetch.trusted_network(self.d, url))
-
- def test_port_trusted_network(self):
- # Ensure trusted_network returns True, even if the url specifies a port.
- url = "git://someserver.org:8080/foo;rev=1"
- self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org")
- self.assertTrue(bb.fetch.trusted_network(self.d, url))
-
- def test_untrusted_network(self):
- # Ensure trusted_network returns False when the host is NOT in the list.
- url = "git://someserver.org/foo;rev=1"
- self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
- self.assertFalse(bb.fetch.trusted_network(self.d, url))
-
- def test_wild_untrusted_network(self):
- # Ensure trusted_network returns False when the host is NOT in the list.
- url = "git://*.someserver.org/foo;rev=1"
- self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
- self.assertFalse(bb.fetch.trusted_network(self.d, url))
-
-class URLHandle(unittest.TestCase):
-
- datatable = {
- "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
- "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
- "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}),
- "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}),
- "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
- }
- # we require a pathname to encodeurl but users can still pass such urls to
- # decodeurl and we need to handle them
- decodedata = datatable.copy()
- decodedata.update({
- "http://somesite.net;someparam=1": ('http', 'somesite.net', '', '', '', {'someparam': '1'}),
- })
-
- def test_decodeurl(self):
- for k, v in self.decodedata.items():
- result = bb.fetch.decodeurl(k)
- self.assertEqual(result, v)
-
- def test_encodeurl(self):
- for k, v in self.datatable.items():
- result = bb.fetch.encodeurl(v)
- self.assertEqual(result, k)
-
-class FetchLatestVersionTest(FetcherTest):
-
- test_git_uris = {
- # version pattern "X.Y.Z"
- ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
- : "1.99.4",
- # version pattern "vX.Y"
- ("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
- : "1.5.0",
- # version pattern "pkg_name-X.Y"
- ("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
- : "1.0",
- # version pattern "pkg_name-vX.Y.Z"
- ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
- : "1.4.0",
- # combination version pattern
- ("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
- : "1.2.0",
- ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
- : "2014.01",
- # version pattern "yyyymmdd"
- ("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
- : "20120614",
- # packages with a valid UPSTREAM_CHECK_GITTAGREGEX
- ("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
- : "0.4.3",
- ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
- : "11.0.0",
- ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
- : "1.3.59",
- ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
- : "3.82+dbg0.9",
- }
-
- test_wget_uris = {
- # packages with versions inside directory name
- ("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
- : "2.24.2",
- ("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
- : "1.6.0",
- ("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
- : "2.8.12.1",
- # packages with versions only in current directory
- ("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
- : "2.19",
- ("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
- : "20120814",
- # packages with "99" in the name of possible version
- ("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
- : "5.0",
- ("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
- : "1.15.1",
- # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
- ("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "https://github.com/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
- : "2.0.0",
- ("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
- : "6.1.19",
- }
- if os.environ.get("BB_SKIP_NETTESTS") == "yes":
- print("Unset BB_SKIP_NETTESTS to run network tests")
- else:
- def test_git_latest_versionstring(self):
- for k, v in self.test_git_uris.items():
- self.d.setVar("PN", k[0])
- self.d.setVar("SRCREV", k[2])
- self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3])
- ud = bb.fetch2.FetchData(k[1], self.d)
- pupver= ud.method.latest_versionstring(ud, self.d)
- verstring = pupver[0]
- r = bb.utils.vercmp_string(v, verstring)
- self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
-
- def test_wget_latest_versionstring(self):
- for k, v in self.test_wget_uris.items():
- self.d.setVar("PN", k[0])
- self.d.setVar("UPSTREAM_CHECK_URI", k[2])
- self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
- ud = bb.fetch2.FetchData(k[1], self.d)
- pupver = ud.method.latest_versionstring(ud, self.d)
- verstring = pupver[0]
- r = bb.utils.vercmp_string(v, verstring)
- self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
-
-
-class FetchCheckStatusTest(FetcherTest):
- test_wget_uris = ["http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2",
- "http://www.cups.org/software/ipptool/ipptool-20130731-linux-ubuntu-i686.tar.gz",
- "http://www.cups.org/",
- "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
- "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
- "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
- "https://yoctoproject.org/",
- "https://yoctoproject.org/documentation",
- "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
- "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
- "ftp://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
- "ftp://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
- "ftp://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
- # GitHub releases are hosted on Amazon S3, which doesn't support HEAD
- "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
- ]
-
- if os.environ.get("BB_SKIP_NETTESTS") == "yes":
- print("Unset BB_SKIP_NETTESTS to run network tests")
- else:
-
- def test_wget_checkstatus(self):
- fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d)
- for u in self.test_wget_uris:
- ud = fetch.ud[u]
- m = ud.method
- ret = m.checkstatus(fetch, ud, self.d)
- self.assertTrue(ret, msg="URI %s, can't check status" % (u))
-
-
- def test_wget_checkstatus_connection_cache(self):
- from bb.fetch2 import FetchConnectionCache
-
- connection_cache = FetchConnectionCache()
- fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d,
- connection_cache = connection_cache)
-
- for u in self.test_wget_uris:
- ud = fetch.ud[u]
- m = ud.method
- ret = m.checkstatus(fetch, ud, self.d)
- self.assertTrue(ret, msg="URI %s, can't check status" % (u))
-
- connection_cache.close_connections()
diff --git a/yocto-poky/bitbake/lib/bb/tests/parse.py b/yocto-poky/bitbake/lib/bb/tests/parse.py
deleted file mode 100644
index 6beb76a48..000000000
--- a/yocto-poky/bitbake/lib/bb/tests/parse.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Test for lib/bb/parse/
-#
-# Copyright (C) 2015 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-
-import unittest
-import tempfile
-import logging
-import bb
-import os
-
-logger = logging.getLogger('BitBake.TestParse')
-
-import bb.parse
-import bb.data
-import bb.siggen
-
-class ParseTest(unittest.TestCase):
-
- testfile = """
-A = "1"
-B = "2"
-do_install() {
- echo "hello"
-}
-
-C = "3"
-"""
-
- def setUp(self):
- self.d = bb.data.init()
- bb.parse.siggen = bb.siggen.init(self.d)
-
- def parsehelper(self, content, suffix = ".bb"):
-
- f = tempfile.NamedTemporaryFile(suffix = suffix)
- f.write(content)
- f.flush()
- os.chdir(os.path.dirname(f.name))
- return f
-
- def test_parse_simple(self):
- f = self.parsehelper(self.testfile)
- d = bb.parse.handle(f.name, self.d)['']
- self.assertEqual(d.getVar("A", True), "1")
- self.assertEqual(d.getVar("B", True), "2")
- self.assertEqual(d.getVar("C", True), "3")
-
- def test_parse_incomplete_function(self):
- testfileB = self.testfile.replace("}", "")
- f = self.parsehelper(testfileB)
- with self.assertRaises(bb.parse.ParseError):
- d = bb.parse.handle(f.name, self.d)['']
-
- overridetest = """
-RRECOMMENDS_${PN} = "a"
-RRECOMMENDS_${PN}_libc = "b"
-OVERRIDES = "libc:${PN}"
-PN = "gtk+"
-"""
-
- def test_parse_overrides(self):
- f = self.parsehelper(self.overridetest)
- d = bb.parse.handle(f.name, self.d)['']
- self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
- bb.data.expandKeys(d)
- self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
- d.setVar("RRECOMMENDS_gtk+", "c")
- self.assertEqual(d.getVar("RRECOMMENDS", True), "c")
-
- overridetest2 = """
-EXTRA_OECONF = ""
-EXTRA_OECONF_class-target = "b"
-EXTRA_OECONF_append = " c"
-"""
-
- def test_parse_overrides(self):
- f = self.parsehelper(self.overridetest2)
- d = bb.parse.handle(f.name, self.d)['']
- d.appendVar("EXTRA_OECONF", " d")
- d.setVar("OVERRIDES", "class-target")
- self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d")
-
- overridetest3 = """
-DESCRIPTION = "A"
-DESCRIPTION_${PN}-dev = "${DESCRIPTION} B"
-PN = "bc"
-"""
-
- def test_parse_combinations(self):
- f = self.parsehelper(self.overridetest3)
- d = bb.parse.handle(f.name, self.d)['']
- bb.data.expandKeys(d)
- self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B")
- d.setVar("DESCRIPTION", "E")
- d.setVar("DESCRIPTION_bc-dev", "C D")
- d.setVar("OVERRIDES", "bc-dev")
- self.assertEqual(d.getVar("DESCRIPTION", True), "C D")
-
-
- classextend = """
-VAR_var_override1 = "B"
-EXTRA = ":override1"
-OVERRIDES = "nothing${EXTRA}"
-
-BBCLASSEXTEND = "###CLASS###"
-"""
- classextend_bbclass = """
-EXTRA = ""
-python () {
- d.renameVar("VAR_var", "VAR_var2")
-}
-"""
-
- #
- # Test based upon a real world data corruption issue. One
- # data store changing a variable poked through into a different data
- # store. This test case replicates that issue where the value 'B' would
- # become unset/disappear.
- #
- def test_parse_classextend_contamination(self):
- cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
- #clsname = os.path.basename(cls.name).replace(".bbclass", "")
- self.classextend = self.classextend.replace("###CLASS###", cls.name)
- f = self.parsehelper(self.classextend)
- alldata = bb.parse.handle(f.name, self.d)
- d1 = alldata['']
- d2 = alldata[cls.name]
- self.assertEqual(d1.getVar("VAR_var", True), "B")
- self.assertEqual(d2.getVar("VAR_var", True), None)
-
diff --git a/yocto-poky/bitbake/lib/bb/tests/utils.py b/yocto-poky/bitbake/lib/bb/tests/utils.py
deleted file mode 100644
index 2f4ccf3c6..000000000
--- a/yocto-poky/bitbake/lib/bb/tests/utils.py
+++ /dev/null
@@ -1,603 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Tests for utils.py
-#
-# Copyright (C) 2012 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-
-import unittest
-import bb
-import os
-import tempfile
-import re
-
-class VerCmpString(unittest.TestCase):
-
- def test_vercmpstring(self):
- result = bb.utils.vercmp_string('1', '2')
- self.assertTrue(result < 0)
- result = bb.utils.vercmp_string('2', '1')
- self.assertTrue(result > 0)
- result = bb.utils.vercmp_string('1', '1.0')
- self.assertTrue(result < 0)
- result = bb.utils.vercmp_string('1', '1.1')
- self.assertTrue(result < 0)
- result = bb.utils.vercmp_string('1.1', '1_p2')
- self.assertTrue(result < 0)
- result = bb.utils.vercmp_string('1.0', '1.0+1.1-beta1')
- self.assertTrue(result < 0)
- result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
- self.assertTrue(result > 0)
-
- def test_explode_dep_versions(self):
- correctresult = {"foo" : ["= 1.10"]}
- result = bb.utils.explode_dep_versions2("foo (= 1.10)")
- self.assertEqual(result, correctresult)
- result = bb.utils.explode_dep_versions2("foo (=1.10)")
- self.assertEqual(result, correctresult)
- result = bb.utils.explode_dep_versions2("foo ( = 1.10)")
- self.assertEqual(result, correctresult)
- result = bb.utils.explode_dep_versions2("foo ( =1.10)")
- self.assertEqual(result, correctresult)
- result = bb.utils.explode_dep_versions2("foo ( = 1.10 )")
- self.assertEqual(result, correctresult)
- result = bb.utils.explode_dep_versions2("foo ( =1.10 )")
- self.assertEqual(result, correctresult)
-
- def test_vercmp_string_op(self):
- compareops = [('1', '1', '=', True),
- ('1', '1', '==', True),
- ('1', '1', '!=', False),
- ('1', '1', '>', False),
- ('1', '1', '<', False),
- ('1', '1', '>=', True),
- ('1', '1', '<=', True),
- ('1', '0', '=', False),
- ('1', '0', '==', False),
- ('1', '0', '!=', True),
- ('1', '0', '>', True),
- ('1', '0', '<', False),
- ('1', '0', '>>', True),
- ('1', '0', '<<', False),
- ('1', '0', '>=', True),
- ('1', '0', '<=', False),
- ('0', '1', '=', False),
- ('0', '1', '==', False),
- ('0', '1', '!=', True),
- ('0', '1', '>', False),
- ('0', '1', '<', True),
- ('0', '1', '>>', False),
- ('0', '1', '<<', True),
- ('0', '1', '>=', False),
- ('0', '1', '<=', True)]
-
- for arg1, arg2, op, correctresult in compareops:
- result = bb.utils.vercmp_string_op(arg1, arg2, op)
- self.assertEqual(result, correctresult, 'vercmp_string_op("%s", "%s", "%s") != %s' % (arg1, arg2, op, correctresult))
-
- # Check that clearly invalid operator raises an exception
- self.assertRaises(bb.utils.VersionStringException, bb.utils.vercmp_string_op, '0', '0', '$')
-
-
-class Path(unittest.TestCase):
- def test_unsafe_delete_path(self):
- checkitems = [('/', True),
- ('//', True),
- ('///', True),
- (os.getcwd().count(os.sep) * ('..' + os.sep), True),
- (os.environ.get('HOME', '/home/test'), True),
- ('/home/someone', True),
- ('/home/other/', True),
- ('/home/other/subdir', False),
- ('', False)]
- for arg1, correctresult in checkitems:
- result = bb.utils._check_unsafe_delete_path(arg1)
- self.assertEqual(result, correctresult, '_check_unsafe_delete_path("%s") != %s' % (arg1, correctresult))
-
-
-class EditMetadataFile(unittest.TestCase):
- _origfile = """
-# A comment
-HELLO = "oldvalue"
-
-THIS = "that"
-
-# Another comment
-NOCHANGE = "samevalue"
-OTHER = 'anothervalue'
-
-MULTILINE = "a1 \\
- a2 \\
- a3"
-
-MULTILINE2 := " \\
- b1 \\
- b2 \\
- b3 \\
- "
-
-
-MULTILINE3 = " \\
- c1 \\
- c2 \\
- c3 \\
-"
-
-do_functionname() {
- command1 ${VAL1} ${VAL2}
- command2 ${VAL3} ${VAL4}
-}
-"""
- def _testeditfile(self, varvalues, compareto, dummyvars=None):
- if dummyvars is None:
- dummyvars = []
- with tempfile.NamedTemporaryFile('w', delete=False) as tf:
- tf.write(self._origfile)
- tf.close()
- try:
- varcalls = []
- def handle_file(varname, origvalue, op, newlines):
- self.assertIn(varname, varvalues, 'Callback called for variable %s not in the list!' % varname)
- self.assertNotIn(varname, dummyvars, 'Callback called for variable %s in dummy list!' % varname)
- varcalls.append(varname)
- return varvalues[varname]
-
- bb.utils.edit_metadata_file(tf.name, varvalues.keys(), handle_file)
- with open(tf.name) as f:
- modfile = f.readlines()
- # Ensure the output matches the expected output
- self.assertEqual(compareto.splitlines(True), modfile)
- # Ensure the callback function was called for every variable we asked for
- # (plus allow testing behaviour when a requested variable is not present)
- self.assertEqual(sorted(varvalues.keys()), sorted(varcalls + dummyvars))
- finally:
- os.remove(tf.name)
-
-
- def test_edit_metadata_file_nochange(self):
- # Test file doesn't get modified with nothing to do
- self._testeditfile({}, self._origfile)
- # Test file doesn't get modified with only dummy variables
- self._testeditfile({'DUMMY1': ('should_not_set', None, 0, True),
- 'DUMMY2': ('should_not_set_again', None, 0, True)}, self._origfile, dummyvars=['DUMMY1', 'DUMMY2'])
- # Test file doesn't get modified with some the same values
- self._testeditfile({'THIS': ('that', None, 0, True),
- 'OTHER': ('anothervalue', None, 0, True),
- 'MULTILINE3': (' c1 c2 c3 ', None, 4, False)}, self._origfile)
-
- def test_edit_metadata_file_1(self):
-
- newfile1 = """
-# A comment
-HELLO = "newvalue"
-
-THIS = "that"
-
-# Another comment
-NOCHANGE = "samevalue"
-OTHER = 'anothervalue'
-
-MULTILINE = "a1 \\
- a2 \\
- a3"
-
-MULTILINE2 := " \\
- b1 \\
- b2 \\
- b3 \\
- "
-
-
-MULTILINE3 = " \\
- c1 \\
- c2 \\
- c3 \\
-"
-
-do_functionname() {
- command1 ${VAL1} ${VAL2}
- command2 ${VAL3} ${VAL4}
-}
-"""
- self._testeditfile({'HELLO': ('newvalue', None, 4, True)}, newfile1)
-
-
- def test_edit_metadata_file_2(self):
-
- newfile2 = """
-# A comment
-HELLO = "oldvalue"
-
-THIS = "that"
-
-# Another comment
-NOCHANGE = "samevalue"
-OTHER = 'anothervalue'
-
-MULTILINE = " \\
- d1 \\
- d2 \\
- d3 \\
- "
-
-MULTILINE2 := " \\
- b1 \\
- b2 \\
- b3 \\
- "
-
-
-MULTILINE3 = "nowsingle"
-
-do_functionname() {
- command1 ${VAL1} ${VAL2}
- command2 ${VAL3} ${VAL4}
-}
-"""
- self._testeditfile({'MULTILINE': (['d1','d2','d3'], None, 4, False),
- 'MULTILINE3': ('nowsingle', None, 4, True),
- 'NOTPRESENT': (['a', 'b'], None, 4, False)}, newfile2, dummyvars=['NOTPRESENT'])
-
-
- def test_edit_metadata_file_3(self):
-
- newfile3 = """
-# A comment
-HELLO = "oldvalue"
-
-# Another comment
-NOCHANGE = "samevalue"
-OTHER = "yetanothervalue"
-
-MULTILINE = "e1 \\
- e2 \\
- e3 \\
- "
-
-MULTILINE2 := "f1 \\
-\tf2 \\
-\t"
-
-
-MULTILINE3 = " \\
- c1 \\
- c2 \\
- c3 \\
-"
-
-do_functionname() {
- othercommand_one a b c
- othercommand_two d e f
-}
-"""
-
- self._testeditfile({'do_functionname()': (['othercommand_one a b c', 'othercommand_two d e f'], None, 4, False),
- 'MULTILINE2': (['f1', 'f2'], None, '\t', True),
- 'MULTILINE': (['e1', 'e2', 'e3'], None, -1, True),
- 'THIS': (None, None, 0, False),
- 'OTHER': ('yetanothervalue', None, 0, True)}, newfile3)
-
-
- def test_edit_metadata_file_4(self):
-
- newfile4 = """
-# A comment
-HELLO = "oldvalue"
-
-THIS = "that"
-
-# Another comment
-OTHER = 'anothervalue'
-
-MULTILINE = "a1 \\
- a2 \\
- a3"
-
-MULTILINE2 := " \\
- b1 \\
- b2 \\
- b3 \\
- "
-
-
-"""
-
- self._testeditfile({'NOCHANGE': (None, None, 0, False),
- 'MULTILINE3': (None, None, 0, False),
- 'THIS': ('that', None, 0, False),
- 'do_functionname()': (None, None, 0, False)}, newfile4)
-
-
- def test_edit_metadata(self):
- newfile5 = """
-# A comment
-HELLO = "hithere"
-
-# A new comment
-THIS += "that"
-
-# Another comment
-NOCHANGE = "samevalue"
-OTHER = 'anothervalue'
-
-MULTILINE = "a1 \\
- a2 \\
- a3"
-
-MULTILINE2 := " \\
- b1 \\
- b2 \\
- b3 \\
- "
-
-
-MULTILINE3 = " \\
- c1 \\
- c2 \\
- c3 \\
-"
-
-NEWVAR = "value"
-
-do_functionname() {
- command1 ${VAL1} ${VAL2}
- command2 ${VAL3} ${VAL4}
-}
-"""
-
-
- def handle_var(varname, origvalue, op, newlines):
- if varname == 'THIS':
- newlines.append('# A new comment\n')
- elif varname == 'do_functionname()':
- newlines.append('NEWVAR = "value"\n')
- newlines.append('\n')
- valueitem = varvalues.get(varname, None)
- if valueitem:
- return valueitem
- else:
- return (origvalue, op, 0, True)
-
- varvalues = {'HELLO': ('hithere', None, 0, True), 'THIS': ('that', '+=', 0, True)}
- varlist = ['HELLO', 'THIS', 'do_functionname()']
- (updated, newlines) = bb.utils.edit_metadata(self._origfile.splitlines(True), varlist, handle_var)
- self.assertTrue(updated, 'List should be updated but isn\'t')
- self.assertEqual(newlines, newfile5.splitlines(True))
-
- # Make sure the orig value matches what we expect it to be
- def test_edit_metadata_origvalue(self):
- origfile = """
-MULTILINE = " stuff \\
- morestuff"
-"""
- expected_value = "stuff morestuff"
- global value_in_callback
- value_in_callback = ""
-
- def handle_var(varname, origvalue, op, newlines):
- global value_in_callback
- value_in_callback = origvalue
- return (origvalue, op, -1, False)
-
- bb.utils.edit_metadata(origfile.splitlines(True),
- ['MULTILINE'],
- handle_var)
-
- testvalue = re.sub('\s+', ' ', value_in_callback.strip())
- self.assertEqual(expected_value, testvalue)
-
-class EditBbLayersConf(unittest.TestCase):
-
- def _test_bblayers_edit(self, before, after, add, remove, notadded, notremoved):
- with tempfile.NamedTemporaryFile('w', delete=False) as tf:
- tf.write(before)
- tf.close()
- try:
- actual_notadded, actual_notremoved = bb.utils.edit_bblayers_conf(tf.name, add, remove)
- with open(tf.name) as f:
- actual_after = f.readlines()
- self.assertEqual(after.splitlines(True), actual_after)
- self.assertEqual(notadded, actual_notadded)
- self.assertEqual(notremoved, actual_notremoved)
- finally:
- os.remove(tf.name)
-
-
- def test_bblayers_remove(self):
- before = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS = " \
- /home/user/path/layer1 \
- /home/user/path/layer2 \
- /home/user/path/subpath/layer3 \
- /home/user/path/layer4 \
- "
-"""
- after = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS = " \
- /home/user/path/layer1 \
- /home/user/path/subpath/layer3 \
- /home/user/path/layer4 \
- "
-"""
- self._test_bblayers_edit(before, after,
- None,
- '/home/user/path/layer2',
- [],
- [])
-
-
- def test_bblayers_add(self):
- before = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS = " \
- /home/user/path/layer1 \
- /home/user/path/layer2 \
- /home/user/path/subpath/layer3 \
- /home/user/path/layer4 \
- "
-"""
- after = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS = " \
- /home/user/path/layer1 \
- /home/user/path/layer2 \
- /home/user/path/subpath/layer3 \
- /home/user/path/layer4 \
- /other/path/to/layer5 \
- "
-"""
- self._test_bblayers_edit(before, after,
- '/other/path/to/layer5/',
- None,
- [],
- [])
-
-
- def test_bblayers_add_remove(self):
- before = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS = " \
- /home/user/path/layer1 \
- /home/user/path/layer2 \
- /home/user/path/subpath/layer3 \
- /home/user/path/layer4 \
- "
-"""
- after = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS = " \
- /home/user/path/layer1 \
- /home/user/path/layer2 \
- /home/user/path/layer4 \
- /other/path/to/layer5 \
- "
-"""
- self._test_bblayers_edit(before, after,
- ['/other/path/to/layer5', '/home/user/path/layer2/'], '/home/user/path/subpath/layer3/',
- ['/home/user/path/layer2'],
- [])
-
-
- def test_bblayers_add_remove_home(self):
- before = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS = " \
- ~/path/layer1 \
- ~/path/layer2 \
- ~/otherpath/layer3 \
- ~/path/layer4 \
- "
-"""
- after = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS = " \
- ~/path/layer2 \
- ~/path/layer4 \
- ~/path2/layer5 \
- "
-"""
- self._test_bblayers_edit(before, after,
- [os.environ['HOME'] + '/path/layer4', '~/path2/layer5'],
- [os.environ['HOME'] + '/otherpath/layer3', '~/path/layer1', '~/path/notinlist'],
- [os.environ['HOME'] + '/path/layer4'],
- ['~/path/notinlist'])
-
-
- def test_bblayers_add_remove_plusequals(self):
- before = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS += " \
- /home/user/path/layer1 \
- /home/user/path/layer2 \
- "
-"""
- after = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS += " \
- /home/user/path/layer2 \
- /home/user/path/layer3 \
- "
-"""
- self._test_bblayers_edit(before, after,
- '/home/user/path/layer3',
- '/home/user/path/layer1',
- [],
- [])
-
-
- def test_bblayers_add_remove_plusequals2(self):
- before = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS += " \
- /home/user/path/layer1 \
- /home/user/path/layer2 \
- /home/user/path/layer3 \
- "
-BBLAYERS += "/home/user/path/layer4"
-BBLAYERS += "/home/user/path/layer5"
-"""
- after = r"""
-# A comment
-
-BBPATH = "${TOPDIR}"
-BBFILES ?= ""
-BBLAYERS += " \
- /home/user/path/layer2 \
- /home/user/path/layer3 \
- "
-BBLAYERS += "/home/user/path/layer5"
-BBLAYERS += "/home/user/otherpath/layer6"
-"""
- self._test_bblayers_edit(before, after,
- ['/home/user/otherpath/layer6', '/home/user/path/layer3'], ['/home/user/path/layer1', '/home/user/path/layer4', '/home/user/path/layer7'],
- ['/home/user/path/layer3'],
- ['/home/user/path/layer7'])
diff --git a/yocto-poky/bitbake/lib/bb/tinfoil.py b/yocto-poky/bitbake/lib/bb/tinfoil.py
deleted file mode 100644
index 7aa653f1a..000000000
--- a/yocto-poky/bitbake/lib/bb/tinfoil.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# tinfoil: a simple wrapper around cooker for bitbake-based command-line utilities
-#
-# Copyright (C) 2012 Intel Corporation
-# Copyright (C) 2011 Mentor Graphics Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import logging
-import warnings
-import os
-import sys
-
-import bb.cache
-import bb.cooker
-import bb.providers
-import bb.utils
-from bb.cooker import state, BBCooker, CookerFeatures
-from bb.cookerdata import CookerConfiguration, ConfigParameters
-import bb.fetch2
-
-class Tinfoil:
- def __init__(self, output=sys.stdout, tracking=False):
- # Needed to avoid deprecation warnings with python 2.6
- warnings.filterwarnings("ignore", category=DeprecationWarning)
-
- # Set up logging
- self.logger = logging.getLogger('BitBake')
- self._log_hdlr = logging.StreamHandler(output)
- bb.msg.addDefaultlogFilter(self._log_hdlr)
- format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
- if output.isatty():
- format.enable_color()
- self._log_hdlr.setFormatter(format)
- self.logger.addHandler(self._log_hdlr)
-
- self.config = CookerConfiguration()
- configparams = TinfoilConfigParameters(parse_only=True)
- self.config.setConfigParameters(configparams)
- self.config.setServerRegIdleCallback(self.register_idle_function)
- features = []
- if tracking:
- features.append(CookerFeatures.BASEDATASTORE_TRACKING)
- self.cooker = BBCooker(self.config, features)
- self.config_data = self.cooker.data
- bb.providers.logger.setLevel(logging.ERROR)
- self.cooker_data = None
-
- def register_idle_function(self, function, data):
- pass
-
- def parseRecipes(self):
- sys.stderr.write("Parsing recipes..")
- self.logger.setLevel(logging.WARNING)
-
- try:
- while self.cooker.state in (state.initial, state.parsing):
- self.cooker.updateCache()
- except KeyboardInterrupt:
- self.cooker.shutdown()
- self.cooker.updateCache()
- sys.exit(2)
-
- self.logger.setLevel(logging.INFO)
- sys.stderr.write("done.\n")
-
- self.cooker_data = self.cooker.recipecache
-
- def prepare(self, config_only = False):
- if not self.cooker_data:
- if config_only:
- self.cooker.parseConfiguration()
- self.cooker_data = self.cooker.recipecache
- else:
- self.parseRecipes()
-
- def shutdown(self):
- self.cooker.shutdown(force=True)
- self.cooker.post_serve()
- self.cooker.unlockBitbake()
- self.logger.removeHandler(self._log_hdlr)
-
-class TinfoilConfigParameters(ConfigParameters):
-
- def __init__(self, **options):
- self.initial_options = options
- super(TinfoilConfigParameters, self).__init__()
-
- def parseCommandLine(self, argv=sys.argv):
- class DummyOptions:
- def __init__(self, initial_options):
- for key, val in initial_options.items():
- setattr(self, key, val)
-
- return DummyOptions(self.initial_options), None
diff --git a/yocto-poky/bitbake/lib/bb/ui/__init__.py b/yocto-poky/bitbake/lib/bb/ui/__init__.py
deleted file mode 100644
index a4805ed02..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# BitBake UI Implementation
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py b/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
deleted file mode 100644
index 93979054d..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ /dev/null
@@ -1,1521 +0,0 @@
-#
-# BitBake ToasterUI Implementation
-#
-# Copyright (C) 2013 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import sys
-import bb
-import re
-import os
-
-os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
-
-
-import django
-from django.utils import timezone
-
-
-def _configure_toaster():
- """ Add toaster to sys path for importing modules
- """
- sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster'))
-_configure_toaster()
-
-django.setup()
-
-from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
-from orm.models import Target_Image_File, BuildArtifact
-from orm.models import Variable, VariableHistory
-from orm.models import Package, Package_File, Target_Installed_Package, Target_File
-from orm.models import Task_Dependency, Package_Dependency
-from orm.models import Recipe_Dependency, Provides
-from orm.models import Project, CustomImagePackage, CustomImageRecipe
-
-from bldcontrol.models import BuildEnvironment, BuildRequest
-
-from bb.msg import BBLogFormatter as formatter
-from django.db import models
-from pprint import pformat
-import logging
-from datetime import datetime, timedelta
-
-from django.db import transaction, connection
-
-# pylint: disable=invalid-name
-# the logger name is standard throughout BitBake
-logger = logging.getLogger("ToasterLogger")
-
-
-class NotExisting(Exception):
- pass
-
-class ORMWrapper(object):
- """ This class creates the dictionaries needed to store information in the database
- following the format defined by the Django models. It is also used to save this
- information in the database.
- """
-
- def __init__(self):
- self.layer_version_objects = []
- self.layer_version_built = []
- self.task_objects = {}
- self.recipe_objects = {}
-
- @staticmethod
- def _build_key(**kwargs):
- key = "0"
- for k in sorted(kwargs.keys()):
- if isinstance(kwargs[k], models.Model):
- key += "-%d" % kwargs[k].id
- else:
- key += "-%s" % str(kwargs[k])
- return key
-
-
- def _cached_get_or_create(self, clazz, **kwargs):
- """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
- database through any other means.
- """
-
- assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
-
- key = ORMWrapper._build_key(**kwargs)
- dictname = "objects_%s" % clazz.__name__
- if not dictname in vars(self).keys():
- vars(self)[dictname] = {}
-
- created = False
- if not key in vars(self)[dictname].keys():
- vars(self)[dictname][key], created = \
- clazz.objects.get_or_create(**kwargs)
-
- return (vars(self)[dictname][key], created)
-
-
- def _cached_get(self, clazz, **kwargs):
- """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
- """
- assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
-
- key = ORMWrapper._build_key(**kwargs)
- dictname = "objects_%s" % clazz.__name__
-
- if not dictname in vars(self).keys():
- vars(self)[dictname] = {}
-
- if not key in vars(self)[dictname].keys():
- vars(self)[dictname][key] = clazz.objects.get(**kwargs)
-
- return vars(self)[dictname][key]
-
- def _timestamp_to_datetime(self, secs):
- """
- Convert timestamp in seconds to Python datetime
- """
- return datetime(1970, 1, 1) + timedelta(seconds=secs)
-
- # pylint: disable=no-self-use
- # we disable detection of no self use in functions because the methods actually work on the object
- # even if they don't touch self anywhere
-
- # pylint: disable=bad-continuation
- # we do not follow the python conventions for continuation indentation due to long lines here
-
- def create_build_object(self, build_info, brbe, project_id):
- assert 'machine' in build_info
- assert 'distro' in build_info
- assert 'distro_version' in build_info
- assert 'started_on' in build_info
- assert 'cooker_log_path' in build_info
- assert 'build_name' in build_info
- assert 'bitbake_version' in build_info
-
- prj = None
- buildrequest = None
- if brbe is not None: # this build was triggered by a request from a user
- logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
- br, _ = brbe.split(":")
- buildrequest = BuildRequest.objects.get(pk = br)
- prj = buildrequest.project
-
- elif project_id is not None: # this build was triggered by an external system for a specific project
- logger.debug(1, "buildinfohelper: project is %s" % prj)
- prj = Project.objects.get(pk = project_id)
-
- else: # this build was triggered by a legacy system, or command line interactive mode
- prj = Project.objects.get_or_create_default_project()
- logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
-
-
- if buildrequest is not None:
- build = buildrequest.build
- logger.info("Updating existing build, with %s", build_info)
- build.project = prj
- build.machine=build_info['machine']
- build.distro=build_info['distro']
- build.distro_version=build_info['distro_version']
- build.cooker_log_path=build_info['cooker_log_path']
- build.build_name=build_info['build_name']
- build.bitbake_version=build_info['bitbake_version']
- build.save()
-
- else:
- build = Build.objects.create(
- project = prj,
- machine=build_info['machine'],
- distro=build_info['distro'],
- distro_version=build_info['distro_version'],
- started_on=build_info['started_on'],
- completed_on=build_info['started_on'],
- cooker_log_path=build_info['cooker_log_path'],
- build_name=build_info['build_name'],
- bitbake_version=build_info['bitbake_version'])
-
- logger.debug(1, "buildinfohelper: build is created %s" % build)
-
- if buildrequest is not None:
- buildrequest.build = build
- buildrequest.save()
-
- return build
-
- @staticmethod
- def get_or_create_targets(target_info):
- result = []
- for target in target_info['targets']:
- task = ''
- if ':' in target:
- target, task = target.split(':', 1)
- if task.startswith('do_'):
- task = task[3:]
- if task == 'build':
- task = ''
- obj, created = Target.objects.get_or_create(build=target_info['build'],
- target=target)
- if created:
- obj.is_image = False
- if task:
- obj.task = task
- obj.save()
- result.append(obj)
- return result
-
- def update_build_object(self, build, errors, warnings, taskfailures):
- assert isinstance(build,Build)
- assert isinstance(errors, int)
- assert isinstance(warnings, int)
-
- if build.outcome == Build.CANCELLED:
- return
- try:
- if build.buildrequest.state == BuildRequest.REQ_CANCELLING:
- return
- except AttributeError:
- # We may not have a buildrequest if this is a command line build
- pass
-
- outcome = Build.SUCCEEDED
- if errors or taskfailures:
- outcome = Build.FAILED
-
- build.completed_on = timezone.now()
- build.outcome = outcome
- build.save()
-
- def update_target_set_license_manifest(self, target, license_manifest_path):
- target.license_manifest_path = license_manifest_path
- target.save()
-
- def update_task_object(self, build, task_name, recipe_name, task_stats):
- """
- Find the task for build which matches the recipe and task name
- to be stored
- """
- task_to_update = Task.objects.get(
- build = build,
- task_name = task_name,
- recipe__name = recipe_name
- )
-
- if 'started' in task_stats and 'ended' in task_stats:
- task_to_update.started = self._timestamp_to_datetime(task_stats['started'])
- task_to_update.ended = self._timestamp_to_datetime(task_stats['ended'])
- task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started'])
- task_to_update.cpu_time_user = task_stats.get('cpu_time_user')
- task_to_update.cpu_time_system = task_stats.get('cpu_time_system')
- if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats:
- task_to_update.disk_io_read = task_stats['disk_io_read']
- task_to_update.disk_io_write = task_stats['disk_io_write']
- task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write']
-
- task_to_update.save()
-
- def get_update_task_object(self, task_information, must_exist = False):
- assert 'build' in task_information
- assert 'recipe' in task_information
- assert 'task_name' in task_information
-
- # we use must_exist info for database look-up optimization
- task_object, created = self._cached_get_or_create(Task,
- build=task_information['build'],
- recipe=task_information['recipe'],
- task_name=task_information['task_name']
- )
- if created and must_exist:
- task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
- raise NotExisting("Task object created when expected to exist", task_information)
-
- object_changed = False
- for v in vars(task_object):
- if v in task_information.keys():
- if vars(task_object)[v] != task_information[v]:
- vars(task_object)[v] = task_information[v]
- object_changed = True
-
- # update setscene-related information if the task has a setscene
- if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
- task_object.outcome = Task.OUTCOME_CACHED
- object_changed = True
-
- outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
- recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
- if outcome_task_setscene == Task.OUTCOME_SUCCESS:
- task_object.sstate_result = Task.SSTATE_RESTORED
- object_changed = True
- elif outcome_task_setscene == Task.OUTCOME_FAILED:
- task_object.sstate_result = Task.SSTATE_FAILED
- object_changed = True
-
- if object_changed:
- task_object.save()
- return task_object
-
-
- def get_update_recipe_object(self, recipe_information, must_exist = False):
- assert 'layer_version' in recipe_information
- assert 'file_path' in recipe_information
- assert 'pathflags' in recipe_information
-
- assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
-
-
- def update_recipe_obj(recipe_object):
- object_changed = False
- for v in vars(recipe_object):
- if v in recipe_information.keys():
- object_changed = True
- vars(recipe_object)[v] = recipe_information[v]
-
- if object_changed:
- recipe_object.save()
-
- recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
- file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
-
- update_recipe_obj(recipe)
-
- built_recipe = None
- # Create a copy of the recipe for historical puposes and update it
- for built_layer in self.layer_version_built:
- if built_layer.layer == recipe_information['layer_version'].layer:
- built_recipe, c = self._cached_get_or_create(Recipe,
- layer_version=built_layer,
- file_path=recipe_information['file_path'],
- pathflags = recipe_information['pathflags'])
- update_recipe_obj(built_recipe)
- break
-
-
- # If we're in analysis mode or if this is a custom recipe
- # then we are wholly responsible for the data
- # and therefore we return the 'real' recipe rather than the build
- # history copy of the recipe.
- if recipe_information['layer_version'].build is not None and \
- recipe_information['layer_version'].build.project == \
- Project.objects.get_or_create_default_project():
- return recipe
-
- if built_recipe is None:
- return recipe
-
- return built_recipe
-
- def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
- if isinstance(layer_obj, Layer_Version):
- # Special case the toaster-custom-images layer which is created
- # on the fly so don't update the values which may cause the layer
- # to be duplicated on a future get_or_create
- if layer_obj.layer.name == CustomImageRecipe.LAYER_NAME:
- return layer_obj
- # We already found our layer version for this build so just
- # update it with the new build information
- logger.debug("We found our layer from toaster")
- layer_obj.local_path = layer_version_information['local_path']
- layer_obj.save()
- self.layer_version_objects.append(layer_obj)
-
- # create a new copy of this layer version as a snapshot for
- # historical purposes
- layer_copy, c = Layer_Version.objects.get_or_create(
- build=build_obj,
- layer=layer_obj.layer,
- up_branch=layer_obj.up_branch,
- branch=layer_version_information['branch'],
- commit=layer_version_information['commit'],
- local_path=layer_version_information['local_path'],
- )
-
- logger.info("created new historical layer version %d",
- layer_copy.pk)
-
- self.layer_version_built.append(layer_copy)
-
- return layer_obj
-
- assert isinstance(build_obj, Build)
- assert isinstance(layer_obj, Layer)
- assert 'branch' in layer_version_information
- assert 'commit' in layer_version_information
- assert 'priority' in layer_version_information
- assert 'local_path' in layer_version_information
-
- # If we're doing a command line build then associate this new layer with the
- # project to avoid it 'contaminating' toaster data
- project = None
- if build_obj.project == Project.objects.get_or_create_default_project():
- project = build_obj.project
-
- layer_version_object, _ = Layer_Version.objects.get_or_create(
- build = build_obj,
- layer = layer_obj,
- branch = layer_version_information['branch'],
- commit = layer_version_information['commit'],
- priority = layer_version_information['priority'],
- local_path = layer_version_information['local_path'],
- project=project)
-
- self.layer_version_objects.append(layer_version_object)
-
- return layer_version_object
-
- def get_update_layer_object(self, layer_information, brbe):
- assert 'name' in layer_information
- assert 'layer_index_url' in layer_information
-
- if brbe is None:
- layer_object, _ = Layer.objects.get_or_create(
- name=layer_information['name'],
- layer_index_url=layer_information['layer_index_url'])
- return layer_object
- else:
- # we are under managed mode; we must match the layer used in the Project Layer
- br_id, be_id = brbe.split(":")
-
- # find layer by checkout path;
- from bldcontrol import bbcontroller
- bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
-
- # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
- # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
-
- # note that this is different
- buildrequest = BuildRequest.objects.get(pk = br_id)
- for brl in buildrequest.brlayer_set.all():
- localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
- # we get a relative path, unless running in HEAD mode where the path is absolute
- if not localdirname.startswith("/"):
- localdirname = os.path.join(bc.be.sourcedir, localdirname)
- #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
- if localdirname.startswith(layer_information['local_path']):
- # If the build request came from toaster this field
- # should contain the information from the layer_version
- # That created this build request.
- if brl.layer_version:
- return brl.layer_version
-
- # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
- #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
-
- for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
- if pl.layercommit.layer.vcs_url == brl.giturl :
- layer = pl.layercommit.layer
- layer.save()
- return layer
-
- raise NotExisting("Unidentified layer %s" % pformat(layer_information))
-
-
- def save_target_file_information(self, build_obj, target_obj, filedata):
- assert isinstance(build_obj, Build)
- assert isinstance(target_obj, Target)
- dirs = filedata['dirs']
- files = filedata['files']
- syms = filedata['syms']
-
- # always create the root directory as a special case;
- # note that this is never displayed, so the owner, group,
- # size, permission are irrelevant
- tf_obj = Target_File.objects.create(target = target_obj,
- path = '/',
- size = 0,
- owner = '',
- group = '',
- permission = '',
- inodetype = Target_File.ITYPE_DIRECTORY)
- tf_obj.save()
-
- # insert directories, ordered by name depth
- for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
- (user, group, size) = d[1:4]
- permission = d[0][1:]
- path = d[4].lstrip(".")
-
- # we already created the root directory, so ignore any
- # entry for it
- if len(path) == 0:
- continue
-
- parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
- if len(parent_path) == 0:
- parent_path = "/"
- parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
- tf_obj = Target_File.objects.create(
- target = target_obj,
- path = unicode(path, 'utf-8'),
- size = size,
- inodetype = Target_File.ITYPE_DIRECTORY,
- permission = permission,
- owner = user,
- group = group,
- directory = parent_obj)
-
-
- # we insert files
- for d in files:
- (user, group, size) = d[1:4]
- permission = d[0][1:]
- path = d[4].lstrip(".")
- parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
- inodetype = Target_File.ITYPE_REGULAR
- if d[0].startswith('b'):
- inodetype = Target_File.ITYPE_BLOCK
- if d[0].startswith('c'):
- inodetype = Target_File.ITYPE_CHARACTER
- if d[0].startswith('p'):
- inodetype = Target_File.ITYPE_FIFO
-
- tf_obj = Target_File.objects.create(
- target = target_obj,
- path = unicode(path, 'utf-8'),
- size = size,
- inodetype = inodetype,
- permission = permission,
- owner = user,
- group = group)
- parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
- tf_obj.directory = parent_obj
- tf_obj.save()
-
- # we insert symlinks
- for d in syms:
- (user, group, size) = d[1:4]
- permission = d[0][1:]
- path = d[4].lstrip(".")
- filetarget_path = d[6]
-
- parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
- if not filetarget_path.startswith("/"):
- # we have a relative path, get a normalized absolute one
- filetarget_path = parent_path + "/" + filetarget_path
- fcp = filetarget_path.split("/")
- fcpl = []
- for i in fcp:
- if i == "..":
- fcpl.pop()
- else:
- fcpl.append(i)
- filetarget_path = "/".join(fcpl)
-
- try:
- filetarget_obj = Target_File.objects.get(
- target = target_obj,
- path = unicode(filetarget_path, 'utf-8'))
- except Target_File.DoesNotExist:
- # we might have an invalid link; no way to detect this. just set it to None
- filetarget_obj = None
-
- parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
-
- tf_obj = Target_File.objects.create(
- target = target_obj,
- path = unicode(path, 'utf-8'),
- size = size,
- inodetype = Target_File.ITYPE_SYMLINK,
- permission = permission,
- owner = user,
- group = group,
- directory = parent_obj,
- sym_target = filetarget_obj)
-
-
- def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False):
- assert isinstance(build_obj, Build)
- assert isinstance(target_obj, Target)
-
- errormsg = ""
- for p in packagedict:
- # Search name swtiches round the installed name vs package name
- # by default installed name == package name
- searchname = p
- if p not in pkgpnmap:
- logger.warning("Image packages list contains %p, but is"
- " missing from all packages list where the"
- " metadata comes from. Skipping...", p)
- continue
-
- if 'OPKGN' in pkgpnmap[p].keys():
- searchname = pkgpnmap[p]['OPKGN']
-
- built_recipe = recipes[pkgpnmap[p]['PN']]
-
- if built_package:
- packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
- recipe = built_recipe
- else:
- packagedict[p]['object'], created = \
- CustomImagePackage.objects.get_or_create(name=searchname)
- # Clear the Package_Dependency objects as we're going to update
- # the CustomImagePackage with the latest dependency information
- packagedict[p]['object'].package_dependencies_target.all().delete()
- packagedict[p]['object'].package_dependencies_source.all().delete()
- try:
- recipe = self._cached_get(
- Recipe,
- name=built_recipe.name,
- layer_version__build=None,
- layer_version__up_branch=
- built_recipe.layer_version.up_branch,
- file_path=built_recipe.file_path,
- version=built_recipe.version
- )
- except (Recipe.DoesNotExist,
- Recipe.MultipleObjectsReturned) as e:
- logger.info("We did not find one recipe for the"
- "configuration data package %s %s" % (p, e))
- continue
-
- if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
- # fill in everything we can from the runtime-reverse package data
- try:
- packagedict[p]['object'].recipe = recipe
- packagedict[p]['object'].version = pkgpnmap[p]['PV']
- packagedict[p]['object'].installed_name = p
- packagedict[p]['object'].revision = pkgpnmap[p]['PR']
- packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
- packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
- packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
- packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
- packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
-
- # no files recorded for this package, so save files info
- packagefile_objects = []
- for targetpath in pkgpnmap[p]['FILES_INFO']:
- targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
- packagefile_objects.append(Package_File( package = packagedict[p]['object'],
- path = targetpath,
- size = targetfilesize))
- if len(packagefile_objects):
- Package_File.objects.bulk_create(packagefile_objects)
- except KeyError as e:
- errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
-
- # save disk installed size
- packagedict[p]['object'].installed_size = packagedict[p]['size']
- packagedict[p]['object'].save()
-
- if built_package:
- Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
-
- packagedeps_objs = []
- for p in packagedict:
- for (px,deptype) in packagedict[p]['depends']:
- if deptype == 'depends':
- tdeptype = Package_Dependency.TYPE_TRDEPENDS
- elif deptype == 'recommends':
- tdeptype = Package_Dependency.TYPE_TRECOMMENDS
-
- try:
- packagedeps_objs.append(Package_Dependency(
- package = packagedict[p]['object'],
- depends_on = packagedict[px]['object'],
- dep_type = tdeptype,
- target = target_obj))
- except KeyError as e:
- logger.warn("Could not add dependency to the package %s "
- "because %s is an unknown package", p, px)
-
- if len(packagedeps_objs) > 0:
- Package_Dependency.objects.bulk_create(packagedeps_objs)
- else:
- logger.info("No package dependencies created")
-
- if len(errormsg) > 0:
- logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
-
- def save_target_image_file_information(self, target_obj, file_name, file_size):
- Target_Image_File.objects.create( target = target_obj,
- file_name = file_name,
- file_size = file_size)
-
- def save_artifact_information(self, build_obj, file_name, file_size):
- # we skip the image files from other builds
- if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
- return
-
- # do not update artifacts found in other builds
- if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
- return
-
- BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
-
- def create_logmessage(self, log_information):
- assert 'build' in log_information
- assert 'level' in log_information
- assert 'message' in log_information
-
- log_object = LogMessage.objects.create(
- build = log_information['build'],
- level = log_information['level'],
- message = log_information['message'])
-
- for v in vars(log_object):
- if v in log_information.keys():
- vars(log_object)[v] = log_information[v]
-
- return log_object.save()
-
-
- def save_build_package_information(self, build_obj, package_info, recipes,
- built_package):
- # assert isinstance(build_obj, Build)
-
- # create and save the object
- pname = package_info['PKG']
- built_recipe = recipes[package_info['PN']]
- if 'OPKGN' in package_info.keys():
- pname = package_info['OPKGN']
-
- if built_package:
- bp_object, _ = Package.objects.get_or_create( build = build_obj,
- name = pname )
- recipe = built_recipe
- else:
- bp_object, created = \
- CustomImagePackage.objects.get_or_create(name=pname)
- try:
- recipe = self._cached_get(Recipe,
- name=built_recipe.name,
- layer_version__build=None,
- file_path=built_recipe.file_path,
- version=built_recipe.version)
-
- except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned):
- logger.debug("We did not find one recipe for the configuration"
- "data package %s" % pname)
- return
-
- bp_object.installed_name = package_info['PKG']
- bp_object.recipe = recipe
- bp_object.version = package_info['PKGV']
- bp_object.revision = package_info['PKGR']
- bp_object.summary = package_info['SUMMARY']
- bp_object.description = package_info['DESCRIPTION']
- bp_object.size = int(package_info['PKGSIZE'])
- bp_object.section = package_info['SECTION']
- bp_object.license = package_info['LICENSE']
- bp_object.save()
-
- # save any attached file information
- packagefile_objects = []
- for path in package_info['FILES_INFO']:
- packagefile_objects.append(Package_File( package = bp_object,
- path = path,
- size = package_info['FILES_INFO'][path] ))
- if len(packagefile_objects):
- Package_File.objects.bulk_create(packagefile_objects)
-
- def _po_byname(p):
- if built_package:
- pkg, created = Package.objects.get_or_create(build=build_obj,
- name=p)
- else:
- pkg, created = CustomImagePackage.objects.get_or_create(name=p)
-
- if created:
- pkg.size = -1
- pkg.save()
- return pkg
-
- packagedeps_objs = []
- # save soft dependency information
- if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
- for p in bb.utils.explode_deps(package_info['RDEPENDS']):
- packagedeps_objs.append(Package_Dependency( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
- if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
- for p in bb.utils.explode_deps(package_info['RPROVIDES']):
- packagedeps_objs.append(Package_Dependency( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
- if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
- for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
- packagedeps_objs.append(Package_Dependency( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
- if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
- for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
- packagedeps_objs.append(Package_Dependency( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
- if 'RREPLACES' in package_info and package_info['RREPLACES']:
- for p in bb.utils.explode_deps(package_info['RREPLACES']):
- packagedeps_objs.append(Package_Dependency( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
- if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
- for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
- packagedeps_objs.append(Package_Dependency( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
-
- if len(packagedeps_objs) > 0:
- Package_Dependency.objects.bulk_create(packagedeps_objs)
-
- return bp_object
-
- def save_build_variables(self, build_obj, vardump):
- assert isinstance(build_obj, Build)
-
- for k in vardump:
- desc = vardump[k]['doc']
- if desc is None:
- var_words = [word for word in k.split('_')]
- root_var = "_".join([word for word in var_words if word.isupper()])
- if root_var and root_var != k and root_var in vardump:
- desc = vardump[root_var]['doc']
- if desc is None:
- desc = ''
- if len(desc):
- HelpText.objects.get_or_create(build=build_obj,
- area=HelpText.VARIABLE,
- key=k, text=desc)
- if not bool(vardump[k]['func']):
- value = vardump[k]['v']
- if value is None:
- value = ''
- variable_obj = Variable.objects.create( build = build_obj,
- variable_name = k,
- variable_value = value,
- description = desc)
-
- varhist_objects = []
- for vh in vardump[k]['history']:
- if not 'documentation.conf' in vh['file']:
- varhist_objects.append(VariableHistory( variable = variable_obj,
- file_name = vh['file'],
- line_number = vh['line'],
- operation = vh['op']))
- if len(varhist_objects):
- VariableHistory.objects.bulk_create(varhist_objects)
-
-
-class MockEvent(object):
- """ This object is used to create event, for which normal event-processing methods can
- be used, out of data that is not coming via an actual event
- """
- def __init__(self):
- self.msg = None
- self.levelno = None
- self.taskname = None
- self.taskhash = None
- self.pathname = None
- self.lineno = None
-
-
-class BuildInfoHelper(object):
- """ This class gathers the build information from the server and sends it
- towards the ORM wrapper for storing in the database
- It is instantiated once per build
- Keeps in memory all data that needs matching before writing it to the database
- """
-
- # pylint: disable=protected-access
- # the code will look into the protected variables of the event; no easy way around this
- # pylint: disable=bad-continuation
- # we do not follow the python conventions for continuation indentation due to long lines here
-
- def __init__(self, server, has_build_history = False, brbe = None):
- self.internal_state = {}
- self.internal_state['taskdata'] = {}
- self.internal_state['targets'] = []
- self.task_order = 0
- self.autocommit_step = 1
- self.server = server
- # we use manual transactions if the database doesn't autocommit on us
- if not connection.features.autocommits_when_autocommit_is_off:
- transaction.set_autocommit(False)
- self.orm_wrapper = ORMWrapper()
- self.has_build_history = has_build_history
- self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
-
- # this is set for Toaster-triggered builds by localhostbecontroller
- # via toasterui
- self.brbe = brbe
-
- self.project = None
-
- logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
-
-
- ###################
- ## methods to convert event/external info into objects that the ORM layer uses
-
-
- def _get_build_information(self, build_log_path):
- build_info = {}
- build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
- build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
- build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
- build_info['started_on'] = timezone.now()
- build_info['completed_on'] = timezone.now()
- build_info['cooker_log_path'] = build_log_path
- build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
- build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
- build_info['project'] = self.project = self.server.runCommand(["getVariable", "TOASTER_PROJECT"])[0]
- return build_info
-
- def _get_task_information(self, event, recipe):
- assert 'taskname' in vars(event)
-
- task_information = {}
- task_information['build'] = self.internal_state['build']
- task_information['outcome'] = Task.OUTCOME_NA
- task_information['recipe'] = recipe
- task_information['task_name'] = event.taskname
- try:
- # some tasks don't come with a hash. and that's ok
- task_information['sstate_checksum'] = event.taskhash
- except AttributeError:
- pass
- return task_information
-
- def _get_layer_version_for_path(self, path):
- assert path.startswith("/")
- assert 'build' in self.internal_state
-
- def _slkey_interactive(layer_version):
- assert isinstance(layer_version, Layer_Version)
- return len(layer_version.local_path)
-
- # Heuristics: we always match recipe to the deepest layer path in the discovered layers
- for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
- # we can match to the recipe file path
- if path.startswith(lvo.local_path):
- return lvo
-
- #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
- logger.warn("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
-
- #mockup the new layer
- unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
- unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
-
- # append it so we don't run into this error again and again
- self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
-
- return unknown_layer_version_obj
-
- def _get_recipe_information_from_taskfile(self, taskfile):
- localfilepath = taskfile.split(":")[-1]
- filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
- layer_version_obj = self._get_layer_version_for_path(localfilepath)
-
-
-
- recipe_info = {}
- recipe_info['layer_version'] = layer_version_obj
- recipe_info['file_path'] = localfilepath
- recipe_info['pathflags'] = filepath_flags
-
- if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
- recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
- else:
- raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
-
- return recipe_info
-
- def _get_path_information(self, task_object):
- assert isinstance(task_object, Task)
- build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
- build_stats_path = []
-
- for t in self.internal_state['targets']:
- buildname = self.internal_state['build'].build_name
- pe, pv = task_object.recipe.version.split(":",1)
- if len(pe) > 0:
- package = task_object.recipe.name + "-" + pe + "_" + pv
- else:
- package = task_object.recipe.name + "-" + pv
-
- build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
- buildname=buildname,
- package=package))
-
- return build_stats_path
-
-
- ################################
- ## external available methods to store information
- @staticmethod
- def _get_data_from_event(event):
- evdata = None
- if '_localdata' in vars(event):
- evdata = event._localdata
- elif 'data' in vars(event):
- evdata = event.data
- else:
- raise Exception("Event with neither _localdata or data properties")
- return evdata
-
- def store_layer_info(self, event):
- layerinfos = BuildInfoHelper._get_data_from_event(event)
- self.internal_state['lvs'] = {}
- for layer in layerinfos:
- try:
- self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
- self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
- except NotExisting as nee:
- logger.warn("buildinfohelper: cannot identify layer exception:%s ", nee)
-
-
- def store_started_build(self, event, build_log_path):
- assert '_pkgs' in vars(event)
- build_information = self._get_build_information(build_log_path)
-
- # Update brbe and project as they can be changed for every build
- self.project = build_information['project']
-
- build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe, self.project)
-
- self.internal_state['build'] = build_obj
-
- # save layer version information for this build
- if not 'lvs' in self.internal_state:
- logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
- else:
- for layer_obj in self.internal_state['lvs']:
- self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
-
- del self.internal_state['lvs']
-
- # create target information
- target_information = {}
- target_information['targets'] = event._pkgs
- target_information['build'] = build_obj
-
- self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
-
- # Save build configuration
- data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
-
- # convert the paths from absolute to relative to either the build directory or layer checkouts
- path_prefixes = []
-
- if self.brbe is not None:
- _, be_id = self.brbe.split(":")
- be = BuildEnvironment.objects.get(pk = be_id)
- path_prefixes.append(be.builddir)
-
- for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
- path_prefixes.append(layer.local_path)
-
- # we strip the prefixes
- for k in data:
- if not bool(data[k]['func']):
- for vh in data[k]['history']:
- if not 'documentation.conf' in vh['file']:
- abs_file_name = vh['file']
- for pp in path_prefixes:
- if abs_file_name.startswith(pp + "/"):
- vh['file']=abs_file_name[len(pp + "/"):]
- break
-
- # save the variables
- self.orm_wrapper.save_build_variables(build_obj, data)
-
- return self.brbe
-
-
- def update_target_image_file(self, event):
- evdata = BuildInfoHelper._get_data_from_event(event)
-
- for t in self.internal_state['targets']:
- if t.is_image == True:
- output_files = list(evdata.viewkeys())
- for output in output_files:
- if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
- self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
-
- def update_artifact_image_file(self, event):
- evdata = BuildInfoHelper._get_data_from_event(event)
- for artifact_path in evdata.keys():
- self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
-
- def update_build_information(self, event, errors, warnings, taskfailures):
- if 'build' in self.internal_state:
- self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures)
-
-
- def store_license_manifest_path(self, event):
- deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
- image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
- path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
- for target in self.internal_state['targets']:
- if target.target in image_name:
- self.orm_wrapper.update_target_set_license_manifest(target, path)
-
-
- def store_started_task(self, event):
- assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
- assert 'taskfile' in vars(event)
- localfilepath = event.taskfile.split(":")[-1]
- assert localfilepath.startswith("/")
-
- identifier = event.taskfile + ":" + event.taskname
-
- recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
- recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
-
- task_information = self._get_task_information(event, recipe)
- task_information['outcome'] = Task.OUTCOME_NA
-
- if isinstance(event, bb.runqueue.runQueueTaskSkipped):
- assert 'reason' in vars(event)
- task_information['task_executed'] = False
- if event.reason == "covered":
- task_information['outcome'] = Task.OUTCOME_COVERED
- if event.reason == "existing":
- task_information['outcome'] = Task.OUTCOME_PREBUILT
- else:
- task_information['task_executed'] = True
- if 'noexec' in vars(event) and event.noexec == True:
- task_information['task_executed'] = False
- task_information['outcome'] = Task.OUTCOME_EMPTY
- task_information['script_type'] = Task.CODING_NA
-
- # do not assign order numbers to scene tasks
- if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
- self.task_order += 1
- task_information['order'] = self.task_order
-
- self.orm_wrapper.get_update_task_object(task_information)
-
- self.internal_state['taskdata'][identifier] = {
- 'outcome': task_information['outcome'],
- }
-
-
- def store_tasks_stats(self, event):
- task_data = BuildInfoHelper._get_data_from_event(event)
-
- for (task_file, task_name, task_stats, recipe_name) in task_data:
- build = self.internal_state['build']
- self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats)
-
- def update_and_store_task(self, event):
- assert 'taskfile' in vars(event)
- localfilepath = event.taskfile.split(":")[-1]
- assert localfilepath.startswith("/")
-
- identifier = event.taskfile + ":" + event.taskname
- if not identifier in self.internal_state['taskdata']:
- if isinstance(event, bb.build.TaskBase):
- # we do a bit of guessing
- candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
- if len(candidates) == 1:
- identifier = candidates[0]
-
- assert identifier in self.internal_state['taskdata']
- identifierlist = identifier.split(":")
- realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
- recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
- recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
- task_information = self._get_task_information(event,recipe)
-
- task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
-
- if 'logfile' in vars(event):
- task_information['logfile'] = event.logfile
-
- if '_message' in vars(event):
- task_information['message'] = event._message
-
- if 'taskflags' in vars(event):
- # with TaskStarted, we get even more information
- if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
- task_information['script_type'] = Task.CODING_PYTHON
- else:
- task_information['script_type'] = Task.CODING_SHELL
-
- if task_information['outcome'] == Task.OUTCOME_NA:
- if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
- task_information['outcome'] = Task.OUTCOME_SUCCESS
- del self.internal_state['taskdata'][identifier]
-
- if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
- task_information['outcome'] = Task.OUTCOME_FAILED
- del self.internal_state['taskdata'][identifier]
-
- if not connection.features.autocommits_when_autocommit_is_off:
- # we force a sync point here, to get the progress bar to show
- if self.autocommit_step % 3 == 0:
- transaction.set_autocommit(True)
- transaction.set_autocommit(False)
- self.autocommit_step += 1
-
- self.orm_wrapper.get_update_task_object(task_information, True) # must exist
-
-
- def store_missed_state_tasks(self, event):
- for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
-
- # identifier = fn + taskname + "_setscene"
- recipe_information = self._get_recipe_information_from_taskfile(fn)
- recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
- mevent = MockEvent()
- mevent.taskname = taskname
- mevent.taskhash = taskhash
- task_information = self._get_task_information(mevent,recipe)
-
- task_information['start_time'] = timezone.now()
- task_information['outcome'] = Task.OUTCOME_NA
- task_information['sstate_checksum'] = taskhash
- task_information['sstate_result'] = Task.SSTATE_MISS
- task_information['path_to_sstate_obj'] = sstatefile
-
- self.orm_wrapper.get_update_task_object(task_information)
-
- for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
-
- # identifier = fn + taskname + "_setscene"
- recipe_information = self._get_recipe_information_from_taskfile(fn)
- recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
- mevent = MockEvent()
- mevent.taskname = taskname
- mevent.taskhash = taskhash
- task_information = self._get_task_information(mevent,recipe)
-
- task_information['path_to_sstate_obj'] = sstatefile
-
- self.orm_wrapper.get_update_task_object(task_information)
-
-
- def store_target_package_data(self, event):
- # for all image targets
- for target in self.internal_state['targets']:
- if target.is_image:
- pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
- imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {})
- filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {})
-
- try:
- self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
- self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
- except KeyError as e:
- logger.warn("KeyError in save_target_package_information"
- "%s ", e)
-
- try:
- self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
- except KeyError as e:
- logger.warn("KeyError in save_target_file_information"
- "%s ", e)
-
-
-
-
- def store_dependency_information(self, event):
- assert '_depgraph' in vars(event)
- assert 'layer-priorities' in event._depgraph
- assert 'pn' in event._depgraph
- assert 'tdepends' in event._depgraph
-
- errormsg = ""
-
- # save layer version priorities
- if 'layer-priorities' in event._depgraph.keys():
- for lv in event._depgraph['layer-priorities']:
- (_, path, _, priority) = lv
- layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^
- assert layer_version_obj is not None
- layer_version_obj.priority = priority
- layer_version_obj.save()
-
- # save recipe information
- self.internal_state['recipes'] = {}
- for pn in event._depgraph['pn']:
-
- file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
- pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
- layer_version_obj = self._get_layer_version_for_path(file_name)
-
- assert layer_version_obj is not None
-
- recipe_info = {}
- recipe_info['name'] = pn
- recipe_info['layer_version'] = layer_version_obj
-
- if 'version' in event._depgraph['pn'][pn]:
- recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
-
- if 'summary' in event._depgraph['pn'][pn]:
- recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
-
- if 'license' in event._depgraph['pn'][pn]:
- recipe_info['license'] = event._depgraph['pn'][pn]['license']
-
- if 'description' in event._depgraph['pn'][pn]:
- recipe_info['description'] = event._depgraph['pn'][pn]['description']
-
- if 'section' in event._depgraph['pn'][pn]:
- recipe_info['section'] = event._depgraph['pn'][pn]['section']
-
- if 'homepage' in event._depgraph['pn'][pn]:
- recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
-
- if 'bugtracker' in event._depgraph['pn'][pn]:
- recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
-
- recipe_info['file_path'] = file_name
- recipe_info['pathflags'] = pathflags
-
- if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
- recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
- else:
- raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
-
- recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
- recipe.is_image = False
- if 'inherits' in event._depgraph['pn'][pn].keys():
- for cls in event._depgraph['pn'][pn]['inherits']:
- if cls.endswith('/image.bbclass'):
- recipe.is_image = True
- recipe_info['is_image'] = True
- # Save the is_image state to the relevant recipe objects
- self.orm_wrapper.get_update_recipe_object(recipe_info)
- break
- if recipe.is_image:
- for t in self.internal_state['targets']:
- if pn == t.target:
- t.is_image = True
- t.save()
- self.internal_state['recipes'][pn] = recipe
-
- # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
-
- assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
-
- # save recipe dependency
- # buildtime
- recipedeps_objects = []
- for recipe in event._depgraph['depends']:
- target = self.internal_state['recipes'][recipe]
- for dep in event._depgraph['depends'][recipe]:
- if dep in assume_provided:
- continue
- via = None
- if 'providermap' in event._depgraph and dep in event._depgraph['providermap']:
- deprecipe = event._depgraph['providermap'][dep][0]
- dependency = self.internal_state['recipes'][deprecipe]
- via = Provides.objects.get_or_create(name=dep,
- recipe=dependency)[0]
- elif dep in self.internal_state['recipes']:
- dependency = self.internal_state['recipes'][dep]
- else:
- errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)
- continue
- recipe_dep = Recipe_Dependency(recipe=target,
- depends_on=dependency,
- via=via,
- dep_type=Recipe_Dependency.TYPE_DEPENDS)
- recipedeps_objects.append(recipe_dep)
-
- Recipe_Dependency.objects.bulk_create(recipedeps_objects)
-
- # save all task information
- def _save_a_task(taskdesc):
- spec = re.split(r'\.', taskdesc)
- pn = ".".join(spec[0:-1])
- taskname = spec[-1]
- e = event
- e.taskname = pn
- recipe = self.internal_state['recipes'][pn]
- task_info = self._get_task_information(e, recipe)
- task_info['task_name'] = taskname
- task_obj = self.orm_wrapper.get_update_task_object(task_info)
- return task_obj
-
- # create tasks
- tasks = {}
- for taskdesc in event._depgraph['tdepends']:
- tasks[taskdesc] = _save_a_task(taskdesc)
-
- # create dependencies between tasks
- taskdeps_objects = []
- for taskdesc in event._depgraph['tdepends']:
- target = tasks[taskdesc]
- for taskdep in event._depgraph['tdepends'][taskdesc]:
- if taskdep not in tasks:
- # Fetch tasks info is not collected previously
- dep = _save_a_task(taskdep)
- else:
- dep = tasks[taskdep]
- taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
- Task_Dependency.objects.bulk_create(taskdeps_objects)
-
- if len(errormsg) > 0:
- logger.warn("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
-
-
- def store_build_package_information(self, event):
- package_info = BuildInfoHelper._get_data_from_event(event)
- self.orm_wrapper.save_build_package_information(
- self.internal_state['build'],
- package_info,
- self.internal_state['recipes'],
- built_package=True)
-
- self.orm_wrapper.save_build_package_information(
- self.internal_state['build'],
- package_info,
- self.internal_state['recipes'],
- built_package=False)
-
- def _store_build_done(self, errorcode):
- logger.info("Build exited with errorcode %d", errorcode)
- br_id, be_id = self.brbe.split(":")
- be = BuildEnvironment.objects.get(pk = be_id)
- be.lock = BuildEnvironment.LOCK_LOCK
- be.save()
- br = BuildRequest.objects.get(pk = br_id)
-
- # if we're 'done' because we got cancelled update the build outcome
- if br.state == BuildRequest.REQ_CANCELLING:
- logger.info("Build cancelled")
- br.build.outcome = Build.CANCELLED
- br.build.save()
- self.internal_state['build'] = br.build
- errorcode = 0
-
- if errorcode == 0:
- # request archival of the project artifacts
- br.state = BuildRequest.REQ_COMPLETED
- else:
- br.state = BuildRequest.REQ_FAILED
- br.save()
-
-
- def store_log_error(self, text):
- mockevent = MockEvent()
- mockevent.levelno = formatter.ERROR
- mockevent.msg = text
- mockevent.pathname = '-- None'
- mockevent.lineno = LogMessage.ERROR
- self.store_log_event(mockevent)
-
- def store_log_exception(self, text, backtrace = ""):
- mockevent = MockEvent()
- mockevent.levelno = -1
- mockevent.msg = text
- mockevent.pathname = backtrace
- mockevent.lineno = -1
- self.store_log_event(mockevent)
-
-
- def store_log_event(self, event):
- if event.levelno < formatter.WARNING:
- return
-
- if 'args' in vars(event):
- event.msg = event.msg % event.args
-
- if not 'build' in self.internal_state:
- if self.brbe is None:
- if not 'backlog' in self.internal_state:
- self.internal_state['backlog'] = []
- self.internal_state['backlog'].append(event)
- return
- else: # we're under Toaster control, the build is already created
- br, _ = self.brbe.split(":")
- buildrequest = BuildRequest.objects.get(pk = br)
- self.internal_state['build'] = buildrequest.build
-
- if 'build' in self.internal_state and 'backlog' in self.internal_state:
- # if we have a backlog of events, do our best to save them here
- if len(self.internal_state['backlog']):
- tempevent = self.internal_state['backlog'].pop()
- logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
- self.store_log_event(tempevent)
- else:
- logger.info("buildinfohelper: All events saved")
- del self.internal_state['backlog']
-
- log_information = {}
- log_information['build'] = self.internal_state['build']
- if event.levelno == formatter.CRITICAL:
- log_information['level'] = LogMessage.CRITICAL
- elif event.levelno == formatter.ERROR:
- log_information['level'] = LogMessage.ERROR
- elif event.levelno == formatter.WARNING:
- log_information['level'] = LogMessage.WARNING
- elif event.levelno == -2: # toaster self-logging
- log_information['level'] = -2
- else:
- log_information['level'] = LogMessage.INFO
-
- log_information['message'] = event.msg
- log_information['pathname'] = event.pathname
- log_information['lineno'] = event.lineno
- logger.info("Logging error 2: %s", log_information)
-
- self.orm_wrapper.create_logmessage(log_information)
-
- def close(self, errorcode):
- if self.brbe is not None:
- self._store_build_done(errorcode)
-
- if 'backlog' in self.internal_state:
- if 'build' in self.internal_state:
- # we save missed events in the database for the current build
- tempevent = self.internal_state['backlog'].pop()
- self.store_log_event(tempevent)
- else:
- # we have no build, and we still have events; something amazingly wrong happend
- for event in self.internal_state['backlog']:
- logger.error("UNSAVED log: %s", event.msg)
-
- if not connection.features.autocommits_when_autocommit_is_off:
- transaction.set_autocommit(True)
-
- # unset the brbe; this is to prevent subsequent command-line builds
- # being incorrectly attached to the previous Toaster-triggered build;
- # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
- self.brbe = None
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/__init__.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/__init__.py
deleted file mode 100644
index b7cbe1a4f..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Gtk+ UI pieces for BitBake
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/__init__.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/__init__.py
+++ /dev/null
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
deleted file mode 100644
index c679f9a07..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class CrumbsDialog(gtk.Dialog):
- """
- A GNOME HIG compliant dialog widget.
- Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
- """
- def __init__(self, title="", parent=None, flags=0, buttons=None):
- super(CrumbsDialog, self).__init__(title, parent, flags, buttons)
-
- self.set_property("has-separator", False) # note: deprecated in 2.22
-
- self.set_border_width(6)
- self.vbox.set_property("spacing", 12)
- self.action_area.set_property("spacing", 12)
- self.action_area.set_property("border-width", 6)
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
deleted file mode 100644
index 3b998e463..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import glib
-import gtk
-from bb.ui.crumbs.hobwidget import HobIconChecker
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class CrumbsMessageDialog(gtk.MessageDialog):
- """
- A GNOME HIG compliant dialog widget.
- Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
- """
- def __init__(self, parent = None, label="", dialog_type = gtk.MESSAGE_QUESTION, msg=""):
- super(CrumbsMessageDialog, self).__init__(None,
- gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
- dialog_type,
- gtk.BUTTONS_NONE,
- None)
-
- self.set_skip_taskbar_hint(False)
-
- self.set_markup(label)
-
- if 0 <= len(msg) < 300:
- self.format_secondary_markup(msg)
- else:
- vbox = self.get_message_area()
- vbox.set_border_width(1)
- vbox.set_property("spacing", 12)
- self.textWindow = gtk.ScrolledWindow()
- self.textWindow.set_shadow_type(gtk.SHADOW_IN)
- self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- self.msgView = gtk.TextView()
- self.msgView.set_editable(False)
- self.msgView.set_wrap_mode(gtk.WRAP_WORD)
- self.msgView.set_cursor_visible(False)
- self.msgView.set_size_request(300, 300)
- self.buf = gtk.TextBuffer()
- self.buf.set_text(msg)
- self.msgView.set_buffer(self.buf)
- self.textWindow.add(self.msgView)
- self.msgView.show()
- vbox.add(self.textWindow)
- self.textWindow.show()
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
deleted file mode 100644
index a13fff906..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
+++ /dev/null
@@ -1,219 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import glob
-import gtk
-import gobject
-import os
-import re
-import shlex
-import subprocess
-import tempfile
-from bb.ui.crumbs.hobwidget import hic, HobButton
-from bb.ui.crumbs.progressbar import HobProgressBar
-import bb.ui.crumbs.utils
-import bb.process
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class DeployImageDialog (CrumbsDialog):
-
- __dummy_usb__ = "--select a usb drive--"
-
- def __init__(self, title, image_path, parent, flags, buttons=None, standalone=False):
- super(DeployImageDialog, self).__init__(title, parent, flags, buttons)
-
- self.image_path = image_path
- self.standalone = standalone
-
- self.create_visual_elements()
- self.connect("response", self.response_cb)
-
- def create_visual_elements(self):
- self.set_size_request(600, 400)
- label = gtk.Label()
- label.set_alignment(0.0, 0.5)
- markup = "<span font_desc='12'>The image to be written into usb drive:</span>"
- label.set_markup(markup)
- self.vbox.pack_start(label, expand=False, fill=False, padding=2)
-
- table = gtk.Table(2, 10, False)
- table.set_col_spacings(5)
- table.set_row_spacings(5)
- self.vbox.pack_start(table, expand=True, fill=True)
-
- scroll = gtk.ScrolledWindow()
- scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
- scroll.set_shadow_type(gtk.SHADOW_IN)
- tv = gtk.TextView()
- tv.set_editable(False)
- tv.set_wrap_mode(gtk.WRAP_WORD)
- tv.set_cursor_visible(False)
- self.buf = gtk.TextBuffer()
- self.buf.set_text(self.image_path)
- tv.set_buffer(self.buf)
- scroll.add(tv)
- table.attach(scroll, 0, 10, 0, 1)
-
- # There are 2 ways to use DeployImageDialog
- # One way is that called by HOB when the 'Deploy Image' button is clicked
- # The other way is that called by a standalone script.
- # Following block of codes handles the latter way. It adds a 'Select Image' button and
- # emit a signal when the button is clicked.
- if self.standalone:
- gobject.signal_new("select_image_clicked", self, gobject.SIGNAL_RUN_FIRST,
- gobject.TYPE_NONE, ())
- icon = gtk.Image()
- pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_IMAGES_DISPLAY_FILE)
- icon.set_from_pixbuf(pix_buffer)
- button = gtk.Button("Select Image")
- button.set_image(icon)
- #button.set_size_request(140, 50)
- table.attach(button, 9, 10, 1, 2, gtk.FILL, 0, 0, 0)
- button.connect("clicked", self.select_image_button_clicked_cb)
-
- separator = gtk.HSeparator()
- self.vbox.pack_start(separator, expand=False, fill=False, padding=10)
-
- self.usb_desc = gtk.Label()
- self.usb_desc.set_alignment(0.0, 0.5)
- markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
- self.usb_desc.set_markup(markup)
-
- self.usb_combo = gtk.combo_box_new_text()
- self.usb_combo.connect("changed", self.usb_combo_changed_cb)
- model = self.usb_combo.get_model()
- model.clear()
- self.usb_combo.append_text(self.__dummy_usb__)
- for usb in self.find_all_usb_devices():
- self.usb_combo.append_text("/dev/" + usb)
- self.usb_combo.set_active(0)
- self.vbox.pack_start(self.usb_combo, expand=False, fill=False)
- self.vbox.pack_start(self.usb_desc, expand=False, fill=False, padding=2)
-
- self.progress_bar = HobProgressBar()
- self.vbox.pack_start(self.progress_bar, expand=False, fill=False)
- separator = gtk.HSeparator()
- self.vbox.pack_start(separator, expand=False, fill=True, padding=10)
-
- self.vbox.show_all()
- self.progress_bar.hide()
-
- def set_image_text_buffer(self, image_path):
- self.buf.set_text(image_path)
-
- def set_image_path(self, image_path):
- self.image_path = image_path
-
- def popen_read(self, cmd):
- tmpout, errors = bb.process.run("%s" % cmd)
- return tmpout.strip()
-
- def find_all_usb_devices(self):
- usb_devs = [ os.readlink(u)
- for u in glob.glob('/dev/disk/by-id/usb*')
- if not re.search(r'part\d+', u) ]
- return [ '%s' % u[u.rfind('/')+1:] for u in usb_devs ]
-
- def get_usb_info(self, dev):
- return "%s %s" % \
- (self.popen_read('cat /sys/class/block/%s/device/vendor' % dev),
- self.popen_read('cat /sys/class/block/%s/device/model' % dev))
-
- def select_image_button_clicked_cb(self, button):
- self.emit('select_image_clicked')
-
- def usb_combo_changed_cb(self, usb_combo):
- combo_item = self.usb_combo.get_active_text()
- if not combo_item or combo_item == self.__dummy_usb__:
- markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
- self.usb_desc.set_markup(markup)
- else:
- markup = "<span font_desc='12'>" + self.get_usb_info(combo_item.lstrip("/dev/")) + "</span>"
- self.usb_desc.set_markup(markup)
-
- def response_cb(self, dialog, response_id):
- if response_id == gtk.RESPONSE_YES:
- lbl = ''
- msg = ''
- combo_item = self.usb_combo.get_active_text()
- if combo_item and combo_item != self.__dummy_usb__ and self.image_path:
- cmdline = bb.ui.crumbs.utils.which_terminal()
- if cmdline:
- tmpfile = tempfile.NamedTemporaryFile()
- cmdline += "\"sudo dd if=" + self.image_path + \
- " of=" + combo_item + " && sync; echo $? > " + tmpfile.name + "\""
- subprocess.call(shlex.split(cmdline))
-
- if int(tmpfile.readline().strip()) == 0:
- lbl = "<b>Deploy image successfully.</b>"
- else:
- lbl = "<b>Failed to deploy image.</b>"
- msg = "Please check image <b>%s</b> exists and USB device <b>%s</b> is writable." % (self.image_path, combo_item)
- tmpfile.close()
- else:
- if not self.image_path:
- lbl = "<b>No selection made.</b>"
- msg = "You have not selected an image to deploy."
- else:
- lbl = "<b>No selection made.</b>"
- msg = "You have not selected a USB device."
- if len(lbl):
- crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
- button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
- HobButton.style_button(button)
- crumbs_dialog.run()
- crumbs_dialog.destroy()
-
- def update_progress_bar(self, title, fraction, status=None):
- self.progress_bar.update(fraction)
- self.progress_bar.set_title(title)
- self.progress_bar.set_rcstyle(status)
-
- def write_file(self, ifile, ofile):
- self.progress_bar.reset()
- self.progress_bar.show()
-
- f_from = os.open(ifile, os.O_RDONLY)
- f_to = os.open(ofile, os.O_WRONLY)
-
- total_size = os.stat(ifile).st_size
- written_size = 0
-
- while True:
- buf = os.read(f_from, 1024*1024)
- if not buf:
- break
- os.write(f_to, buf)
- written_size += 1024*1024
- self.update_progress_bar("Writing to usb:", written_size * 1.0/total_size)
-
- self.update_progress_bar("Writing completed:", 1.0)
- os.close(f_from)
- os.close(f_to)
- self.progress_bar.hide()
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
deleted file mode 100644
index 21216adc9..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import os
-from bb.ui.crumbs.hobwidget import HobViewTable, HobInfoButton, HobButton, HobAltButton
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class ImageSelectionDialog (CrumbsDialog):
-
- __columns__ = [{
- 'col_name' : 'Image name',
- 'col_id' : 0,
- 'col_style': 'text',
- 'col_min' : 400,
- 'col_max' : 400
- }, {
- 'col_name' : 'Select',
- 'col_id' : 1,
- 'col_style': 'radio toggle',
- 'col_min' : 160,
- 'col_max' : 160
- }]
-
-
- def __init__(self, image_folder, image_types, title, parent, flags, buttons=None, image_extension = {}):
- super(ImageSelectionDialog, self).__init__(title, parent, flags, buttons)
- self.connect("response", self.response_cb)
-
- self.image_folder = image_folder
- self.image_types = image_types
- self.image_list = []
- self.image_names = []
- self.image_extension = image_extension
-
- # create visual elements on the dialog
- self.create_visual_elements()
-
- self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
- self.fill_image_store()
-
- def create_visual_elements(self):
- hbox = gtk.HBox(False, 6)
-
- self.vbox.pack_start(hbox, expand=False, fill=False)
-
- entry = gtk.Entry()
- entry.set_text(self.image_folder)
- table = gtk.Table(1, 10, True)
- table.set_size_request(560, -1)
- hbox.pack_start(table, expand=False, fill=False)
- table.attach(entry, 0, 9, 0, 1)
- image = gtk.Image()
- image.set_from_stock(gtk.STOCK_OPEN, gtk.ICON_SIZE_BUTTON)
- open_button = gtk.Button()
- open_button.set_image(image)
- open_button.connect("clicked", self.select_path_cb, self, entry)
- table.attach(open_button, 9, 10, 0, 1)
-
- self.image_table = HobViewTable(self.__columns__, "Images")
- self.image_table.set_size_request(-1, 300)
- self.image_table.connect("toggled", self.toggled_cb)
- self.image_table.connect_group_selection(self.table_selected_cb)
- self.image_table.connect("row-activated", self.row_actived_cb)
- self.vbox.pack_start(self.image_table, expand=True, fill=True)
-
- self.show_all()
-
- def change_image_cb(self, model, path, columnid):
- if not model:
- return
- iter = model.get_iter_first()
- while iter:
- rowpath = model.get_path(iter)
- model[rowpath][columnid] = False
- iter = model.iter_next(iter)
-
- model[path][columnid] = True
-
- def toggled_cb(self, table, cell, path, columnid, tree):
- model = tree.get_model()
- self.change_image_cb(model, path, columnid)
-
- def table_selected_cb(self, selection):
- model, paths = selection.get_selected_rows()
- if paths:
- self.change_image_cb(model, paths[0], 1)
-
- def row_actived_cb(self, tab, model, path):
- self.change_image_cb(model, path, 1)
- self.emit('response', gtk.RESPONSE_YES)
-
- def select_path_cb(self, action, parent, entry):
- dialog = gtk.FileChooserDialog("", parent,
- gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
- text = entry.get_text()
- dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
- button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
- HobAltButton.style_button(button)
- button = dialog.add_button("Open", gtk.RESPONSE_YES)
- HobButton.style_button(button)
- response = dialog.run()
- if response == gtk.RESPONSE_YES:
- path = dialog.get_filename()
- entry.set_text(path)
- self.image_folder = path
- self.fill_image_store()
-
- dialog.destroy()
-
- def fill_image_store(self):
- self.image_list = []
- self.image_store.clear()
- imageset = set()
- for root, dirs, files in os.walk(self.image_folder):
- # ignore the sub directories
- dirs[:] = []
- for f in files:
- for image_type in self.image_types:
- if image_type in self.image_extension:
- real_types = self.image_extension[image_type]
- else:
- real_types = [image_type]
- for real_image_type in real_types:
- if f.endswith('.' + real_image_type):
- imageset.add(f.rsplit('.' + real_image_type)[0].rsplit('.rootfs')[0])
- self.image_list.append(f)
-
- for image in imageset:
- self.image_store.set(self.image_store.append(), 0, image, 1, False)
-
- self.image_table.set_model(self.image_store)
-
- def response_cb(self, dialog, response_id):
- self.image_names = []
- if response_id == gtk.RESPONSE_YES:
- iter = self.image_store.get_iter_first()
- while iter:
- path = self.image_store.get_path(iter)
- if self.image_store[path][1]:
- for f in self.image_list:
- if f.startswith(self.image_store[path][0] + '.'):
- self.image_names.append(f)
- break
- iter = self.image_store.iter_next(iter)
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
deleted file mode 100644
index 52d57b673..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
+++ /dev/null
@@ -1,298 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import os
-import tempfile
-from bb.ui.crumbs.hobwidget import hic, HobButton, HobAltButton
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class CellRendererPixbufActivatable(gtk.CellRendererPixbuf):
- """
- A custom CellRenderer implementation which is activatable
- so that we can handle user clicks
- """
- __gsignals__ = { 'clicked' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_STRING,)), }
-
- def __init__(self):
- gtk.CellRendererPixbuf.__init__(self)
- self.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
- self.set_property('follow-state', True)
-
- """
- Respond to a user click on a cell
- """
- def do_activate(self, even, widget, path, background_area, cell_area, flags):
- self.emit('clicked', path)
-
-#
-# LayerSelectionDialog
-#
-class LayerSelectionDialog (CrumbsDialog):
-
- TARGETS = [
- ("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
- ("text/plain", 0, 1),
- ("TEXT", 0, 2),
- ("STRING", 0, 3),
- ]
-
- def gen_label_widget(self, content):
- label = gtk.Label()
- label.set_alignment(0, 0)
- label.set_markup(content)
- label.show()
- return label
-
- def layer_widget_toggled_cb(self, cell, path, layer_store):
- name = layer_store[path][0]
- toggle = not layer_store[path][1]
- layer_store[path][1] = toggle
-
- def layer_widget_add_clicked_cb(self, action, layer_store, parent):
- dialog = gtk.FileChooserDialog("Add new layer", parent,
- gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
- button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
- HobAltButton.style_button(button)
- button = dialog.add_button("Open", gtk.RESPONSE_YES)
- HobButton.style_button(button)
- label = gtk.Label("Select the layer you wish to add")
- label.show()
- dialog.set_extra_widget(label)
- response = dialog.run()
- path = dialog.get_filename()
- dialog.destroy()
-
- lbl = "<b>Error</b>"
- msg = "Unable to load layer <i>%s</i> because " % path
- if response == gtk.RESPONSE_YES:
- import os
- import os.path
- layers = []
- it = layer_store.get_iter_first()
- while it:
- layers.append(layer_store.get_value(it, 0))
- it = layer_store.iter_next(it)
-
- if not path:
- msg += "it is an invalid path."
- elif not os.path.exists(path+"/conf/layer.conf"):
- msg += "there is no layer.conf inside the directory."
- elif path in layers:
- msg += "it is already in loaded layers."
- else:
- layer_store.append([path])
- return
- dialog = CrumbsMessageDialog(parent, lbl, gtk.MESSAGE_ERROR, msg)
- dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
- response = dialog.run()
- dialog.destroy()
-
- def layer_widget_del_clicked_cb(self, action, tree_selection, layer_store):
- model, iter = tree_selection.get_selected()
- if iter:
- layer_store.remove(iter)
-
-
- def gen_layer_widget(self, layers, layers_avail, window, tooltip=""):
- hbox = gtk.HBox(False, 6)
-
- layer_tv = gtk.TreeView()
- layer_tv.set_rules_hint(True)
- layer_tv.set_headers_visible(False)
- tree_selection = layer_tv.get_selection()
- tree_selection.set_mode(gtk.SELECTION_SINGLE)
-
- # Allow enable drag and drop of rows including row move
- dnd_internal_target = ''
- dnd_targets = [(dnd_internal_target, gtk.TARGET_SAME_WIDGET, 0)]
- layer_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
- dnd_targets,
- gtk.gdk.ACTION_MOVE)
- layer_tv.enable_model_drag_dest(dnd_targets,
- gtk.gdk.ACTION_MOVE)
- layer_tv.connect("drag_data_get", self.drag_data_get_cb)
- layer_tv.connect("drag_data_received", self.drag_data_received_cb)
-
- col0= gtk.TreeViewColumn('Path')
- cell0 = gtk.CellRendererText()
- cell0.set_padding(5,2)
- col0.pack_start(cell0, True)
- col0.set_cell_data_func(cell0, self.draw_layer_path_cb)
- layer_tv.append_column(col0)
-
- scroll = gtk.ScrolledWindow()
- scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
- scroll.set_shadow_type(gtk.SHADOW_IN)
- scroll.add(layer_tv)
-
- table_layer = gtk.Table(2, 10, False)
- hbox.pack_start(table_layer, expand=True, fill=True)
-
- table_layer.attach(scroll, 0, 10, 0, 1)
-
- layer_store = gtk.ListStore(gobject.TYPE_STRING)
- for layer in layers:
- layer_store.append([layer])
-
- col1 = gtk.TreeViewColumn('Enabled')
- layer_tv.append_column(col1)
-
- cell1 = CellRendererPixbufActivatable()
- cell1.set_fixed_size(-1,35)
- cell1.connect("clicked", self.del_cell_clicked_cb, layer_store)
- col1.pack_start(cell1, True)
- col1.set_cell_data_func(cell1, self.draw_delete_button_cb, layer_tv)
-
- add_button = gtk.Button()
- add_button.set_relief(gtk.RELIEF_NONE)
- box = gtk.HBox(False, 6)
- box.show()
- add_button.add(box)
- add_button.connect("enter-notify-event", self.add_hover_cb)
- add_button.connect("leave-notify-event", self.add_leave_cb)
- self.im = gtk.Image()
- self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
- self.im.show()
- box.pack_start(self.im, expand=False, fill=False, padding=6)
- lbl = gtk.Label("Add layer")
- lbl.set_alignment(0.0, 0.5)
- lbl.show()
- box.pack_start(lbl, expand=True, fill=True, padding=6)
- add_button.connect("clicked", self.layer_widget_add_clicked_cb, layer_store, window)
- table_layer.attach(add_button, 0, 10, 1, 2, gtk.EXPAND | gtk.FILL, 0, 0, 6)
- layer_tv.set_model(layer_store)
-
- hbox.show_all()
-
- return hbox, layer_store
-
- def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
- treeselection = treeview.get_selection()
- model, iter = treeselection.get_selected()
- data = model.get_value(iter, 0)
- selection.set(selection.target, 8, data)
-
- def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
- model = treeview.get_model()
- data = selection.data
- drop_info = treeview.get_dest_row_at_pos(x, y)
- if drop_info:
- path, position = drop_info
- iter = model.get_iter(path)
- if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
- model.insert_before(iter, [data])
- else:
- model.insert_after(iter, [data])
- else:
- model.append([data])
- if context.action == gtk.gdk.ACTION_MOVE:
- context.finish(True, True, etime)
- return
-
- def add_hover_cb(self, button, event):
- self.im.set_from_file(hic.ICON_INDI_ADD_HOVER_FILE)
-
- def add_leave_cb(self, button, event):
- self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
-
- def __init__(self, title, layers, layers_non_removable, all_layers, parent, flags, buttons=None):
- super(LayerSelectionDialog, self).__init__(title, parent, flags, buttons)
-
- # class members from other objects
- self.layers = layers
- self.layers_non_removable = layers_non_removable
- self.all_layers = all_layers
- self.layers_changed = False
-
- # icon for remove button in TreeView
- im = gtk.Image()
- im.set_from_file(hic.ICON_INDI_REMOVE_FILE)
- self.rem_icon = im.get_pixbuf()
-
- # class members for internal use
- self.layer_store = None
-
- # create visual elements on the dialog
- self.create_visual_elements()
- self.connect("response", self.response_cb)
-
- def create_visual_elements(self):
- layer_widget, self.layer_store = self.gen_layer_widget(self.layers, self.all_layers, self, None)
- layer_widget.set_size_request(450, 250)
- self.vbox.pack_start(layer_widget, expand=True, fill=True)
- self.show_all()
-
- def response_cb(self, dialog, response_id):
- model = self.layer_store
- it = model.get_iter_first()
- layers = []
- while it:
- layers.append(model.get_value(it, 0))
- it = model.iter_next(it)
-
- self.layers_changed = (self.layers != layers)
- self.layers = layers
-
- """
- A custom cell_data_func to draw a delete 'button' in the TreeView for layers
- other than the meta layer. The deletion of which is prevented so that the
- user can't shoot themselves in the foot too badly.
- """
- def draw_delete_button_cb(self, col, cell, model, it, tv):
- path = model.get_value(it, 0)
- if path in self.layers_non_removable:
- cell.set_sensitive(False)
- cell.set_property('pixbuf', None)
- cell.set_property('mode', gtk.CELL_RENDERER_MODE_INERT)
- else:
- cell.set_property('pixbuf', self.rem_icon)
- cell.set_sensitive(True)
- cell.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
-
- return True
-
- """
- A custom cell_data_func to write an extra message into the layer path cell
- for the meta layer. We should inform the user that they can't remove it for
- their own safety.
- """
- def draw_layer_path_cb(self, col, cell, model, it):
- path = model.get_value(it, 0)
- if path in self.layers_non_removable:
- cell.set_property('markup', "<b>It cannot be removed</b>\n%s" % path)
- else:
- cell.set_property('text', path)
-
- def del_cell_clicked_cb(self, cell, path, model):
- it = model.get_iter_from_string(path)
- model.remove(it)
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
deleted file mode 100644
index 09b9ce6de..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
+++ /dev/null
@@ -1,437 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2013 Intel Corporation
-#
-# Authored by Andrei Dinu <andrei.adrianx.dinu@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import string
-import gtk
-import gobject
-import os
-import tempfile
-import glib
-from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
-from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
-from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
-from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class PropertyDialog(CrumbsDialog):
-
- def __init__(self, title, parent, information, flags, buttons=None):
-
- super(PropertyDialog, self).__init__(title, parent, flags, buttons)
-
- self.properties = information
-
- if len(self.properties) == 10:
- self.create_recipe_visual_elements()
- elif len(self.properties) == 5:
- self.create_package_visual_elements()
- else:
- self.create_information_visual_elements()
-
-
- def create_information_visual_elements(self):
-
- HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("icons/"))
- ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
-
- self.set_resizable(False)
-
- self.table = gtk.Table(1,1,False)
- self.table.set_row_spacings(0)
- self.table.set_col_spacings(0)
-
- self.image = gtk.Image()
- self.image.set_from_file(ICON_PACKAGES_DISPLAY_FILE)
- self.image.set_property("xalign",0)
- #self.vbox.add(self.image)
-
- image_info = self.properties.split("*")[0]
- info = self.properties.split("*")[1]
-
- vbox = gtk.VBox(True, spacing=30)
-
- self.label_short = gtk.Label()
- self.label_short.set_line_wrap(False)
- self.label_short.set_markup(image_info)
- self.label_short.set_property("xalign", 0)
-
- self.info_label = gtk.Label()
- self.info_label.set_line_wrap(True)
- self.info_label.set_markup(info)
- self.info_label.set_property("yalign", 0.5)
-
- self.table.attach(self.image, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=5,ypadding=5)
- self.table.attach(self.label_short, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=5)
- self.table.attach(self.info_label, 0,1,1,2, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=10)
-
- self.vbox.add(self.table)
- self.connect('delete-event', lambda w, e: self.destroy() or True)
-
- def treeViewTooltip( self, widget, e, tooltips, cell, emptyText="" ):
- try:
- (path,col,x,y) = widget.get_path_at_pos( int(e.x), int(e.y) )
- it = widget.get_model().get_iter(path)
- value = widget.get_model().get_value(it,cell)
- if value in self.tooltip_items:
- tooltips.set_tip(widget, self.tooltip_items[value])
- tooltips.enable()
- else:
- tooltips.set_tip(widget, emptyText)
- except:
- tooltips.set_tip(widget, emptyText)
-
-
- def create_package_visual_elements(self):
-
- import json
-
- name = self.properties['name']
- binb = self.properties['binb']
- size = self.properties['size']
- recipe = self.properties['recipe']
- file_list = json.loads(self.properties['files_list'])
-
- files_temp = ''
- paths_temp = ''
- files_binb = []
- paths_binb = []
-
- self.tooltip_items = {}
-
- self.set_resizable(False)
-
- #cleaning out the recipe variable
- recipe = recipe.split("+")[0]
-
- vbox = gtk.VBox(True,spacing = 0)
-
- ###################################### NAME ROW + COL #################################
-
- self.label_short = gtk.Label()
- self.label_short.set_size_request(300,-1)
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ###################################### SIZE ROW + COL ######################################
-
- self.label_short = gtk.Label()
- self.label_short.set_size_request(300,-1)
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Size: </span>" + size)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ##################################### RECIPE ROW + COL #########################################
-
- self.label_short = gtk.Label()
- self.label_short.set_size_request(300,-1)
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Recipe: </span>" + recipe)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ##################################### BINB ROW + COL #######################################
-
- if binb != '':
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
- self.label_short.set_property("xalign", 0)
-
- self.label_info = gtk.Label()
- self.label_info.set_size_request(300,-1)
- self.label_info.set_selectable(True)
- self.label_info.set_line_wrap(True)
- self.label_info.set_markup(binb)
- self.label_info.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
- self.vbox.add(self.label_info)
-
- #################################### FILES BROUGHT BY PACKAGES ###################################
-
- if file_list:
-
- self.textWindow = gtk.ScrolledWindow()
- self.textWindow.set_shadow_type(gtk.SHADOW_IN)
- self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- self.textWindow.set_size_request(100, 170)
-
- packagefiles_store = gtk.ListStore(str)
-
- self.packagefiles_tv = gtk.TreeView()
- self.packagefiles_tv.set_rules_hint(True)
- self.packagefiles_tv.set_headers_visible(True)
- self.textWindow.add(self.packagefiles_tv)
-
- self.cell1 = gtk.CellRendererText()
- col1 = gtk.TreeViewColumn('Package files', self.cell1)
- col1.set_cell_data_func(self.cell1, self.regex_field)
- self.packagefiles_tv.append_column(col1)
-
- items = file_list.keys()
- items.sort()
- for item in items:
- fullpath = item
- while len(item) > 35:
- item = item[:len(item)/2] + "" + item[len(item)/2+1:]
- if len(item) == 35:
- item = item[:len(item)/2] + "..." + item[len(item)/2+3:]
- self.tooltip_items[item] = fullpath
-
- packagefiles_store.append([str(item)])
-
- self.packagefiles_tv.set_model(packagefiles_store)
-
- tips = gtk.Tooltips()
- tips.set_tip(self.packagefiles_tv, "")
- self.packagefiles_tv.connect("motion-notify-event", self.treeViewTooltip, tips, 0)
- self.packagefiles_tv.set_events(gtk.gdk.POINTER_MOTION_MASK)
-
- self.vbox.add(self.textWindow)
-
- self.vbox.show_all()
-
-
- def regex_field(self, column, cell, model, iter):
- cell.set_property('text', model.get_value(iter, 0))
- return
-
-
- def create_recipe_visual_elements(self):
-
- summary = self.properties['summary']
- name = self.properties['name']
- version = self.properties['version']
- revision = self.properties['revision']
- binb = self.properties['binb']
- group = self.properties['group']
- license = self.properties['license']
- homepage = self.properties['homepage']
- bugtracker = self.properties['bugtracker']
- description = self.properties['description']
-
- self.set_resizable(False)
-
- #cleaning out the version variable and also the summary
- version = version.split(":")[1]
- if len(version) > 30:
- version = version.split("+")[0]
- else:
- version = version.split("-")[0]
- license = license.replace("&" , "and")
- if (homepage == ''):
- homepage = 'unknown'
- if (bugtracker == ''):
- bugtracker = 'unknown'
- summary = summary.split("+")[0]
-
- #calculating the rows needed for the table
- binb_items_count = len(binb.split(','))
- binb_items = binb.split(',')
-
- vbox = gtk.VBox(False,spacing = 0)
-
- ######################################## SUMMARY LABEL #########################################
-
- if summary != '':
- self.label_short = gtk.Label()
- self.label_short.set_width_chars(37)
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<b>" + summary + "</b>")
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ########################################## NAME ROW + COL #######################################
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ####################################### VERSION ROW + COL ####################################
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Version: </span>" + version)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ##################################### REVISION ROW + COL #####################################
-
- self.label_short = gtk.Label()
- self.label_short.set_line_wrap(True)
- self.label_short.set_selectable(True)
- self.label_short.set_markup("<span weight=\"bold\">Revision: </span>" + revision)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ################################## GROUP ROW + COL ############################################
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Group: </span>" + group)
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
-
- ################################# HOMEPAGE ROW + COL ############################################
-
- if homepage != 'unknown':
- self.label_info = gtk.Label()
- self.label_info.set_selectable(True)
- self.label_info.set_line_wrap(True)
- if len(homepage) > 35:
- self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:35] + "..." + "</a>")
- else:
- self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:60] + "</a>")
-
- self.label_info.set_property("xalign", 0)
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<b>Homepage: </b>")
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
- self.vbox.add(self.label_info)
-
- ################################# BUGTRACKER ROW + COL ###########################################
-
- if bugtracker != 'unknown':
- self.label_info = gtk.Label()
- self.label_info.set_selectable(True)
- self.label_info.set_line_wrap(True)
- if len(bugtracker) > 35:
- self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:35] + "..." + "</a>")
- else:
- self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:60] + "</a>")
- self.label_info.set_property("xalign", 0)
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<b>Bugtracker: </b>")
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
- self.vbox.add(self.label_info)
-
- ################################# LICENSE ROW + COL ############################################
-
- self.label_info = gtk.Label()
- self.label_info.set_selectable(True)
- self.label_info.set_line_wrap(True)
- self.label_info.set_markup(license)
- self.label_info.set_property("xalign", 0)
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">License: </span>")
- self.label_short.set_property("xalign", 0)
-
- self.vbox.add(self.label_short)
- self.vbox.add(self.label_info)
-
- ################################### BINB ROW+COL #############################################
-
- if binb != '':
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
- self.label_short.set_property("xalign", 0)
- self.vbox.add(self.label_short)
- self.label_info = gtk.Label()
- self.label_info.set_selectable(True)
- self.label_info.set_width_chars(36)
- if len(binb) > 200:
- scrolled_window = gtk.ScrolledWindow()
- scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
- scrolled_window.set_size_request(100,100)
- self.label_info.set_markup(binb)
- self.label_info.set_padding(6,6)
- self.label_info.set_alignment(0,0)
- self.label_info.set_line_wrap(True)
- scrolled_window.add_with_viewport(self.label_info)
- self.vbox.add(scrolled_window)
- else:
- self.label_info.set_markup(binb)
- self.label_info.set_property("xalign", 0)
- self.label_info.set_line_wrap(True)
- self.vbox.add(self.label_info)
-
- ################################ DESCRIPTION TAG ROW #################################################
-
- self.label_short = gtk.Label()
- self.label_short.set_line_wrap(True)
- self.label_short.set_markup("<span weight=\"bold\">Description </span>")
- self.label_short.set_property("xalign", 0)
- self.vbox.add(self.label_short)
-
- ################################ DESCRIPTION INFORMATION ROW ##########################################
-
- hbox = gtk.HBox(True,spacing = 0)
-
- self.label_short = gtk.Label()
- self.label_short.set_selectable(True)
- self.label_short.set_width_chars(36)
- if len(description) > 200:
- scrolled_window = gtk.ScrolledWindow()
- scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
- scrolled_window.set_size_request(100,100)
- self.label_short.set_markup(description)
- self.label_short.set_padding(6,6)
- self.label_short.set_alignment(0,0)
- self.label_short.set_line_wrap(True)
- scrolled_window.add_with_viewport(self.label_short)
- self.vbox.add(scrolled_window)
- else:
- self.label_short.set_markup(description)
- self.label_short.set_property("xalign", 0)
- self.label_short.set_line_wrap(True)
- self.vbox.add(self.label_short)
-
- self.vbox.show_all()
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
deleted file mode 100644
index e0285c93c..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import os
-from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton, HobAltButton
-
-"""
-The following are convenience classes for implementing GNOME HIG compliant
-BitBake GUI's
-In summary: spacing = 12px, border-width = 6px
-"""
-
-class SettingsUIHelper():
-
- def gen_label_widget(self, content):
- label = gtk.Label()
- label.set_alignment(0, 0)
- label.set_markup(content)
- label.show()
- return label
-
- def gen_label_info_widget(self, content, tooltip):
- table = gtk.Table(1, 10, False)
- label = self.gen_label_widget(content)
- info = HobInfoButton(tooltip, self)
- table.attach(label, 0, 1, 0, 1, xoptions=gtk.FILL)
- table.attach(info, 1, 2, 0, 1, xoptions=gtk.FILL, xpadding=10)
- return table
-
- def gen_spinner_widget(self, content, lower, upper, tooltip=""):
- hbox = gtk.HBox(False, 12)
- adjust = gtk.Adjustment(value=content, lower=lower, upper=upper, step_incr=1)
- spinner = gtk.SpinButton(adjustment=adjust, climb_rate=1, digits=0)
-
- spinner.set_value(content)
- hbox.pack_start(spinner, expand=False, fill=False)
-
- info = HobInfoButton(tooltip, self)
- hbox.pack_start(info, expand=False, fill=False)
-
- hbox.show_all()
- return hbox, spinner
-
- def gen_combo_widget(self, curr_item, all_item, tooltip=""):
- hbox = gtk.HBox(False, 12)
- combo = gtk.combo_box_new_text()
- hbox.pack_start(combo, expand=False, fill=False)
-
- index = 0
- for item in all_item or []:
- combo.append_text(item)
- if item == curr_item:
- combo.set_active(index)
- index += 1
-
- info = HobInfoButton(tooltip, self)
- hbox.pack_start(info, expand=False, fill=False)
-
- hbox.show_all()
- return hbox, combo
-
- def entry_widget_select_path_cb(self, action, parent, entry):
- dialog = gtk.FileChooserDialog("", parent,
- gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
- text = entry.get_text()
- dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
- button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
- HobAltButton.style_button(button)
- button = dialog.add_button("Open", gtk.RESPONSE_YES)
- HobButton.style_button(button)
- response = dialog.run()
- if response == gtk.RESPONSE_YES:
- path = dialog.get_filename()
- entry.set_text(path)
-
- dialog.destroy()
-
- def gen_entry_widget(self, content, parent, tooltip="", need_button=True):
- hbox = gtk.HBox(False, 12)
- entry = gtk.Entry()
- entry.set_text(content)
- entry.set_size_request(350,30)
-
- if need_button:
- table = gtk.Table(1, 10, False)
- hbox.pack_start(table, expand=True, fill=True)
- table.attach(entry, 0, 9, 0, 1, xoptions=gtk.SHRINK)
- image = gtk.Image()
- image.set_from_stock(gtk.STOCK_OPEN,gtk.ICON_SIZE_BUTTON)
- open_button = gtk.Button()
- open_button.set_image(image)
- open_button.connect("clicked", self.entry_widget_select_path_cb, parent, entry)
- table.attach(open_button, 9, 10, 0, 1, xoptions=gtk.SHRINK)
- else:
- hbox.pack_start(entry, expand=True, fill=True)
-
- if tooltip != "":
- info = HobInfoButton(tooltip, self)
- hbox.pack_start(info, expand=False, fill=False)
-
- hbox.show_all()
- return hbox, entry
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hobcolor.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hobcolor.py
deleted file mode 100644
index 3316542a2..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hobcolor.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2012 Intel Corporation
-#
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-class HobColors:
- WHITE = "#ffffff"
- PALE_GREEN = "#aaffaa"
- ORANGE = "#eb8e68"
- PALE_RED = "#ffaaaa"
- GRAY = "#aaaaaa"
- LIGHT_GRAY = "#dddddd"
- SLIGHT_DARK = "#5f5f5f"
- DARK = "#3c3b37"
- BLACK = "#000000"
- PALE_BLUE = "#53b8ff"
- DEEP_RED = "#aa3e3e"
- KHAKI = "#fff68f"
-
- OK = WHITE
- RUNNING = PALE_GREEN
- WARNING = ORANGE
- ERROR = PALE_RED
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/hobwidget.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/hobwidget.py
deleted file mode 100644
index 2b969c146..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/hobwidget.py
+++ /dev/null
@@ -1,904 +0,0 @@
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011-2012 Intel Corporation
-#
-# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-import gtk
-import gobject
-import os
-import os.path
-import sys
-import pango, pangocairo
-import cairo
-import math
-
-from bb.ui.crumbs.hobcolor import HobColors
-from bb.ui.crumbs.persistenttooltip import PersistentTooltip
-
-class hwc:
-
- MAIN_WIN_WIDTH = 1024
- MAIN_WIN_HEIGHT = 700
-
-class hic:
-
- HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("ui/icons/"))
-
- ICON_RCIPE_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_display.png'))
- ICON_RCIPE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_hover.png'))
- ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_display.png'))
- ICON_PACKAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_hover.png'))
- ICON_LAYERS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_display.png'))
- ICON_LAYERS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_hover.png'))
- ICON_IMAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_display.png'))
- ICON_IMAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_hover.png'))
- ICON_SETTINGS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_display.png'))
- ICON_SETTINGS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_hover.png'))
- ICON_INFO_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
- ICON_INFO_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_hover.png'))
- ICON_INDI_CONFIRM_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/confirmation.png'))
- ICON_INDI_ERROR_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/denied.png'))
- ICON_INDI_REMOVE_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove.png'))
- ICON_INDI_REMOVE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove-hover.png'))
- ICON_INDI_ADD_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add.png'))
- ICON_INDI_ADD_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add-hover.png'))
- ICON_INDI_REFRESH_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/refresh.png'))
- ICON_INDI_ALERT_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/alert.png'))
- ICON_INDI_TICK_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/tick.png'))
- ICON_INDI_INFO_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/info.png'))
-
-class HobViewTable (gtk.VBox):
- """
- A VBox to contain the table for different recipe views and package view
- """
- __gsignals__ = {
- "toggled" : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,
- gobject.TYPE_STRING,
- gobject.TYPE_INT,
- gobject.TYPE_PYOBJECT,)),
- "row-activated" : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,
- gobject.TYPE_PYOBJECT,)),
- "cell-fadeinout-stopped" : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,
- gobject.TYPE_PYOBJECT,
- gobject.TYPE_PYOBJECT,)),
- }
-
- def __init__(self, columns, name):
- gtk.VBox.__init__(self, False, 6)
- self.table_tree = gtk.TreeView()
- self.table_tree.set_headers_visible(True)
- self.table_tree.set_headers_clickable(True)
- self.table_tree.set_rules_hint(True)
- self.table_tree.set_enable_tree_lines(True)
- self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
- self.toggle_columns = []
- self.table_tree.connect("row-activated", self.row_activated_cb)
- self.top_bar = None
- self.tab_name = name
-
- for i, column in enumerate(columns):
- col_name = column['col_name']
- col = gtk.TreeViewColumn(col_name)
- col.set_clickable(True)
- col.set_resizable(True)
- if self.tab_name.startswith('Included'):
- if col_name!='Included':
- col.set_sort_column_id(column['col_id'])
- else:
- col.set_sort_column_id(column['col_id'])
- if 'col_min' in column.keys():
- col.set_min_width(column['col_min'])
- if 'col_max' in column.keys():
- col.set_max_width(column['col_max'])
- if 'expand' in column.keys():
- col.set_expand(True)
- self.table_tree.append_column(col)
-
- if (not 'col_style' in column.keys()) or column['col_style'] == 'text':
- cell = gtk.CellRendererText()
- col.pack_start(cell, True)
- col.set_attributes(cell, text=column['col_id'])
- if 'col_t_id' in column.keys():
- col.add_attribute(cell, 'font', column['col_t_id'])
- elif column['col_style'] == 'check toggle':
- cell = HobCellRendererToggle()
- cell.set_property('activatable', True)
- cell.connect("toggled", self.toggled_cb, i, self.table_tree)
- cell.connect_render_state_changed(self.stop_cell_fadeinout_cb, self.table_tree)
- self.toggle_id = i
- col.pack_end(cell, True)
- col.set_attributes(cell, active=column['col_id'])
- self.toggle_columns.append(col_name)
- if 'col_group' in column.keys():
- col.set_cell_data_func(cell, self.set_group_number_cb)
- elif column['col_style'] == 'radio toggle':
- cell = gtk.CellRendererToggle()
- cell.set_property('activatable', True)
- cell.set_radio(True)
- cell.connect("toggled", self.toggled_cb, i, self.table_tree)
- self.toggle_id = i
- col.pack_end(cell, True)
- col.set_attributes(cell, active=column['col_id'])
- self.toggle_columns.append(col_name)
- elif column['col_style'] == 'binb':
- cell = gtk.CellRendererText()
- col.pack_start(cell, True)
- col.set_cell_data_func(cell, self.display_binb_cb, column['col_id'])
- if 'col_t_id' in column.keys():
- col.add_attribute(cell, 'font', column['col_t_id'])
-
- self.scroll = gtk.ScrolledWindow()
- self.scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
- self.scroll.add(self.table_tree)
-
- self.pack_end(self.scroll, True, True, 0)
-
- def add_no_result_bar(self, entry):
- color = HobColors.KHAKI
- self.top_bar = gtk.EventBox()
- self.top_bar.set_size_request(-1, 70)
- self.top_bar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
- self.top_bar.set_flags(gtk.CAN_DEFAULT)
- self.top_bar.grab_default()
-
- no_result_tab = gtk.Table(5, 20, True)
- self.top_bar.add(no_result_tab)
-
- label = gtk.Label()
- label.set_alignment(0.0, 0.5)
- title = "No results matching your search"
- label.set_markup("<span size='x-large'><b>%s</b></span>" % title)
- no_result_tab.attach(label, 1, 14, 1, 4)
-
- clear_button = HobButton("Clear search")
- clear_button.set_tooltip_text("Clear search query")
- clear_button.connect('clicked', self.set_search_entry_clear_cb, entry)
- no_result_tab.attach(clear_button, 16, 19, 1, 4)
-
- self.pack_start(self.top_bar, False, True, 12)
- self.top_bar.show_all()
-
- def set_search_entry_clear_cb(self, button, search):
- if search.get_editable() == True:
- search.set_text("")
- search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
- search.grab_focus()
-
- def display_binb_cb(self, col, cell, model, it, col_id):
- binb = model.get_value(it, col_id)
- # Just display the first item
- if binb:
- bin = binb.split(', ')
- total_no = len(bin)
- if total_no > 1 and bin[0] == "User Selected":
- if total_no > 2:
- present_binb = bin[1] + ' (+' + str(total_no - 1) + ')'
- else:
- present_binb = bin[1]
- else:
- if total_no > 1:
- present_binb = bin[0] + ' (+' + str(total_no - 1) + ')'
- else:
- present_binb = bin[0]
- cell.set_property('text', present_binb)
- else:
- cell.set_property('text', "")
- return True
-
- def set_model(self, tree_model):
- self.table_tree.set_model(tree_model)
-
- def toggle_default(self):
- model = self.table_tree.get_model()
- if not model:
- return
- iter = model.get_iter_first()
- if iter:
- rowpath = model.get_path(iter)
- model[rowpath][self.toggle_id] = True
-
- def toggled_cb(self, cell, path, columnid, tree):
- self.emit("toggled", cell, path, columnid, tree)
-
- def row_activated_cb(self, tree, path, view_column):
- if not view_column.get_title() in self.toggle_columns:
- self.emit("row-activated", tree.get_model(), path)
-
- def stop_cell_fadeinout_cb(self, ctrl, cell, tree):
- self.emit("cell-fadeinout-stopped", ctrl, cell, tree)
-
- def set_group_number_cb(self, col, cell, model, iter):
- if model and (model.iter_parent(iter) == None):
- cell.cell_attr["number_of_children"] = model.iter_n_children(iter)
- else:
- cell.cell_attr["number_of_children"] = 0
-
- def connect_group_selection(self, cb_func):
- self.table_tree.get_selection().connect("changed", cb_func)
-
-"""
-A method to calculate a softened value for the colour of widget when in the
-provided state.
-
-widget: the widget whose style to use
-state: the state of the widget to use the style for
-
-Returns a string value representing the softened colour
-"""
-def soften_color(widget, state=gtk.STATE_NORMAL):
- # this colour munging routine is heavily inspired bu gdu_util_get_mix_color()
- # from gnome-disk-utility:
- # http://git.gnome.org/browse/gnome-disk-utility/tree/src/gdu-gtk/gdu-gtk.c?h=gnome-3-0
- blend = 0.7
- style = widget.get_style()
- color = style.text[state]
- color.red = color.red * blend + style.base[state].red * (1.0 - blend)
- color.green = color.green * blend + style.base[state].green * (1.0 - blend)
- color.blue = color.blue * blend + style.base[state].blue * (1.0 - blend)
- return color.to_string()
-
-class BaseHobButton(gtk.Button):
- """
- A gtk.Button subclass which follows the visual design of Hob for primary
- action buttons
-
- label: the text to display as the button's label
- """
- def __init__(self, label):
- gtk.Button.__init__(self, label)
- HobButton.style_button(self)
-
- @staticmethod
- def style_button(button):
- style = button.get_style()
- style = gtk.rc_get_style_by_paths(gtk.settings_get_default(), 'gtk-button', 'gtk-button', gobject.TYPE_NONE)
-
- button.set_flags(gtk.CAN_DEFAULT)
- button.grab_default()
-
-# label = "<span size='x-large'><b>%s</b></span>" % gobject.markup_escape_text(button.get_label())
- label = button.get_label()
- button.set_label(label)
- button.child.set_use_markup(True)
-
-class HobButton(BaseHobButton):
- """
- A gtk.Button subclass which follows the visual design of Hob for primary
- action buttons
-
- label: the text to display as the button's label
- """
- def __init__(self, label):
- BaseHobButton.__init__(self, label)
- HobButton.style_button(self)
-
-class HobAltButton(BaseHobButton):
- """
- A gtk.Button subclass which has no relief, and so is more discrete
- """
- def __init__(self, label):
- BaseHobButton.__init__(self, label)
- HobAltButton.style_button(self)
-
- """
- A callback for the state-changed event to ensure the text is displayed
- differently when the widget is not sensitive
- """
- @staticmethod
- def desensitise_on_state_change_cb(button, state):
- if not button.get_property("sensitive"):
- HobAltButton.set_text(button, False)
- else:
- HobAltButton.set_text(button, True)
-
- """
- Set the button label with an appropriate colour for the current widget state
- """
- @staticmethod
- def set_text(button, sensitive=True):
- if sensitive:
- colour = HobColors.PALE_BLUE
- else:
- colour = HobColors.LIGHT_GRAY
- button.set_label("<span size='large' color='%s'><b>%s</b></span>" % (colour, gobject.markup_escape_text(button.text)))
- button.child.set_use_markup(True)
-
-class HobImageButton(gtk.Button):
- """
- A gtk.Button with an icon and two rows of text, the second of which is
- displayed in a blended colour.
-
- primary_text: the main button label
- secondary_text: optional second line of text
- icon_path: path to the icon file to display on the button
- """
- def __init__(self, primary_text, secondary_text="", icon_path="", hover_icon_path=""):
- gtk.Button.__init__(self)
- self.set_relief(gtk.RELIEF_NONE)
-
- self.icon_path = icon_path
- self.hover_icon_path = hover_icon_path
-
- hbox = gtk.HBox(False, 10)
- hbox.show()
- self.add(hbox)
- self.icon = gtk.Image()
- self.icon.set_from_file(self.icon_path)
- self.icon.set_alignment(0.5, 0.0)
- self.icon.show()
- if self.hover_icon_path and len(self.hover_icon_path):
- self.connect("enter-notify-event", self.set_hover_icon_cb)
- self.connect("leave-notify-event", self.set_icon_cb)
- hbox.pack_start(self.icon, False, False, 0)
- label = gtk.Label()
- label.set_alignment(0.0, 0.5)
- colour = soften_color(label)
- mark = "<span size='x-large'>%s</span>\n<span size='medium' fgcolor='%s' weight='ultralight'>%s</span>" % (primary_text, colour, secondary_text)
- label.set_markup(mark)
- label.show()
- hbox.pack_start(label, True, True, 0)
-
- def set_hover_icon_cb(self, widget, event):
- self.icon.set_from_file(self.hover_icon_path)
-
- def set_icon_cb(self, widget, event):
- self.icon.set_from_file(self.icon_path)
-
-class HobInfoButton(gtk.EventBox):
- """
- This class implements a button-like widget per the Hob visual and UX designs
- which will display a persistent tooltip, with the contents of tip_markup, when
- clicked.
-
- tip_markup: the Pango Markup to be displayed in the persistent tooltip
- """
- def __init__(self, tip_markup, parent=None):
- gtk.EventBox.__init__(self)
- self.image = gtk.Image()
- self.image.set_from_file(
- hic.ICON_INFO_DISPLAY_FILE)
- self.image.show()
- self.add(self.image)
- self.tip_markup = tip_markup
- self.my_parent = parent
-
- self.set_events(gtk.gdk.BUTTON_RELEASE |
- gtk.gdk.ENTER_NOTIFY_MASK |
- gtk.gdk.LEAVE_NOTIFY_MASK)
-
- self.connect("button-release-event", self.button_release_cb)
- self.connect("enter-notify-event", self.mouse_in_cb)
- self.connect("leave-notify-event", self.mouse_out_cb)
-
- """
- When the mouse click is released emulate a button-click and show the associated
- PersistentTooltip
- """
- def button_release_cb(self, widget, event):
- from bb.ui.crumbs.hig.propertydialog import PropertyDialog
- self.dialog = PropertyDialog(title = '',
- parent = self.my_parent,
- information = self.tip_markup,
- flags = gtk.DIALOG_DESTROY_WITH_PARENT
- | gtk.DIALOG_NO_SEPARATOR)
-
- button = self.dialog.add_button("Close", gtk.RESPONSE_CANCEL)
- HobAltButton.style_button(button)
- button.connect("clicked", lambda w: self.dialog.destroy())
- self.dialog.show_all()
- self.dialog.run()
-
- """
- Change to the prelight image when the mouse enters the widget
- """
- def mouse_in_cb(self, widget, event):
- self.image.set_from_file(hic.ICON_INFO_HOVER_FILE)
-
- """
- Change to the stock image when the mouse enters the widget
- """
- def mouse_out_cb(self, widget, event):
- self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
-
-class HobIndicator(gtk.DrawingArea):
- def __init__(self, count):
- gtk.DrawingArea.__init__(self)
- # Set no window for transparent background
- self.set_has_window(False)
- self.set_size_request(38,38)
- # We need to pass through button clicks
- self.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
-
- self.connect('expose-event', self.expose)
-
- self.count = count
- self.color = HobColors.GRAY
-
- def expose(self, widget, event):
- if self.count and self.count > 0:
- ctx = widget.window.cairo_create()
-
- x, y, w, h = self.allocation
-
- ctx.set_operator(cairo.OPERATOR_OVER)
- ctx.set_source_color(gtk.gdk.color_parse(self.color))
- ctx.translate(w/2, h/2)
- ctx.arc(x, y, min(w,h)/2 - 2, 0, 2*math.pi)
- ctx.fill_preserve()
-
- layout = self.create_pango_layout(str(self.count))
- textw, texth = layout.get_pixel_size()
- x = (w/2)-(textw/2) + x
- y = (h/2) - (texth/2) + y
- ctx.move_to(x, y)
- self.window.draw_layout(self.style.light_gc[gtk.STATE_NORMAL], int(x), int(y), layout)
-
- def set_count(self, count):
- self.count = count
-
- def set_active(self, active):
- if active:
- self.color = HobColors.DEEP_RED
- else:
- self.color = HobColors.GRAY
-
-class HobTabLabel(gtk.HBox):
- def __init__(self, text, count=0):
- gtk.HBox.__init__(self, False, 0)
- self.indicator = HobIndicator(count)
- self.indicator.show()
- self.pack_end(self.indicator, False, False)
- self.lbl = gtk.Label(text)
- self.lbl.set_alignment(0.0, 0.5)
- self.lbl.show()
- self.pack_end(self.lbl, True, True, 6)
-
- def set_count(self, count):
- self.indicator.set_count(count)
-
- def set_active(self, active=True):
- self.indicator.set_active(active)
-
-class HobNotebook(gtk.Notebook):
- def __init__(self):
- gtk.Notebook.__init__(self)
- self.set_property('homogeneous', True)
-
- self.pages = []
-
- self.search = None
- self.search_focus = False
- self.page_changed = False
-
- self.connect("switch-page", self.page_changed_cb)
-
- self.show_all()
-
- def page_changed_cb(self, nb, page, page_num):
- for p, lbl in enumerate(self.pages):
- if p == page_num:
- lbl.set_active()
- else:
- lbl.set_active(False)
-
- if self.search:
- self.page_changed = True
- self.reset_entry(self.search, page_num)
-
- def append_page(self, child, tab_label, tab_tooltip=None):
- label = HobTabLabel(tab_label)
- if tab_tooltip:
- label.set_tooltip_text(tab_tooltip)
- label.set_active(False)
- self.pages.append(label)
- gtk.Notebook.append_page(self, child, label)
-
- def set_entry(self, names, tips):
- self.search = gtk.Entry()
- self.search_names = names
- self.search_tips = tips
- style = self.search.get_style()
- style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
- self.search.set_style(style)
- self.search.set_text(names[0])
- self.search.set_tooltip_text(self.search_tips[0])
- self.search.props.has_tooltip = True
-
- self.search.set_editable(False)
- self.search.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
- self.search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
- self.search.connect("icon-release", self.set_search_entry_clear_cb)
- self.search.set_width_chars(30)
- self.search.show()
-
- self.search.connect("focus-in-event", self.set_search_entry_editable_cb)
- self.search.connect("focus-out-event", self.set_search_entry_reset_cb)
- self.set_action_widget(self.search, gtk.PACK_END)
-
- def show_indicator_icon(self, title, number):
- for child in self.pages:
- if child.lbl.get_label() == title:
- child.set_count(number)
-
- def hide_indicator_icon(self, title):
- for child in self.pages:
- if child.lbl.get_label() == title:
- child.set_count(0)
-
- def set_search_entry_editable_cb(self, search, event):
- self.search_focus = True
- search.set_editable(True)
- text = search.get_text()
- if text in self.search_names:
- search.set_text("")
- style = self.search.get_style()
- style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.BLACK, False, False)
- search.set_style(style)
-
- def set_search_entry_reset_cb(self, search, event):
- page_num = self.get_current_page()
- text = search.get_text()
- if not text:
- self.reset_entry(search, page_num)
-
- def reset_entry(self, entry, page_num):
- style = entry.get_style()
- style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
- entry.set_style(style)
- entry.set_text(self.search_names[page_num])
- entry.set_tooltip_text(self.search_tips[page_num])
- entry.set_editable(False)
- entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
-
- def set_search_entry_clear_cb(self, search, icon_pos, event):
- if search.get_editable() == True:
- search.set_text("")
- search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
- search.grab_focus()
-
- def set_page(self, title):
- for child in self.pages:
- if child.lbl.get_label() == title:
- child.grab_focus()
- self.set_current_page(self.pages.index(child))
- return
-
-class HobWarpCellRendererText(gtk.CellRendererText):
- def __init__(self, col_number):
- gtk.CellRendererText.__init__(self)
- self.set_property("wrap-mode", pango.WRAP_WORD_CHAR)
- self.set_property("wrap-width", 300) # default value wrap width is 300
- self.col_n = col_number
-
- def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
- if widget:
- self.props.wrap_width = self.get_resized_wrap_width(widget, widget.get_column(self.col_n))
- return gtk.CellRendererText.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
-
- def get_resized_wrap_width(self, treeview, column):
- otherCols = []
- for col in treeview.get_columns():
- if col != column:
- otherCols.append(col)
- adjwidth = treeview.allocation.width - sum(c.get_width() for c in otherCols)
- adjwidth -= treeview.style_get_property("horizontal-separator") * 4
- if self.props.wrap_width == adjwidth or adjwidth <= 0:
- adjwidth = self.props.wrap_width
- return adjwidth
-
-gobject.type_register(HobWarpCellRendererText)
-
-class HobIconChecker(hic):
- def set_hob_icon_to_stock_icon(self, file_path, stock_id=""):
- try:
- pixbuf = gtk.gdk.pixbuf_new_from_file(file_path)
- except Exception, e:
- return None
-
- if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None):
- icon_factory = gtk.IconFactory()
- icon_factory.add_default()
- icon_factory.add(stock_id, gtk.IconSet(pixbuf))
- gtk.stock_add([(stock_id, '_label', 0, 0, '')])
-
- return icon_factory.lookup(stock_id)
-
- return None
-
- """
- For make hob icon consistently by request, and avoid icon view diff by system or gtk version, we use some 'hob icon' to replace the 'gtk icon'.
- this function check the stock_id and make hob_id to replaced the gtk_id then return it or ""
- """
- def check_stock_icon(self, stock_name=""):
- HOB_CHECK_STOCK_NAME = {
- ('hic-dialog-info', 'gtk-dialog-info', 'dialog-info') : self.ICON_INDI_INFO_FILE,
- ('hic-ok', 'gtk-ok', 'ok') : self.ICON_INDI_TICK_FILE,
- ('hic-dialog-error', 'gtk-dialog-error', 'dialog-error') : self.ICON_INDI_ERROR_FILE,
- ('hic-dialog-warning', 'gtk-dialog-warning', 'dialog-warning') : self.ICON_INDI_ALERT_FILE,
- ('hic-task-refresh', 'gtk-execute', 'execute') : self.ICON_INDI_REFRESH_FILE,
- }
- valid_stock_id = stock_name
- if stock_name:
- for names, path in HOB_CHECK_STOCK_NAME.iteritems():
- if stock_name in names:
- valid_stock_id = names[0]
- if not gtk.icon_factory_lookup_default(valid_stock_id):
- self.set_hob_icon_to_stock_icon(path, valid_stock_id)
-
- return valid_stock_id
-
-class HobCellRendererController(gobject.GObject):
- (MODE_CYCLE_RUNNING, MODE_ONE_SHORT) = range(2)
- __gsignals__ = {
- "run-timer-stopped" : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- }
- def __init__(self, runningmode=MODE_CYCLE_RUNNING, is_draw_row=False):
- gobject.GObject.__init__(self)
- self.timeout_id = None
- self.current_angle_pos = 0.0
- self.step_angle = 0.0
- self.tree_headers_height = 0
- self.running_cell_areas = []
- self.running_mode = runningmode
- self.is_queue_draw_row_area = is_draw_row
- self.force_stop_enable = False
-
- def is_active(self):
- if self.timeout_id:
- return True
- else:
- return False
-
- def reset_run(self):
- self.force_stop()
- self.running_cell_areas = []
- self.current_angle_pos = 0.0
- self.step_angle = 0.0
-
- ''' time_iterval: (1~1000)ms, which will be as the basic interval count for timer
- init_usrdata: the current data which related the progress-bar will be at
- min_usrdata: the range of min of user data
- max_usrdata: the range of max of user data
- step: each step which you want to progress
- Note: the init_usrdata should in the range of from min to max, and max should > min
- step should < (max - min)
- '''
- def start_run(self, time_iterval, init_usrdata, min_usrdata, max_usrdata, step, tree):
- if (not time_iterval) or (not max_usrdata):
- return
- usr_range = (max_usrdata - min_usrdata) * 1.0
- self.current_angle_pos = (init_usrdata * 1.0) / usr_range
- self.step_angle = (step * 1) / usr_range
- self.timeout_id = gobject.timeout_add(int(time_iterval),
- self.make_image_on_progressing_cb, tree)
- self.tree_headers_height = self.get_treeview_headers_height(tree)
- self.force_stop_enable = False
-
- def force_stop(self):
- self.emit("run-timer-stopped")
- self.force_stop_enable = True
- if self.timeout_id:
- if gobject.source_remove(self.timeout_id):
- self.timeout_id = None
-
- def on_draw_pixbuf_cb(self, pixbuf, cr, x, y, img_width, img_height, do_refresh=True):
- if pixbuf:
- r = max(img_width/2, img_height/2)
- cr.translate(x + r, y + r)
- if do_refresh:
- cr.rotate(2 * math.pi * self.current_angle_pos)
-
- cr.set_source_pixbuf(pixbuf, -img_width/2, -img_height/2)
- cr.paint()
-
- def on_draw_fadeinout_cb(self, cr, color, x, y, width, height, do_fadeout=True):
- if do_fadeout:
- alpha = self.current_angle_pos * 0.8
- else:
- alpha = (1.0 - self.current_angle_pos) * 0.8
-
- cr.set_source_rgba(color.red, color.green, color.blue, alpha)
- cr.rectangle(x, y, width, height)
- cr.fill()
-
- def get_treeview_headers_height(self, tree):
- if tree and (tree.get_property("headers-visible") == True):
- height = tree.get_allocation().height - tree.get_bin_window().get_size()[1]
- return height
-
- return 0
-
- def make_image_on_progressing_cb(self, tree):
- self.current_angle_pos += self.step_angle
- if self.running_mode == self.MODE_CYCLE_RUNNING:
- if (self.current_angle_pos >= 1):
- self.current_angle_pos = 0
- else:
- if self.current_angle_pos > 1:
- self.force_stop()
- return False
-
- if self.is_queue_draw_row_area:
- for path in self.running_cell_areas:
- rect = tree.get_cell_area(path, tree.get_column(0))
- row_x, _, row_width, _ = tree.get_visible_rect()
- tree.queue_draw_area(row_x, rect.y + self.tree_headers_height, row_width, rect.height)
- else:
- for rect in self.running_cell_areas:
- tree.queue_draw_area(rect.x, rect.y + self.tree_headers_height, rect.width, rect.height)
-
- return (not self.force_stop_enable)
-
- def append_running_cell_area(self, cell_area):
- if cell_area and (cell_area not in self.running_cell_areas):
- self.running_cell_areas.append(cell_area)
-
- def remove_running_cell_area(self, cell_area):
- if cell_area in self.running_cell_areas:
- self.running_cell_areas.remove(cell_area)
- if not self.running_cell_areas:
- self.reset_run()
-
-gobject.type_register(HobCellRendererController)
-
-class HobCellRendererPixbuf(gtk.CellRendererPixbuf):
- def __init__(self):
- gtk.CellRendererPixbuf.__init__(self)
- self.control = HobCellRendererController()
- # add icon checker for make the gtk-icon transfer to hob-icon
- self.checker = HobIconChecker()
- self.set_property("stock-size", gtk.ICON_SIZE_DND)
-
- def get_pixbuf_from_stock_icon(self, widget, stock_id="", size=gtk.ICON_SIZE_DIALOG):
- if widget and stock_id and gtk.icon_factory_lookup_default(stock_id):
- return widget.render_icon(stock_id, size)
-
- return None
-
- def set_icon_name_to_id(self, new_name):
- if new_name and type(new_name) == str:
- # check the name is need to transfer to hob icon or not
- name = self.checker.check_stock_icon(new_name)
- if name.startswith("hic") or name.startswith("gtk"):
- stock_id = name
- else:
- stock_id = 'gtk-' + name
-
- return stock_id
-
- ''' render cell exactly, "icon-name" is priority
- if use the 'hic-task-refresh' will make the pix animation
- if 'pix' will change the pixbuf for it from the pixbuf or image.
- '''
- def do_render(self, window, tree, background_area,cell_area, expose_area, flags):
- if (not self.control) or (not tree):
- return
-
- x, y, w, h = self.on_get_size(tree, cell_area)
- x += cell_area.x
- y += cell_area.y
- w -= 2 * self.get_property("xpad")
- h -= 2 * self.get_property("ypad")
-
- stock_id = ""
- if self.props.icon_name:
- stock_id = self.set_icon_name_to_id(self.props.icon_name)
- elif self.props.stock_id:
- stock_id = self.props.stock_id
- elif self.props.pixbuf:
- pix = self.props.pixbuf
- else:
- return
-
- if stock_id:
- pix = self.get_pixbuf_from_stock_icon(tree, stock_id, self.props.stock_size)
- if stock_id == 'hic-task-refresh':
- self.control.append_running_cell_area(cell_area)
- if self.control.is_active():
- self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, True)
- else:
- self.control.start_run(200, 0, 0, 1000, 150, tree)
- else:
- self.control.remove_running_cell_area(cell_area)
- self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, False)
-
- def on_get_size(self, widget, cell_area):
- if self.props.icon_name or self.props.pixbuf or self.props.stock_id:
- w, h = gtk.icon_size_lookup(self.props.stock_size)
- calc_width = self.get_property("xpad") * 2 + w
- calc_height = self.get_property("ypad") * 2 + h
- x_offset = 0
- y_offset = 0
- if cell_area and w > 0 and h > 0:
- x_offset = self.get_property("xalign") * (cell_area.width - calc_width - self.get_property("xpad"))
- y_offset = self.get_property("yalign") * (cell_area.height - calc_height - self.get_property("ypad"))
-
- return x_offset, y_offset, w, h
-
- return 0, 0, 0, 0
-
-gobject.type_register(HobCellRendererPixbuf)
-
-class HobCellRendererToggle(gtk.CellRendererToggle):
- def __init__(self):
- gtk.CellRendererToggle.__init__(self)
- self.ctrl = HobCellRendererController(is_draw_row=True)
- self.ctrl.running_mode = self.ctrl.MODE_ONE_SHORT
- self.cell_attr = {"fadeout": False, "number_of_children": 0}
-
- def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
- if (not self.ctrl) or (not widget):
- return
-
- if flags & gtk.CELL_RENDERER_SELECTED:
- state = gtk.STATE_SELECTED
- else:
- state = gtk.STATE_NORMAL
-
- if self.ctrl.is_active():
- path = widget.get_path_at_pos(cell_area.x + cell_area.width/2, cell_area.y + cell_area.height/2)
- # sometimes the parameters of cell_area will be a negative number,such as pull up down the scroll bar
- # it's over the tree container range, so the path will be bad
- if not path: return
- path = path[0]
- if path in self.ctrl.running_cell_areas:
- cr = window.cairo_create()
- color = widget.get_style().base[state]
-
- row_x, _, row_width, _ = widget.get_visible_rect()
- border_y = self.get_property("ypad")
- self.ctrl.on_draw_fadeinout_cb(cr, color, row_x, cell_area.y - border_y, row_width, \
- cell_area.height + border_y * 2, self.cell_attr["fadeout"])
- # draw number of a group
- if self.cell_attr["number_of_children"]:
- text = "%d pkg" % self.cell_attr["number_of_children"]
- pangolayout = widget.create_pango_layout(text)
- textw, texth = pangolayout.get_pixel_size()
- x = cell_area.x + (cell_area.width/2) - (textw/2)
- y = cell_area.y + (cell_area.height/2) - (texth/2)
-
- widget.style.paint_layout(window, state, True, cell_area, widget, "checkbox", x, y, pangolayout)
- else:
- return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
-
- '''delay: normally delay time is 1000ms
- cell_list: whilch cells need to be render
- '''
- def fadeout(self, tree, delay, cell_list=None):
- if (delay < 200) or (not tree):
- return
- self.cell_attr["fadeout"] = True
- self.ctrl.running_cell_areas = cell_list
- self.ctrl.start_run(200, 0, 0, delay, (delay * 200 / 1000), tree)
-
- def connect_render_state_changed(self, func, usrdata=None):
- if not func:
- return
- if usrdata:
- self.ctrl.connect("run-timer-stopped", func, self, usrdata)
- else:
- self.ctrl.connect("run-timer-stopped", func, self)
-
-gobject.type_register(HobCellRendererToggle)
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/persistenttooltip.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/persistenttooltip.py
deleted file mode 100644
index 927c19429..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/persistenttooltip.py
+++ /dev/null
@@ -1,186 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2012 Intel Corporation
-#
-# Authored by Joshua Lock <josh@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gobject
-import gtk
-try:
- import gconf
-except:
- pass
-
-class PersistentTooltip(gtk.Window):
- """
- A tooltip which persists once shown until the user dismisses it with the Esc
- key or by clicking the close button.
-
- # FIXME: the PersistentTooltip should be disabled when the user clicks anywhere off
- # it. We can't do this with focus-out-event becuase modal ensures we have focus?
-
- markup: some Pango text markup to display in the tooltip
- """
- def __init__(self, markup, parent_win=None):
- gtk.Window.__init__(self, gtk.WINDOW_POPUP)
-
- # Inherit the system theme for a tooltip
- style = gtk.rc_get_style_by_paths(gtk.settings_get_default(),
- 'gtk-tooltip', 'gtk-tooltip', gobject.TYPE_NONE)
- self.set_style(style)
-
- # The placement of the close button on the tip should reflect how the
- # window manager of the users system places close buttons. Try to read
- # the metacity gconf key to determine whether the close button is on the
- # left or the right.
- # In the case that we can't determine the users configuration we default
- # to close buttons being on the right.
- __button_right = True
- try:
- client = gconf.client_get_default()
- order = client.get_string("/apps/metacity/general/button_layout")
- if order and order.endswith(":"):
- __button_right = False
- except NameError:
- pass
-
- # We need to ensure we're only shown once
- self.shown = False
-
- # We don't want any WM decorations
- self.set_decorated(False)
- # We don't want to show in the taskbar or window switcher
- self.set_skip_pager_hint(True)
- self.set_skip_taskbar_hint(True)
- # We must be modal to ensure we grab focus when presented from a gtk.Dialog
- self.set_modal(True)
-
- self.set_border_width(0)
- self.set_position(gtk.WIN_POS_MOUSE)
- self.set_opacity(0.95)
-
- # Ensure a reasonable minimum size
- self.set_geometry_hints(self, 100, 50)
-
- # Set this window as a transient window for parent(main window)
- if parent_win:
- self.set_transient_for(parent_win)
- self.set_destroy_with_parent(True)
- # Draw our label and close buttons
- hbox = gtk.HBox(False, 0)
- hbox.show()
- self.add(hbox)
-
- img = gtk.Image()
- img.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_BUTTON)
-
- self.button = gtk.Button()
- self.button.set_image(img)
- self.button.connect("clicked", self._dismiss_cb)
- self.button.set_flags(gtk.CAN_DEFAULT)
- self.button.grab_focus()
- self.button.show()
- vbox = gtk.VBox(False, 0)
- vbox.show()
- vbox.pack_start(self.button, False, False, 0)
- if __button_right:
- hbox.pack_end(vbox, True, True, 0)
- else:
- hbox.pack_start(vbox, True, True, 0)
-
- self.set_default(self.button)
-
- bin = gtk.HBox(True, 6)
- bin.set_border_width(6)
- bin.show()
- self.label = gtk.Label()
- self.label.set_line_wrap(True)
- # We want to match the colours of the normal tooltips, as dictated by
- # the users gtk+-2.0 theme, wherever possible - on some systems this
- # requires explicitly setting a fg_color for the label which matches the
- # tooltip_fg_color
- settings = gtk.settings_get_default()
- colours = settings.get_property('gtk-color-scheme').split('\n')
- # remove any empty lines, there's likely to be a trailing one after
- # calling split on a dictionary-like string
- colours = filter(None, colours)
- for col in colours:
- item, val = col.split(': ')
- if item == 'tooltip_fg_color':
- style = self.label.get_style()
- style.fg[gtk.STATE_NORMAL] = gtk.gdk.color_parse(val)
- self.label.set_style(style)
- break # we only care for the tooltip_fg_color
-
- self.label.set_markup(markup)
- self.label.show()
- bin.add(self.label)
- hbox.pack_end(bin, True, True, 6)
-
- # add the original URL display for user reference
- if 'a href' in markup:
- hbox.set_tooltip_text(self.get_markup_url(markup))
- hbox.show()
-
- self.connect("key-press-event", self._catch_esc_cb)
-
- """
- Callback when the PersistentTooltip's close button is clicked.
- Hides the PersistentTooltip.
- """
- def _dismiss_cb(self, button):
- self.hide()
- return True
-
- """
- Callback when the Esc key is detected. Hides the PersistentTooltip.
- """
- def _catch_esc_cb(self, widget, event):
- keyname = gtk.gdk.keyval_name(event.keyval)
- if keyname == "Escape":
- self.hide()
- return True
-
- """
- Called to present the PersistentTooltip.
- Overrides the superclasses show() method to include state tracking.
- """
- def show(self):
- if not self.shown:
- self.shown = True
- gtk.Window.show(self)
-
- """
- Called to hide the PersistentTooltip.
- Overrides the superclasses hide() method to include state tracking.
- """
- def hide(self):
- self.shown = False
- gtk.Window.hide(self)
-
- """
- Called to get the hyperlink URL from markup text.
- """
- def get_markup_url(self, markup):
- url = "http:"
- if markup and type(markup) == str:
- s = markup
- if 'http:' in s:
- import re
- url = re.search('(http:[^,\\ "]+)', s).group(0)
-
- return url
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/progress.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/progress.py
deleted file mode 100644
index 1d28a111b..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/progress.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import gtk
-
-class ProgressBar(gtk.Dialog):
- def __init__(self, parent):
-
- gtk.Dialog.__init__(self, flags=(gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT))
- self.set_title("Parsing metadata, please wait...")
- self.set_default_size(500, 0)
- self.set_transient_for(parent)
- self.progress = gtk.ProgressBar()
- self.vbox.pack_start(self.progress)
- self.show_all()
-
- def set_text(self, msg):
- self.progress.set_text(msg)
-
- def update(self, x, y):
- self.progress.set_fraction(float(x)/float(y))
- self.progress.set_text("%2d %%" % (x*100/y))
-
- def pulse(self):
- self.progress.set_text("Loading...")
- self.progress.pulse()
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/progressbar.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/progressbar.py
deleted file mode 100644
index 3e2c660e4..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/progressbar.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2011 Intel Corporation
-#
-# Authored by Shane Wang <shane.wang@intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-from bb.ui.crumbs.hobcolor import HobColors
-
-class HobProgressBar (gtk.ProgressBar):
- def __init__(self):
- gtk.ProgressBar.__init__(self)
- self.set_rcstyle(True)
- self.percentage = 0
-
- def set_rcstyle(self, status):
- rcstyle = gtk.RcStyle()
- rcstyle.fg[2] = gtk.gdk.Color(HobColors.BLACK)
- if status == "stop":
- rcstyle.bg[3] = gtk.gdk.Color(HobColors.WARNING)
- elif status == "fail":
- rcstyle.bg[3] = gtk.gdk.Color(HobColors.ERROR)
- else:
- rcstyle.bg[3] = gtk.gdk.Color(HobColors.RUNNING)
- self.modify_style(rcstyle)
-
- def set_title(self, text=None):
- if not text:
- text = ""
- text += " %.0f%%" % self.percentage
- self.set_text(text)
-
- def set_stop_title(self, text=None):
- if not text:
- text = ""
- self.set_text(text)
-
- def reset(self):
- self.set_fraction(0)
- self.set_text("")
- self.set_rcstyle(True)
- self.percentage = 0
-
- def update(self, fraction):
- self.percentage = int(fraction * 100)
- self.set_fraction(fraction)
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/puccho.glade b/yocto-poky/bitbake/lib/bb/ui/crumbs/puccho.glade
deleted file mode 100644
index d7553a6e1..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/puccho.glade
+++ /dev/null
@@ -1,606 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
-<!--Generated with glade3 3.4.5 on Mon Nov 10 12:24:12 2008 -->
-<glade-interface>
- <widget class="GtkDialog" id="build_dialog">
- <property name="title" translatable="yes">Start a build</property>
- <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
- <property name="has_separator">False</property>
- <child internal-child="vbox">
- <widget class="GtkVBox" id="dialog-vbox1">
- <property name="visible">True</property>
- <property name="spacing">2</property>
- <child>
- <widget class="GtkTable" id="build_table">
- <property name="visible">True</property>
- <property name="border_width">6</property>
- <property name="n_rows">7</property>
- <property name="n_columns">3</property>
- <property name="column_spacing">5</property>
- <property name="row_spacing">6</property>
- <child>
- <widget class="GtkAlignment" id="status_alignment">
- <property name="visible">True</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkHBox" id="status_hbox">
- <property name="spacing">6</property>
- <child>
- <widget class="GtkImage" id="status_image">
- <property name="visible">True</property>
- <property name="no_show_all">True</property>
- <property name="xalign">0</property>
- <property name="stock">gtk-dialog-error</property>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="status_label">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">If you see this text something is wrong...</property>
- <property name="use_markup">True</property>
- <property name="use_underline">True</property>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">2</property>
- <property name="bottom_attach">3</property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label2">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Build configuration&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="image_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="image_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Image:</property>
- </widget>
- <packing>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="distribution_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="distribution_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Distribution:</property>
- </widget>
- <packing>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="machine_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="machine_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Machine:</property>
- </widget>
- <packing>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkButton" id="refresh_button">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-refresh</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="location_entry">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="width_chars">32</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label3">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Location:</property>
- </widget>
- <packing>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label1">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Repository&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment1">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment2">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment3">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child internal-child="action_area">
- <widget class="GtkHButtonBox" id="dialog-action_area1">
- <property name="visible">True</property>
- <property name="layout_style">GTK_BUTTONBOX_END</property>
- <child>
- <placeholder/>
- </child>
- <child>
- <placeholder/>
- </child>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="pack_type">GTK_PACK_END</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <widget class="GtkDialog" id="dialog2">
- <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
- <property name="has_separator">False</property>
- <child internal-child="vbox">
- <widget class="GtkVBox" id="dialog-vbox2">
- <property name="visible">True</property>
- <property name="spacing">2</property>
- <child>
- <widget class="GtkTable" id="table2">
- <property name="visible">True</property>
- <property name="border_width">6</property>
- <property name="n_rows">7</property>
- <property name="n_columns">3</property>
- <property name="column_spacing">6</property>
- <property name="row_spacing">6</property>
- <child>
- <widget class="GtkLabel" id="label7">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Repositories&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment4">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkScrolledWindow" id="scrolledwindow1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <widget class="GtkTreeView" id="treeview1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="headers_clickable">True</property>
- </widget>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">2</property>
- <property name="bottom_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="entry1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label9">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Additional packages&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment6">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="xscale">0</property>
- <child>
- <widget class="GtkLabel" id="label8">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="yalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Location: </property>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment7">
- <property name="visible">True</property>
- <property name="xalign">1</property>
- <property name="xscale">0</property>
- <child>
- <widget class="GtkHButtonBox" id="hbuttonbox1">
- <property name="visible">True</property>
- <property name="spacing">5</property>
- <child>
- <widget class="GtkButton" id="button7">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-remove</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- </child>
- <child>
- <widget class="GtkButton" id="button6">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-edit</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child>
- <widget class="GtkButton" id="button5">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-add</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="position">2</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment5">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label10">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="yalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Search:</property>
- </widget>
- <packing>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="entry2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment8">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkScrolledWindow" id="scrolledwindow2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <widget class="GtkTreeView" id="treeview2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="headers_clickable">True</property>
- </widget>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child internal-child="action_area">
- <widget class="GtkHButtonBox" id="dialog-action_area2">
- <property name="visible">True</property>
- <property name="layout_style">GTK_BUTTONBOX_END</property>
- <child>
- <widget class="GtkButton" id="button4">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-close</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="pack_type">GTK_PACK_END</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <widget class="GtkWindow" id="main_window">
- <child>
- <widget class="GtkVBox" id="main_window_vbox">
- <property name="visible">True</property>
- <child>
- <widget class="GtkToolbar" id="main_toolbar">
- <property name="visible">True</property>
- <child>
- <widget class="GtkToolButton" id="main_toolbutton_build">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Build</property>
- <property name="stock_id">gtk-execute</property>
- </widget>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- <child>
- <widget class="GtkVPaned" id="vpaned1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <child>
- <widget class="GtkScrolledWindow" id="results_scrolledwindow">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="resize">False</property>
- <property name="shrink">True</property>
- </packing>
- </child>
- <child>
- <widget class="GtkScrolledWindow" id="progress_scrolledwindow">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="resize">True</property>
- <property name="shrink">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
-</glade-interface>
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/runningbuild.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/runningbuild.py
deleted file mode 100644
index 16a955d2b..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/runningbuild.py
+++ /dev/null
@@ -1,551 +0,0 @@
-
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import logging
-import time
-import urllib
-import urllib2
-import pango
-from bb.ui.crumbs.hobcolor import HobColors
-from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf
-
-class RunningBuildModel (gtk.TreeStore):
- (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
-
- def __init__ (self):
- gtk.TreeStore.__init__ (self,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_INT)
-
- def failure_model_filter(self, model, it):
- color = model.get(it, self.COL_COLOR)[0]
- if not color:
- return False
- if color == HobColors.ERROR or color == HobColors.WARNING:
- return True
- return False
-
- def failure_model(self):
- model = self.filter_new()
- model.set_visible_func(self.failure_model_filter)
- return model
-
- def foreach_cell_func(self, model, path, iter, usr_data=None):
- if model.get_value(iter, self.COL_ICON) == "gtk-execute":
- model.set(iter, self.COL_ICON, "")
-
- def close_task_refresh(self):
- self.foreach(self.foreach_cell_func, None)
-
-class RunningBuild (gobject.GObject):
- __gsignals__ = {
- 'build-started' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-failed' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-complete' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-aborted' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'task-started' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,)),
- 'log-error' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'log-warning' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'disk-full' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'no-provider' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_PYOBJECT,)),
- 'log' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
- }
- pids_to_task = {}
- tasks_to_iter = {}
-
- def __init__ (self, sequential=False):
- gobject.GObject.__init__ (self)
- self.model = RunningBuildModel()
- self.sequential = sequential
- self.buildaborted = False
-
- def reset (self):
- self.pids_to_task.clear()
- self.tasks_to_iter.clear()
- self.model.clear()
-
- def handle_event (self, event, pbar=None):
- # Handle an event from the event queue, this may result in updating
- # the model and thus the UI. Or it may be to tell us that the build
- # has finished successfully (or not, as the case may be.)
-
- parent = None
- pid = 0
- package = None
- task = None
-
- # If we have a pid attached to this message/event try and get the
- # (package, task) pair for it. If we get that then get the parent iter
- # for the message.
- if hasattr(event, 'pid'):
- pid = event.pid
- if hasattr(event, 'process'):
- pid = event.process
-
- if pid and pid in self.pids_to_task:
- (package, task) = self.pids_to_task[pid]
- parent = self.tasks_to_iter[(package, task)]
-
- if(isinstance(event, logging.LogRecord)):
- if event.taskpid == 0 or event.levelno > logging.INFO:
- self.emit("log", "handle", event)
- # FIXME: this is a hack! More info in Yocto #1433
- # http://bugzilla.pokylinux.org/show_bug.cgi?id=1433, temporarily
- # mask the error message as it's not informative for the user.
- if event.msg.startswith("Execution of event handler 'run_buildstats' failed"):
- return
-
- if (event.levelno < logging.INFO or
- event.msg.startswith("Running task")):
- return # don't add these to the list
-
- if event.levelno >= logging.ERROR:
- icon = "dialog-error"
- color = HobColors.ERROR
- self.emit("log-error")
- elif event.levelno >= logging.WARNING:
- icon = "dialog-warning"
- color = HobColors.WARNING
- self.emit("log-warning")
- else:
- icon = None
- color = HobColors.OK
-
- # if we know which package we belong to, we'll append onto its list.
- # otherwise, we'll jump to the top of the master list
- if self.sequential or not parent:
- tree_add = self.model.append
- else:
- tree_add = self.model.prepend
- tree_add(parent,
- (None,
- package,
- task,
- event.getMessage(),
- icon,
- color,
- 0))
-
- # if there are warnings while processing a package
- # (parent), mark the task with warning color;
- # in case there are errors, the updates will be
- # handled on TaskFailed.
- if color == HobColors.WARNING and parent:
- self.model.set(parent, self.model.COL_COLOR, color)
- if task: #then we have a parent (package), and update it's color
- self.model.set(self.tasks_to_iter[(package, None)], self.model.COL_COLOR, color)
-
- elif isinstance(event, bb.build.TaskStarted):
- (package, task) = (event._package, event._task)
-
- # Save out this PID.
- self.pids_to_task[pid] = (package, task)
-
- # Check if we already have this package in our model. If so then
- # that can be the parent for the task. Otherwise we create a new
- # top level for the package.
- if ((package, None) in self.tasks_to_iter):
- parent = self.tasks_to_iter[(package, None)]
- else:
- if self.sequential:
- add = self.model.append
- else:
- add = self.model.prepend
- parent = add(None, (None,
- package,
- None,
- "Package: %s" % (package),
- None,
- HobColors.OK,
- 0))
- self.tasks_to_iter[(package, None)] = parent
-
- # Because this parent package now has an active child mark it as
- # such.
- self.model.set(parent, self.model.COL_ICON, "gtk-execute")
- parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
- if parent_color != HobColors.ERROR and parent_color != HobColors.WARNING:
- self.model.set(parent, self.model.COL_COLOR, HobColors.RUNNING)
-
- # Add an entry in the model for this task
- i = self.model.append (parent, (None,
- package,
- task,
- "Task: %s" % (task),
- "gtk-execute",
- HobColors.RUNNING,
- 0))
-
- # update the parent's active task count
- num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] + 1
- self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
-
- # Save out the iter so that we can find it when we have a message
- # that we need to attach to a task.
- self.tasks_to_iter[(package, task)] = i
-
- elif isinstance(event, bb.build.TaskBase):
- self.emit("log", "info", event._message)
- current = self.tasks_to_iter[(package, task)]
- parent = self.tasks_to_iter[(package, None)]
-
- # remove this task from the parent's active count
- num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] - 1
- self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
-
- if isinstance(event, bb.build.TaskFailed):
- # Mark the task and parent as failed
- icon = "dialog-error"
- color = HobColors.ERROR
-
- logfile = event.logfile
- if logfile and os.path.exists(logfile):
- with open(logfile) as f:
- logdata = f.read()
- self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', HobColors.OK, 0))
-
- for i in (current, parent):
- self.model.set(i, self.model.COL_ICON, icon,
- self.model.COL_COLOR, color)
- else:
- # Mark the parent package and the task as inactive,
- # but make sure to preserve error, warnings and active
- # states
- parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
- task_color = self.model.get(current, self.model.COL_COLOR)[0]
-
- # Mark the task as inactive
- self.model.set(current, self.model.COL_ICON, None)
- if task_color != HobColors.ERROR:
- if task_color == HobColors.WARNING:
- self.model.set(current, self.model.COL_ICON, 'dialog-warning')
- else:
- self.model.set(current, self.model.COL_COLOR, HobColors.OK)
-
- # Mark the parent as inactive
- if parent_color != HobColors.ERROR:
- if parent_color == HobColors.WARNING:
- self.model.set(parent, self.model.COL_ICON, "dialog-warning")
- else:
- self.model.set(parent, self.model.COL_ICON, None)
- if num_active == 0:
- self.model.set(parent, self.model.COL_COLOR, HobColors.OK)
-
- # Clear the iters and the pids since when the task goes away the
- # pid will no longer be used for messages
- del self.tasks_to_iter[(package, task)]
- del self.pids_to_task[pid]
-
- elif isinstance(event, bb.event.BuildStarted):
-
- self.emit("build-started")
- self.model.prepend(None, (None,
- None,
- None,
- "Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
- None,
- HobColors.OK,
- 0))
- if pbar:
- pbar.update(0, self.progress_total)
- pbar.set_title(bb.event.getName(event))
-
- elif isinstance(event, bb.event.BuildCompleted):
- failures = int (event._failures)
- self.model.prepend(None, (None,
- None,
- None,
- "Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
- None,
- HobColors.OK,
- 0))
-
- # Emit the appropriate signal depending on the number of failures
- if self.buildaborted:
- self.emit ("build-aborted")
- self.buildaborted = False
- elif (failures >= 1):
- self.emit ("build-failed")
- else:
- self.emit ("build-succeeded")
- # Emit a generic "build-complete" signal for things wishing to
- # handle when the build is finished
- self.emit("build-complete")
- # reset the all cell's icon indicator
- self.model.close_task_refresh()
- if pbar:
- pbar.set_text(event.msg)
-
- elif isinstance(event, bb.event.DiskFull):
- self.buildaborted = True
- self.emit("disk-full")
-
- elif isinstance(event, bb.command.CommandFailed):
- self.emit("log", "error", "Command execution failed: %s" % (event.error))
- if event.error.startswith("Exited with"):
- # If the command fails with an exit code we're done, emit the
- # generic signal for the UI to notify the user
- self.emit("build-complete")
- # reset the all cell's icon indicator
- self.model.close_task_refresh()
-
- elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
- pbar.set_title("Loading cache")
- self.progress_total = event.total
- pbar.update(0, self.progress_total)
- elif isinstance(event, bb.event.CacheLoadProgress) and pbar:
- pbar.update(event.current, self.progress_total)
- elif isinstance(event, bb.event.CacheLoadCompleted) and pbar:
- pbar.update(self.progress_total, self.progress_total)
- pbar.hide()
- elif isinstance(event, bb.event.ParseStarted) and pbar:
- if event.total == 0:
- return
- pbar.set_title("Processing recipes")
- self.progress_total = event.total
- pbar.update(0, self.progress_total)
- elif isinstance(event, bb.event.ParseProgress) and pbar:
- pbar.update(event.current, self.progress_total)
- elif isinstance(event, bb.event.ParseCompleted) and pbar:
- pbar.hide()
- #using runqueue events as many as possible to update the progress bar
- elif isinstance(event, bb.runqueue.runQueueTaskFailed):
- self.emit("log", "error", "Task %s (%s) failed with exit code '%s'" % (event.taskid, event.taskstring, event.exitcode))
- elif isinstance(event, bb.runqueue.sceneQueueTaskFailed):
- self.emit("log", "warn", "Setscene task %s (%s) failed with exit code '%s' - real task will be run instead" \
- % (event.taskid, event.taskstring, event.exitcode))
- elif isinstance(event, (bb.runqueue.runQueueTaskStarted, bb.runqueue.sceneQueueTaskStarted)):
- if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
- self.emit("log", "info", "Running setscene task %d of %d (%s)" % \
- (event.stats.completed + event.stats.active + event.stats.failed + 1,
- event.stats.total, event.taskstring))
- else:
- if event.noexec:
- tasktype = 'noexec task'
- else:
- tasktype = 'task'
- self.emit("log", "info", "Running %s %s of %s (ID: %s, %s)" % \
- (tasktype, event.stats.completed + event.stats.active + event.stats.failed + 1,
- event.stats.total, event.taskid, event.taskstring))
- message = {}
- message["eventname"] = bb.event.getName(event)
- num_of_completed = event.stats.completed + event.stats.failed
- message["current"] = num_of_completed
- message["total"] = event.stats.total
- message["title"] = ""
- message["task"] = event.taskstring
- self.emit("task-started", message)
- elif isinstance(event, bb.event.MultipleProviders):
- self.emit("log", "info", "multiple providers are available for %s%s (%s)" \
- % (event._is_runtime and "runtime " or "", event._item, ", ".join(event._candidates)))
- self.emit("log", "info", "consider defining a PREFERRED_PROVIDER entry to match %s" % (event._item))
- elif isinstance(event, bb.event.NoProvider):
- msg = ""
- if event._runtime:
- r = "R"
- else:
- r = ""
-
- extra = ''
- if not event._reasons:
- if event._close_matches:
- extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
-
- if event._dependees:
- msg = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s\n" % (r, event._item, ", ".join(event._dependees), r, extra)
- else:
- msg = "Nothing %sPROVIDES '%s'%s\n" % (r, event._item, extra)
- if event._reasons:
- for reason in event._reasons:
- msg += ("%s\n" % reason)
- self.emit("no-provider", msg)
- self.emit("log", "error", msg)
- elif isinstance(event, bb.event.LogExecTTY):
- icon = "dialog-warning"
- color = HobColors.WARNING
- if self.sequential or not parent:
- tree_add = self.model.append
- else:
- tree_add = self.model.prepend
- tree_add(parent,
- (None,
- package,
- task,
- event.msg,
- icon,
- color,
- 0))
- else:
- if not isinstance(event, (bb.event.BuildBase,
- bb.event.StampUpdate,
- bb.event.ConfigParsed,
- bb.event.RecipeParsed,
- bb.event.RecipePreFinalise,
- bb.runqueue.runQueueEvent,
- bb.runqueue.runQueueExitWait,
- bb.event.OperationStarted,
- bb.event.OperationCompleted,
- bb.event.OperationProgress)):
- self.emit("log", "error", "Unknown event: %s" % (event.error if hasattr(event, 'error') else 'error'))
-
- return
-
-
-def do_pastebin(text):
- url = 'http://pastebin.com/api_public.php'
- params = {'paste_code': text, 'paste_format': 'text'}
-
- req = urllib2.Request(url, urllib.urlencode(params))
- response = urllib2.urlopen(req)
- paste_url = response.read()
-
- return paste_url
-
-
-class RunningBuildTreeView (gtk.TreeView):
- __gsignals__ = {
- "button_press_event" : "override"
- }
- def __init__ (self, readonly=False, hob=False):
- gtk.TreeView.__init__ (self)
- self.readonly = readonly
-
- # The icon that indicates whether we're building or failed.
- # add 'hob' flag because there has not only hob to share this code
- if hob:
- renderer = HobCellRendererPixbuf ()
- else:
- renderer = gtk.CellRendererPixbuf()
- col = gtk.TreeViewColumn ("Status", renderer)
- col.add_attribute (renderer, "icon-name", 4)
- self.append_column (col)
-
- # The message of the build.
- # add 'hob' flag because there has not only hob to share this code
- if hob:
- self.message_renderer = HobWarpCellRendererText (col_number=1)
- else:
- self.message_renderer = gtk.CellRendererText ()
- self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
- self.message_column.add_attribute(self.message_renderer, 'background', 5)
- self.message_renderer.set_property('editable', (not self.readonly))
- self.append_column (self.message_column)
-
- def do_button_press_event(self, event):
- gtk.TreeView.do_button_press_event(self, event)
-
- if event.button == 3:
- selection = super(RunningBuildTreeView, self).get_selection()
- (model, it) = selection.get_selected()
- if it is not None:
- can_paste = model.get(it, model.COL_LOG)[0]
- if can_paste == 'pastebin':
- # build a simple menu with a pastebin option
- menu = gtk.Menu()
- menuitem = gtk.MenuItem("Copy")
- menu.append(menuitem)
- menuitem.connect("activate", self.clipboard_handler, (model, it))
- menuitem.show()
- menuitem = gtk.MenuItem("Send log to pastebin")
- menu.append(menuitem)
- menuitem.connect("activate", self.pastebin_handler, (model, it))
- menuitem.show()
- menu.show()
- menu.popup(None, None, None, event.button, event.time)
-
- def _add_to_clipboard(self, clipping):
- """
- Add the contents of clipping to the system clipboard.
- """
- clipboard = gtk.clipboard_get()
- clipboard.set_text(clipping)
- clipboard.store()
-
- def pastebin_handler(self, widget, data):
- """
- Send the log data to pastebin, then add the new paste url to the
- clipboard.
- """
- (model, it) = data
- paste_url = do_pastebin(model.get(it, model.COL_MESSAGE)[0])
-
- # @todo Provide visual feedback to the user that it is done and that
- # it worked.
- print paste_url
-
- self._add_to_clipboard(paste_url)
-
- def clipboard_handler(self, widget, data):
- """
- """
- (model, it) = data
- message = model.get(it, model.COL_MESSAGE)[0]
-
- self._add_to_clipboard(message)
-
-class BuildFailureTreeView(gtk.TreeView):
-
- def __init__ (self):
- gtk.TreeView.__init__(self)
- self.set_rules_hint(False)
- self.set_headers_visible(False)
- self.get_selection().set_mode(gtk.SELECTION_SINGLE)
-
- # The icon that indicates whether we're building or failed.
- renderer = HobCellRendererPixbuf ()
- col = gtk.TreeViewColumn ("Status", renderer)
- col.add_attribute (renderer, "icon-name", RunningBuildModel.COL_ICON)
- self.append_column (col)
-
- # The message of the build.
- self.message_renderer = HobWarpCellRendererText (col_number=1)
- self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=RunningBuildModel.COL_MESSAGE, background=RunningBuildModel.COL_COLOR)
- self.append_column (self.message_column)
diff --git a/yocto-poky/bitbake/lib/bb/ui/crumbs/utils.py b/yocto-poky/bitbake/lib/bb/ui/crumbs/utils.py
deleted file mode 100644
index 939864fa6..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/crumbs/utils.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# BitBake UI Utils
-#
-# Copyright (C) 2012 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-# This utility method looks for xterm or vte and return the
-# frist to exist, currently we are keeping this simple, but
-# we will likely move the oe.terminal implementation into
-# bitbake which will allow more flexibility.
-
-import os
-import bb
-
-def which_terminal():
- term = bb.utils.which(os.environ["PATH"], "xterm")
- if term:
- return term + " -e "
- term = bb.utils.which(os.environ["PATH"], "vte")
- if term:
- return term + " -c "
- return None
diff --git a/yocto-poky/bitbake/lib/bb/ui/depexp.py b/yocto-poky/bitbake/lib/bb/ui/depexp.py
deleted file mode 100644
index 240aafc3e..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/depexp.py
+++ /dev/null
@@ -1,333 +0,0 @@
-#
-# BitBake Graphical GTK based Dependency Explorer
-#
-# Copyright (C) 2007 Ross Burton
-# Copyright (C) 2007 - 2008 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import sys
-import gobject
-import gtk
-import Queue
-import threading
-import xmlrpclib
-import bb
-import bb.event
-from bb.ui.crumbs.progressbar import HobProgressBar
-
-# Package Model
-(COL_PKG_NAME) = (0)
-
-# Dependency Model
-(TYPE_DEP, TYPE_RDEP) = (0, 1)
-(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
-
-
-class PackageDepView(gtk.TreeView):
- def __init__(self, model, dep_type, label):
- gtk.TreeView.__init__(self)
- self.current = None
- self.dep_type = dep_type
- self.filter_model = model.filter_new()
- self.filter_model.set_visible_func(self._filter)
- self.set_model(self.filter_model)
- #self.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PACKAGE))
-
- def _filter(self, model, iter):
- (this_type, package) = model.get(iter, COL_DEP_TYPE, COL_DEP_PARENT)
- if this_type != self.dep_type: return False
- return package == self.current
-
- def set_current_package(self, package):
- self.current = package
- self.filter_model.refilter()
-
-
-class PackageReverseDepView(gtk.TreeView):
- def __init__(self, model, label):
- gtk.TreeView.__init__(self)
- self.current = None
- self.filter_model = model.filter_new()
- self.filter_model.set_visible_func(self._filter)
- self.set_model(self.filter_model)
- self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PARENT))
-
- def _filter(self, model, iter):
- package = model.get_value(iter, COL_DEP_PACKAGE)
- return package == self.current
-
- def set_current_package(self, package):
- self.current = package
- self.filter_model.refilter()
-
-
-class DepExplorer(gtk.Window):
- def __init__(self):
- gtk.Window.__init__(self)
- self.set_title("Dependency Explorer")
- self.set_default_size(500, 500)
- self.connect("delete-event", gtk.main_quit)
-
- # Create the data models
- self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
- self.pkg_model.set_sort_column_id(COL_PKG_NAME, gtk.SORT_ASCENDING)
- self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
- self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, gtk.SORT_ASCENDING)
-
- pane = gtk.HPaned()
- pane.set_position(250)
- self.add(pane)
-
- # The master list of packages
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
-
- self.pkg_treeview = gtk.TreeView(self.pkg_model)
- self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
- column = gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME)
- self.pkg_treeview.append_column(column)
- pane.add1(scrolled)
- scrolled.add(self.pkg_treeview)
-
- box = gtk.VBox(homogeneous=True, spacing=4)
-
- # Runtime Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
- self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- scrolled.add(self.rdep_treeview)
- box.add(scrolled)
-
- # Build Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
- self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- scrolled.add(self.dep_treeview)
- box.add(scrolled)
- pane.add2(box)
-
- # Reverse Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
- self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
- scrolled.add(self.revdep_treeview)
- box.add(scrolled)
- pane.add2(box)
-
- self.show_all()
-
- def on_package_activated(self, treeview, path, column, data_col):
- model = treeview.get_model()
- package = model.get_value(model.get_iter(path), data_col)
-
- pkg_path = []
- def finder(model, path, iter, needle):
- package = model.get_value(iter, COL_PKG_NAME)
- if package == needle:
- pkg_path.append(path)
- return True
- else:
- return False
- self.pkg_model.foreach(finder, package)
- if pkg_path:
- self.pkg_treeview.get_selection().select_path(pkg_path[0])
- self.pkg_treeview.scroll_to_cell(pkg_path[0])
-
- def on_cursor_changed(self, selection):
- (model, it) = selection.get_selected()
- if it is None:
- current_package = None
- else:
- current_package = model.get_value(it, COL_PKG_NAME)
- self.rdep_treeview.set_current_package(current_package)
- self.dep_treeview.set_current_package(current_package)
- self.revdep_treeview.set_current_package(current_package)
-
-
- def parse(self, depgraph):
- for package in depgraph["pn"]:
- self.pkg_model.insert(0, (package,))
-
- for package in depgraph["depends"]:
- for depend in depgraph["depends"][package]:
- self.depends_model.insert (0, (TYPE_DEP, package, depend))
-
- for package in depgraph["rdepends-pn"]:
- for rdepend in depgraph["rdepends-pn"][package]:
- self.depends_model.insert (0, (TYPE_RDEP, package, rdepend))
-
-
-class gtkthread(threading.Thread):
- quit = threading.Event()
- def __init__(self, shutdown):
- threading.Thread.__init__(self)
- self.setDaemon(True)
- self.shutdown = shutdown
-
- def run(self):
- gobject.threads_init()
- gtk.gdk.threads_init()
- gtk.main()
- gtkthread.quit.set()
-
-
-def main(server, eventHandler, params):
- try:
- params.updateFromServer(server)
- cmdline = params.parseActions()
- if not cmdline:
- print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
- return 1
- if 'msg' in cmdline and cmdline['msg']:
- print(cmdline['msg'])
- return 1
- cmdline = cmdline['action']
- if not cmdline or cmdline[0] != "generateDotGraph":
- print("This UI requires the -g option")
- return 1
- ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
- if error:
- print("Error running command '%s': %s" % (cmdline, error))
- return 1
- elif ret != True:
- print("Error running command '%s': returned %s" % (cmdline, ret))
- return 1
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return
-
- try:
- gtk.init_check()
- except RuntimeError:
- sys.stderr.write("Please set DISPLAY variable before running this command \n")
- return
-
- shutdown = 0
-
- gtkgui = gtkthread(shutdown)
- gtkgui.start()
-
- gtk.gdk.threads_enter()
- dep = DepExplorer()
- bardialog = gtk.Dialog(parent=dep,
- flags=gtk.DIALOG_MODAL|gtk.DIALOG_DESTROY_WITH_PARENT)
- bardialog.set_default_size(400, 50)
- pbar = HobProgressBar()
- bardialog.vbox.pack_start(pbar)
- bardialog.show_all()
- bardialog.connect("delete-event", gtk.main_quit)
- gtk.gdk.threads_leave()
-
- progress_total = 0
- while True:
- try:
- event = eventHandler.waitEvent(0.25)
- if gtkthread.quit.isSet():
- _, error = server.runCommand(["stateForceShutdown"])
- if error:
- print('Unable to cleanly stop: %s' % error)
- break
-
- if event is None:
- continue
-
- if isinstance(event, bb.event.CacheLoadStarted):
- progress_total = event.total
- gtk.gdk.threads_enter()
- bardialog.set_title("Loading Cache")
- pbar.update(0)
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.event.CacheLoadProgress):
- x = event.current
- gtk.gdk.threads_enter()
- pbar.update(x * 1.0 / progress_total)
- pbar.set_title('')
- gtk.gdk.threads_leave()
- continue
-
- if isinstance(event, bb.event.CacheLoadCompleted):
- bardialog.hide()
- continue
-
- if isinstance(event, bb.event.ParseStarted):
- progress_total = event.total
- if progress_total == 0:
- continue
- gtk.gdk.threads_enter()
- pbar.update(0)
- bardialog.set_title("Processing recipes")
-
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.event.ParseProgress):
- x = event.current
- gtk.gdk.threads_enter()
- pbar.update(x * 1.0 / progress_total)
- pbar.set_title('')
- gtk.gdk.threads_leave()
- continue
-
- if isinstance(event, bb.event.ParseCompleted):
- bardialog.hide()
- continue
-
- if isinstance(event, bb.event.DepTreeGenerated):
- gtk.gdk.threads_enter()
- dep.parse(event._depgraph)
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.command.CommandCompleted):
- continue
-
- if isinstance(event, bb.command.CommandFailed):
- print("Command execution failed: %s" % event.error)
- return event.exitcode
-
- if isinstance(event, bb.command.CommandExit):
- return event.exitcode
-
- if isinstance(event, bb.cooker.CookerExit):
- break
-
- continue
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- except KeyboardInterrupt:
- if shutdown == 2:
- print("\nThird Keyboard Interrupt, exit.\n")
- break
- if shutdown == 1:
- print("\nSecond Keyboard Interrupt, stopping...\n")
- _, error = server.runCommand(["stateForceShutdown"])
- if error:
- print('Unable to cleanly stop: %s' % error)
- if shutdown == 0:
- print("\nKeyboard Interrupt, closing down...\n")
- _, error = server.runCommand(["stateShutdown"])
- if error:
- print('Unable to cleanly shutdown: %s' % error)
- shutdown = shutdown + 1
- pass
diff --git a/yocto-poky/bitbake/lib/bb/ui/goggle.py b/yocto-poky/bitbake/lib/bb/ui/goggle.py
deleted file mode 100644
index f4ee7b41a..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/goggle.py
+++ /dev/null
@@ -1,121 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gobject
-import gtk
-import xmlrpclib
-from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
-from bb.ui.crumbs.progress import ProgressBar
-
-import Queue
-
-
-def event_handle_idle_func (eventHandler, build, pbar):
-
- # Consume as many messages as we can in the time available to us
- event = eventHandler.getEvent()
- while event:
- build.handle_event (event, pbar)
- event = eventHandler.getEvent()
-
- return True
-
-def scroll_tv_cb (model, path, iter, view):
- view.scroll_to_cell (path)
-
-
-# @todo hook these into the GUI so the user has feedback...
-def running_build_failed_cb (running_build):
- pass
-
-
-def running_build_succeeded_cb (running_build):
- pass
-
-
-class MainWindow (gtk.Window):
- def __init__ (self):
- gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
-
- # Setup tree view and the scrolled window
- scrolled_window = gtk.ScrolledWindow ()
- self.add (scrolled_window)
- self.cur_build_tv = RunningBuildTreeView()
- self.connect("delete-event", gtk.main_quit)
- self.set_default_size(640, 480)
- scrolled_window.add (self.cur_build_tv)
-
-
-def main (server, eventHandler, params):
- gobject.threads_init()
- gtk.gdk.threads_init()
-
- window = MainWindow ()
- window.show_all ()
- pbar = ProgressBar(window)
- pbar.connect("delete-event", gtk.main_quit)
-
- # Create the object for the current build
- running_build = RunningBuild ()
- window.cur_build_tv.set_model (running_build.model)
- running_build.model.connect("row-inserted", scroll_tv_cb, window.cur_build_tv)
- running_build.connect ("build-succeeded", running_build_succeeded_cb)
- running_build.connect ("build-failed", running_build_failed_cb)
-
- try:
- params.updateFromServer(server)
- cmdline = params.parseActions()
- if not cmdline:
- print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
- return 1
- if 'msg' in cmdline and cmdline['msg']:
- logger.error(cmdline['msg'])
- return 1
- cmdline = cmdline['action']
- ret, error = server.runCommand(cmdline)
- if error:
- print("Error running command '%s': %s" % (cmdline, error))
- return 1
- elif ret != True:
- print("Error running command '%s': returned %s" % (cmdline, ret))
- return 1
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return 1
-
- # Use a timeout function for probing the event queue to find out if we
- # have a message waiting for us.
- gobject.timeout_add (100,
- event_handle_idle_func,
- eventHandler,
- running_build,
- pbar)
-
- try:
- gtk.main()
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- except KeyboardInterrupt:
- pass
- finally:
- server.runCommand(["stateForceShutdown"])
-
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/images/images_display.png b/yocto-poky/bitbake/lib/bb/ui/icons/images/images_display.png
deleted file mode 100644
index a7f87101a..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/images/images_display.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/images/images_hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/images/images_hover.png
deleted file mode 100644
index 2d9cd99b8..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/images/images_hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/add-hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/add-hover.png
deleted file mode 100644
index 526df770d..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/add-hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/add.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/add.png
deleted file mode 100644
index 31e7090d6..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/add.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/alert.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/alert.png
deleted file mode 100644
index d1c6f55a2..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/alert.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/confirmation.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/confirmation.png
deleted file mode 100644
index 3a5402d1e..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/confirmation.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/denied.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/denied.png
deleted file mode 100644
index ee35c7def..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/denied.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/error.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/error.png
deleted file mode 100644
index d06a8c151..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/error.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/info.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/info.png
deleted file mode 100644
index ee8e8d846..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/info.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/issues.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/issues.png
deleted file mode 100644
index b0c746133..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/issues.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/refresh.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/refresh.png
deleted file mode 100644
index eb6c419db..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/refresh.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/remove-hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/remove-hover.png
deleted file mode 100644
index aa57c6998..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/remove-hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/remove.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/remove.png
deleted file mode 100644
index 05c3c293d..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/remove.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/tick.png b/yocto-poky/bitbake/lib/bb/ui/icons/indicators/tick.png
deleted file mode 100644
index beaad361c..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/indicators/tick.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/info/info_display.png b/yocto-poky/bitbake/lib/bb/ui/icons/info/info_display.png
deleted file mode 100644
index 5afbba29f..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/info/info_display.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/info/info_hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/info/info_hover.png
deleted file mode 100644
index f9d294dfa..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/info/info_hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/layers/layers_display.png b/yocto-poky/bitbake/lib/bb/ui/icons/layers/layers_display.png
deleted file mode 100644
index b7f9053a9..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/layers/layers_display.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/layers/layers_hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/layers/layers_hover.png
deleted file mode 100644
index 0bf3ce0db..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/layers/layers_hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/packages/packages_display.png b/yocto-poky/bitbake/lib/bb/ui/icons/packages/packages_display.png
deleted file mode 100644
index f5d0a5064..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/packages/packages_display.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/packages/packages_hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/packages/packages_hover.png
deleted file mode 100644
index c081165f3..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/packages/packages_hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/recipe/recipe_display.png b/yocto-poky/bitbake/lib/bb/ui/icons/recipe/recipe_display.png
deleted file mode 100644
index e9809bc7d..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/recipe/recipe_display.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png
deleted file mode 100644
index 7e48da9af..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/settings/settings_display.png b/yocto-poky/bitbake/lib/bb/ui/icons/settings/settings_display.png
deleted file mode 100644
index 88c464db0..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/settings/settings_display.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/settings/settings_hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/settings/settings_hover.png
deleted file mode 100644
index d92a0bf2c..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/settings/settings_hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/templates/templates_display.png b/yocto-poky/bitbake/lib/bb/ui/icons/templates/templates_display.png
deleted file mode 100644
index 153c7afb6..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/templates/templates_display.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/icons/templates/templates_hover.png b/yocto-poky/bitbake/lib/bb/ui/icons/templates/templates_hover.png
deleted file mode 100644
index afb7165fe..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/icons/templates/templates_hover.png
+++ /dev/null
Binary files differ
diff --git a/yocto-poky/bitbake/lib/bb/ui/knotty.py b/yocto-poky/bitbake/lib/bb/ui/knotty.py
deleted file mode 100644
index 268562770..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/knotty.py
+++ /dev/null
@@ -1,594 +0,0 @@
-#
-# BitBake (No)TTY UI Implementation
-#
-# Handling output to TTYs or files (no TTY)
-#
-# Copyright (C) 2006-2012 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import division
-
-import os
-import sys
-import xmlrpclib
-import logging
-import progressbar
-import signal
-import bb.msg
-import time
-import fcntl
-import struct
-import copy
-import atexit
-from bb.ui import uihelper
-
-featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
-
-logger = logging.getLogger("BitBake")
-interactive = sys.stdout.isatty()
-
-class BBProgress(progressbar.ProgressBar):
- def __init__(self, msg, maxval):
- self.msg = msg
- widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
- progressbar.ETA()]
-
- try:
- self._resize_default = signal.getsignal(signal.SIGWINCH)
- except:
- self._resize_default = None
- progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets, fd=sys.stdout)
-
- def _handle_resize(self, signum, frame):
- progressbar.ProgressBar._handle_resize(self, signum, frame)
- if self._resize_default:
- self._resize_default(signum, frame)
- def finish(self):
- progressbar.ProgressBar.finish(self)
- if self._resize_default:
- signal.signal(signal.SIGWINCH, self._resize_default)
-
-class NonInteractiveProgress(object):
- fobj = sys.stdout
-
- def __init__(self, msg, maxval):
- self.msg = msg
- self.maxval = maxval
-
- def start(self):
- self.fobj.write("%s..." % self.msg)
- self.fobj.flush()
- return self
-
- def update(self, value):
- pass
-
- def finish(self):
- self.fobj.write("done.\n")
- self.fobj.flush()
-
-def new_progress(msg, maxval):
- if interactive:
- return BBProgress(msg, maxval)
- else:
- return NonInteractiveProgress(msg, maxval)
-
-def pluralise(singular, plural, qty):
- if(qty == 1):
- return singular % qty
- else:
- return plural % qty
-
-
-class InteractConsoleLogFilter(logging.Filter):
- def __init__(self, tf, format):
- self.tf = tf
- self.format = format
-
- def filter(self, record):
- if record.levelno == self.format.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
- return False
- self.tf.clearFooter()
- return True
-
-class TerminalFilter(object):
- rows = 25
- columns = 80
-
- def sigwinch_handle(self, signum, frame):
- self.rows, self.columns = self.getTerminalColumns()
- if self._sigwinch_default:
- self._sigwinch_default(signum, frame)
-
- def getTerminalColumns(self):
- def ioctl_GWINSZ(fd):
- try:
- cr = struct.unpack('hh', fcntl.ioctl(fd, self.termios.TIOCGWINSZ, '1234'))
- except:
- return None
- return cr
- cr = ioctl_GWINSZ(sys.stdout.fileno())
- if not cr:
- try:
- fd = os.open(os.ctermid(), os.O_RDONLY)
- cr = ioctl_GWINSZ(fd)
- os.close(fd)
- except:
- pass
- if not cr:
- try:
- cr = (env['LINES'], env['COLUMNS'])
- except:
- cr = (25, 80)
- return cr
-
- def __init__(self, main, helper, console, errconsole, format):
- self.main = main
- self.helper = helper
- self.cuu = None
- self.stdinbackup = None
- self.interactive = sys.stdout.isatty()
- self.footer_present = False
- self.lastpids = []
-
- if not self.interactive:
- return
-
- try:
- import curses
- except ImportError:
- sys.exit("FATAL: The knotty ui could not load the required curses python module.")
-
- import termios
- self.curses = curses
- self.termios = termios
- try:
- fd = sys.stdin.fileno()
- self.stdinbackup = termios.tcgetattr(fd)
- new = copy.deepcopy(self.stdinbackup)
- new[3] = new[3] & ~termios.ECHO
- termios.tcsetattr(fd, termios.TCSADRAIN, new)
- curses.setupterm()
- if curses.tigetnum("colors") > 2:
- format.enable_color()
- self.ed = curses.tigetstr("ed")
- if self.ed:
- self.cuu = curses.tigetstr("cuu")
- try:
- self._sigwinch_default = signal.getsignal(signal.SIGWINCH)
- signal.signal(signal.SIGWINCH, self.sigwinch_handle)
- except:
- pass
- self.rows, self.columns = self.getTerminalColumns()
- except:
- self.cuu = None
- if not self.cuu:
- self.interactive = False
- bb.note("Unable to use interactive mode for this terminal, using fallback")
- return
- console.addFilter(InteractConsoleLogFilter(self, format))
- errconsole.addFilter(InteractConsoleLogFilter(self, format))
-
- def clearFooter(self):
- if self.footer_present:
- lines = self.footer_present
- sys.stdout.write(self.curses.tparm(self.cuu, lines))
- sys.stdout.write(self.curses.tparm(self.ed))
- self.footer_present = False
-
- def updateFooter(self):
- if not self.cuu:
- return
- activetasks = self.helper.running_tasks
- failedtasks = self.helper.failed_tasks
- runningpids = self.helper.running_pids
- if self.footer_present and (self.lastcount == self.helper.tasknumber_current) and (self.lastpids == runningpids):
- return
- if self.footer_present:
- self.clearFooter()
- if (not self.helper.tasknumber_total or self.helper.tasknumber_current == self.helper.tasknumber_total) and not len(activetasks):
- return
- tasks = []
- for t in runningpids:
- tasks.append("%s (pid %s)" % (activetasks[t]["title"], t))
-
- if self.main.shutdown:
- content = "Waiting for %s running tasks to finish:" % len(activetasks)
- elif not len(activetasks):
- content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
- else:
- content = "Currently %s running tasks (%s of %s):" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
- print(content)
- lines = 1 + int(len(content) / (self.columns + 1))
- for tasknum, task in enumerate(tasks[:(self.rows - 2)]):
- content = "%s: %s" % (tasknum, task)
- print(content)
- lines = lines + 1 + int(len(content) / (self.columns + 1))
- self.footer_present = lines
- self.lastpids = runningpids[:]
- self.lastcount = self.helper.tasknumber_current
-
- def finish(self):
- if self.stdinbackup:
- fd = sys.stdin.fileno()
- self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
-
-def _log_settings_from_server(server):
- # Get values of variables which control our output
- includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
- if error:
- logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error)
- raise BaseException(error)
- loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
- if error:
- logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
- raise BaseException(error)
- consolelogfile, error = server.runCommand(["getSetVariable", "BB_CONSOLELOG"])
- if error:
- logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
- raise BaseException(error)
- return includelogs, loglines, consolelogfile
-
-_evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.LogRecord",
- "bb.build.TaskFailed", "bb.build.TaskBase", "bb.event.ParseStarted",
- "bb.event.ParseProgress", "bb.event.ParseCompleted", "bb.event.CacheLoadStarted",
- "bb.event.CacheLoadProgress", "bb.event.CacheLoadCompleted", "bb.command.CommandFailed",
- "bb.command.CommandExit", "bb.command.CommandCompleted", "bb.cooker.CookerExit",
- "bb.event.MultipleProviders", "bb.event.NoProvider", "bb.runqueue.sceneQueueTaskStarted",
- "bb.runqueue.runQueueTaskStarted", "bb.runqueue.runQueueTaskFailed", "bb.runqueue.sceneQueueTaskFailed",
- "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent"]
-
-def main(server, eventHandler, params, tf = TerminalFilter):
-
- includelogs, loglines, consolelogfile = _log_settings_from_server(server)
-
- if sys.stdin.isatty() and sys.stdout.isatty():
- log_exec_tty = True
- else:
- log_exec_tty = False
-
- helper = uihelper.BBUIHelper()
-
- console = logging.StreamHandler(sys.stdout)
- errconsole = logging.StreamHandler(sys.stderr)
- format_str = "%(levelname)s: %(message)s"
- format = bb.msg.BBLogFormatter(format_str)
- bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut)
- bb.msg.addDefaultlogFilter(errconsole, bb.msg.BBLogFilterStdErr)
- console.setFormatter(format)
- errconsole.setFormatter(format)
- logger.addHandler(console)
- logger.addHandler(errconsole)
-
- bb.utils.set_process_name("KnottyUI")
-
- if params.options.remote_server and params.options.kill_server:
- server.terminateServer()
- return
-
- if consolelogfile and not params.options.show_environment and not params.options.show_versions:
- bb.utils.mkdirhier(os.path.dirname(consolelogfile))
- conlogformat = bb.msg.BBLogFormatter(format_str)
- consolelog = logging.FileHandler(consolelogfile)
- bb.msg.addDefaultlogFilter(consolelog)
- consolelog.setFormatter(conlogformat)
- logger.addHandler(consolelog)
-
- llevel, debug_domains = bb.msg.constructLogOptions()
- server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
-
- universe = False
- if not params.observe_only:
- params.updateFromServer(server)
- params.updateToServer(server, os.environ.copy())
- cmdline = params.parseActions()
- if not cmdline:
- print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
- return 1
- if 'msg' in cmdline and cmdline['msg']:
- logger.error(cmdline['msg'])
- return 1
- if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]:
- universe = True
-
- ret, error = server.runCommand(cmdline['action'])
- if error:
- logger.error("Command '%s' failed: %s" % (cmdline, error))
- return 1
- elif ret != True:
- logger.error("Command '%s' failed: returned %s" % (cmdline, ret))
- return 1
-
-
- parseprogress = None
- cacheprogress = None
- main.shutdown = 0
- interrupted = False
- return_value = 0
- errors = 0
- warnings = 0
- taskfailures = []
-
- termfilter = tf(main, helper, console, errconsole, format)
- atexit.register(termfilter.finish)
-
- while True:
- try:
- event = eventHandler.waitEvent(0)
- if event is None:
- if main.shutdown > 1:
- break
- termfilter.updateFooter()
- event = eventHandler.waitEvent(0.25)
- if event is None:
- continue
- helper.eventHandler(event)
- if isinstance(event, bb.runqueue.runQueueExitWait):
- if not main.shutdown:
- main.shutdown = 1
- continue
- if isinstance(event, bb.event.LogExecTTY):
- if log_exec_tty:
- tries = event.retries
- while tries:
- print("Trying to run: %s" % event.prog)
- if os.system(event.prog) == 0:
- break
- time.sleep(event.sleep_delay)
- tries -= 1
- if tries:
- continue
- logger.warn(event.msg)
- continue
-
- if isinstance(event, logging.LogRecord):
- if event.levelno >= format.ERROR:
- errors = errors + 1
- return_value = 1
- elif event.levelno == format.WARNING:
- warnings = warnings + 1
-
- if event.taskpid != 0:
- # For "normal" logging conditions, don't show note logs from tasks
- # but do show them if the user has changed the default log level to
- # include verbose/debug messages
- if event.levelno <= format.NOTE and (event.levelno < llevel or (event.levelno == format.NOTE and llevel != format.VERBOSE)):
- continue
-
- # Prefix task messages with recipe/task
- if event.taskpid in helper.running_tasks:
- taskinfo = helper.running_tasks[event.taskpid]
- event.msg = taskinfo['title'] + ': ' + event.msg
- if hasattr(event, 'fn'):
- event.msg = event.fn + ': ' + event.msg
- logger.handle(event)
- continue
-
- if isinstance(event, bb.build.TaskFailedSilent):
- logger.warn("Logfile for failed setscene task is %s" % event.logfile)
- continue
- if isinstance(event, bb.build.TaskFailed):
- return_value = 1
- logfile = event.logfile
- if logfile and os.path.exists(logfile):
- termfilter.clearFooter()
- bb.error("Logfile of failure stored in: %s" % logfile)
- if includelogs and not event.errprinted:
- print("Log data follows:")
- f = open(logfile, "r")
- lines = []
- while True:
- l = f.readline()
- if l == '':
- break
- l = l.rstrip()
- if loglines:
- lines.append(' | %s' % l)
- if len(lines) > int(loglines):
- lines.pop(0)
- else:
- print('| %s' % l)
- f.close()
- if lines:
- for line in lines:
- print(line)
- if isinstance(event, bb.build.TaskBase):
- logger.info(event._message)
- continue
- if isinstance(event, bb.event.ParseStarted):
- if event.total == 0:
- continue
- parseprogress = new_progress("Parsing recipes", event.total).start()
- continue
- if isinstance(event, bb.event.ParseProgress):
- parseprogress.update(event.current)
- continue
- if isinstance(event, bb.event.ParseCompleted):
- if not parseprogress:
- continue
-
- parseprogress.finish()
- print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
- % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
- continue
-
- if isinstance(event, bb.event.CacheLoadStarted):
- cacheprogress = new_progress("Loading cache", event.total).start()
- continue
- if isinstance(event, bb.event.CacheLoadProgress):
- cacheprogress.update(event.current)
- continue
- if isinstance(event, bb.event.CacheLoadCompleted):
- cacheprogress.finish()
- print("Loaded %d entries from dependency cache." % event.num_entries)
- continue
-
- if isinstance(event, bb.command.CommandFailed):
- return_value = event.exitcode
- if event.error:
- errors = errors + 1
- logger.error("Command execution failed: %s", event.error)
- main.shutdown = 2
- continue
- if isinstance(event, bb.command.CommandExit):
- if not return_value:
- return_value = event.exitcode
- continue
- if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
- main.shutdown = 2
- continue
- if isinstance(event, bb.event.MultipleProviders):
- logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
- event._item,
- ", ".join(event._candidates))
- rtime = ""
- if event._is_runtime:
- rtime = "R"
- logger.info("consider defining a PREFERRED_%sPROVIDER entry to match %s" % (rtime, event._item))
- continue
- if isinstance(event, bb.event.NoProvider):
- if event._runtime:
- r = "R"
- else:
- r = ""
-
- extra = ''
- if not event._reasons:
- if event._close_matches:
- extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
-
- # For universe builds, only show these as warnings, not errors
- h = logger.warning
- if not universe:
- return_value = 1
- errors = errors + 1
- h = logger.error
-
- if event._dependees:
- h("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s", r, event._item, ", ".join(event._dependees), r, extra)
- else:
- h("Nothing %sPROVIDES '%s'%s", r, event._item, extra)
- if event._reasons:
- for reason in event._reasons:
- h("%s", reason)
- continue
-
- if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
- logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring))
- continue
-
- if isinstance(event, bb.runqueue.runQueueTaskStarted):
- if event.noexec:
- tasktype = 'noexec task'
- else:
- tasktype = 'task'
- logger.info("Running %s %s of %s (ID: %s, %s)",
- tasktype,
- event.stats.completed + event.stats.active +
- event.stats.failed + 1,
- event.stats.total, event.taskid, event.taskstring)
- continue
-
- if isinstance(event, bb.runqueue.runQueueTaskFailed):
- return_value = 1
- taskfailures.append(event.taskstring)
- logger.error("Task %s (%s) failed with exit code '%s'",
- event.taskid, event.taskstring, event.exitcode)
- continue
-
- if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
- logger.warn("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
- event.taskid, event.taskstring, event.exitcode)
- continue
-
- if isinstance(event, bb.event.DepTreeGenerated):
- continue
-
- # ignore
- if isinstance(event, (bb.event.BuildBase,
- bb.event.MetadataEvent,
- bb.event.StampUpdate,
- bb.event.ConfigParsed,
- bb.event.RecipeParsed,
- bb.event.RecipePreFinalise,
- bb.runqueue.runQueueEvent,
- bb.event.OperationStarted,
- bb.event.OperationCompleted,
- bb.event.OperationProgress,
- bb.event.DiskFull)):
- continue
-
- logger.error("Unknown event: %s", event)
-
- except EnvironmentError as ioerror:
- termfilter.clearFooter()
- # ignore interrupted io
- if ioerror.args[0] == 4:
- continue
- sys.stderr.write(str(ioerror))
- if not params.observe_only:
- _, error = server.runCommand(["stateForceShutdown"])
- main.shutdown = 2
- except KeyboardInterrupt:
- termfilter.clearFooter()
- if params.observe_only:
- print("\nKeyboard Interrupt, exiting observer...")
- main.shutdown = 2
- if not params.observe_only and main.shutdown == 1:
- print("\nSecond Keyboard Interrupt, stopping...\n")
- _, error = server.runCommand(["stateForceShutdown"])
- if error:
- logger.error("Unable to cleanly stop: %s" % error)
- if not params.observe_only and main.shutdown == 0:
- print("\nKeyboard Interrupt, closing down...\n")
- interrupted = True
- _, error = server.runCommand(["stateShutdown"])
- if error:
- logger.error("Unable to cleanly shutdown: %s" % error)
- main.shutdown = main.shutdown + 1
- pass
- except Exception as e:
- import traceback
- sys.stderr.write(traceback.format_exc())
- if not params.observe_only:
- _, error = server.runCommand(["stateForceShutdown"])
- main.shutdown = 2
- return_value = 1
- try:
- summary = ""
- if taskfailures:
- summary += pluralise("\nSummary: %s task failed:",
- "\nSummary: %s tasks failed:", len(taskfailures))
- for failure in taskfailures:
- summary += "\n %s" % failure
- if warnings:
- summary += pluralise("\nSummary: There was %s WARNING message shown.",
- "\nSummary: There were %s WARNING messages shown.", warnings)
- if return_value and errors:
- summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
- "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
- if summary:
- print(summary)
-
- if interrupted:
- print("Execution was interrupted, returning a non-zero exit code.")
- if return_value == 0:
- return_value = 1
- except IOError as e:
- import errno
- if e.errno == errno.EPIPE:
- pass
-
- return return_value
diff --git a/yocto-poky/bitbake/lib/bb/ui/ncurses.py b/yocto-poky/bitbake/lib/bb/ui/ncurses.py
deleted file mode 100644
index 9589a77d7..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/ncurses.py
+++ /dev/null
@@ -1,373 +0,0 @@
-#
-# BitBake Curses UI Implementation
-#
-# Implements an ncurses frontend for the BitBake utility.
-#
-# Copyright (C) 2006 Michael 'Mickey' Lauer
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- We have the following windows:
-
- 1.) Main Window: Shows what we are ultimately building and how far we are. Includes status bar
- 2.) Thread Activity Window: Shows one status line for every concurrent bitbake thread.
- 3.) Command Line Window: Contains an interactive command line where you can interact w/ Bitbake.
-
- Basic window layout is like that:
-
- |---------------------------------------------------------|
- | <Main Window> | <Thread Activity Window> |
- | | 0: foo do_compile complete|
- | Building Gtk+-2.6.10 | 1: bar do_patch complete |
- | Status: 60% | ... |
- | | ... |
- | | ... |
- |---------------------------------------------------------|
- |<Command Line Window> |
- |>>> which virtual/kernel |
- |openzaurus-kernel |
- |>>> _ |
- |---------------------------------------------------------|
-
-"""
-
-
-from __future__ import division
-import logging
-import os, sys, itertools, time, subprocess
-
-try:
- import curses
-except ImportError:
- sys.exit("FATAL: The ncurses ui could not load the required curses python module.")
-
-import bb
-import xmlrpclib
-from bb import ui
-from bb.ui import uihelper
-
-parsespin = itertools.cycle( r'|/-\\' )
-
-X = 0
-Y = 1
-WIDTH = 2
-HEIGHT = 3
-
-MAXSTATUSLENGTH = 32
-
-class NCursesUI:
- """
- NCurses UI Class
- """
- class Window:
- """Base Window Class"""
- def __init__( self, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
- self.win = curses.newwin( height, width, y, x )
- self.dimensions = ( x, y, width, height )
- """
- if curses.has_colors():
- color = 1
- curses.init_pair( color, fg, bg )
- self.win.bkgdset( ord(' '), curses.color_pair(color) )
- else:
- self.win.bkgdset( ord(' '), curses.A_BOLD )
- """
- self.erase()
- self.setScrolling()
- self.win.noutrefresh()
-
- def erase( self ):
- self.win.erase()
-
- def setScrolling( self, b = True ):
- self.win.scrollok( b )
- self.win.idlok( b )
-
- def setBoxed( self ):
- self.boxed = True
- self.win.box()
- self.win.noutrefresh()
-
- def setText( self, x, y, text, *args ):
- self.win.addstr( y, x, text, *args )
- self.win.noutrefresh()
-
- def appendText( self, text, *args ):
- self.win.addstr( text, *args )
- self.win.noutrefresh()
-
- def drawHline( self, y ):
- self.win.hline( y, 0, curses.ACS_HLINE, self.dimensions[WIDTH] )
- self.win.noutrefresh()
-
- class DecoratedWindow( Window ):
- """Base class for windows with a box and a title bar"""
- def __init__( self, title, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
- NCursesUI.Window.__init__( self, x+1, y+3, width-2, height-4, fg, bg )
- self.decoration = NCursesUI.Window( x, y, width, height, fg, bg )
- self.decoration.setBoxed()
- self.decoration.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
- self.setTitle( title )
-
- def setTitle( self, title ):
- self.decoration.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-
- #-------------------------------------------------------------------------#
-# class TitleWindow( Window ):
- #-------------------------------------------------------------------------#
-# """Title Window"""
-# def __init__( self, x, y, width, height ):
-# NCursesUI.Window.__init__( self, x, y, width, height )
-# version = bb.__version__
-# title = "BitBake %s" % version
-# credit = "(C) 2003-2007 Team BitBake"
-# #self.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
-# self.win.border()
-# self.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-# self.setText( 1, 2, credit.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-
- #-------------------------------------------------------------------------#
- class ThreadActivityWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Thread Activity Window"""
- def __init__( self, x, y, width, height ):
- NCursesUI.DecoratedWindow.__init__( self, "Thread Activity", x, y, width, height )
-
- def setStatus( self, thread, text ):
- line = "%02d: %s" % ( thread, text )
- width = self.dimensions[WIDTH]
- if ( len(line) > width ):
- line = line[:width-3] + "..."
- else:
- line = line.ljust( width )
- self.setText( 0, thread, line )
-
- #-------------------------------------------------------------------------#
- class MainWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Main Window"""
- def __init__( self, x, y, width, height ):
- self.StatusPosition = width - MAXSTATUSLENGTH
- NCursesUI.DecoratedWindow.__init__( self, None, x, y, width, height )
- curses.nl()
-
- def setTitle( self, title ):
- title = "BitBake %s" % bb.__version__
- self.decoration.setText( 2, 1, title, curses.A_BOLD )
- self.decoration.setText( self.StatusPosition - 8, 1, "Status:", curses.A_BOLD )
-
- def setStatus(self, status):
- while len(status) < MAXSTATUSLENGTH:
- status = status + " "
- self.decoration.setText( self.StatusPosition, 1, status, curses.A_BOLD )
-
-
- #-------------------------------------------------------------------------#
- class ShellOutputWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Interactive Command Line Output"""
- def __init__( self, x, y, width, height ):
- NCursesUI.DecoratedWindow.__init__( self, "Command Line Window", x, y, width, height )
-
- #-------------------------------------------------------------------------#
- class ShellInputWindow( Window ):
- #-------------------------------------------------------------------------#
- """Interactive Command Line Input"""
- def __init__( self, x, y, width, height ):
- NCursesUI.Window.__init__( self, x, y, width, height )
-
-# put that to the top again from curses.textpad import Textbox
-# self.textbox = Textbox( self.win )
-# t = threading.Thread()
-# t.run = self.textbox.edit
-# t.start()
-
- #-------------------------------------------------------------------------#
- def main(self, stdscr, server, eventHandler, params):
- #-------------------------------------------------------------------------#
- height, width = stdscr.getmaxyx()
-
- # for now split it like that:
- # MAIN_y + THREAD_y = 2/3 screen at the top
- # MAIN_x = 2/3 left, THREAD_y = 1/3 right
- # CLI_y = 1/3 of screen at the bottom
- # CLI_x = full
-
- main_left = 0
- main_top = 0
- main_height = ( height // 3 * 2 )
- main_width = ( width // 3 ) * 2
- clo_left = main_left
- clo_top = main_top + main_height
- clo_height = height - main_height - main_top - 1
- clo_width = width
- cli_left = main_left
- cli_top = clo_top + clo_height
- cli_height = 1
- cli_width = width
- thread_left = main_left + main_width
- thread_top = main_top
- thread_height = main_height
- thread_width = width - main_width
-
- #tw = self.TitleWindow( 0, 0, width, main_top )
- mw = self.MainWindow( main_left, main_top, main_width, main_height )
- taw = self.ThreadActivityWindow( thread_left, thread_top, thread_width, thread_height )
- clo = self.ShellOutputWindow( clo_left, clo_top, clo_width, clo_height )
- cli = self.ShellInputWindow( cli_left, cli_top, cli_width, cli_height )
- cli.setText( 0, 0, "BB>" )
-
- mw.setStatus("Idle")
-
- helper = uihelper.BBUIHelper()
- shutdown = 0
-
- try:
- params.updateFromServer(server)
- cmdline = params.parseActions()
- if not cmdline:
- print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
- return 1
- if 'msg' in cmdline and cmdline['msg']:
- logger.error(cmdline['msg'])
- return 1
- cmdline = cmdline['action']
- ret, error = server.runCommand(cmdline)
- if error:
- print("Error running command '%s': %s" % (cmdline, error))
- return
- elif ret != True:
- print("Couldn't get default commandlind! %s" % ret)
- return
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return
-
- exitflag = False
- while not exitflag:
- try:
- event = eventHandler.waitEvent(0.25)
- if not event:
- continue
-
- helper.eventHandler(event)
- if isinstance(event, bb.build.TaskBase):
- mw.appendText("NOTE: %s\n" % event._message)
- if isinstance(event, logging.LogRecord):
- mw.appendText(logging.getLevelName(event.levelno) + ': ' + event.getMessage() + '\n')
-
- if isinstance(event, bb.event.CacheLoadStarted):
- self.parse_total = event.total
- if isinstance(event, bb.event.CacheLoadProgress):
- x = event.current
- y = self.parse_total
- mw.setStatus("Loading Cache: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
- if isinstance(event, bb.event.CacheLoadCompleted):
- mw.setStatus("Idle")
- mw.appendText("Loaded %d entries from dependency cache.\n"
- % ( event.num_entries))
-
- if isinstance(event, bb.event.ParseStarted):
- self.parse_total = event.total
- if isinstance(event, bb.event.ParseProgress):
- x = event.current
- y = self.parse_total
- mw.setStatus("Parsing Recipes: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
- if isinstance(event, bb.event.ParseCompleted):
- mw.setStatus("Idle")
- mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked.\n"
- % ( event.cached, event.parsed, event.skipped, event.masked ))
-
-# if isinstance(event, bb.build.TaskFailed):
-# if event.logfile:
-# if data.getVar("BBINCLUDELOGS", d):
-# bb.error("log data follows (%s)" % logfile)
-# number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
-# if number_of_lines:
-# subprocess.call('tail -n%s %s' % (number_of_lines, logfile), shell=True)
-# else:
-# f = open(logfile, "r")
-# while True:
-# l = f.readline()
-# if l == '':
-# break
-# l = l.rstrip()
-# print '| %s' % l
-# f.close()
-# else:
-# bb.error("see log in %s" % logfile)
-
- if isinstance(event, bb.command.CommandCompleted):
- # stop so the user can see the result of the build, but
- # also allow them to now exit with a single ^C
- shutdown = 2
- if isinstance(event, bb.command.CommandFailed):
- mw.appendText("Command execution failed: %s" % event.error)
- time.sleep(2)
- exitflag = True
- if isinstance(event, bb.command.CommandExit):
- exitflag = True
- if isinstance(event, bb.cooker.CookerExit):
- exitflag = True
-
- if isinstance(event, bb.event.LogExecTTY):
- mw.appendText('WARN: ' + event.msg + '\n')
- if helper.needUpdate:
- activetasks, failedtasks = helper.getTasks()
- taw.erase()
- taw.setText(0, 0, "")
- if activetasks:
- taw.appendText("Active Tasks:\n")
- for task in activetasks.itervalues():
- taw.appendText(task["title"] + '\n')
- if failedtasks:
- taw.appendText("Failed Tasks:\n")
- for task in failedtasks:
- taw.appendText(task["title"] + '\n')
-
- curses.doupdate()
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
-
- except KeyboardInterrupt:
- if shutdown == 2:
- mw.appendText("Third Keyboard Interrupt, exit.\n")
- exitflag = True
- if shutdown == 1:
- mw.appendText("Second Keyboard Interrupt, stopping...\n")
- _, error = server.runCommand(["stateForceShutdown"])
- if error:
- print("Unable to cleanly stop: %s" % error)
- if shutdown == 0:
- mw.appendText("Keyboard Interrupt, closing down...\n")
- _, error = server.runCommand(["stateShutdown"])
- if error:
- print("Unable to cleanly shutdown: %s" % error)
- shutdown = shutdown + 1
- pass
-
-def main(server, eventHandler, params):
- if not os.isatty(sys.stdout.fileno()):
- print("FATAL: Unable to run 'ncurses' UI without a TTY.")
- return
- ui = NCursesUI()
- try:
- curses.wrapper(ui.main, server, eventHandler, params)
- except:
- import traceback
- traceback.print_exc()
diff --git a/yocto-poky/bitbake/lib/bb/ui/toasterui.py b/yocto-poky/bitbake/lib/bb/ui/toasterui.py
deleted file mode 100644
index 6bf4c1f03..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/toasterui.py
+++ /dev/null
@@ -1,465 +0,0 @@
-#
-# BitBake ToasterUI Implementation
-# based on (No)TTY UI Implementation by Richard Purdie
-#
-# Handling output to TTYs or files (no TTY)
-#
-# Copyright (C) 2006-2012 Richard Purdie
-# Copyright (C) 2013 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import division
-import time
-import sys
-try:
- import bb
-except RuntimeError as exc:
- sys.exit(str(exc))
-
-from bb.ui import uihelper
-from bb.ui.buildinfohelper import BuildInfoHelper
-
-import bb.msg
-import logging
-import os
-
-# pylint: disable=invalid-name
-# module properties for UI modules are read by bitbake and the contract should not be broken
-
-
-featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
-
-logger = logging.getLogger("ToasterLogger")
-interactive = sys.stdout.isatty()
-
-def _log_settings_from_server(server):
- # Get values of variables which control our output
- includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
- if error:
- logger.error("Unable to get the value of BBINCLUDELOGS variable: %s", error)
- raise BaseException(error)
- loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
- if error:
- logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s", error)
- raise BaseException(error)
- consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
- if error:
- logger.error("Unable to get the value of BB_CONSOLELOG variable: %s", error)
- raise BaseException(error)
- return consolelogfile
-
-# create a log file for a single build and direct the logger at it;
-# log file name is timestamped to the millisecond (depending
-# on system clock accuracy) to ensure it doesn't overlap with
-# other log file names
-#
-# returns (log file, path to log file) for a build
-def _open_build_log(log_dir):
- format_str = "%(levelname)s: %(message)s"
-
- now = time.time()
- now_ms = int((now - int(now)) * 1000)
- time_str = time.strftime('build_%Y%m%d_%H%M%S', time.localtime(now))
- log_file_name = time_str + ('.%d.log' % now_ms)
- build_log_file_path = os.path.join(log_dir, log_file_name)
-
- build_log = logging.FileHandler(build_log_file_path)
-
- logformat = bb.msg.BBLogFormatter(format_str)
- build_log.setFormatter(logformat)
-
- bb.msg.addDefaultlogFilter(build_log)
- logger.addHandler(build_log)
-
- return (build_log, build_log_file_path)
-
-# stop logging to the build log if it exists
-def _close_build_log(build_log):
- if build_log:
- build_log.flush()
- build_log.close()
- logger.removeHandler(build_log)
-
-_evt_list = [
- "bb.build.TaskBase",
- "bb.build.TaskFailed",
- "bb.build.TaskFailedSilent",
- "bb.build.TaskStarted",
- "bb.build.TaskSucceeded",
- "bb.command.CommandCompleted",
- "bb.command.CommandExit",
- "bb.command.CommandFailed",
- "bb.cooker.CookerExit",
- "bb.event.BuildCompleted",
- "bb.event.BuildStarted",
- "bb.event.CacheLoadCompleted",
- "bb.event.CacheLoadProgress",
- "bb.event.CacheLoadStarted",
- "bb.event.ConfigParsed",
- "bb.event.DepTreeGenerated",
- "bb.event.LogExecTTY",
- "bb.event.MetadataEvent",
- "bb.event.MultipleProviders",
- "bb.event.NoProvider",
- "bb.event.ParseCompleted",
- "bb.event.ParseProgress",
- "bb.event.RecipeParsed",
- "bb.event.SanityCheck",
- "bb.event.SanityCheckPassed",
- "bb.event.TreeDataPreparationCompleted",
- "bb.event.TreeDataPreparationStarted",
- "bb.runqueue.runQueueTaskCompleted",
- "bb.runqueue.runQueueTaskFailed",
- "bb.runqueue.runQueueTaskSkipped",
- "bb.runqueue.runQueueTaskStarted",
- "bb.runqueue.sceneQueueTaskCompleted",
- "bb.runqueue.sceneQueueTaskFailed",
- "bb.runqueue.sceneQueueTaskStarted",
- "logging.LogRecord"]
-
-def main(server, eventHandler, params):
- # set to a logging.FileHandler instance when a build starts;
- # see _open_build_log()
- build_log = None
-
- # set to the log path when a build starts
- build_log_file_path = None
-
- helper = uihelper.BBUIHelper()
-
- # TODO don't use log output to determine when bitbake has started
- #
- # WARNING: this log handler cannot be removed, as localhostbecontroller
- # relies on output in the toaster_ui.log file to determine whether
- # the bitbake server has started, which only happens if
- # this logger is setup here (see the TODO in the loop below)
- console = logging.StreamHandler(sys.stdout)
- format_str = "%(levelname)s: %(message)s"
- formatter = bb.msg.BBLogFormatter(format_str)
- bb.msg.addDefaultlogFilter(console)
- console.setFormatter(formatter)
- logger.addHandler(console)
- logger.setLevel(logging.INFO)
- llevel, debug_domains = bb.msg.constructLogOptions()
- result, error = server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
- if not result or error:
- logger.error("can't set event mask: %s", error)
- return 1
-
- # verify and warn
- build_history_enabled = True
- inheritlist, _ = server.runCommand(["getVariable", "INHERIT"])
-
- if not "buildhistory" in inheritlist.split(" "):
- logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
- build_history_enabled = False
-
- if not params.observe_only:
- params.updateFromServer(server)
- params.updateToServer(server, os.environ.copy())
- cmdline = params.parseActions()
- if not cmdline:
- print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
- return 1
- if 'msg' in cmdline and cmdline['msg']:
- logger.error(cmdline['msg'])
- return 1
-
- ret, error = server.runCommand(cmdline['action'])
- if error:
- logger.error("Command '%s' failed: %s" % (cmdline, error))
- return 1
- elif ret != True:
- logger.error("Command '%s' failed: returned %s" % (cmdline, ret))
- return 1
-
- # set to 1 when toasterui needs to shut down
- main.shutdown = 0
-
- interrupted = False
- return_value = 0
- errors = 0
- warnings = 0
- taskfailures = []
- first = True
-
- buildinfohelper = BuildInfoHelper(server, build_history_enabled,
- os.getenv('TOASTER_BRBE'))
-
- # write our own log files into bitbake's log directory;
- # we're only interested in the path to the parent directory of
- # this file, as we're writing our own logs into the same directory
- consolelogfile = _log_settings_from_server(server)
- log_dir = os.path.dirname(consolelogfile)
- bb.utils.mkdirhier(log_dir)
-
- while True:
- try:
- event = eventHandler.waitEvent(0.25)
- if first:
- first = False
-
- # TODO don't use log output to determine when bitbake has started
- #
- # this is the line localhostbecontroller needs to
- # see in toaster_ui.log which it uses to decide whether
- # the bitbake server has started...
- logger.info("ToasterUI waiting for events")
-
- if event is None:
- if main.shutdown > 0:
- # if shutting down, close any open build log first
- _close_build_log(build_log)
-
- break
- continue
-
- helper.eventHandler(event)
-
- # pylint: disable=protected-access
- # the code will look into the protected variables of the event; no easy way around this
-
- # we treat ParseStarted as the first event of toaster-triggered
- # builds; that way we get the Build Configuration included in the log
- # and any errors that occur before BuildStarted is fired
- if isinstance(event, bb.event.ParseStarted):
- if not (build_log and build_log_file_path):
- build_log, build_log_file_path = _open_build_log(log_dir)
- continue
-
- if isinstance(event, bb.event.BuildStarted):
- if not (build_log and build_log_file_path):
- build_log, build_log_file_path = _open_build_log(log_dir)
-
- buildinfohelper.store_started_build(event, build_log_file_path)
- continue
-
- if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
- buildinfohelper.update_and_store_task(event)
- logger.info("Logfile for task %s", event.logfile)
- continue
-
- if isinstance(event, bb.build.TaskBase):
- logger.info(event._message)
-
- if isinstance(event, bb.event.LogExecTTY):
- logger.info(event.msg)
- continue
-
- if isinstance(event, logging.LogRecord):
- if event.levelno == -1:
- event.levelno = formatter.ERROR
-
- buildinfohelper.store_log_event(event)
-
- if event.levelno >= formatter.ERROR:
- errors = errors + 1
- elif event.levelno == formatter.WARNING:
- warnings = warnings + 1
-
- # For "normal" logging conditions, don't show note logs from tasks
- # but do show them if the user has changed the default log level to
- # include verbose/debug messages
- if event.taskpid != 0 and event.levelno <= formatter.NOTE:
- continue
-
- logger.handle(event)
- continue
-
- if isinstance(event, bb.build.TaskFailed):
- buildinfohelper.update_and_store_task(event)
- logfile = event.logfile
- if logfile and os.path.exists(logfile):
- bb.error("Logfile of failure stored in: %s" % logfile)
- continue
-
- # these events are unprocessed now, but may be used in the future to log
- # timing and error informations from the parsing phase in Toaster
- if isinstance(event, (bb.event.SanityCheckPassed, bb.event.SanityCheck)):
- continue
- if isinstance(event, bb.event.ParseProgress):
- continue
- if isinstance(event, bb.event.ParseCompleted):
- continue
- if isinstance(event, bb.event.CacheLoadStarted):
- continue
- if isinstance(event, bb.event.CacheLoadProgress):
- continue
- if isinstance(event, bb.event.CacheLoadCompleted):
- continue
- if isinstance(event, bb.event.MultipleProviders):
- logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
- event._item,
- ", ".join(event._candidates))
- logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
- continue
-
- if isinstance(event, bb.event.NoProvider):
- errors = errors + 1
- if event._runtime:
- r = "R"
- else:
- r = ""
-
- if event._dependees:
- text = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)" % (r, event._item, ", ".join(event._dependees), r)
- else:
- text = "Nothing %sPROVIDES '%s'" % (r, event._item)
-
- logger.error(text)
- if event._reasons:
- for reason in event._reasons:
- logger.error("%s", reason)
- text += reason
- buildinfohelper.store_log_error(text)
- continue
-
- if isinstance(event, bb.event.ConfigParsed):
- continue
- if isinstance(event, bb.event.RecipeParsed):
- continue
-
- # end of saved events
-
- if isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped)):
- buildinfohelper.store_started_task(event)
- continue
-
- if isinstance(event, bb.runqueue.runQueueTaskCompleted):
- buildinfohelper.update_and_store_task(event)
- continue
-
- if isinstance(event, bb.runqueue.runQueueTaskFailed):
- buildinfohelper.update_and_store_task(event)
- taskfailures.append(event.taskstring)
- logger.error("Task %s (%s) failed with exit code '%s'",
- event.taskid, event.taskstring, event.exitcode)
- continue
-
- if isinstance(event, (bb.runqueue.sceneQueueTaskCompleted, bb.runqueue.sceneQueueTaskFailed)):
- buildinfohelper.update_and_store_task(event)
- continue
-
-
- if isinstance(event, (bb.event.TreeDataPreparationStarted, bb.event.TreeDataPreparationCompleted)):
- continue
-
- if isinstance(event, (bb.event.BuildCompleted, bb.command.CommandFailed)):
-
- errorcode = 0
- if isinstance(event, bb.command.CommandFailed):
- errors += 1
- errorcode = 1
- logger.error("Command execution failed: %s", event.error)
-
- # turn off logging to the current build log
- _close_build_log(build_log)
-
- # reset ready for next BuildStarted
- build_log = None
-
- # update the build info helper on BuildCompleted, not on CommandXXX
- buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
-
- brbe = buildinfohelper.brbe
- buildinfohelper.close(errorcode)
-
- # we start a new build info
- if params.observe_only:
- logger.debug("ToasterUI prepared for new build")
- errors = 0
- warnings = 0
- taskfailures = []
- buildinfohelper = BuildInfoHelper(server, build_history_enabled)
- else:
- main.shutdown = 1
-
- logger.info("ToasterUI build done, brbe: %s", brbe)
- continue
-
- if isinstance(event, (bb.command.CommandCompleted,
- bb.command.CommandFailed,
- bb.command.CommandExit)):
- if params.observe_only:
- errorcode = 0
- else:
- main.shutdown = 1
-
- continue
-
- if isinstance(event, bb.event.MetadataEvent):
- if event.type == "SinglePackageInfo":
- buildinfohelper.store_build_package_information(event)
- elif event.type == "LayerInfo":
- buildinfohelper.store_layer_info(event)
- elif event.type == "BuildStatsList":
- buildinfohelper.store_tasks_stats(event)
- elif event.type == "ImagePkgList":
- buildinfohelper.store_target_package_data(event)
- elif event.type == "MissedSstate":
- buildinfohelper.store_missed_state_tasks(event)
- elif event.type == "ImageFileSize":
- buildinfohelper.update_target_image_file(event)
- elif event.type == "ArtifactFileSize":
- buildinfohelper.update_artifact_image_file(event)
- elif event.type == "LicenseManifestPath":
- buildinfohelper.store_license_manifest_path(event)
- elif event.type == "SetBRBE":
- buildinfohelper.brbe = buildinfohelper._get_data_from_event(event)
- elif event.type == "OSErrorException":
- logger.error(event)
- else:
- logger.error("Unprocessed MetadataEvent %s ", str(event))
- continue
-
- if isinstance(event, bb.cooker.CookerExit):
- # shutdown when bitbake server shuts down
- main.shutdown = 1
- continue
-
- if isinstance(event, bb.event.DepTreeGenerated):
- buildinfohelper.store_dependency_information(event)
- continue
-
- logger.warn("Unknown event: %s", event)
- return_value += 1
-
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- except KeyboardInterrupt:
- main.shutdown = 1
- except Exception as e:
- # print errors to log
- import traceback
- from pprint import pformat
- exception_data = traceback.format_exc()
- logger.error("%s\n%s" , e, exception_data)
-
- # save them to database, if possible; if it fails, we already logged to console.
- try:
- buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
- except Exception as ce:
- logger.error("CRITICAL - Failed to to save toaster exception to the database: %s", str(ce))
-
- # make sure we return with an error
- return_value += 1
-
- if interrupted and return_value == 0:
- return_value += 1
-
- logger.warn("Return value is %d", return_value)
- return return_value
diff --git a/yocto-poky/bitbake/lib/bb/ui/uievent.py b/yocto-poky/bitbake/lib/bb/ui/uievent.py
deleted file mode 100644
index df093c53c..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/uievent.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-
-"""
-Use this class to fork off a thread to recieve event callbacks from the bitbake
-server and queue them for the UI to process. This process must be used to avoid
-client/server deadlocks.
-"""
-
-import socket, threading, pickle, collections
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-
-class BBUIEventQueue:
- def __init__(self, BBServer, clientinfo=("localhost, 0")):
-
- self.eventQueue = []
- self.eventQueueLock = threading.Lock()
- self.eventQueueNotify = threading.Event()
-
- self.BBServer = BBServer
- self.clientinfo = clientinfo
-
- server = UIXMLRPCServer(self.clientinfo)
- self.host, self.port = server.socket.getsockname()
-
- server.register_function( self.system_quit, "event.quit" )
- server.register_function( self.send_event, "event.sendpickle" )
- server.socket.settimeout(1)
-
- self.EventHandle = None
-
- # the event handler registration may fail here due to cooker being in invalid state
- # this is a transient situation, and we should retry a couple of times before
- # giving up
-
- for count_tries in range(5):
- ret = self.BBServer.registerEventHandler(self.host, self.port)
-
- if isinstance(ret, collections.Iterable):
- self.EventHandle, error = ret
- else:
- self.EventHandle = ret
- error = ""
-
- if self.EventHandle != None:
- break
-
- errmsg = "Could not register UI event handler. Error: %s, host %s, "\
- "port %d" % (error, self.host, self.port)
- bb.warn("%s, retry" % errmsg)
-
- import time
- time.sleep(1)
- else:
- raise Exception(errmsg)
-
- self.server = server
-
- self.t = threading.Thread()
- self.t.setDaemon(True)
- self.t.run = self.startCallbackHandler
- self.t.start()
-
- def getEvent(self):
-
- self.eventQueueLock.acquire()
-
- if len(self.eventQueue) == 0:
- self.eventQueueLock.release()
- return None
-
- item = self.eventQueue.pop(0)
-
- if len(self.eventQueue) == 0:
- self.eventQueueNotify.clear()
-
- self.eventQueueLock.release()
- return item
-
- def waitEvent(self, delay):
- self.eventQueueNotify.wait(delay)
- return self.getEvent()
-
- def queue_event(self, event):
- self.eventQueueLock.acquire()
- self.eventQueue.append(event)
- self.eventQueueNotify.set()
- self.eventQueueLock.release()
-
- def send_event(self, event):
- self.queue_event(pickle.loads(event))
-
- def startCallbackHandler(self):
-
- self.server.timeout = 1
- bb.utils.set_process_name("UIEventQueue")
- while not self.server.quit:
- try:
- self.server.handle_request()
- except Exception as e:
- import traceback
- logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc(e)))
-
- self.server.server_close()
-
- def system_quit( self ):
- """
- Shut down the callback thread
- """
- try:
- self.BBServer.unregisterEventHandler(self.EventHandle)
- except:
- pass
- self.server.quit = True
-
-class UIXMLRPCServer (SimpleXMLRPCServer):
-
- def __init__( self, interface ):
- self.quit = False
- SimpleXMLRPCServer.__init__( self,
- interface,
- requestHandler=SimpleXMLRPCRequestHandler,
- logRequests=False, allow_none=True)
-
- def get_request(self):
- while not self.quit:
- try:
- sock, addr = self.socket.accept()
- sock.settimeout(1)
- return (sock, addr)
- except socket.timeout:
- pass
- return (None, None)
-
- def close_request(self, request):
- if request is None:
- return
- SimpleXMLRPCServer.close_request(self, request)
-
- def process_request(self, request, client_address):
- if request is None:
- return
- SimpleXMLRPCServer.process_request(self, request, client_address)
-
diff --git a/yocto-poky/bitbake/lib/bb/ui/uihelper.py b/yocto-poky/bitbake/lib/bb/ui/uihelper.py
deleted file mode 100644
index db70b763f..000000000
--- a/yocto-poky/bitbake/lib/bb/ui/uihelper.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import bb.build
-
-class BBUIHelper:
- def __init__(self):
- self.needUpdate = False
- self.running_tasks = {}
- # Running PIDs preserves the order tasks were executed in
- self.running_pids = []
- self.failed_tasks = []
- self.tasknumber_current = 0
- self.tasknumber_total = 0
-
- def eventHandler(self, event):
- if isinstance(event, bb.build.TaskStarted):
- self.running_tasks[event.pid] = { 'title' : "%s %s" % (event._package, event._task) }
- self.running_pids.append(event.pid)
- self.needUpdate = True
- if isinstance(event, bb.build.TaskSucceeded):
- del self.running_tasks[event.pid]
- self.running_pids.remove(event.pid)
- self.needUpdate = True
- if isinstance(event, bb.build.TaskFailedSilent):
- del self.running_tasks[event.pid]
- self.running_pids.remove(event.pid)
- # Don't add to the failed tasks list since this is e.g. a setscene task failure
- self.needUpdate = True
- if isinstance(event, bb.build.TaskFailed):
- del self.running_tasks[event.pid]
- self.running_pids.remove(event.pid)
- self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
- self.needUpdate = True
- if isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
- self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1
- self.tasknumber_total = event.stats.total
- self.needUpdate = True
-
- def getTasks(self):
- self.needUpdate = False
- return (self.running_tasks, self.failed_tasks)
-
diff --git a/yocto-poky/bitbake/lib/bb/utils.py b/yocto-poky/bitbake/lib/bb/utils.py
deleted file mode 100644
index 3544bbe17..000000000
--- a/yocto-poky/bitbake/lib/bb/utils.py
+++ /dev/null
@@ -1,1453 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Utility Functions
-"""
-
-# Copyright (C) 2004 Michael Lauer
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, fcntl, os, string, stat, shutil, time
-import sys
-import errno
-import logging
-import bb
-import bb.msg
-import multiprocessing
-import fcntl
-import subprocess
-import glob
-import fnmatch
-import traceback
-import errno
-import signal
-import ast
-from commands import getstatusoutput
-from contextlib import contextmanager
-from ctypes import cdll
-
-
-logger = logging.getLogger("BitBake.Util")
-
-def clean_context():
- return {
- "os": os,
- "bb": bb,
- "time": time,
- }
-
-def get_context():
- return _context
-
-
-def set_context(ctx):
- _context = ctx
-
-# Context used in better_exec, eval
-_context = clean_context()
-
-class VersionStringException(Exception):
- """Exception raised when an invalid version specification is found"""
-
-def explode_version(s):
- r = []
- alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
- numeric_regexp = re.compile('^(\d+)(.*)$')
- while (s != ''):
- if s[0] in string.digits:
- m = numeric_regexp.match(s)
- r.append((0, int(m.group(1))))
- s = m.group(2)
- continue
- if s[0] in string.letters:
- m = alpha_regexp.match(s)
- r.append((1, m.group(1)))
- s = m.group(2)
- continue
- if s[0] == '~':
- r.append((-1, s[0]))
- else:
- r.append((2, s[0]))
- s = s[1:]
- return r
-
-def split_version(s):
- """Split a version string into its constituent parts (PE, PV, PR)"""
- s = s.strip(" <>=")
- e = 0
- if s.count(':'):
- e = int(s.split(":")[0])
- s = s.split(":")[1]
- r = ""
- if s.count('-'):
- r = s.rsplit("-", 1)[1]
- s = s.rsplit("-", 1)[0]
- v = s
- return (e, v, r)
-
-def vercmp_part(a, b):
- va = explode_version(a)
- vb = explode_version(b)
- while True:
- if va == []:
- (oa, ca) = (0, None)
- else:
- (oa, ca) = va.pop(0)
- if vb == []:
- (ob, cb) = (0, None)
- else:
- (ob, cb) = vb.pop(0)
- if (oa, ca) == (0, None) and (ob, cb) == (0, None):
- return 0
- if oa < ob:
- return -1
- elif oa > ob:
- return 1
- elif ca < cb:
- return -1
- elif ca > cb:
- return 1
-
-def vercmp(ta, tb):
- (ea, va, ra) = ta
- (eb, vb, rb) = tb
-
- r = int(ea or 0) - int(eb or 0)
- if (r == 0):
- r = vercmp_part(va, vb)
- if (r == 0):
- r = vercmp_part(ra, rb)
- return r
-
-def vercmp_string(a, b):
- ta = split_version(a)
- tb = split_version(b)
- return vercmp(ta, tb)
-
-def vercmp_string_op(a, b, op):
- """
- Compare two versions and check if the specified comparison operator matches the result of the comparison.
- This function is fairly liberal about what operators it will accept since there are a variety of styles
- depending on the context.
- """
- res = vercmp_string(a, b)
- if op in ('=', '=='):
- return res == 0
- elif op == '<=':
- return res <= 0
- elif op == '>=':
- return res >= 0
- elif op in ('>', '>>'):
- return res > 0
- elif op in ('<', '<<'):
- return res < 0
- elif op == '!=':
- return res != 0
- else:
- raise VersionStringException('Unsupported comparison operator "%s"' % op)
-
-def explode_deps(s):
- """
- Take an RDEPENDS style string of format:
- "DEPEND1 (optional version) DEPEND2 (optional version) ..."
- and return a list of dependencies.
- Version information is ignored.
- """
- r = []
- l = s.split()
- flag = False
- for i in l:
- if i[0] == '(':
- flag = True
- #j = []
- if not flag:
- r.append(i)
- #else:
- # j.append(i)
- if flag and i.endswith(')'):
- flag = False
- # Ignore version
- #r[-1] += ' ' + ' '.join(j)
- return r
-
-def explode_dep_versions2(s):
- """
- Take an RDEPENDS style string of format:
- "DEPEND1 (optional version) DEPEND2 (optional version) ..."
- and return a dictionary of dependencies and versions.
- """
- r = {}
- l = s.replace(",", "").split()
- lastdep = None
- lastcmp = ""
- lastver = ""
- incmp = False
- inversion = False
- for i in l:
- if i[0] == '(':
- incmp = True
- i = i[1:].strip()
- if not i:
- continue
-
- if incmp:
- incmp = False
- inversion = True
- # This list is based on behavior and supported comparisons from deb, opkg and rpm.
- #
- # Even though =<, <<, ==, !=, =>, and >> may not be supported,
- # we list each possibly valid item.
- # The build system is responsible for validation of what it supports.
- if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
- lastcmp = i[0:2]
- i = i[2:]
- elif i.startswith(('<', '>', '=')):
- lastcmp = i[0:1]
- i = i[1:]
- else:
- # This is an unsupported case!
- raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
- lastcmp = (i or "")
- i = ""
- i.strip()
- if not i:
- continue
-
- if inversion:
- if i.endswith(')'):
- i = i[:-1] or ""
- inversion = False
- if lastver and i:
- lastver += " "
- if i:
- lastver += i
- if lastdep not in r:
- r[lastdep] = []
- r[lastdep].append(lastcmp + " " + lastver)
- continue
-
- #if not inversion:
- lastdep = i
- lastver = ""
- lastcmp = ""
- if not (i in r and r[i]):
- r[lastdep] = []
-
- return r
-
-def explode_dep_versions(s):
- r = explode_dep_versions2(s)
- for d in r:
- if not r[d]:
- r[d] = None
- continue
- if len(r[d]) > 1:
- bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
- r[d] = r[d][0]
- return r
-
-def join_deps(deps, commasep=True):
- """
- Take the result from explode_dep_versions and generate a dependency string
- """
- result = []
- for dep in deps:
- if deps[dep]:
- if isinstance(deps[dep], list):
- for v in deps[dep]:
- result.append(dep + " (" + v + ")")
- else:
- result.append(dep + " (" + deps[dep] + ")")
- else:
- result.append(dep)
- if commasep:
- return ", ".join(result)
- else:
- return " ".join(result)
-
-def _print_trace(body, line):
- """
- Print the Environment of a Text Body
- """
- error = []
- # print the environment of the method
- min_line = max(1, line-4)
- max_line = min(line + 4, len(body))
- for i in range(min_line, max_line + 1):
- if line == i:
- error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
- else:
- error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
- return error
-
-def better_compile(text, file, realfile, mode = "exec", lineno = 0):
- """
- A better compile method. This method
- will print the offending lines.
- """
- try:
- cache = bb.methodpool.compile_cache(text)
- if cache:
- return cache
- # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
- text2 = "\n" * int(lineno) + text
- code = compile(text2, realfile, mode)
- bb.methodpool.compile_cache_add(text, code)
- return code
- except Exception as e:
- error = []
- # split the text into lines again
- body = text.split('\n')
- error.append("Error in compiling python function in %s, line %s:\n" % (realfile, lineno))
- if hasattr(e, "lineno"):
- error.append("The code lines resulting in this error were:")
- error.extend(_print_trace(body, e.lineno))
- else:
- error.append("The function causing this error was:")
- for line in body:
- error.append(line)
- error.append("%s: %s" % (e.__class__.__name__, str(e)))
-
- logger.error("\n".join(error))
-
- e = bb.BBHandledException(e)
- raise e
-
-def _print_exception(t, value, tb, realfile, text, context):
- error = []
- try:
- exception = traceback.format_exception_only(t, value)
- error.append('Error executing a python function in %s:\n' % realfile)
-
- # Strip 'us' from the stack (better_exec call) unless that was where the
- # error came from
- if tb.tb_next is not None:
- tb = tb.tb_next
-
- textarray = text.split('\n')
-
- linefailed = tb.tb_lineno
-
- tbextract = traceback.extract_tb(tb)
- tbformat = traceback.format_list(tbextract)
- error.append("The stack trace of python calls that resulted in this exception/failure was:")
- error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
- error.extend(_print_trace(textarray, linefailed))
-
- # See if this is a function we constructed and has calls back into other functions in
- # "text". If so, try and improve the context of the error by diving down the trace
- level = 0
- nexttb = tb.tb_next
- while nexttb is not None and (level+1) < len(tbextract):
- error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
- if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
- # The code was possibly in the string we compiled ourselves
- error.extend(_print_trace(textarray, tbextract[level+1][1]))
- elif tbextract[level+1][0].startswith("/"):
- # The code looks like it might be in a file, try and load it
- try:
- with open(tbextract[level+1][0], "r") as f:
- text = f.readlines()
- error.extend(_print_trace(text, tbextract[level+1][1]))
- except:
- error.append(tbformat[level+1])
- else:
- error.append(tbformat[level+1])
- nexttb = tb.tb_next
- level = level + 1
-
- error.append("Exception: %s" % ''.join(exception))
- finally:
- logger.error("\n".join(error))
-
-def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
- """
- Similiar to better_compile, better_exec will
- print the lines that are responsible for the
- error.
- """
- import bb.parse
- if not text:
- text = code
- if not hasattr(code, "co_filename"):
- code = better_compile(code, realfile, realfile)
- try:
- exec(code, get_context(), context)
- except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
- # Error already shown so passthrough, no need for traceback
- raise
- except Exception as e:
- if pythonexception:
- raise
- (t, value, tb) = sys.exc_info()
- try:
- _print_exception(t, value, tb, realfile, text, context)
- except Exception as e:
- logger.error("Exception handler error: %s" % str(e))
-
- e = bb.BBHandledException(e)
- raise e
-
-def simple_exec(code, context):
- exec(code, get_context(), context)
-
-def better_eval(source, locals):
- return eval(source, get_context(), locals)
-
-@contextmanager
-def fileslocked(files):
- """Context manager for locking and unlocking file locks."""
- locks = []
- if files:
- for lockfile in files:
- locks.append(bb.utils.lockfile(lockfile))
-
- yield
-
- for lock in locks:
- bb.utils.unlockfile(lock)
-
-@contextmanager
-def timeout(seconds):
- def timeout_handler(signum, frame):
- pass
-
- original_handler = signal.signal(signal.SIGALRM, timeout_handler)
-
- try:
- signal.alarm(seconds)
- yield
- finally:
- signal.alarm(0)
- signal.signal(signal.SIGALRM, original_handler)
-
-def lockfile(name, shared=False, retry=True, block=False):
- """
- Use the specified file as a lock file, return when the lock has
- been acquired. Returns a variable to pass to unlockfile().
- Parameters:
- retry: True to re-try locking if it fails, False otherwise
- block: True to block until the lock succeeds, False otherwise
- The retry and block parameters are kind of equivalent unless you
- consider the possibility of sending a signal to the process to break
- out - at which point you want block=True rather than retry=True.
- """
- dirname = os.path.dirname(name)
- mkdirhier(dirname)
-
- if not os.access(dirname, os.W_OK):
- logger.error("Unable to acquire lock '%s', directory is not writable",
- name)
- sys.exit(1)
-
- op = fcntl.LOCK_EX
- if shared:
- op = fcntl.LOCK_SH
- if not retry and not block:
- op = op | fcntl.LOCK_NB
-
- while True:
- # If we leave the lockfiles lying around there is no problem
- # but we should clean up after ourselves. This gives potential
- # for races though. To work around this, when we acquire the lock
- # we check the file we locked was still the lock file on disk.
- # by comparing inode numbers. If they don't match or the lockfile
- # no longer exists, we start again.
-
- # This implementation is unfair since the last person to request the
- # lock is the most likely to win it.
-
- try:
- lf = open(name, 'a+')
- fileno = lf.fileno()
- fcntl.flock(fileno, op)
- statinfo = os.fstat(fileno)
- if os.path.exists(lf.name):
- statinfo2 = os.stat(lf.name)
- if statinfo.st_ino == statinfo2.st_ino:
- return lf
- lf.close()
- except Exception:
- try:
- lf.close()
- except Exception:
- pass
- pass
- if not retry:
- return None
-
-def unlockfile(lf):
- """
- Unlock a file locked using lockfile()
- """
- try:
- # If we had a shared lock, we need to promote to exclusive before
- # removing the lockfile. Attempt this, ignore failures.
- fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
- os.unlink(lf.name)
- except (IOError, OSError):
- pass
- fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
- lf.close()
-
-def md5_file(filename):
- """
- Return the hex string representation of the MD5 checksum of filename.
- """
- try:
- import hashlib
- m = hashlib.md5()
- except ImportError:
- import md5
- m = md5.new()
-
- with open(filename, "rb") as f:
- for line in f:
- m.update(line)
- return m.hexdigest()
-
-def sha256_file(filename):
- """
- Return the hex string representation of the 256-bit SHA checksum of
- filename. On Python 2.4 this will return None, so callers will need to
- handle that by either skipping SHA checks, or running a standalone sha256sum
- binary.
- """
- try:
- import hashlib
- except ImportError:
- return None
-
- s = hashlib.sha256()
- with open(filename, "rb") as f:
- for line in f:
- s.update(line)
- return s.hexdigest()
-
-def sha1_file(filename):
- """
- Return the hex string representation of the SHA1 checksum of the filename
- """
- try:
- import hashlib
- except ImportError:
- return None
-
- s = hashlib.sha1()
- with open(filename, "rb") as f:
- for line in f:
- s.update(line)
- return s.hexdigest()
-
-def preserved_envvars_exported():
- """Variables which are taken from the environment and placed in and exported
- from the metadata"""
- return [
- 'BB_TASKHASH',
- 'HOME',
- 'LOGNAME',
- 'PATH',
- 'PWD',
- 'SHELL',
- 'TERM',
- 'USER',
- ]
-
-def preserved_envvars():
- """Variables which are taken from the environment and placed in the metadata"""
- v = [
- 'BBPATH',
- 'BB_PRESERVE_ENV',
- 'BB_ENV_WHITELIST',
- 'BB_ENV_EXTRAWHITE',
- ]
- return v + preserved_envvars_exported()
-
-def filter_environment(good_vars):
- """
- Create a pristine environment for bitbake. This will remove variables that
- are not known and may influence the build in a negative way.
- """
-
- removed_vars = {}
- for key in os.environ.keys():
- if key in good_vars:
- continue
-
- removed_vars[key] = os.environ[key]
- os.unsetenv(key)
- del os.environ[key]
-
- if removed_vars:
- logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
-
- return removed_vars
-
-def approved_variables():
- """
- Determine and return the list of whitelisted variables which are approved
- to remain in the environment.
- """
- if 'BB_PRESERVE_ENV' in os.environ:
- return os.environ.keys()
- approved = []
- if 'BB_ENV_WHITELIST' in os.environ:
- approved = os.environ['BB_ENV_WHITELIST'].split()
- approved.extend(['BB_ENV_WHITELIST'])
- else:
- approved = preserved_envvars()
- if 'BB_ENV_EXTRAWHITE' in os.environ:
- approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
- if 'BB_ENV_EXTRAWHITE' not in approved:
- approved.extend(['BB_ENV_EXTRAWHITE'])
- return approved
-
-def clean_environment():
- """
- Clean up any spurious environment variables. This will remove any
- variables the user hasn't chosen to preserve.
- """
- if 'BB_PRESERVE_ENV' not in os.environ:
- good_vars = approved_variables()
- return filter_environment(good_vars)
-
- return {}
-
-def empty_environment():
- """
- Remove all variables from the environment.
- """
- for s in os.environ.keys():
- os.unsetenv(s)
- del os.environ[s]
-
-def build_environment(d):
- """
- Build an environment from all exported variables.
- """
- import bb.data
- for var in bb.data.keys(d):
- export = d.getVarFlag(var, "export", False)
- if export:
- os.environ[var] = d.getVar(var, True) or ""
-
-def _check_unsafe_delete_path(path):
- """
- Basic safeguard against recursively deleting something we shouldn't. If it returns True,
- the caller should raise an exception with an appropriate message.
- NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
- with potentially disastrous results.
- """
- extra = ''
- # HOME might not be /home/something, so in case we can get it, check against it
- homedir = os.environ.get('HOME', '')
- if homedir:
- extra = '|%s' % homedir
- if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
- return True
- return False
-
-def remove(path, recurse=False):
- """Equivalent to rm -f or rm -rf"""
- if not path:
- return
- if recurse:
- for name in glob.glob(path):
- if _check_unsafe_delete_path(path):
- raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
- # shutil.rmtree(name) would be ideal but its too slow
- subprocess.call(['rm', '-rf'] + glob.glob(path))
- return
- for name in glob.glob(path):
- try:
- os.unlink(name)
- except OSError as exc:
- if exc.errno != errno.ENOENT:
- raise
-
-def prunedir(topdir):
- # Delete everything reachable from the directory named in 'topdir'.
- # CAUTION: This is dangerous!
- if _check_unsafe_delete_path(topdir):
- raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
- for root, dirs, files in os.walk(topdir, topdown = False):
- for name in files:
- os.remove(os.path.join(root, name))
- for name in dirs:
- if os.path.islink(os.path.join(root, name)):
- os.remove(os.path.join(root, name))
- else:
- os.rmdir(os.path.join(root, name))
- os.rmdir(topdir)
-
-#
-# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
-# but thats possibly insane and suffixes is probably going to be small
-#
-def prune_suffix(var, suffixes, d):
- # See if var ends with any of the suffixes listed and
- # remove it if found
- for suffix in suffixes:
- if var.endswith(suffix):
- return var.replace(suffix, "")
- return var
-
-def mkdirhier(directory):
- """Create a directory like 'mkdir -p', but does not complain if
- directory already exists like os.makedirs
- """
-
- try:
- os.makedirs(directory)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise e
-
-def movefile(src, dest, newmtime = None, sstat = None):
- """Moves a file from src to dest, preserving all permissions and
- attributes; mtime will be preserved even when moving across
- filesystems. Returns true on success and false on failure. Move is
- atomic.
- """
-
- #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
- try:
- if not sstat:
- sstat = os.lstat(src)
- except Exception as e:
- print("movefile: Stating source file failed...", e)
- return None
-
- destexists = 1
- try:
- dstat = os.lstat(dest)
- except:
- dstat = os.lstat(os.path.dirname(dest))
- destexists = 0
-
- if destexists:
- if stat.S_ISLNK(dstat[stat.ST_MODE]):
- try:
- os.unlink(dest)
- destexists = 0
- except Exception as e:
- pass
-
- if stat.S_ISLNK(sstat[stat.ST_MODE]):
- try:
- target = os.readlink(src)
- if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
- os.unlink(dest)
- os.symlink(target, dest)
- #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
- os.unlink(src)
- return os.lstat(dest)
- except Exception as e:
- print("movefile: failed to properly create symlink:", dest, "->", target, e)
- return None
-
- renamefailed = 1
- if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
- try:
- # os.rename needs to know the dest path ending with file name
- # so append the file name to a path only if it's a dir specified
- srcfname = os.path.basename(src)
- destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
- else dest
- os.rename(src, destpath)
- renamefailed = 0
- except Exception as e:
- if e[0] != errno.EXDEV:
- # Some random error.
- print("movefile: Failed to move", src, "to", dest, e)
- return None
- # Invalid cross-device-link 'bind' mounted or actually Cross-Device
-
- if renamefailed:
- didcopy = 0
- if stat.S_ISREG(sstat[stat.ST_MODE]):
- try: # For safety copy then move it over.
- shutil.copyfile(src, dest + "#new")
- os.rename(dest + "#new", dest)
- didcopy = 1
- except Exception as e:
- print('movefile: copy', src, '->', dest, 'failed.', e)
- return None
- else:
- #we don't yet handle special, so we need to fall back to /bin/mv
- a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
- if a[0] != 0:
- print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
- return None # failure
- try:
- if didcopy:
- os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
- os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
- os.unlink(src)
- except Exception as e:
- print("movefile: Failed to chown/chmod/unlink", dest, e)
- return None
-
- if newmtime:
- os.utime(dest, (newmtime, newmtime))
- else:
- os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
- newmtime = sstat[stat.ST_MTIME]
- return newmtime
-
-def copyfile(src, dest, newmtime = None, sstat = None):
- """
- Copies a file from src to dest, preserving all permissions and
- attributes; mtime will be preserved even when moving across
- filesystems. Returns true on success and false on failure.
- """
- #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
- try:
- if not sstat:
- sstat = os.lstat(src)
- except Exception as e:
- logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
- return False
-
- destexists = 1
- try:
- dstat = os.lstat(dest)
- except:
- dstat = os.lstat(os.path.dirname(dest))
- destexists = 0
-
- if destexists:
- if stat.S_ISLNK(dstat[stat.ST_MODE]):
- try:
- os.unlink(dest)
- destexists = 0
- except Exception as e:
- pass
-
- if stat.S_ISLNK(sstat[stat.ST_MODE]):
- try:
- target = os.readlink(src)
- if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
- os.unlink(dest)
- os.symlink(target, dest)
- #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
- return os.lstat(dest)
- except Exception as e:
- logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
- return False
-
- if stat.S_ISREG(sstat[stat.ST_MODE]):
- try:
- srcchown = False
- if not os.access(src, os.R_OK):
- # Make sure we can read it
- srcchown = True
- os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
-
- # For safety copy then move it over.
- shutil.copyfile(src, dest + "#new")
- os.rename(dest + "#new", dest)
- except Exception as e:
- logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
- return False
- finally:
- if srcchown:
- os.chmod(src, sstat[stat.ST_MODE])
- os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
-
- else:
- #we don't yet handle special, so we need to fall back to /bin/mv
- a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
- if a[0] != 0:
- logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
- return False # failure
- try:
- os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
- os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
- except Exception as e:
- logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
- return False
-
- if newmtime:
- os.utime(dest, (newmtime, newmtime))
- else:
- os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
- newmtime = sstat[stat.ST_MTIME]
- return newmtime
-
-def which(path, item, direction = 0, history = False):
- """
- Locate a file in a PATH
- """
-
- hist = []
- paths = (path or "").split(':')
- if direction != 0:
- paths.reverse()
-
- for p in paths:
- next = os.path.join(p, item)
- hist.append(next)
- if os.path.exists(next):
- if not os.path.isabs(next):
- next = os.path.abspath(next)
- if history:
- return next, hist
- return next
-
- if history:
- return "", hist
- return ""
-
-def to_boolean(string, default=None):
- if not string:
- return default
-
- normalized = string.lower()
- if normalized in ("y", "yes", "1", "true"):
- return True
- elif normalized in ("n", "no", "0", "false"):
- return False
- else:
- raise ValueError("Invalid value for to_boolean: %s" % string)
-
-def contains(variable, checkvalues, truevalue, falsevalue, d):
- """Check if a variable contains all the values specified.
-
- Arguments:
-
- variable -- the variable name. This will be fetched and expanded (using
- d.getVar(variable, True)) and then split into a set().
-
- checkvalues -- if this is a string it is split on whitespace into a set(),
- otherwise coerced directly into a set().
-
- truevalue -- the value to return if checkvalues is a subset of variable.
-
- falsevalue -- the value to return if variable is empty or if checkvalues is
- not a subset of variable.
-
- d -- the data store.
- """
-
- val = d.getVar(variable, True)
- if not val:
- return falsevalue
- val = set(val.split())
- if isinstance(checkvalues, basestring):
- checkvalues = set(checkvalues.split())
- else:
- checkvalues = set(checkvalues)
- if checkvalues.issubset(val):
- return truevalue
- return falsevalue
-
-def contains_any(variable, checkvalues, truevalue, falsevalue, d):
- val = d.getVar(variable, True)
- if not val:
- return falsevalue
- val = set(val.split())
- if isinstance(checkvalues, basestring):
- checkvalues = set(checkvalues.split())
- else:
- checkvalues = set(checkvalues)
- if checkvalues & val:
- return truevalue
- return falsevalue
-
-def cpu_count():
- return multiprocessing.cpu_count()
-
-def nonblockingfd(fd):
- fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
-
-def process_profilelog(fn, pout = None):
- # Either call with a list of filenames and set pout or a filename and optionally pout.
- if not pout:
- pout = fn + '.processed'
- pout = open(pout, 'w')
-
- import pstats
- if isinstance(fn, list):
- p = pstats.Stats(*fn, stream=pout)
- else:
- p = pstats.Stats(fn, stream=pout)
- p.sort_stats('time')
- p.print_stats()
- p.print_callers()
- p.sort_stats('cumulative')
- p.print_stats()
-
- pout.flush()
- pout.close()
-
-#
-# Was present to work around multiprocessing pool bugs in python < 2.7.3
-#
-def multiprocessingpool(*args, **kwargs):
-
- import multiprocessing.pool
- #import multiprocessing.util
- #multiprocessing.util.log_to_stderr(10)
- # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
- # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
- def wrapper(func):
- def wrap(self, timeout=None):
- return func(self, timeout=timeout if timeout is not None else 1e100)
- return wrap
- multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
-
- return multiprocessing.Pool(*args, **kwargs)
-
-def exec_flat_python_func(func, *args, **kwargs):
- """Execute a flat python function (defined with def funcname(args):...)"""
- # Prepare a small piece of python code which calls the requested function
- # To do this we need to prepare two things - a set of variables we can use to pass
- # the values of arguments into the calling function, and the list of arguments for
- # the function being called
- context = {}
- funcargs = []
- # Handle unnamed arguments
- aidx = 1
- for arg in args:
- argname = 'arg_%s' % aidx
- context[argname] = arg
- funcargs.append(argname)
- aidx += 1
- # Handle keyword arguments
- context.update(kwargs)
- funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
- code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
- comp = bb.utils.better_compile(code, '<string>', '<string>')
- bb.utils.better_exec(comp, context, code, '<string>')
- return context['retval']
-
-def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
- """Edit lines from a recipe or config file and modify one or more
- specified variable values set in the file using a specified callback
- function. Lines are expected to have trailing newlines.
- Parameters:
- meta_lines: lines from the file; can be a list or an iterable
- (e.g. file pointer)
- variables: a list of variable names to look for. Functions
- may also be specified, but must be specified with '()' at
- the end of the name. Note that the function doesn't have
- any intrinsic understanding of _append, _prepend, _remove,
- or overrides, so these are considered as part of the name.
- These values go into a regular expression, so regular
- expression syntax is allowed.
- varfunc: callback function called for every variable matching
- one of the entries in the variables parameter. The function
- should take four arguments:
- varname: name of variable matched
- origvalue: current value in file
- op: the operator (e.g. '+=')
- newlines: list of lines up to this point. You can use
- this to prepend lines before this variable setting
- if you wish.
- and should return a three-element tuple:
- newvalue: new value to substitute in, or None to drop
- the variable setting entirely. (If the removal
- results in two consecutive blank lines, one of the
- blank lines will also be dropped).
- newop: the operator to use - if you specify None here,
- the original operation will be used.
- indent: number of spaces to indent multi-line entries,
- or -1 to indent up to the level of the assignment
- and opening quote, or a string to use as the indent.
- minbreak: True to allow the first element of a
- multi-line value to continue on the same line as
- the assignment, False to indent before the first
- element.
- match_overrides: True to match items with _overrides on the end,
- False otherwise
- Returns a tuple:
- updated:
- True if changes were made, False otherwise.
- newlines:
- Lines after processing
- """
-
- var_res = {}
- if match_overrides:
- override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
- else:
- override_re = ''
- for var in variables:
- if var.endswith('()'):
- var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
- else:
- var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
-
- updated = False
- varset_start = ''
- varlines = []
- newlines = []
- in_var = None
- full_value = ''
- var_end = ''
-
- def handle_var_end():
- prerun_newlines = newlines[:]
- op = varset_start[len(in_var):].strip()
- (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
- changed = (prerun_newlines != newlines)
-
- if newvalue is None:
- # Drop the value
- return True
- elif newvalue != full_value or (newop not in [None, op]):
- if newop not in [None, op]:
- # Callback changed the operator
- varset_new = "%s %s" % (in_var, newop)
- else:
- varset_new = varset_start
-
- if isinstance(indent, (int, long)):
- if indent == -1:
- indentspc = ' ' * (len(varset_new) + 2)
- else:
- indentspc = ' ' * indent
- else:
- indentspc = indent
- if in_var.endswith('()'):
- # A function definition
- if isinstance(newvalue, list):
- newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
- else:
- if not newvalue.startswith('\n'):
- newvalue = '\n' + newvalue
- if not newvalue.endswith('\n'):
- newvalue = newvalue + '\n'
- newlines.append('%s {%s}\n' % (varset_new, newvalue))
- else:
- # Normal variable
- if isinstance(newvalue, list):
- if not newvalue:
- # Empty list -> empty string
- newlines.append('%s ""\n' % varset_new)
- elif minbreak:
- # First item on first line
- if len(newvalue) == 1:
- newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
- else:
- newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
- for item in newvalue[1:]:
- newlines.append('%s%s \\\n' % (indentspc, item))
- newlines.append('%s"\n' % indentspc)
- else:
- # No item on first line
- newlines.append('%s " \\\n' % varset_new)
- for item in newvalue:
- newlines.append('%s%s \\\n' % (indentspc, item))
- newlines.append('%s"\n' % indentspc)
- else:
- newlines.append('%s "%s"\n' % (varset_new, newvalue))
- return True
- else:
- # Put the old lines back where they were
- newlines.extend(varlines)
- # If newlines was touched by the function, we'll need to return True
- return changed
-
- checkspc = False
-
- for line in meta_lines:
- if in_var:
- value = line.rstrip()
- varlines.append(line)
- if in_var.endswith('()'):
- full_value += '\n' + value
- else:
- full_value += value[:-1]
- if value.endswith(var_end):
- if in_var.endswith('()'):
- if full_value.count('{') - full_value.count('}') >= 0:
- continue
- full_value = full_value[:-1]
- if handle_var_end():
- updated = True
- checkspc = True
- in_var = None
- else:
- skip = False
- for (varname, var_re) in var_res.iteritems():
- res = var_re.match(line)
- if res:
- isfunc = varname.endswith('()')
- if isfunc:
- splitvalue = line.split('{', 1)
- var_end = '}'
- else:
- var_end = res.groups()[-1]
- splitvalue = line.split(var_end, 1)
- varset_start = splitvalue[0].rstrip()
- value = splitvalue[1].rstrip()
- if not isfunc and value.endswith('\\'):
- value = value[:-1]
- full_value = value
- varlines = [line]
- in_var = res.group(1)
- if isfunc:
- in_var += '()'
- if value.endswith(var_end):
- full_value = full_value[:-1]
- if handle_var_end():
- updated = True
- checkspc = True
- in_var = None
- skip = True
- break
- if not skip:
- if checkspc:
- checkspc = False
- if newlines and newlines[-1] == '\n' and line == '\n':
- # Squash blank line if there are two consecutive blanks after a removal
- continue
- newlines.append(line)
- return (updated, newlines)
-
-
-def edit_metadata_file(meta_file, variables, varfunc):
- """Edit a recipe or config file and modify one or more specified
- variable values set in the file using a specified callback function.
- The file is only written to if the value(s) actually change.
- This is basically the file version of edit_metadata(), see that
- function's description for parameter/usage information.
- Returns True if the file was written to, False otherwise.
- """
- with open(meta_file, 'r') as f:
- (updated, newlines) = edit_metadata(f, variables, varfunc)
- if updated:
- with open(meta_file, 'w') as f:
- f.writelines(newlines)
- return updated
-
-
-def edit_bblayers_conf(bblayers_conf, add, remove):
- """Edit bblayers.conf, adding and/or removing layers
- Parameters:
- bblayers_conf: path to bblayers.conf file to edit
- add: layer path (or list of layer paths) to add; None or empty
- list to add nothing
- remove: layer path (or list of layer paths) to remove; None or
- empty list to remove nothing
- Returns a tuple:
- notadded: list of layers specified to be added but weren't
- (because they were already in the list)
- notremoved: list of layers that were specified to be removed
- but weren't (because they weren't in the list)
- """
-
- import fnmatch
-
- def remove_trailing_sep(pth):
- if pth and pth[-1] == os.sep:
- pth = pth[:-1]
- return pth
-
- approved = bb.utils.approved_variables()
- def canonicalise_path(pth):
- pth = remove_trailing_sep(pth)
- if 'HOME' in approved and '~' in pth:
- pth = os.path.expanduser(pth)
- return pth
-
- def layerlist_param(value):
- if not value:
- return []
- elif isinstance(value, list):
- return [remove_trailing_sep(x) for x in value]
- else:
- return [remove_trailing_sep(value)]
-
- addlayers = layerlist_param(add)
- removelayers = layerlist_param(remove)
-
- # Need to use a list here because we can't set non-local variables from a callback in python 2.x
- bblayercalls = []
- removed = []
- plusequals = False
- orig_bblayers = []
-
- def handle_bblayers_firstpass(varname, origvalue, op, newlines):
- bblayercalls.append(op)
- if op == '=':
- del orig_bblayers[:]
- orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
- return (origvalue, None, 2, False)
-
- def handle_bblayers(varname, origvalue, op, newlines):
- updated = False
- bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
- if removelayers:
- for removelayer in removelayers:
- for layer in bblayers:
- if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
- updated = True
- bblayers.remove(layer)
- removed.append(removelayer)
- break
- if addlayers and not plusequals:
- for addlayer in addlayers:
- if addlayer not in bblayers:
- updated = True
- bblayers.append(addlayer)
- del addlayers[:]
-
- if updated:
- if op == '+=' and not bblayers:
- bblayers = None
- return (bblayers, None, 2, False)
- else:
- return (origvalue, None, 2, False)
-
- with open(bblayers_conf, 'r') as f:
- (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
-
- if not bblayercalls:
- raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
-
- # Try to do the "smart" thing depending on how the user has laid out
- # their bblayers.conf file
- if bblayercalls.count('+=') > 1:
- plusequals = True
-
- removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
- notadded = []
- for layer in addlayers:
- layer_canon = canonicalise_path(layer)
- if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
- notadded.append(layer)
- notadded_canon = [canonicalise_path(layer) for layer in notadded]
- addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
-
- (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
- if addlayers:
- # Still need to add these
- for addlayer in addlayers:
- newlines.append('BBLAYERS += "%s"\n' % addlayer)
- updated = True
-
- if updated:
- with open(bblayers_conf, 'w') as f:
- f.writelines(newlines)
-
- notremoved = list(set(removelayers) - set(removed))
-
- return (notadded, notremoved)
-
-
-def get_file_layer(filename, d):
- """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
- collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
- collection_res = {}
- for collection in collections:
- collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
-
- def path_to_layer(path):
- # Use longest path so we handle nested layers
- matchlen = 0
- match = None
- for collection, regex in collection_res.iteritems():
- if len(regex) > matchlen and re.match(regex, path):
- matchlen = len(regex)
- match = collection
- return match
-
- result = None
- bbfiles = (d.getVar('BBFILES', True) or '').split()
- bbfilesmatch = False
- for bbfilesentry in bbfiles:
- if fnmatch.fnmatch(filename, bbfilesentry):
- bbfilesmatch = True
- result = path_to_layer(bbfilesentry)
-
- if not bbfilesmatch:
- # Probably a bbclass
- result = path_to_layer(filename)
-
- return result
-
-
-# Constant taken from http://linux.die.net/include/linux/prctl.h
-PR_SET_PDEATHSIG = 1
-
-class PrCtlError(Exception):
- pass
-
-def signal_on_parent_exit(signame):
- """
- Trigger signame to be sent when the parent process dies
- """
- signum = getattr(signal, signame)
- # http://linux.die.net/man/2/prctl
- result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
- if result != 0:
- raise PrCtlError('prctl failed with error code %s' % result)
-
-#
-# Manually call the ioprio syscall. We could depend on other libs like psutil
-# however this gets us enough of what we need to bitbake for now without the
-# dependency
-#
-_unamearch = os.uname()[4]
-IOPRIO_WHO_PROCESS = 1
-IOPRIO_CLASS_SHIFT = 13
-
-def ioprio_set(who, cls, value):
- NR_ioprio_set = None
- if _unamearch == "x86_64":
- NR_ioprio_set = 251
- elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
- NR_ioprio_set = 289
-
- if NR_ioprio_set:
- ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
- rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
- if rc != 0:
- raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
- else:
- bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
-
-def set_process_name(name):
- from ctypes import cdll, byref, create_string_buffer
- # This is nice to have for debugging, not essential
- try:
- libc = cdll.LoadLibrary('libc.so.6')
- buff = create_string_buffer(len(name)+1)
- buff.value = name
- libc.prctl(15, byref(buff), 0, 0, 0)
- except:
- pass
-
-# export common proxies variables from datastore to environment
-def export_proxies(d):
- import os
-
- variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
- 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY']
- exported = False
-
- for v in variables:
- if v in os.environ.keys():
- exported = True
- else:
- v_proxy = d.getVar(v, True)
- if v_proxy is not None:
- os.environ[v] = v_proxy
- exported = True
-
- return exported
OpenPOWER on IntegriCloud