summaryrefslogtreecommitdiffstats
path: root/poky/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb')
-rw-r--r--poky/bitbake/lib/bb/build.py3
-rw-r--r--poky/bitbake/lib/bb/cache.py2
-rw-r--r--poky/bitbake/lib/bb/codeparser.py9
-rw-r--r--poky/bitbake/lib/bb/cooker.py55
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py4
-rw-r--r--poky/bitbake/lib/bb/data.py4
-rw-r--r--poky/bitbake/lib/bb/data_smart.py20
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py28
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py22
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py20
-rw-r--r--poky/bitbake/lib/bb/fetch2/hg.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/npm.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py77
-rwxr-xr-xpoky/bitbake/lib/bb/main.py5
-rw-r--r--poky/bitbake/lib/bb/monitordisk.py12
-rw-r--r--poky/bitbake/lib/bb/parse/ast.py2
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/BBHandler.py27
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py2
-rw-r--r--poky/bitbake/lib/bb/persist_data.py222
-rw-r--r--poky/bitbake/lib/bb/providers.py4
-rw-r--r--poky/bitbake/lib/bb/pysh/builtin.py710
-rw-r--r--poky/bitbake/lib/bb/pysh/interp.py1367
-rw-r--r--poky/bitbake/lib/bb/pysh/lsprof.py116
-rw-r--r--poky/bitbake/lib/bb/pysh/pysh.py167
-rw-r--r--poky/bitbake/lib/bb/pysh/pyshlex.py5
-rw-r--r--poky/bitbake/lib/bb/pysh/pyshyacc.py17
-rw-r--r--poky/bitbake/lib/bb/pysh/sherrors.py26
-rw-r--r--poky/bitbake/lib/bb/pysh/subprocess_fix.py77
-rw-r--r--poky/bitbake/lib/bb/runqueue.py92
-rw-r--r--poky/bitbake/lib/bb/siggen.py28
-rw-r--r--poky/bitbake/lib/bb/tests/codeparser.py7
-rw-r--r--poky/bitbake/lib/bb/tests/data.py9
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py27
-rw-r--r--poky/bitbake/lib/bb/tests/persist_data.py142
-rw-r--r--poky/bitbake/lib/bb/tests/utils.py4
-rw-r--r--poky/bitbake/lib/bb/utils.py47
36 files changed, 609 insertions, 2754 deletions
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index 3e2a94edb..7571421d7 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -304,9 +304,10 @@ def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated", pythonexception=pythonexception)
except (bb.parse.SkipRecipe, bb.build.FuncFailed):
raise
- except:
+ except Exception as e:
if pythonexception:
raise
+ logger.error(str(e))
raise FuncFailed(func, None)
finally:
bb.debug(2, "Python function %s finished" % func)
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 258d679dc..65c514b90 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -97,7 +97,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
self.skipreason = self.getvar('__SKIPPED', metadata)
if self.skipreason:
- self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
+ self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
self.skipped = True
self.provides = self.depvar('PROVIDES', metadata)
self.rprovides = self.depvar('RPROVIDES', metadata)
diff --git a/poky/bitbake/lib/bb/codeparser.py b/poky/bitbake/lib/bb/codeparser.py
index ddd1b97dc..ac995a6a1 100644
--- a/poky/bitbake/lib/bb/codeparser.py
+++ b/poky/bitbake/lib/bb/codeparser.py
@@ -33,7 +33,7 @@ from bb.cache import MultiProcessCache
logger = logging.getLogger('BitBake.CodeParser')
def bbhash(s):
- return hashlib.md5(s.encode("utf-8")).hexdigest()
+ return hashlib.sha256(s.encode("utf-8")).hexdigest()
def check_indent(codestr):
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
@@ -140,7 +140,7 @@ class CodeParserCache(MultiProcessCache):
# so that an existing cache gets invalidated. Additionally you'll need
# to increment __cache_version__ in cache.py in order to ensure that old
# recipe caches don't trigger "Taskhash mismatch" errors.
- CACHE_VERSION = 10
+ CACHE_VERSION = 11
def __init__(self):
MultiProcessCache.__init__(self)
@@ -368,8 +368,9 @@ class ShellParser():
def _parse_shell(self, value):
try:
tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
- except pyshlex.NeedMore:
- raise sherrors.ShellSyntaxError("Unexpected EOF")
+ except Exception:
+ bb.error('Error during parse shell code, the last 5 lines are:\n%s' % '\n'.join(value.split('\n')[-5:]))
+ raise
self.process_tokens(tokens)
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index adc41014e..9ccaa79f5 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -1216,8 +1216,8 @@ class BBCooker:
continue
elif regex == "":
parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
+ cre = re.compile('^NULL$')
errors = False
- continue
else:
try:
cre = re.compile(regex)
@@ -1890,35 +1890,6 @@ class ParsingFailure(Exception):
self.recipe = recipe
Exception.__init__(self, realexception, recipe)
-class Feeder(multiprocessing.Process):
- def __init__(self, jobs, to_parsers, quit):
- self.quit = quit
- self.jobs = jobs
- self.to_parsers = to_parsers
- multiprocessing.Process.__init__(self)
-
- def run(self):
- while True:
- try:
- quit = self.quit.get_nowait()
- except queue.Empty:
- pass
- else:
- if quit == 'cancel':
- self.to_parsers.cancel_join_thread()
- break
-
- try:
- job = self.jobs.pop()
- except IndexError:
- break
-
- try:
- self.to_parsers.put(job, timeout=0.5)
- except queue.Full:
- self.jobs.insert(0, job)
- continue
-
class Parser(multiprocessing.Process):
def __init__(self, jobs, results, quit, init, profile):
self.jobs = jobs
@@ -1965,11 +1936,8 @@ class Parser(multiprocessing.Process):
result = pending.pop()
else:
try:
- job = self.jobs.get(timeout=0.25)
- except queue.Empty:
- continue
-
- if job is None:
+ job = self.jobs.pop()
+ except IndexError:
break
result = self.parse(*job)
@@ -2053,14 +2021,15 @@ class CookerParser(object):
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
- self.feeder_quit = multiprocessing.Queue(maxsize=1)
self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
- self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
self.result_queue = multiprocessing.Queue()
- self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
- self.feeder.start()
+
+ def chunkify(lst,n):
+ return [lst[i::n] for i in range(n)]
+ self.jobs = chunkify(self.willparse, self.num_processes)
+
for i in range(0, self.num_processes):
- parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
+ parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
parser.start()
self.process_names.append(parser.name)
self.processes.append(parser)
@@ -2081,25 +2050,19 @@ class CookerParser(object):
self.total)
bb.event.fire(event, self.cfgdata)
- self.feeder_quit.put(None)
for process in self.processes:
self.parser_quit.put(None)
else:
- self.feeder_quit.put('cancel')
-
self.parser_quit.cancel_join_thread()
for process in self.processes:
self.parser_quit.put(None)
- self.jobs.cancel_join_thread()
-
for process in self.processes:
if force:
process.join(.1)
process.terminate()
else:
process.join()
- self.feeder.join()
sync = threading.Thread(target=self.bb_cache.sync)
sync.start()
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 5df66e617..09412e28c 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -391,7 +391,11 @@ class CookerDataBuilder(object):
bb.fatal("BBFILES_DYNAMIC entries must be of the form <collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
+ collections_tmp = collections[:]
for c in collections:
+ collections_tmp.remove(c)
+ if c in collections_tmp:
+ bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split())
if compat and not (compat & layerseries):
bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
diff --git a/poky/bitbake/lib/bb/data.py b/poky/bitbake/lib/bb/data.py
index d66d98cc8..29c238803 100644
--- a/poky/bitbake/lib/bb/data.py
+++ b/poky/bitbake/lib/bb/data.py
@@ -322,8 +322,6 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
if varflags.get("python"):
value = d.getVarFlag(key, "_content", False)
parser = bb.codeparser.PythonParser(key, logger)
- if value and "\t" in value:
- logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE")))
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
deps = deps | parser.references
deps = deps | (keys & parser.execs)
@@ -438,7 +436,7 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
if var is not None:
data = data + str(var)
k = fn + "." + task
- basehash[k] = hashlib.md5(data.encode("utf-8")).hexdigest()
+ basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest()
taskdeps[task] = alldeps
return taskdeps, basehash
diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py
index 6b94fc4b4..07db7be97 100644
--- a/poky/bitbake/lib/bb/data_smart.py
+++ b/poky/bitbake/lib/bb/data_smart.py
@@ -39,10 +39,11 @@ from bb.COW import COWDictBase
logger = logging.getLogger("BitBake.Data")
__setvar_keyword__ = ["_append", "_prepend", "_remove"]
-__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
-__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
+__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
+__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
-__whitespace_split__ = re.compile('(\s)')
+__whitespace_split__ = re.compile(r'(\s)')
+__override_regexp__ = re.compile(r'[a-z0-9]+')
def infer_caller_details(loginfo, parent = False, varval = True):
"""Save the caller the trouble of specifying everything."""
@@ -122,7 +123,11 @@ class VariableParse:
connector = self.d["_remote_data"]
return connector.expandPythonRef(self.varname, code, self.d)
- codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
+ if self.varname:
+ varname = 'Var <%s>' % self.varname
+ else:
+ varname = '<expansion>'
+ codeobj = compile(code.strip(), varname, "eval")
parser = bb.codeparser.PythonParser(self.varname, logger)
parser.parse_python(code)
@@ -427,7 +432,8 @@ class DataSmart(MutableMapping):
except bb.parse.SkipRecipe:
raise
except Exception as exc:
- raise ExpansionError(varname, s, exc) from exc
+ tb = sys.exc_info()[2]
+ raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
varparse.value = s
@@ -592,7 +598,7 @@ class DataSmart(MutableMapping):
# aka pay the cookie monster
override = var[var.rfind('_')+1:]
shortvar = var[:var.rfind('_')]
- while override and override.islower():
+ while override and __override_regexp__.match(override):
if shortvar not in self.overridedata:
self.overridedata[shortvar] = []
if [var, override] not in self.overridedata[shortvar]:
@@ -1068,4 +1074,4 @@ class DataSmart(MutableMapping):
data.update({i:value})
data_str = str([(k, data[k]) for k in sorted(data.keys())])
- return hashlib.md5(data_str.encode("utf-8")).hexdigest()
+ return hashlib.sha256(data_str.encode("utf-8")).hexdigest()
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 709372e16..8fecc809d 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -524,7 +524,7 @@ def fetcher_parse_save():
def fetcher_parse_done():
_checksum_cache.save_merge()
-def fetcher_compare_revisions():
+def fetcher_compare_revisions(d):
"""
Compare the revisions in the persistant cache with current values and
return true/false on whether they've changed.
@@ -777,7 +777,8 @@ def get_srcrev(d, method_name='sortable_revision'):
#
format = d.getVar('SRCREV_FORMAT')
if not format:
- raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+ raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
+ "The SCMs are:\n%s" % '\n'.join(scms))
name_to_rev = {}
seenautoinc = False
@@ -858,7 +859,10 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
# Disable pseudo as it may affect ssh, potentially causing it to hang.
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
- logger.debug(1, "Running %s", cmd)
+ if workdir:
+ logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
+ else:
+ logger.debug(1, "Running %s", cmd)
success = False
error_message = ""
@@ -894,7 +898,7 @@ def check_network_access(d, info, url):
log remote network access, and error if BB_NO_NETWORK is set or the given
URI is untrusted
"""
- if d.getVar("BB_NO_NETWORK") == "1":
+ if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
raise NetworkAccess(url, info)
elif not trusted_network(d, url):
raise UntrustedUrl(url, info)
@@ -1027,7 +1031,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
raise
except IOError as e:
- if e.errno in [os.errno.ESTALE]:
+ if e.errno in [errno.ESTALE]:
logger.warning("Stale Error Observed %s." % ud.url)
return False
raise
@@ -1094,7 +1098,7 @@ def trusted_network(d, url):
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
Note: modifies SRC_URI & mirrors.
"""
- if d.getVar('BB_NO_NETWORK') == "1":
+ if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
return True
pkgname = d.expand(d.getVar('PN', False))
@@ -1403,7 +1407,7 @@ class FetchMethod(object):
Fetch urls
Assumes localpath was called first
"""
- raise NoMethodError(url)
+ raise NoMethodError(urldata.url)
def unpack(self, urldata, rootdir, data):
iterate = False
@@ -1547,7 +1551,7 @@ class FetchMethod(object):
Check the status of a URL
Assumes localpath was called first
"""
- logger.info("URL %s could not be checked for status since no method exists.", url)
+ logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
return True
def latest_revision(self, ud, d, name):
@@ -1555,7 +1559,7 @@ class FetchMethod(object):
Look in the cache for the latest revision, if not present ask the SCM.
"""
if not hasattr(self, "_latest_revision"):
- raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
+ raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
key = self.generate_revision_key(ud, d, name)
@@ -1638,7 +1642,7 @@ class Fetch(object):
urls = self.urls
network = self.d.getVar("BB_NO_NETWORK")
- premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
+ premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
for u in urls:
ud = self.ud[u]
@@ -1716,7 +1720,7 @@ class Fetch(object):
update_stamp(ud, self.d)
except IOError as e:
- if e.errno in [os.errno.ESTALE]:
+ if e.errno in [errno.ESTALE]:
logger.error("Stale Error Observed %s." % u)
raise ChecksumError("Stale Error Detected")
@@ -1786,7 +1790,7 @@ class Fetch(object):
for url in urls:
if url not in self.ud:
- self.ud[url] = FetchData(url, d)
+ self.ud[url] = FetchData(url, self.d)
ud = self.ud[url]
ud.setup_localpath(self.d)
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 59a2ee8f8..8185bf4db 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -199,7 +199,7 @@ class Git(FetchMethod):
depth_default = 1
ud.shallow_depths = collections.defaultdict(lambda: depth_default)
- revs_default = d.getVar("BB_GIT_SHALLOW_REVS", True)
+ revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
ud.shallow_revs = []
ud.branches = {}
for pos, name in enumerate(ud.names):
@@ -318,7 +318,7 @@ class Git(FetchMethod):
def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors
# is not possible
- if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+ if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
if os.path.exists(ud.clonedir):
return False
@@ -522,9 +522,17 @@ class Git(FetchMethod):
def clean(self, ud, d):
""" clean the git directory """
- bb.utils.remove(ud.localpath, True)
- bb.utils.remove(ud.fullmirror)
- bb.utils.remove(ud.fullmirror + ".done")
+ to_remove = [ud.localpath, ud.fullmirror, ud.fullmirror + ".done"]
+ # The localpath is a symlink to clonedir when it is cloned from a
+ # mirror, so remove both of them.
+ if os.path.islink(ud.localpath):
+ clonedir = os.path.realpath(ud.localpath)
+ to_remove.append(clonedir)
+
+ for r in to_remove:
+ if os.path.exists(r):
+ bb.note('Removing %s' % r)
+ bb.utils.remove(r, True)
def supports_srcrev(self):
return True
@@ -615,7 +623,7 @@ class Git(FetchMethod):
"""
pupver = ('', '')
- tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
+ tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
try:
output = self._lsremote(ud, d, "refs/tags/*")
except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
@@ -630,7 +638,7 @@ class Git(FetchMethod):
tag_head = line.split("/")[-1]
# Ignore non-released branches
- m = re.search("(alpha|beta|rc|final)+", tag_head)
+ m = re.search(r"(alpha|beta|rc|final)+", tag_head)
if m:
continue
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index b21fed266..32389130b 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -147,6 +147,23 @@ class GitSM(Git):
return submodules != []
+ def need_update(self, ud, d):
+ if Git.need_update(self, ud, d):
+ return True
+
+ try:
+ # Check for the nugget dropped by the download operation
+ known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
+ (ud.basecmd), d, workdir=ud.clonedir)
+
+ if ud.revisions[ud.names[0]] not in known_srcrevs.split():
+ return True
+ except bb.fetch2.FetchError:
+ # No srcrev nuggets, so this is new and needs to be updated
+ return True
+
+ return False
+
def download(self, ud, d):
def download_submodule(ud, url, module, modpath, d):
url += ";bareclone=1;nobranch=1"
@@ -157,6 +174,9 @@ class GitSM(Git):
try:
newfetch = Fetch([url], d, cache=False)
newfetch.download()
+ # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
except Exception as e:
logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
raise
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index 936d04311..5a2985e16 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -99,7 +99,7 @@ class Hg(FetchMethod):
def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors
# is not possible
- if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+ if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
if os.path.exists(ud.moddir):
return False
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index 408dfc3d0..65bf5a364 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -226,7 +226,7 @@ class Npm(FetchMethod):
self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
return
outputurl = "invalid"
- if ('resolved' not in data) or (not data['resolved'].startswith('http')):
+ if ('resolved' not in data) or (not data['resolved'].startswith('http://') and not data['resolved'].startswith('https://')):
# will be the case for ${PN}
fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 8f505b6de..3bb3e3bb0 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -33,11 +33,14 @@ import logging
import errno
import bb
import bb.progress
+import socket
+import http.client
import urllib.request, urllib.parse, urllib.error
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
+from bb.fetch2 import FetchConnectionCache
from bb.utils import export_proxies
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
@@ -132,10 +135,6 @@ class Wget(FetchMethod):
return True
def checkstatus(self, fetch, ud, d, try_again=True):
- import urllib.request, urllib.error, urllib.parse, socket, http.client
- from urllib.response import addinfourl
- from bb.fetch2 import FetchConnectionCache
-
class HTTPConnectionCache(http.client.HTTPConnection):
if fetch.connection_cache:
def connect(self):
@@ -168,7 +167,7 @@ class Wget(FetchMethod):
"""
host = req.host
if not host:
- raise urlllib2.URLError('no host given')
+ raise urllib.error.URLError('no host given')
h = http_class(host, timeout=req.timeout) # will parse host:port
h.set_debuglevel(self._debuglevel)
@@ -185,7 +184,7 @@ class Wget(FetchMethod):
# request.
# Don't close connection when connection_cache is enabled,
- if fetch.connection_cache is None:
+ if fetch.connection_cache is None:
headers["Connection"] = "close"
else:
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
@@ -252,7 +251,7 @@ class Wget(FetchMethod):
pass
closed = False
- resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
+ resp = urllib.response.addinfourl(fp_dummy(), r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
@@ -271,17 +270,16 @@ class Wget(FetchMethod):
fp.read()
fp.close()
- newheaders = dict((k,v) for k,v in list(req.headers.items())
+ newheaders = dict((k, v) for k, v in list(req.headers.items())
if k.lower() not in ("content-length", "content-type"))
return self.parent.open(urllib.request.Request(req.get_full_url(),
headers=newheaders,
origin_req_host=req.origin_req_host,
unverifiable=True))
- """
- Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
- Forbidden when they actually mean 405 Method Not Allowed.
- """
+
+ # Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
+ # Forbidden when they actually mean 405 Method Not Allowed.
http_error_403 = http_error_405
@@ -292,15 +290,15 @@ class Wget(FetchMethod):
"""
def redirect_request(self, req, fp, code, msg, headers, newurl):
newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
- newreq.get_method = lambda: req.get_method()
+ newreq.get_method = req.get_method
return newreq
exported_proxies = export_proxies(d)
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
- if export_proxies:
+ if exported_proxies:
handlers.append(urllib.request.ProxyHandler())
handlers.append(CacheHTTPHandler())
- # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
+ # Since Python 2.7.9 ssl cert validation is enabled by default
# see PEP-0476, this causes verification errors on some https servers
# so disable by default.
import ssl
@@ -319,19 +317,19 @@ class Wget(FetchMethod):
'''Adds Basic auth to http request, pass in login:password as string'''
import base64
encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
- authheader = "Basic %s" % encodeuser
+ authheader = "Basic %s" % encodeuser
r.add_header("Authorization", authheader)
- if ud.user:
- add_basic_auth(ud.user, r)
+ if ud.user and ud.pswd:
+ add_basic_auth(ud.user + ':' + ud.pswd, r)
try:
- import netrc, urllib.parse
+ import netrc
n = netrc.netrc()
login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
add_basic_auth("%s:%s" % (login, password), r)
except (TypeError, ImportError, IOError, netrc.NetrcParseError):
- pass
+ pass
with opener.open(r) as response:
pass
@@ -396,18 +394,14 @@ class Wget(FetchMethod):
(oldpn, oldpv, oldsuffix) = old
(newpn, newpv, newsuffix) = new
- """
- Check for a new suffix type that we have never heard of before
- """
- if (newsuffix):
+ # Check for a new suffix type that we have never heard of before
+ if newsuffix:
m = self.suffix_regex_comp.search(newsuffix)
if not m:
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
return False
- """
- Not our package so ignore it
- """
+ # Not our package so ignore it
if oldpn != newpn:
return False
@@ -473,15 +467,14 @@ class Wget(FetchMethod):
return ""
- def _check_latest_version_by_dir(self, dirver, package, package_regex,
- current_version, ud, d):
+ def _check_latest_version_by_dir(self, dirver, package, package_regex, current_version, ud, d):
"""
- Scan every directory in order to get upstream version.
+ Scan every directory in order to get upstream version.
"""
version_dir = ['', '', '']
version = ['', '', '']
- dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
+ dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
s = dirver_regex.search(dirver)
if s:
version_dir[1] = s.group('ver')
@@ -541,26 +534,26 @@ class Wget(FetchMethod):
gst-fluendo-mp3
"""
# match most patterns which uses "-" as separator to version digits
- pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
+ pn_prefix1 = r"[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
# a loose pattern such as for unzip552.tar.gz
- pn_prefix2 = "[a-zA-Z]+"
+ pn_prefix2 = r"[a-zA-Z]+"
# a loose pattern such as for 80325-quicky-0.4.tar.gz
- pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
+ pn_prefix3 = r"[0-9]+[-]?[a-zA-Z]+"
# Save the Package Name (pn) Regex for use later
- pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+ pn_regex = r"(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
# match version
- pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
+ pver_regex = r"(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
# match arch
parch_regex = "-source|_all_"
# src.rpm extension was added only for rpm package. Can be removed if the rpm
# packaged will always be considered as having to be manually upgraded
- psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+ psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
# match name, version and archive type of a package
- package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
+ package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
self.suffix_regex_comp = re.compile(psuffix_regex)
@@ -572,7 +565,7 @@ class Wget(FetchMethod):
version = self._parse_path(package_regex_comp, package)
if version:
package_custom_regex_comp = re.compile(
- "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
+ r"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
(re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
else:
package_custom_regex_comp = None
@@ -589,7 +582,7 @@ class Wget(FetchMethod):
current_version = ['', d.getVar('PV'), '']
"""possible to have no version in pkg name, such as spectrum-fw"""
- if not re.search("\d+", package):
+ if not re.search(r"\d+", package):
current_version[1] = re.sub('_', '.', current_version[1])
current_version[1] = re.sub('-', '.', current_version[1])
return (current_version[1], '')
@@ -607,13 +600,13 @@ class Wget(FetchMethod):
# search for version matches on folders inside the path, like:
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
- dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
+ dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
m = dirver_regex.search(path)
if m:
pn = d.getVar('PN')
dirver = m.group('dirver')
- dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
+ dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
if not dirver_pn_regex.search(dirver):
return (self._check_latest_version_by_dir(dirver,
package, package_regex, current_version, ud, d), '')
diff --git a/poky/bitbake/lib/bb/main.py b/poky/bitbake/lib/bb/main.py
index 7dc953da6..41dd3b9e0 100755
--- a/poky/bitbake/lib/bb/main.py
+++ b/poky/bitbake/lib/bb/main.py
@@ -475,10 +475,11 @@ def setup_bitbake(configParams, configuration, extrafeatures=None):
if not retries:
raise
retries -= 1
+ tryno = 8 - retries
if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError)):
- logger.info("Retrying server connection...")
+ logger.info("Retrying server connection (#%d)..." % tryno)
else:
- logger.info("Retrying server connection... (%s)" % traceback.format_exc())
+ logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc()))
if not retries:
bb.fatal("Unable to connect to bitbake server, or start one")
if retries < 5:
diff --git a/poky/bitbake/lib/bb/monitordisk.py b/poky/bitbake/lib/bb/monitordisk.py
index 833cd3d34..2ad1e6156 100644
--- a/poky/bitbake/lib/bb/monitordisk.py
+++ b/poky/bitbake/lib/bb/monitordisk.py
@@ -28,16 +28,16 @@ def convertGMK(unit):
""" Convert the space unit G, M, K, the unit is case-insensitive """
- unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
+ unitG = re.match(r'([1-9][0-9]*)[gG]\s?$', unit)
if unitG:
return int(unitG.group(1)) * (1024 ** 3)
- unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
+ unitM = re.match(r'([1-9][0-9]*)[mM]\s?$', unit)
if unitM:
return int(unitM.group(1)) * (1024 ** 2)
- unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
+ unitK = re.match(r'([1-9][0-9]*)[kK]\s?$', unit)
if unitK:
return int(unitK.group(1)) * 1024
- unitN = re.match('([1-9][0-9]*)\s?$', unit)
+ unitN = re.match(r'([1-9][0-9]*)\s?$', unit)
if unitN:
return int(unitN.group(1))
else:
@@ -83,7 +83,7 @@ def getDiskData(BBDirs, configuration):
for pathSpaceInode in BBDirs.split():
# The input format is: "dir,space,inode", dir is a must, space
# and inode are optional
- pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
+ pathSpaceInodeRe = re.match(r'([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
if not pathSpaceInodeRe:
printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
return None
@@ -147,7 +147,7 @@ def getInterval(configuration):
else:
# The disk space or inode interval is optional, but it should
# have a correct value once it is specified
- intervalRe = re.match('([^,]*),?\s*(.*)', interval)
+ intervalRe = re.match(r'([^,]*),?\s*(.*)', interval)
if intervalRe:
intervalSpace = intervalRe.group(1)
if intervalSpace:
diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py
index 9d20c323f..6d7c80b34 100644
--- a/poky/bitbake/lib/bb/parse/ast.py
+++ b/poky/bitbake/lib/bb/parse/ast.py
@@ -178,7 +178,7 @@ class MethodNode(AstNode):
funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
self.python = True
text = "def %s(d):\n" % (funcname) + text
- bb.methodpool.insert_method(funcname, text, self.filename, self.lineno - len(self.body))
+ bb.methodpool.insert_method(funcname, text, self.filename, self.lineno - len(self.body) - 1)
anonfuncs = data.getVar('__BBANONFUNCS', False) or []
anonfuncs.append(funcname)
data.setVar('__BBANONFUNCS', anonfuncs)
diff --git a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index e5039e3bd..9dba5f233 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -38,14 +38,15 @@ from .ConfHandler import include, init
# For compatibility
bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
-__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
-__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
-__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
-__deltask_regexp__ = re.compile("deltask\s+(?P<func>\w+)")
-__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
-__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
-__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
+__func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
+__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
+__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
+__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
+__deltask_regexp__ = re.compile(r"deltask\s+(?P<func>\w+)")
+__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
+__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
+__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
+__python_tab_regexp__ = re.compile(r" *\t")
__infunc__ = []
__inpython__ = False
@@ -160,6 +161,16 @@ def handle(fn, d, include):
def feeder(lineno, s, fn, root, statements, eof=False):
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
+
+ # Check tabs in python functions:
+ # - def py_funcname(): covered by __inpython__
+ # - python(): covered by '__anonymous' == __infunc__[0]
+ # - python funcname(): covered by __infunc__[3]
+ if __inpython__ or (__infunc__ and ('__anonymous' == __infunc__[0] or __infunc__[3])):
+ tab = __python_tab_regexp__.match(s)
+ if tab:
+ bb.warn('python should use 4 spaces indentation, but found tabs in %s, line %s' % (root, lineno))
+
if __infunc__:
if s == '}':
__body__.append('')
diff --git a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 9d3ebe16f..ea49f8ca9 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -147,7 +147,7 @@ def handle(fn, data, include):
continue
s = s.rstrip()
while s[-1] == '\\':
- s2 = f.readline().strip()
+ s2 = f.readline().rstrip()
lineno = lineno + 1
if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
diff --git a/poky/bitbake/lib/bb/persist_data.py b/poky/bitbake/lib/bb/persist_data.py
index bef701861..0d44100f1 100644
--- a/poky/bitbake/lib/bb/persist_data.py
+++ b/poky/bitbake/lib/bb/persist_data.py
@@ -29,6 +29,7 @@ import warnings
from bb.compat import total_ordering
from collections import Mapping
import sqlite3
+import contextlib
sqlversion = sqlite3.sqlite_version_info
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -36,84 +37,181 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
logger = logging.getLogger("BitBake.PersistData")
-if hasattr(sqlite3, 'enable_shared_cache'):
- try:
- sqlite3.enable_shared_cache(True)
- except sqlite3.OperationalError:
- pass
-
@total_ordering
class SQLTable(collections.MutableMapping):
+ class _Decorators(object):
+ @staticmethod
+ def retry(*, reconnect=True):
+ """
+ Decorator that restarts a function if a database locked sqlite
+ exception occurs. If reconnect is True, the database connection
+ will be closed and reopened each time a failure occurs
+ """
+ def retry_wrapper(f):
+ def wrap_func(self, *args, **kwargs):
+ # Reconnect if necessary
+ if self.connection is None and reconnect:
+ self.reconnect()
+
+ count = 0
+ while True:
+ try:
+ return f(self, *args, **kwargs)
+ except sqlite3.OperationalError as exc:
+ if count < 500 and ('is locked' in str(exc) or 'locking protocol' in str(exc)):
+ count = count + 1
+ if reconnect:
+ self.reconnect()
+ continue
+ raise
+ return wrap_func
+ return retry_wrapper
+
+ @staticmethod
+ def transaction(f):
+ """
+ Decorator that starts a database transaction and creates a database
+ cursor for performing queries. If no exception is thrown, the
+ database results are commited. If an exception occurs, the database
+ is rolled back. In all cases, the cursor is closed after the
+ function ends.
+
+ Note that the cursor is passed as an extra argument to the function
+ after `self` and before any of the normal arguments
+ """
+ def wrap_func(self, *args, **kwargs):
+ # Context manager will COMMIT the database on success,
+ # or ROLLBACK on an exception
+ with self.connection:
+ # Automatically close the cursor when done
+ with contextlib.closing(self.connection.cursor()) as cursor:
+ return f(self, cursor, *args, **kwargs)
+ return wrap_func
+
"""Object representing a table/domain in the database"""
def __init__(self, cachefile, table):
self.cachefile = cachefile
self.table = table
- self.cursor = connect(self.cachefile)
-
- self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
- % table)
-
- def _execute(self, *query):
- """Execute a query, waiting to acquire a lock if necessary"""
- count = 0
- while True:
- try:
- return self.cursor.execute(*query)
- except sqlite3.OperationalError as exc:
- if 'database is locked' in str(exc) and count < 500:
- count = count + 1
+
+ self.connection = None
+ self._execute_single("CREATE TABLE IF NOT EXISTS %s(key TEXT PRIMARY KEY NOT NULL, value TEXT);" % table)
+
+ @_Decorators.retry(reconnect=False)
+ @_Decorators.transaction
+ def _setup_database(self, cursor):
+ cursor.execute("pragma synchronous = off;")
+ # Enable WAL and keep the autocheckpoint length small (the default is
+ # usually 1000). Persistent caches are usually read-mostly, so keeping
+ # this short will keep readers running quickly
+ cursor.execute("pragma journal_mode = WAL;")
+ cursor.execute("pragma wal_autocheckpoint = 100;")
+
+ def reconnect(self):
+ if self.connection is not None:
+ self.connection.close()
+ self.connection = sqlite3.connect(self.cachefile, timeout=5)
+ self.connection.text_factory = str
+ self._setup_database()
+
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def _execute_single(self, cursor, *query):
+ """
+ Executes a single query and discards the results. This correctly closes
+ the database cursor when finished
+ """
+ cursor.execute(*query)
+
+ @_Decorators.retry()
+ def _row_iter(self, f, *query):
+ """
+ Helper function that returns a row iterator. Each time __next__ is
+ called on the iterator, the provided function is evaluated to determine
+ the return value
+ """
+ class CursorIter(object):
+ def __init__(self, cursor):
+ self.cursor = cursor
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ row = self.cursor.fetchone()
+ if row is None:
self.cursor.close()
- self.cursor = connect(self.cachefile)
- continue
- raise
+ raise StopIteration
+ return f(row)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, typ, value, traceback):
+ self.cursor.close()
+ return False
+
+ cursor = self.connection.cursor()
+ try:
+ cursor.execute(*query)
+ return CursorIter(cursor)
+ except:
+ cursor.close()
def __enter__(self):
- self.cursor.__enter__()
+ self.connection.__enter__()
return self
def __exit__(self, *excinfo):
- self.cursor.__exit__(*excinfo)
-
- def __getitem__(self, key):
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- for row in data:
+ self.connection.__exit__(*excinfo)
+
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __getitem__(self, cursor, key):
+ cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
+ row = cursor.fetchone()
+ if row is not None:
return row[1]
raise KeyError(key)
- def __delitem__(self, key):
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __delitem__(self, cursor, key):
if key not in self:
raise KeyError(key)
- self._execute("DELETE from %s where key=?;" % self.table, [key])
+ cursor.execute("DELETE from %s where key=?;" % self.table, [key])
- def __setitem__(self, key, value):
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __setitem__(self, cursor, key, value):
if not isinstance(key, str):
raise TypeError('Only string keys are supported')
elif not isinstance(value, str):
raise TypeError('Only string values are supported')
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- exists = len(list(data))
- if exists:
- self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
- [value, key])
+ cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
+ row = cursor.fetchone()
+ if row is not None:
+ cursor.execute("UPDATE %s SET value=? WHERE key=?;" % self.table, [value, key])
else:
- self._execute("INSERT into %s(key, value) values (?, ?);" %
- self.table, [key, value])
-
- def __contains__(self, key):
- return key in set(self)
-
- def __len__(self):
- data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
- for row in data:
+ cursor.execute("INSERT into %s(key, value) values (?, ?);" % self.table, [key, value])
+
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __contains__(self, cursor, key):
+ cursor.execute('SELECT * from %s where key=?;' % self.table, [key])
+ return cursor.fetchone() is not None
+
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __len__(self, cursor):
+ cursor.execute("SELECT COUNT(key) FROM %s;" % self.table)
+ row = cursor.fetchone()
+ if row is not None:
return row[0]
def __iter__(self):
- data = self._execute("SELECT key FROM %s;" % self.table)
- return (row[0] for row in data)
+ return self._row_iter(lambda row: row[0], "SELECT key from %s;" % self.table)
def __lt__(self, other):
if not isinstance(other, Mapping):
@@ -122,25 +220,27 @@ class SQLTable(collections.MutableMapping):
return len(self) < len(other)
def get_by_pattern(self, pattern):
- data = self._execute("SELECT * FROM %s WHERE key LIKE ?;" %
- self.table, [pattern])
- return [row[1] for row in data]
+ return self._row_iter(lambda row: row[1], "SELECT * FROM %s WHERE key LIKE ?;" %
+ self.table, [pattern])
def values(self):
return list(self.itervalues())
def itervalues(self):
- data = self._execute("SELECT value FROM %s;" % self.table)
- return (row[0] for row in data)
+ return self._row_iter(lambda row: row[0], "SELECT value FROM %s;" %
+ self.table)
def items(self):
return list(self.iteritems())
def iteritems(self):
- return self._execute("SELECT * FROM %s;" % self.table)
+ return self._row_iter(lambda row: (row[0], row[1]), "SELECT * FROM %s;" %
+ self.table)
- def clear(self):
- self._execute("DELETE FROM %s;" % self.table)
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def clear(self, cursor):
+ cursor.execute("DELETE FROM %s;" % self.table)
def has_key(self, key):
return key in self
@@ -194,12 +294,6 @@ class PersistData(object):
"""
del self.data[domain][key]
-def connect(database):
- connection = sqlite3.connect(database, timeout=5, isolation_level=None)
- connection.execute("pragma synchronous = off;")
- connection.text_factory = str
- return connection
-
def persist(domain, d):
"""Convenience factory for SQLTable objects based upon metadata"""
import bb.utils
diff --git a/poky/bitbake/lib/bb/providers.py b/poky/bitbake/lib/bb/providers.py
index c2aa98c06..f496d84d1 100644
--- a/poky/bitbake/lib/bb/providers.py
+++ b/poky/bitbake/lib/bb/providers.py
@@ -129,7 +129,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
preferred_v = cfgData.getVar("PREFERRED_VERSION")
if preferred_v:
- m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
+ m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v)
if m:
if m.group(1):
preferred_e = m.group(1)[:-1]
@@ -384,7 +384,7 @@ def getRuntimeProviders(dataCache, rdepend):
# Only search dynamic packages if we can't find anything in other variables
for pattern in dataCache.packages_dynamic:
- pattern = pattern.replace('+', "\+")
+ pattern = pattern.replace(r'+', r"\+")
if pattern in regexp_cache:
regexp = regexp_cache[pattern]
else:
diff --git a/poky/bitbake/lib/bb/pysh/builtin.py b/poky/bitbake/lib/bb/pysh/builtin.py
deleted file mode 100644
index a8814dc33..000000000
--- a/poky/bitbake/lib/bb/pysh/builtin.py
+++ /dev/null
@@ -1,710 +0,0 @@
-# builtin.py - builtins and utilities definitions for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Builtin and internal utilities implementations.
-
-- Beware not to use python interpreter environment as if it were the shell
-environment. For instance, commands working directory must be explicitely handled
-through env['PWD'] instead of relying on python working directory.
-"""
-import errno
-import optparse
-import os
-import re
-import subprocess
-import sys
-import time
-
-def has_subprocess_bug():
- return getattr(subprocess, 'list2cmdline') and \
- ( subprocess.list2cmdline(['']) == '' or \
- subprocess.list2cmdline(['foo|bar']) == 'foo|bar')
-
-# Detect python bug 1634343: "subprocess swallows empty arguments under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470>
-# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470>
-if has_subprocess_bug():
- import subprocess_fix
- subprocess.list2cmdline = subprocess_fix.list2cmdline
-
-from sherrors import *
-
-class NonExitingParser(optparse.OptionParser):
- """OptionParser default behaviour upon error is to print the error message and
- exit. Raise a utility error instead.
- """
- def error(self, msg):
- raise UtilityError(msg)
-
-#-------------------------------------------------------------------------------
-# set special builtin
-#-------------------------------------------------------------------------------
-OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False,
- help='The shell shall disable pathname expansion.')
-OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False,
- help="""When this option is on, if a simple command fails for any of the \
- reasons listed in Consequences of Shell Errors or returns an exit status \
- value >0, and is not part of the compound list following a while, until, \
- or if keyword, and is not a part of an AND or OR list, and is not a \
- pipeline preceded by the ! reserved word, then the shell shall immediately \
- exit.""")
-OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False,
- help="""The shell shall write to standard error a trace for each command \
- after it expands the command and before it executes it. It is unspecified \
- whether the command that turns tracing off is traced.""")
-
-def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SET.parse_args(args)
- env = interp.get_env()
-
- if option.has_f:
- env.set_opt('-f')
- if option.has_e:
- env.set_opt('-e')
- if option.has_x:
- env.set_opt('-x')
- return 0
-
-#-------------------------------------------------------------------------------
-# shift special builtin
-#-------------------------------------------------------------------------------
-def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- params = interp.get_env().get_positional_args()
- if args:
- try:
- n = int(args[0])
- if n > len(params):
- raise ValueError()
- except ValueError:
- return 1
- else:
- n = 1
-
- params[:n] = []
- interp.get_env().set_positional_args(params)
- return 0
-
-#-------------------------------------------------------------------------------
-# export special builtin
-#-------------------------------------------------------------------------------
-OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_EXPORT.parse_args(args)
- if option.has_p:
- raise NotImplementedError()
-
- for arg in args:
- try:
- name, value = arg.split('=', 1)
- except ValueError:
- name, value = arg, None
- env = interp.get_env().export(name, value)
-
- return 0
-
-#-------------------------------------------------------------------------------
-# return special builtin
-#-------------------------------------------------------------------------------
-def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- res = 0
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = 0
- if not 0<=res<=255:
- res = 0
-
- # BUG: should be last executed command exit code
- raise ReturnSignal(res)
-
-#-------------------------------------------------------------------------------
-# trap special builtin
-#-------------------------------------------------------------------------------
-def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- if len(args) < 2:
- stderr.write('trap: usage: trap [[arg] signal_spec ...]\n')
- return 2
-
- action = args[0]
- for sig in args[1:]:
- try:
- env.traps[sig] = action
- except Exception as e:
- stderr.write('trap: %s\n' % str(e))
- return 0
-
-#-------------------------------------------------------------------------------
-# unset special builtin
-#-------------------------------------------------------------------------------
-OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions")
-OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False)
-OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False)
-
-def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_UNSET.parse_args(args)
-
- status = 0
- env = interp.get_env()
- for arg in args:
- try:
- if option.has_f:
- env.remove_function(arg)
- else:
- del env[arg]
- except KeyError:
- pass
- except VarAssignmentError:
- status = 1
-
- return status
-
-#-------------------------------------------------------------------------------
-# wait special builtin
-#-------------------------------------------------------------------------------
-def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return interp.wait([int(arg) for arg in args])
-
-#-------------------------------------------------------------------------------
-# cat utility
-#-------------------------------------------------------------------------------
-def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if not args:
- args = ['-']
-
- status = 0
- for arg in args:
- if arg == '-':
- data = stdin.read()
- else:
- path = os.path.join(env['PWD'], arg)
- try:
- f = file(path, 'rb')
- try:
- data = f.read()
- finally:
- f.close()
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
- status = 1
- continue
- stdout.write(data)
- stdout.flush()
- return status
-
-#-------------------------------------------------------------------------------
-# cd utility
-#-------------------------------------------------------------------------------
-OPT_CD = NonExitingParser("cd - change the working directory")
-
-def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_CD.parse_args(args)
- env = interp.get_env()
-
- directory = None
- printdir = False
- if not args:
- home = env.get('HOME')
- if home:
- # Unspecified, do nothing
- return 0
- else:
- directory = home
- elif len(args)==1:
- directory = args[0]
- if directory=='-':
- if 'OLDPWD' not in env:
- raise UtilityError("OLDPWD not set")
- printdir = True
- directory = env['OLDPWD']
- else:
- raise UtilityError("too many arguments")
-
- curpath = None
- # Absolute directories will be handled correctly by the os.path.join call.
- if not directory.startswith('.') and not directory.startswith('..'):
- cdpaths = env.get('CDPATH', '.').split(';')
- for cdpath in cdpaths:
- p = os.path.join(cdpath, directory)
- if os.path.isdir(p):
- curpath = p
- break
-
- if curpath is None:
- curpath = directory
- curpath = os.path.join(env['PWD'], directory)
-
- env['OLDPWD'] = env['PWD']
- env['PWD'] = curpath
- if printdir:
- stdout.write('%s\n' % curpath)
- return 0
-
-#-------------------------------------------------------------------------------
-# colon utility
-#-------------------------------------------------------------------------------
-def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# echo utility
-#-------------------------------------------------------------------------------
-def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Echo only takes arguments, no options. Use printf if you need fancy stuff.
- output = ' '.join(args) + '\n'
- stdout.write(output)
- stdout.flush()
- return 0
-
-#-------------------------------------------------------------------------------
-# egrep utility
-#-------------------------------------------------------------------------------
-# egrep is usually a shell script.
-# Unfortunately, pysh does not support shell scripts *with arguments* right now,
-# so the redirection is implemented here, assuming grep is available.
-def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-E'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# env utility
-#-------------------------------------------------------------------------------
-def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if args and args[0]=='-i':
- raise NotImplementedError('env: -i option is not implemented')
-
- i = 0
- for arg in args:
- if '=' not in arg:
- break
- # Update the current environment
- name, value = arg.split('=', 1)
- env[name] = value
- i += 1
-
- if args[i:]:
- # Find then execute the specified interpreter
- utility = env.find_in_path(args[i])
- if not utility:
- return 127
- args[i:i+1] = utility
- name = args[i]
- args = args[i+1:]
- try:
- return run_command(name, args, interp, env, stdin, stdout, stderr,
- debugflags)
- except UtilityError:
- stderr.write('env: failed to execute %s' % ' '.join([name]+args))
- return 126
- else:
- for pair in env.get_variables().iteritems():
- stdout.write('%s=%s\n' % pair)
- return 0
-
-#-------------------------------------------------------------------------------
-# exit utility
-#-------------------------------------------------------------------------------
-def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- res = None
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = None
- if not 0<=res<=255:
- res = None
-
- if res is None:
- # BUG: should be last executed command exit code
- res = 0
-
- raise ExitSignal(res)
-
-#-------------------------------------------------------------------------------
-# fgrep utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-F'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# gunzip utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# kill utility
-#-------------------------------------------------------------------------------
-def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- for arg in args:
- pid = int(arg)
- status = subprocess.call(['pskill', '/T', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- # pskill is asynchronous, hence the stupid polling loop
- while 1:
- p = subprocess.Popen(['pslist', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- output = p.communicate()[0]
- if ('process %d was not' % pid) in output:
- break
- time.sleep(1)
- return status
-
-#-------------------------------------------------------------------------------
-# mkdir utility
-#-------------------------------------------------------------------------------
-OPT_MKDIR = NonExitingParser("mkdir - make directories.")
-OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # TODO: implement umask
- # TODO: implement proper utility error report
- option, args = OPT_MKDIR.parse_args(args)
- for arg in args:
- path = os.path.join(env['PWD'], arg)
- if option.has_p:
- try:
- os.makedirs(path)
- except IOError as e:
- if e.errno != errno.EEXIST:
- raise
- else:
- os.mkdir(path)
- return 0
-
-#-------------------------------------------------------------------------------
-# netstat utility
-#-------------------------------------------------------------------------------
-def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- # Do you really expect me to implement netstat ?
- # This empty form is enough for Mercurial tests since it's
- # supposed to generate nothing upon success. Faking this test
- # is not a big deal either.
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# pwd utility
-#-------------------------------------------------------------------------------
-OPT_PWD = NonExitingParser("pwd - return working directory name")
-OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True,
- help="""If the PWD environment variable contains an absolute pathname of \
- the current directory that does not contain the filenames dot or dot-dot, \
- pwd shall write this pathname to standard output. Otherwise, the -L option \
- shall behave as the -P option.""")
-OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False,
- help="""The absolute pathname written shall not contain filenames that, in \
- the context of the pathname, refer to files of type symbolic link.""")
-
-def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_PWD.parse_args(args)
- stdout.write('%s\n' % env['PWD'])
- return 0
-
-#-------------------------------------------------------------------------------
-# printf utility
-#-------------------------------------------------------------------------------
-RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)')
-
-def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- def replace(m):
- assert m.group()
- g = m.group()[1:]
- if g.startswith('x'):
- return chr(int(g[1:], 16))
- if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g):
- # Yay, an octal number
- return chr(int(g, 8))
- return {
- 'a': '\a',
- 'b': '\b',
- 'f': '\f',
- 'n': '\n',
- 'r': '\r',
- 't': '\t',
- 'v': '\v',
- '\\': '\\',
- }.get(g)
-
- # Convert escape sequences
- format = re.sub(RE_UNESCAPE, replace, args[0])
- stdout.write(format % tuple(args[1:]))
- return 0
-
-#-------------------------------------------------------------------------------
-# true utility
-#-------------------------------------------------------------------------------
-def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# sed utility
-#-------------------------------------------------------------------------------
-RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$')
-
-# cygwin sed fails with some expressions when they do not end with a single space.
-# see unit tests for details. Interestingly, the same expressions works perfectly
-# in cygwin shell.
-def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Scan pattern arguments and append a space if necessary
- for i in range(len(args)):
- if not RE_SED.search(args[i]):
- continue
- args[i] = args[i] + ' '
-
- return run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# sleep utility
-#-------------------------------------------------------------------------------
-def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- time.sleep(int(args[0]))
- return 0
-
-#-------------------------------------------------------------------------------
-# sort utility
-#-------------------------------------------------------------------------------
-OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files")
-
-def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags):
-
- def sort(path):
- if path == '-':
- lines = stdin.readlines()
- else:
- try:
- f = file(path)
- try:
- lines = f.readlines()
- finally:
- f.close()
- except IOError as e:
- stderr.write(str(e) + '\n')
- return 1
-
- if lines and lines[-1][-1]!='\n':
- lines[-1] = lines[-1] + '\n'
- return lines
-
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SORT.parse_args(args)
- alllines = []
-
- if len(args)<=0:
- args += ['-']
-
- # Load all files lines
- curdir = os.getcwd()
- try:
- os.chdir(env['PWD'])
- for path in args:
- alllines += sort(path)
- finally:
- os.chdir(curdir)
-
- alllines.sort()
- for line in alllines:
- stdout.write(line)
- return 0
-
-#-------------------------------------------------------------------------------
-# hg utility
-#-------------------------------------------------------------------------------
-
-hgcommands = [
- 'add',
- 'addremove',
- 'commit', 'ci',
- 'debugrename',
- 'debugwalk',
- 'falabala', # Dummy command used in a mercurial test
- 'incoming',
- 'locate',
- 'pull',
- 'push',
- 'qinit',
- 'remove', 'rm',
- 'rename', 'mv',
- 'revert',
- 'showconfig',
- 'status', 'st',
- 'strip',
- ]
-
-def rewriteslashes(name, args):
- # Several hg commands output file paths, rewrite the separators
- if len(args) > 1 and name.lower().endswith('python') \
- and args[0].endswith('hg'):
- for cmd in hgcommands:
- if cmd in args[1:]:
- return True
-
- # svn output contains many paths with OS specific separators.
- # Normalize these to unix paths.
- base = os.path.basename(name)
- if base.startswith('svn'):
- return True
-
- return False
-
-def rewritehg(output):
- if not output:
- return output
- # Rewrite os specific messages
- output = output.replace(': The system cannot find the file specified',
- ': No such file or directory')
- output = re.sub(': Access is denied.*$', ': Permission denied', output)
- output = output.replace(': No connection could be made because the target machine actively refused it',
- ': Connection refused')
- return output
-
-
-def run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags):
- # Execute the command
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- hgbin = interp.options().hgbinary
- ishg = hgbin and ('hg' in name or args and 'hg' in args[0])
- unixoutput = 'cygwin' in name or ishg
-
- exec_env = env.get_variables()
- try:
- # BUG: comparing file descriptor is clearly not a reliable way to tell
- # whether they point on the same underlying object. But in pysh limited
- # scope this is usually right, we do not expect complicated redirections
- # besides usual 2>&1.
- # Still there is one case we have but cannot deal with is when stdout
- # and stderr are redirected *by pysh caller*. This the reason for the
- # --redirect pysh() option.
- # Now, we want to know they are the same because we sometimes need to
- # transform the command output, mostly remove CR-LF to ensure that
- # command output is unix-like. Cygwin utilies are a special case because
- # they explicitely set their output streams to binary mode, so we have
- # nothing to do. For all others commands, we have to guess whether they
- # are sending text data, in which case the transformation must be done.
- # Again, the NUL character test is unreliable but should be enough for
- # hg tests.
- redirected = stdout.fileno()==stderr.fileno()
- if not redirected:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- else:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, err = p.communicate()
- except WindowsError as e:
- raise UtilityError(str(e))
-
- if not unixoutput:
- def encode(s):
- if '\0' in s:
- return s
- return s.replace('\r\n', '\n')
- else:
- encode = lambda s: s
-
- if rewriteslashes(name, args):
- encode1_ = encode
- def encode(s):
- s = encode1_(s)
- s = s.replace('\\\\', '\\')
- s = s.replace('\\', '/')
- return s
-
- if ishg:
- encode2_ = encode
- def encode(s):
- return rewritehg(encode2_(s))
-
- stdout.write(encode(out))
- if not redirected:
- stderr.write(encode(err))
- return p.returncode
-
diff --git a/poky/bitbake/lib/bb/pysh/interp.py b/poky/bitbake/lib/bb/pysh/interp.py
deleted file mode 100644
index d14ecf3c6..000000000
--- a/poky/bitbake/lib/bb/pysh/interp.py
+++ /dev/null
@@ -1,1367 +0,0 @@
-# interp.py - shell interpreter for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Implement the shell interpreter.
-
-Most references are made to "The Open Group Base Specifications Issue 6".
-<http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html>
-"""
-# TODO: document the fact input streams must implement fileno() so Popen will work correctly.
-# it requires non-stdin stream to be implemented as files. Still to be tested...
-# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here.
-# TODO: stop command execution upon error.
-# TODO: sort out the filename/io_number mess. It should be possible to use filenames only.
-# TODO: review subshell implementation
-# TODO: test environment cloning for non-special builtins
-# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost
-# TODO: unit test for variable assignment
-# TODO: test error management wrt error type/utility type
-# TODO: test for binary output everywhere
-# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary.
-import base64
-import cPickle as pickle
-import errno
-import glob
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
-
-import builtin
-from sherrors import *
-import pyshlex
-import pyshyacc
-
-def mappend(func, *args, **kargs):
- """Like map but assume func returns a list. Returned lists are merged into
- a single one.
- """
- return reduce(lambda a,b: a+b, map(func, *args, **kargs), [])
-
-class FileWrapper:
- """File object wrapper to ease debugging.
-
- Allow mode checking and implement file duplication through a simple
- reference counting scheme. Not sure the latter is really useful since
- only real file descriptors can be used.
- """
- def __init__(self, mode, file, close=True):
- if mode not in ('r', 'w', 'a'):
- raise IOError('invalid mode: %s' % mode)
- self._mode = mode
- self._close = close
- if isinstance(file, FileWrapper):
- if file._refcount[0] <= 0:
- raise IOError(0, 'Error')
- self._refcount = file._refcount
- self._refcount[0] += 1
- self._file = file._file
- else:
- self._refcount = [1]
- self._file = file
-
- def dup(self):
- return FileWrapper(self._mode, self, self._close)
-
- def fileno(self):
- """fileno() should be only necessary for input streams."""
- return self._file.fileno()
-
- def read(self, size=-1):
- if self._mode!='r':
- raise IOError(0, 'Error')
- return self._file.read(size)
-
- def readlines(self, *args, **kwargs):
- return self._file.readlines(*args, **kwargs)
-
- def write(self, s):
- if self._mode not in ('w', 'a'):
- raise IOError(0, 'Error')
- return self._file.write(s)
-
- def flush(self):
- self._file.flush()
-
- def close(self):
- if not self._refcount:
- return
- assert self._refcount[0] > 0
-
- self._refcount[0] -= 1
- if self._refcount[0] == 0:
- self._mode = 'c'
- if self._close:
- self._file.close()
- self._refcount = None
-
- def mode(self):
- return self._mode
-
- def __getattr__(self, name):
- if name == 'name':
- self.name = getattr(self._file, name)
- return self.name
- else:
- raise AttributeError(name)
-
- def __del__(self):
- self.close()
-
-
-def win32_open_devnull(mode):
- return open('NUL', mode)
-
-
-class Redirections:
- """Stores open files and their mapping to pseudo-sh file descriptor.
- """
- # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does
- # not make 1 to redirect to 4
- def __init__(self, stdin=None, stdout=None, stderr=None):
- self._descriptors = {}
- if stdin is not None:
- self._add_descriptor(0, stdin)
- if stdout is not None:
- self._add_descriptor(1, stdout)
- if stderr is not None:
- self._add_descriptor(2, stderr)
-
- def add_here_document(self, interp, name, content, io_number=None):
- if io_number is None:
- io_number = 0
-
- if name==pyshlex.unquote_wordtree(name):
- content = interp.expand_here_document(('TOKEN', content))
-
- # Write document content in a temporary file
- tmp = tempfile.TemporaryFile()
- try:
- tmp.write(content)
- tmp.flush()
- tmp.seek(0)
- self._add_descriptor(io_number, FileWrapper('r', tmp))
- except:
- tmp.close()
- raise
-
- def add(self, interp, op, filename, io_number=None):
- if op not in ('<', '>', '>|', '>>', '>&'):
- # TODO: add descriptor duplication and here_documents
- raise RedirectionError('Unsupported redirection operator "%s"' % op)
-
- if io_number is not None:
- io_number = int(io_number)
-
- if (op == '>&' and filename.isdigit()) or filename=='-':
- # No expansion for file descriptors, quote them if you want a filename
- fullname = filename
- else:
- if filename.startswith('/'):
- # TODO: win32 kludge
- if filename=='/dev/null':
- fullname = 'NUL'
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError()
- else:
- fullname = interp.expand_redirection(('TOKEN', filename))
- if not fullname:
- raise RedirectionError('%s: ambiguous redirect' % filename)
- # Build absolute path based on PWD
- fullname = os.path.join(interp.get_env()['PWD'], fullname)
-
- if op=='<':
- return self._add_input_redirection(interp, fullname, io_number)
- elif op in ('>', '>|'):
- clobber = ('>|'==op)
- return self._add_output_redirection(interp, fullname, io_number, clobber)
- elif op=='>>':
- return self._add_output_appending(interp, fullname, io_number)
- elif op=='>&':
- return self._dup_output_descriptor(fullname, io_number)
-
- def close(self):
- if self._descriptors is not None:
- for desc in self._descriptors.itervalues():
- desc.flush()
- desc.close()
- self._descriptors = None
-
- def stdin(self):
- return self._descriptors[0]
-
- def stdout(self):
- return self._descriptors[1]
-
- def stderr(self):
- return self._descriptors[2]
-
- def clone(self):
- clone = Redirections()
- for desc, fileobj in self._descriptors.iteritems():
- clone._descriptors[desc] = fileobj.dup()
- return clone
-
- def _add_output_redirection(self, interp, filename, io_number, clobber):
- if io_number is None:
- # io_number default to standard output
- io_number = 1
-
- if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename):
- # File already exist in no-clobber mode, bail out
- raise RedirectionError('File "%s" already exists' % filename)
-
- # Open and register
- self._add_file_descriptor(io_number, filename, 'w')
-
- def _add_output_appending(self, interp, filename, io_number):
- if io_number is None:
- io_number = 1
- self._add_file_descriptor(io_number, filename, 'a')
-
- def _add_input_redirection(self, interp, filename, io_number):
- if io_number is None:
- io_number = 0
- self._add_file_descriptor(io_number, filename, 'r')
-
- def _add_file_descriptor(self, io_number, filename, mode):
- try:
- if filename.startswith('/'):
- if filename=='/dev/null':
- f = win32_open_devnull(mode+'b')
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError('cannot open absolute path %s' % repr(filename))
- else:
- f = file(filename, mode+'b')
- except IOError as e:
- raise RedirectionError(str(e))
-
- wrapper = None
- try:
- wrapper = FileWrapper(mode, f)
- f = None
- self._add_descriptor(io_number, wrapper)
- except:
- if f: f.close()
- if wrapper: wrapper.close()
- raise
-
- def _dup_output_descriptor(self, source_fd, dest_fd):
- if source_fd is None:
- source_fd = 1
- self._dup_file_descriptor(source_fd, dest_fd, 'w')
-
- def _dup_file_descriptor(self, source_fd, dest_fd, mode):
- source_fd = int(source_fd)
- if source_fd not in self._descriptors:
- raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd))
- source = self._descriptors[source_fd]
-
- if source.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode))
-
- if dest_fd=='-':
- # Close the source descriptor
- del self._descriptors[source_fd]
- source.close()
- else:
- dest_fd = int(dest_fd)
- if dest_fd not in self._descriptors:
- raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd))
-
- dest = self._descriptors[dest_fd]
- if dest.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode))
-
- self._descriptors[dest_fd] = source.dup()
- dest.close()
-
- def _add_descriptor(self, io_number, file):
- io_number = int(io_number)
-
- if io_number in self._descriptors:
- # Close the current descriptor
- d = self._descriptors[io_number]
- del self._descriptors[io_number]
- d.close()
-
- self._descriptors[io_number] = file
-
- def __str__(self):
- names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v
- in self._descriptors.iteritems()]
- names = ','.join(names)
- return 'Redirections(%s)' % names
-
- def __del__(self):
- self.close()
-
-def cygwin_to_windows_path(path):
- """Turn /cygdrive/c/foo into c:/foo, or return path if it
- is not a cygwin path.
- """
- if not path.startswith('/cygdrive/'):
- return path
- path = path[len('/cygdrive/'):]
- path = path[:1] + ':' + path[1:]
- return path
-
-def win32_to_unix_path(path):
- if path is not None:
- path = path.replace('\\', '/')
- return path
-
-_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?')
-_SHEBANG_CMDS = {
- '/usr/bin/env': 'env',
- '/bin/sh': 'pysh',
- 'python': 'python',
-}
-
-def resolve_shebang(path, ignoreshell=False):
- """Return a list of arguments as shebang interpreter call or an empty list
- if path does not refer to an executable script.
- See <http://www.opengroup.org/austin/docs/austin_51r2.txt>.
-
- ignoreshell - set to True to ignore sh shebangs. Return an empty list instead.
- """
- try:
- f = file(path)
- try:
- # At most 80 characters in the first line
- header = f.read(80).splitlines()[0]
- finally:
- f.close()
-
- m = _RE_SHEBANG.search(header)
- if not m:
- return []
- cmd, arg = m.group(1,2)
- if os.path.isfile(cmd):
- # Keep this one, the hg script for instance contains a weird windows
- # shebang referencing the current python install.
- cmdfile = os.path.basename(cmd).lower()
- if cmdfile == 'python.exe':
- cmd = 'python'
- pass
- elif cmd not in _SHEBANG_CMDS:
- raise CommandNotFound('Unknown interpreter "%s" referenced in '\
- 'shebang' % header)
- cmd = _SHEBANG_CMDS.get(cmd)
- if cmd is None or (ignoreshell and cmd == 'pysh'):
- return []
- if arg is None:
- return [cmd, win32_to_unix_path(path)]
- return [cmd, arg, win32_to_unix_path(path)]
- except IOError as e:
- if e.errno!=errno.ENOENT and \
- (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM
- raise
- return []
-
-def win32_find_in_path(name, path):
- if isinstance(path, str):
- path = path.split(os.pathsep)
-
- exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep)
- for p in path:
- p_name = os.path.join(p, name)
-
- prefix = resolve_shebang(p_name)
- if prefix:
- return prefix
-
- for ext in exts:
- p_name_ext = p_name + ext
- if os.path.exists(p_name_ext):
- return [win32_to_unix_path(p_name_ext)]
- return []
-
-class Traps(dict):
- def __setitem__(self, key, value):
- if key not in ('EXIT',):
- raise NotImplementedError()
- super(Traps, self).__setitem__(key, value)
-
-# IFS white spaces character class
-_IFS_WHITESPACES = (' ', '\t', '\n')
-
-class Environment:
- """Environment holds environment variables, export table, function
- definitions and whatever is defined in 2.12 "Shell Execution Environment",
- redirection excepted.
- """
- def __init__(self, pwd):
- self._opt = set() #Shell options
-
- self._functions = {}
- self._env = {'?': '0', '#': '0'}
- self._exported = set([
- 'HOME', 'IFS', 'PATH'
- ])
-
- # Set environment vars with side-effects
- self._ifs_ws = None # Set of IFS whitespace characters
- self._ifs_re = None # Regular expression used to split between words using IFS classes
- self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values
- self['PWD'] = pwd
- self.traps = Traps()
-
- def clone(self, subshell=False):
- env = Environment(self['PWD'])
- env._opt = set(self._opt)
- for k,v in self.get_variables().iteritems():
- if k in self._exported:
- env.export(k,v)
- elif subshell:
- env[k] = v
-
- if subshell:
- env._functions = dict(self._functions)
-
- return env
-
- def __getitem__(self, key):
- if key in ('@', '*', '-', '$'):
- raise NotImplementedError('%s is not implemented' % repr(key))
- return self._env[key]
-
- def get(self, key, defval=None):
- try:
- return self[key]
- except KeyError:
- return defval
-
- def __setitem__(self, key, value):
- if key=='IFS':
- # Update the whitespace/non-whitespace classes
- self._update_ifs(value)
- elif key=='PWD':
- pwd = os.path.abspath(value)
- if not os.path.isdir(pwd):
- raise VarAssignmentError('Invalid directory %s' % value)
- value = pwd
- elif key in ('?', '!'):
- value = str(int(value))
- self._env[key] = value
-
- def __delitem__(self, key):
- if key in ('IFS', 'PWD', '?'):
- raise VarAssignmentError('%s cannot be unset' % key)
- del self._env[key]
-
- def __contains__(self, item):
- return item in self._env
-
- def set_positional_args(self, args):
- """Set the content of 'args' as positional argument from 1 to len(args).
- Return previous argument as a list of strings.
- """
- # Save and remove previous arguments
- prevargs = []
- for i in range(int(self._env['#'])):
- i = str(i+1)
- prevargs.append(self._env[i])
- del self._env[i]
- self._env['#'] = '0'
-
- #Set new ones
- for i,arg in enumerate(args):
- self._env[str(i+1)] = str(arg)
- self._env['#'] = str(len(args))
-
- return prevargs
-
- def get_positional_args(self):
- return [self._env[str(i+1)] for i in range(int(self._env['#']))]
-
- def get_variables(self):
- return dict(self._env)
-
- def export(self, key, value=None):
- if value is not None:
- self[key] = value
- self._exported.add(key)
-
- def get_exported(self):
- return [(k,self._env.get(k)) for k in self._exported]
-
- def split_fields(self, word):
- if not self._ifs_ws or not word:
- return [word]
- return re.split(self._ifs_re, word)
-
- def _update_ifs(self, value):
- """Update the split_fields related variables when IFS character set is
- changed.
- """
- # TODO: handle NULL IFS
-
- # Separate characters in whitespace and non-whitespace
- chars = set(value)
- ws = [c for c in chars if c in _IFS_WHITESPACES]
- nws = [c for c in chars if c not in _IFS_WHITESPACES]
-
- # Keep whitespaces in a string for left and right stripping
- self._ifs_ws = ''.join(ws)
-
- # Build a regexp to split fields
- trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']'
- if nws:
- # First, the single non-whitespace occurence.
- nws = '[' + ''.join([re.escape(c) for c in nws]) + ']'
- nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)'
- else:
- # Then mix all parts with quantifiers
- nws = trailing + '+'
- self._ifs_re = re.compile(nws)
-
- def has_opt(self, opt, val=None):
- return (opt, val) in self._opt
-
- def set_opt(self, opt, val=None):
- self._opt.add((opt, val))
-
- def find_in_path(self, name, pwd=False):
- path = self._env.get('PATH', '').split(os.pathsep)
- if pwd:
- path[:0] = [self['PWD']]
- if os.name == 'nt':
- return win32_find_in_path(name, self._env.get('PATH', ''))
- else:
- raise NotImplementedError()
-
- def define_function(self, name, body):
- if not is_name(name):
- raise ShellSyntaxError('%s is not a valid function name' % repr(name))
- self._functions[name] = body
-
- def remove_function(self, name):
- del self._functions[name]
-
- def is_function(self, name):
- return name in self._functions
-
- def get_function(self, name):
- return self._functions.get(name)
-
-
-name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
-name_charset = dict(zip(name_charset,name_charset))
-
-def match_name(s):
- """Return the length in characters of the longest prefix made of name
- allowed characters in s.
- """
- for i,c in enumerate(s):
- if c not in name_charset:
- return s[:i]
- return s
-
-def is_name(s):
- return len([c for c in s if c not in name_charset])<=0
-
-def is_special_param(c):
- return len(c)==1 and c in ('@','*','#','?','-','$','!','0')
-
-def utility_not_implemented(name, *args, **kwargs):
- raise NotImplementedError('%s utility is not implemented' % name)
-
-
-class Utility:
- """Define utilities properties:
- func -- utility callable. See builtin module for utility samples.
- is_special -- see XCU 2.8.
- """
- def __init__(self, func, is_special=0):
- self.func = func
- self.is_special = bool(is_special)
-
-
-def encodeargs(args):
- def encodearg(s):
- lines = base64.encodestring(s)
- lines = [l.splitlines()[0] for l in lines]
- return ''.join(lines)
-
- s = pickle.dumps(args)
- return encodearg(s)
-
-def decodeargs(s):
- s = base64.decodestring(s)
- return pickle.loads(s)
-
-
-class GlobError(Exception):
- pass
-
-class Options:
- def __init__(self):
- # True if Mercurial operates with binary streams
- self.hgbinary = True
-
-class Interpreter:
- # Implementation is very basic: the execute() method just makes a DFS on the
- # AST and execute nodes one by one. Nodes are tuple (name,obj) where name
- # is a string identifier and obj the AST element returned by the parser.
- #
- # Handler are named after the node identifiers.
- # TODO: check node names and remove the switch in execute with some
- # dynamic getattr() call to find node handlers.
- """Shell interpreter.
-
- The following debugging flags can be passed:
- debug-parsing - enable PLY debugging.
- debug-tree - print the generated AST.
- debug-cmd - trace command execution before word expansion, plus exit status.
- debug-utility - trace utility execution.
- """
-
- # List supported commands.
- COMMANDS = {
- 'cat': Utility(builtin.utility_cat,),
- 'cd': Utility(builtin.utility_cd,),
- ':': Utility(builtin.utility_colon,),
- 'echo': Utility(builtin.utility_echo),
- 'env': Utility(builtin.utility_env),
- 'exit': Utility(builtin.utility_exit),
- 'export': Utility(builtin.builtin_export, is_special=1),
- 'egrep': Utility(builtin.utility_egrep),
- 'fgrep': Utility(builtin.utility_fgrep),
- 'gunzip': Utility(builtin.utility_gunzip),
- 'kill': Utility(builtin.utility_kill),
- 'mkdir': Utility(builtin.utility_mkdir),
- 'netstat': Utility(builtin.utility_netstat),
- 'printf': Utility(builtin.utility_printf),
- 'pwd': Utility(builtin.utility_pwd),
- 'return': Utility(builtin.builtin_return, is_special=1),
- 'sed': Utility(builtin.utility_sed,),
- 'set': Utility(builtin.builtin_set,),
- 'shift': Utility(builtin.builtin_shift,),
- 'sleep': Utility(builtin.utility_sleep,),
- 'sort': Utility(builtin.utility_sort,),
- 'trap': Utility(builtin.builtin_trap, is_special=1),
- 'true': Utility(builtin.utility_true),
- 'unset': Utility(builtin.builtin_unset, is_special=1),
- 'wait': Utility(builtin.builtin_wait, is_special=1),
- }
-
- def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None,
- stdout=None, stderr=None, opts=Options()):
- self._env = env
- if self._env is None:
- self._env = Environment(pwd)
- self._children = {}
-
- self._redirs = redirs
- self._close_redirs = False
-
- if self._redirs is None:
- if stdin is None:
- stdin = sys.stdin
- if stdout is None:
- stdout = sys.stdout
- if stderr is None:
- stderr = sys.stderr
- stdin = FileWrapper('r', stdin, False)
- stdout = FileWrapper('w', stdout, False)
- stderr = FileWrapper('w', stderr, False)
- self._redirs = Redirections(stdin, stdout, stderr)
- self._close_redirs = True
-
- self._debugflags = list(debugflags)
- self._logfile = sys.stderr
- self._options = opts
-
- def close(self):
- """Must be called when the interpreter is no longer used."""
- script = self._env.traps.get('EXIT')
- if script:
- try:
- self.execute_script(script=script)
- except:
- pass
-
- if self._redirs is not None and self._close_redirs:
- self._redirs.close()
- self._redirs = None
-
- def log(self, s):
- self._logfile.write(s)
- self._logfile.flush()
-
- def __getitem__(self, key):
- return self._env[key]
-
- def __setitem__(self, key, value):
- self._env[key] = value
-
- def options(self):
- return self._options
-
- def redirect(self, redirs, ios):
- def add_redir(io):
- if isinstance(io, pyshyacc.IORedirect):
- redirs.add(self, io.op, io.filename, io.io_number)
- else:
- redirs.add_here_document(self, io.name, io.content, io.io_number)
-
- map(add_redir, ios)
- return redirs
-
- def execute_script(self, script=None, ast=None, sourced=False,
- scriptpath=None):
- """If script is not None, parse the input. Otherwise takes the supplied
- AST. Then execute the AST.
- Return the script exit status.
- """
- try:
- if scriptpath is not None:
- self._env['0'] = os.path.abspath(scriptpath)
-
- if script is not None:
- debug_parsing = ('debug-parsing' in self._debugflags)
- cmds, script = pyshyacc.parse(script, True, debug_parsing)
- if 'debug-tree' in self._debugflags:
- pyshyacc.print_commands(cmds, self._logfile)
- self._logfile.flush()
- else:
- cmds, script = ast, ''
-
- status = 0
- for cmd in cmds:
- try:
- status = self.execute(cmd)
- except ExitSignal as e:
- if sourced:
- raise
- status = int(e.args[0])
- return status
- except ShellError:
- self._env['?'] = 1
- raise
- if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags:
- self.log('returncode ' + str(status)+ '\n')
- return status
- except CommandNotFound as e:
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
- except RedirectionError as e:
- # TODO: should be handled depending on the utility status
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
-
- def dotcommand(self, env, args):
- if len(args) < 1:
- raise ShellError('. expects at least one argument')
- path = args[0]
- if '/' not in path:
- found = env.find_in_path(args[0], True)
- if found:
- path = found[0]
- script = file(path).read()
- return self.execute_script(script=script, sourced=True)
-
- def execute(self, token, redirs=None):
- """Execute and AST subtree with supplied redirections overriding default
- interpreter ones.
- Return the exit status.
- """
- if not token:
- return 0
-
- if redirs is None:
- redirs = self._redirs
-
- if isinstance(token, list):
- # Commands sequence
- res = 0
- for t in token:
- res = self.execute(t, redirs)
- return res
-
- type, value = token
- status = 0
- if type=='simple_command':
- redirs_copy = redirs.clone()
- try:
- # TODO: define and handle command return values
- # TODO: implement set -e
- status = self._execute_simple_command(value, redirs_copy)
- finally:
- redirs_copy.close()
- elif type=='pipeline':
- status = self._execute_pipeline(value, redirs)
- elif type=='and_or':
- status = self._execute_and_or(value, redirs)
- elif type=='for_clause':
- status = self._execute_for_clause(value, redirs)
- elif type=='while_clause':
- status = self._execute_while_clause(value, redirs)
- elif type=='function_definition':
- status = self._execute_function_definition(value, redirs)
- elif type=='brace_group':
- status = self._execute_brace_group(value, redirs)
- elif type=='if_clause':
- status = self._execute_if_clause(value, redirs)
- elif type=='subshell':
- status = self.subshell(ast=value.cmds, redirs=redirs)
- elif type=='async':
- status = self._asynclist(value)
- elif type=='redirect_list':
- redirs_copy = self.redirect(redirs.clone(), value.redirs)
- try:
- status = self.execute(value.cmd, redirs_copy)
- finally:
- redirs_copy.close()
- else:
- raise NotImplementedError('Unsupported token type ' + type)
-
- if status < 0:
- status = 255
- return status
-
- def _execute_if_clause(self, if_clause, redirs):
- cond_status = self.execute(if_clause.cond, redirs)
- if cond_status==0:
- return self.execute(if_clause.if_cmds, redirs)
- else:
- return self.execute(if_clause.else_cmds, redirs)
-
- def _execute_brace_group(self, group, redirs):
- status = 0
- for cmd in group.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_function_definition(self, fundef, redirs):
- self._env.define_function(fundef.name, fundef.body)
- return 0
-
- def _execute_while_clause(self, while_clause, redirs):
- status = 0
- while 1:
- cond_status = 0
- for cond in while_clause.condition:
- cond_status = self.execute(cond, redirs)
-
- if cond_status:
- break
-
- for cmd in while_clause.cmds:
- status = self.execute(cmd, redirs)
-
- return status
-
- def _execute_for_clause(self, for_clause, redirs):
- if not is_name(for_clause.name):
- raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name))
- items = mappend(self.expand_token, for_clause.items)
-
- status = 0
- for item in items:
- self._env[for_clause.name] = item
- for cmd in for_clause.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_and_or(self, or_and, redirs):
- res = self.execute(or_and.left, redirs)
- if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0):
- res = self.execute(or_and.right, redirs)
- return res
-
- def _execute_pipeline(self, pipeline, redirs):
- if len(pipeline.commands)==1:
- status = self.execute(pipeline.commands[0], redirs)
- else:
- # Execute all commands one after the other
- status = 0
- inpath, outpath = None, None
- try:
- # Commands inputs and outputs cannot really be plugged as done
- # by a real shell. Run commands sequentially and chain their
- # input/output throught temporary files.
- tmpfd, inpath = tempfile.mkstemp()
- os.close(tmpfd)
- tmpfd, outpath = tempfile.mkstemp()
- os.close(tmpfd)
-
- inpath = win32_to_unix_path(inpath)
- outpath = win32_to_unix_path(outpath)
-
- for i, cmd in enumerate(pipeline.commands):
- call_redirs = redirs.clone()
- try:
- if i!=0:
- call_redirs.add(self, '<', inpath)
- if i!=len(pipeline.commands)-1:
- call_redirs.add(self, '>', outpath)
-
- status = self.execute(cmd, call_redirs)
-
- # Chain inputs/outputs
- inpath, outpath = outpath, inpath
- finally:
- call_redirs.close()
- finally:
- if inpath: os.remove(inpath)
- if outpath: os.remove(outpath)
-
- if pipeline.reverse_status:
- status = int(not status)
- self._env['?'] = status
- return status
-
- def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others):
- assert interp is self
-
- func = env.get_function(name)
- #Set positional parameters
- prevargs = None
- try:
- prevargs = env.set_positional_args(args)
- try:
- redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup())
- try:
- status = self.execute(func, redirs)
- finally:
- redirs.close()
- except ReturnSignal as e:
- status = int(e.args[0])
- env['?'] = status
- return status
- finally:
- #Reset positional parameters
- if prevargs is not None:
- env.set_positional_args(prevargs)
-
- def _execute_simple_command(self, token, redirs):
- """Can raise ReturnSignal when return builtin is called, ExitSignal when
- exit is called, and other shell exceptions upon builtin failures.
- """
- debug_command = 'debug-cmd' in self._debugflags
- if debug_command:
- self.log('word' + repr(token.words) + '\n')
- self.log('assigns' + repr(token.assigns) + '\n')
- self.log('redirs' + repr(token.redirs) + '\n')
-
- is_special = None
- env = self._env
-
- try:
- # Word expansion
- args = []
- for word in token.words:
- args += self.expand_token(word)
- if is_special is None and args:
- is_special = env.is_function(args[0]) or \
- (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special)
-
- if debug_command:
- self.log('_execute_simple_command' + str(args) + '\n')
-
- if not args:
- # Redirections happen is a subshell
- redirs = redirs.clone()
- elif not is_special:
- env = self._env.clone()
-
- # Redirections
- self.redirect(redirs, token.redirs)
-
- # Variables assignments
- res = 0
- for type,(k,v) in token.assigns:
- status, expanded = self.expand_variable((k,v))
- if status is not None:
- res = status
- if args:
- env.export(k, expanded)
- else:
- env[k] = expanded
-
- if args and args[0] in ('.', 'source'):
- res = self.dotcommand(env, args[1:])
- elif args:
- if args[0] in self.COMMANDS:
- command = self.COMMANDS[args[0]]
- elif env.is_function(args[0]):
- command = Utility(self._execute_function, is_special=True)
- else:
- if not '/' in args[0].replace('\\', '/'):
- cmd = env.find_in_path(args[0])
- if not cmd:
- # TODO: test error code on unknown command => 127
- raise CommandNotFound('Unknown command: "%s"' % args[0])
- else:
- # Handle commands like '/cygdrive/c/foo.bat'
- cmd = cygwin_to_windows_path(args[0])
- if not os.path.exists(cmd):
- raise CommandNotFound('%s: No such file or directory' % args[0])
- shebang = resolve_shebang(cmd)
- if shebang:
- cmd = shebang
- else:
- cmd = [cmd]
- args[0:1] = cmd
- command = Utility(builtin.run_command)
-
- # Command execution
- if 'debug-cmd' in self._debugflags:
- self.log('redirections ' + str(redirs) + '\n')
-
- res = command.func(args[0], args[1:], self, env,
- redirs.stdin(), redirs.stdout(),
- redirs.stderr(), self._debugflags)
-
- if self._env.has_opt('-x'):
- # Trace command execution in shell environment
- # BUG: would be hard to reproduce a real shell behaviour since
- # the AST is not annotated with source lines/tokens.
- self._redirs.stdout().write(' '.join(args))
-
- except ReturnSignal:
- raise
- except ShellError as e:
- if is_special or isinstance(e, (ExitSignal,
- ShellSyntaxError, ExpansionError)):
- raise e
- self._redirs.stderr().write(str(e)+'\n')
- return 1
-
- return res
-
- def expand_token(self, word):
- """Expand a word as specified in [2.6 Word Expansions]. Return the list
- of expanded words.
- """
- status, wtrees = self._expand_word(word)
- return map(pyshlex.wordtree_as_string, wtrees)
-
- def expand_variable(self, word):
- """Return a status code (or None if no command expansion occurred)
- and a single word.
- """
- status, wtrees = self._expand_word(word, pathname=False, split=False)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return status, words[0]
-
- def expand_here_document(self, word):
- """Return the expanded document as a single word. The here document is
- assumed to be unquoted.
- """
- status, wtrees = self._expand_word(word, pathname=False,
- split=False, here_document=True)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return words[0]
-
- def expand_redirection(self, word):
- """Return a single word."""
- return self.expand_variable(word)[1]
-
- def get_env(self):
- return self._env
-
- def _expand_word(self, token, pathname=True, split=True, here_document=False):
- wtree = pyshlex.make_wordtree(token[1], here_document=here_document)
-
- # TODO: implement tilde expansion
- def expand(wtree):
- """Return a pseudo wordtree: the tree or its subelements can be empty
- lists when no value result from the expansion.
- """
- status = None
- for part in wtree:
- if not isinstance(part, list):
- continue
- if part[0]in ("'", '\\'):
- continue
- elif part[0] in ('`', '$('):
- status, result = self._expand_command(part)
- part[:] = result
- elif part[0] in ('$', '${'):
- part[:] = self._expand_parameter(part, wtree[0]=='"', split)
- elif part[0] in ('', '"'):
- status, result = expand(part)
- part[:] = result
- else:
- raise NotImplementedError('%s expansion is not implemented'
- % part[0])
- # [] is returned when an expansion result in no-field,
- # like an empty $@
- wtree = [p for p in wtree if p != []]
- if len(wtree) < 3:
- return status, []
- return status, wtree
-
- status, wtree = expand(wtree)
- if len(wtree) == 0:
- return status, wtree
- wtree = pyshlex.normalize_wordtree(wtree)
-
- if split:
- wtrees = self._split_fields(wtree)
- else:
- wtrees = [wtree]
-
- if pathname:
- wtrees = mappend(self._expand_pathname, wtrees)
-
- wtrees = map(self._remove_quotes, wtrees)
- return status, wtrees
-
- def _expand_command(self, wtree):
- # BUG: there is something to do with backslashes and quoted
- # characters here
- command = pyshlex.wordtree_as_string(wtree[1:-1])
- status, output = self.subshell_output(command)
- return status, ['', output, '']
-
- def _expand_parameter(self, wtree, quoted=False, split=False):
- """Return a valid wtree or an empty list when no parameter results."""
- # Get the parameter name
- # TODO: implement weird expansion rules with ':'
- name = pyshlex.wordtree_as_string(wtree[1:-1])
- if not is_name(name) and not is_special_param(name):
- raise ExpansionError('Bad substitution "%s"' % name)
- # TODO: implement special parameters
- if name in ('@', '*'):
- args = self._env.get_positional_args()
- if len(args) == 0:
- return []
- if len(args)<2:
- return ['', ''.join(args), '']
-
- sep = self._env.get('IFS', '')[:1]
- if split and quoted and name=='@':
- # Introduce a new token to tell the caller that these parameters
- # cause a split as specified in 2.5.2
- return ['@'] + args + ['']
- else:
- return ['', sep.join(args), '']
-
- return ['', self._env.get(name, ''), '']
-
- def _split_fields(self, wtree):
- def is_empty(split):
- return split==['', '', '']
-
- def split_positional(quoted):
- # Return a list of wtree split according positional parameters rules.
- # All remaining '@' groups are removed.
- assert quoted[0]=='"'
-
- splits = [[]]
- for part in quoted:
- if not isinstance(part, list) or part[0]!='@':
- splits[-1].append(part)
- else:
- # Empty or single argument list were dealt with already
- assert len(part)>3
- # First argument must join with the beginning part of the original word
- splits[-1].append(part[1])
- # Create double-quotes expressions for every argument after the first
- for arg in part[2:-1]:
- splits[-1].append('"')
- splits.append(['"', arg])
- return splits
-
- # At this point, all expansions but pathnames have occured. Only quoted
- # and positional sequences remain. Thus, all candidates for field splitting
- # are in the tree root, or are positional splits ('@') and lie in root
- # children.
- if not wtree or wtree[0] not in ('', '"'):
- # The whole token is quoted or empty, nothing to split
- return [wtree]
-
- if wtree[0]=='"':
- wtree = ['', wtree, '']
-
- result = [['', '']]
- for part in wtree[1:-1]:
- if isinstance(part, list):
- if part[0]=='"':
- splits = split_positional(part)
- if len(splits)<=1:
- result[-1] += [part, '']
- else:
- # Terminate the current split
- result[-1] += [splits[0], '']
- result += splits[1:-1]
- # Create a new split
- result += [['', splits[-1], '']]
- else:
- result[-1] += [part, '']
- else:
- splits = self._env.split_fields(part)
- if len(splits)<=1:
- # No split
- result[-1][-1] += part
- else:
- # Terminate the current resulting part and create a new one
- result[-1][-1] += splits[0]
- result[-1].append('')
- result += [['', r, ''] for r in splits[1:-1]]
- result += [['', splits[-1]]]
- result[-1].append('')
-
- # Leading and trailing empty groups come from leading/trailing blanks
- if result and is_empty(result[-1]):
- result[-1:] = []
- if result and is_empty(result[0]):
- result[:1] = []
- return result
-
- def _expand_pathname(self, wtree):
- """See [2.6.6 Pathname Expansion]."""
- if self._env.has_opt('-f'):
- return [wtree]
-
- # All expansions have been performed, only quoted sequences should remain
- # in the tree. Generate the pattern by folding the tree, escaping special
- # characters when appear quoted
- special_chars = '*?[]'
-
- def make_pattern(wtree):
- subpattern = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = make_pattern(part)
- elif wtree[0]!='':
- for c in part:
- # Meta-characters cannot be quoted
- if c in special_chars:
- raise GlobError()
- subpattern.append(part)
- return ''.join(subpattern)
-
- def pwd_glob(pattern):
- cwd = os.getcwd()
- os.chdir(self._env['PWD'])
- try:
- return glob.glob(pattern)
- finally:
- os.chdir(cwd)
-
- #TODO: check working directory issues here wrt relative patterns
- try:
- pattern = make_pattern(wtree)
- paths = pwd_glob(pattern)
- except GlobError:
- # BUG: Meta-characters were found in quoted sequences. The should
- # have been used literally but this is unsupported in current glob module.
- # Instead we consider the whole tree must be used literally and
- # therefore there is no point in globbing. This is wrong when meta
- # characters are mixed with quoted meta in the same pattern like:
- # < foo*"py*" >
- paths = []
-
- if not paths:
- return [wtree]
- return [['', path, ''] for path in paths]
-
- def _remove_quotes(self, wtree):
- """See [2.6.7 Quote Removal]."""
-
- def unquote(wtree):
- unquoted = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = unquote(part)
- unquoted.append(part)
- return ''.join(unquoted)
-
- return ['', unquote(wtree), '']
-
- def subshell(self, script=None, ast=None, redirs=None):
- """Execute the script or AST in a subshell, with inherited redirections
- if redirs is not None.
- """
- if redirs:
- sub_redirs = redirs
- else:
- sub_redirs = redirs.clone()
-
- subshell = None
- try:
- subshell = Interpreter(None, self._debugflags, self._env.clone(True),
- sub_redirs, opts=self._options)
- return subshell.execute_script(script, ast)
- finally:
- if not redirs: sub_redirs.close()
- if subshell: subshell.close()
-
- def subshell_output(self, script):
- """Execute the script in a subshell and return the captured output."""
- # Create temporary file to capture subshell output
- tmpfd, tmppath = tempfile.mkstemp()
- try:
- tmpfile = os.fdopen(tmpfd, 'wb')
- stdout = FileWrapper('w', tmpfile)
-
- redirs = Redirections(self._redirs.stdin().dup(),
- stdout,
- self._redirs.stderr().dup())
- try:
- status = self.subshell(script=script, redirs=redirs)
- finally:
- redirs.close()
- redirs = None
-
- # Extract subshell standard output
- tmpfile = open(tmppath, 'rb')
- try:
- output = tmpfile.read()
- return status, output.rstrip('\n')
- finally:
- tmpfile.close()
- finally:
- os.remove(tmppath)
-
- def _asynclist(self, cmd):
- args = (self._env.get_variables(), cmd)
- arg = encodeargs(args)
- assert len(args) < 30*1024
- cmd = ['pysh.bat', '--ast', '-c', arg]
- p = subprocess.Popen(cmd, cwd=self._env['PWD'])
- self._children[p.pid] = p
- self._env['!'] = p.pid
- return 0
-
- def wait(self, pids=None):
- if not pids:
- pids = self._children.keys()
-
- status = 127
- for pid in pids:
- if pid not in self._children:
- continue
- p = self._children.pop(pid)
- status = p.wait()
-
- return status
-
diff --git a/poky/bitbake/lib/bb/pysh/lsprof.py b/poky/bitbake/lib/bb/pysh/lsprof.py
deleted file mode 100644
index b1831c22a..000000000
--- a/poky/bitbake/lib/bb/pysh/lsprof.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#! /usr/bin/env python
-
-import sys
-from _lsprof import Profiler, profiler_entry
-
-__all__ = ['profile', 'Stats']
-
-def profile(f, *args, **kwds):
- """XXX docstring"""
- p = Profiler()
- p.enable(subcalls=True, builtins=True)
- try:
- f(*args, **kwds)
- finally:
- p.disable()
- return Stats(p.getstats())
-
-
-class Stats(object):
- """XXX docstring"""
-
- def __init__(self, data):
- self.data = data
-
- def sort(self, crit="inlinetime"):
- """XXX docstring"""
- if crit not in profiler_entry.__dict__:
- raise ValueError("Can't sort by %s" % crit)
- self.data.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
- for e in self.data:
- if e.calls:
- e.calls.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
-
- def pprint(self, top=None, file=None, limit=None, climit=None):
- """XXX docstring"""
- if file is None:
- file = sys.stdout
- d = self.data
- if top is not None:
- d = d[:top]
- cols = "% 12s %12s %11.4f %11.4f %s\n"
- hcols = "% 12s %12s %12s %12s %s\n"
- cols2 = "+%12s %12s %11.4f %11.4f + %s\n"
- file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
- "Inline(ms)", "module:lineno(function)"))
- count = 0
- for e in d:
- file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
- e.inlinetime, label(e.code)))
- count += 1
- if limit is not None and count == limit:
- return
- ccount = 0
- if e.calls:
- for se in e.calls:
- file.write(cols % ("+%s" % se.callcount, se.reccallcount,
- se.totaltime, se.inlinetime,
- "+%s" % label(se.code)))
- count += 1
- ccount += 1
- if limit is not None and count == limit:
- return
- if climit is not None and ccount == climit:
- break
-
- def freeze(self):
- """Replace all references to code objects with string
- descriptions; this makes it possible to pickle the instance."""
-
- # this code is probably rather ickier than it needs to be!
- for i in range(len(self.data)):
- e = self.data[i]
- if not isinstance(e.code, str):
- self.data[i] = type(e)((label(e.code),) + e[1:])
- if e.calls:
- for j in range(len(e.calls)):
- se = e.calls[j]
- if not isinstance(se.code, str):
- e.calls[j] = type(se)((label(se.code),) + se[1:])
-
-_fn2mod = {}
-
-def label(code):
- if isinstance(code, str):
- return code
- try:
- mname = _fn2mod[code.co_filename]
- except KeyError:
- for k, v in sys.modules.items():
- if v is None:
- continue
- if not hasattr(v, '__file__'):
- continue
- if not isinstance(v.__file__, str):
- continue
- if v.__file__.startswith(code.co_filename):
- mname = _fn2mod[code.co_filename] = k
- break
- else:
- mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
-
- return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
-
-
-if __name__ == '__main__':
- import os
- sys.argv = sys.argv[1:]
- if not sys.argv:
- print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
- sys.exit(2)
- sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
- stats = profile(execfile, sys.argv[0], globals(), locals())
- stats.sort()
- stats.pprint()
diff --git a/poky/bitbake/lib/bb/pysh/pysh.py b/poky/bitbake/lib/bb/pysh/pysh.py
deleted file mode 100644
index b4e6145b5..000000000
--- a/poky/bitbake/lib/bb/pysh/pysh.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# pysh.py - command processing for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-import optparse
-import os
-import sys
-
-import interp
-
-SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1')
-SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None,
- help='A string that shall be interpreted by the shell as one or more commands')
-SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None,
- help='Redirect script commands stdout and stderr to the specified file')
-# See utility_command in builtin.py about the reason for this flag.
-SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False,
- help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout')
-SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False,
- help='Trace PLY execution')
-SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False,
- help='Display the generated syntax tree.')
-SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False,
- help='Trace command execution before parameters expansion and exit status.')
-SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False,
- help='Trace utility calls, after parameters expansions')
-SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False,
- help='Encoded commands to execute in a subprocess')
-SH_OPT.add_option('--profile', action='store_true', default=False,
- help='Profile pysh run')
-
-
-def split_args(args):
- # Separate shell arguments from command ones
- # Just stop at the first argument not starting with a dash. I know, this is completely broken,
- # it ignores files starting with a dash or may take option values for command file. This is not
- # supposed to happen for now
- command_index = len(args)
- for i,arg in enumerate(args):
- if not arg.startswith('-'):
- command_index = i
- break
-
- return args[:command_index], args[command_index:]
-
-
-def fixenv(env):
- path = env.get('PATH')
- if path is not None:
- parts = path.split(os.pathsep)
- # Remove Windows utilities from PATH, they are useless at best and
- # some of them (find) may be confused with other utilities.
- parts = [p for p in parts if 'system32' not in p.lower()]
- env['PATH'] = os.pathsep.join(parts)
- if env.get('HOME') is None:
- # Several utilities, including cvsps, cannot work without
- # a defined HOME directory.
- env['HOME'] = os.path.expanduser('~')
- return env
-
-def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None):
- if os.environ.get('PYSH_TEXT') != '1':
- import msvcrt
- for fp in (sys.stdin, sys.stdout, sys.stderr):
- msvcrt.setmode(fp.fileno(), os.O_BINARY)
-
- hgbin = os.environ.get('PYSH_HGTEXT') != '1'
-
- if debugflags is None:
- debugflags = []
- if options.debug_parsing: debugflags.append('debug-parsing')
- if options.debug_utility: debugflags.append('debug-utility')
- if options.debug_cmd: debugflags.append('debug-cmd')
- if options.debug_tree: debugflags.append('debug-tree')
-
- if env is None:
- env = fixenv(dict(os.environ))
- if cwd is None:
- cwd = os.getcwd()
-
- if not cmdargs:
- # Nothing to do
- return 0
-
- ast = None
- command_file = None
- if options.command_string:
- input = cmdargs[0]
- if not options.ast:
- input += '\n'
- else:
- args, input = interp.decodeargs(input), None
- env, ast = args
- cwd = env.get('PWD', cwd)
- else:
- command_file = cmdargs[0]
- arguments = cmdargs[1:]
-
- prefix = interp.resolve_shebang(command_file, ignoreshell=True)
- if prefix:
- input = ' '.join(prefix + [command_file] + arguments)
- else:
- # Read commands from file
- f = file(command_file)
- try:
- # Trailing newline to help the parser
- input = f.read() + '\n'
- finally:
- f.close()
-
- redirect = None
- try:
- if options.redirected:
- stdout = sys.stdout
- stderr = stdout
- elif options.redirect_to:
- redirect = open(options.redirect_to, 'wb')
- stdout = redirect
- stderr = redirect
- else:
- stdout = sys.stdout
- stderr = sys.stderr
-
- # TODO: set arguments to environment variables
- opts = interp.Options()
- opts.hgbinary = hgbin
- ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr,
- opts=opts)
- try:
- # Export given environment in shell object
- for k,v in env.iteritems():
- ip.get_env().export(k,v)
- return ip.execute_script(input, ast, scriptpath=command_file)
- finally:
- ip.close()
- finally:
- if redirect is not None:
- redirect.close()
-
-def sh(cwd=None, args=None, debugflags=None, env=None):
- if args is None:
- args = sys.argv[1:]
- shargs, cmdargs = split_args(args)
- options, shargs = SH_OPT.parse_args(shargs)
-
- if options.profile:
- import lsprof
- p = lsprof.Profiler()
- p.enable(subcalls=True)
- try:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
- finally:
- p.disable()
- stats = lsprof.Stats(p.getstats())
- stats.sort()
- stats.pprint(top=10, file=sys.stderr, climit=5)
- else:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
-
-def main():
- sys.exit(sh())
-
-if __name__=='__main__':
- main()
diff --git a/poky/bitbake/lib/bb/pysh/pyshlex.py b/poky/bitbake/lib/bb/pysh/pyshlex.py
index fbf094b7a..a42c29446 100644
--- a/poky/bitbake/lib/bb/pysh/pyshlex.py
+++ b/poky/bitbake/lib/bb/pysh/pyshlex.py
@@ -13,11 +13,6 @@
# PLY in pull mode. It was designed to work incrementally and it would not be
# that hard to enable pull mode.
import re
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
from ply import lex
from bb.pysh.sherrors import *
diff --git a/poky/bitbake/lib/bb/pysh/pyshyacc.py b/poky/bitbake/lib/bb/pysh/pyshyacc.py
index ba4cefdcb..de565dc9a 100644
--- a/poky/bitbake/lib/bb/pysh/pyshyacc.py
+++ b/poky/bitbake/lib/bb/pysh/pyshyacc.py
@@ -636,13 +636,16 @@ def p_empty(p):
def p_error(p):
msg = []
w = msg.append
- w('%r\n' % p)
- w('followed by:\n')
- for i in range(5):
- n = yacc.token()
- if not n:
- break
- w(' %r\n' % n)
+ if p:
+ w('%r\n' % p)
+ w('followed by:\n')
+ for i in range(5):
+ n = yacc.token()
+ if not n:
+ break
+ w(' %r\n' % n)
+ else:
+ w('Unexpected EOF')
raise sherrors.ShellSyntaxError(''.join(msg))
# Build the parser
diff --git a/poky/bitbake/lib/bb/pysh/sherrors.py b/poky/bitbake/lib/bb/pysh/sherrors.py
index 49d0533de..3fe8e47b2 100644
--- a/poky/bitbake/lib/bb/pysh/sherrors.py
+++ b/poky/bitbake/lib/bb/pysh/sherrors.py
@@ -13,29 +13,3 @@ class ShellError(Exception):
class ShellSyntaxError(ShellError):
pass
-
-class UtilityError(ShellError):
- """Raised upon utility syntax error (option or operand error)."""
- pass
-
-class ExpansionError(ShellError):
- pass
-
-class CommandNotFound(ShellError):
- """Specified command was not found."""
- pass
-
-class RedirectionError(ShellError):
- pass
-
-class VarAssignmentError(ShellError):
- """Variable assignment error."""
- pass
-
-class ExitSignal(ShellError):
- """Exit signal."""
- pass
-
-class ReturnSignal(ShellError):
- """Exit signal."""
- pass
diff --git a/poky/bitbake/lib/bb/pysh/subprocess_fix.py b/poky/bitbake/lib/bb/pysh/subprocess_fix.py
deleted file mode 100644
index 46eca2280..000000000
--- a/poky/bitbake/lib/bb/pysh/subprocess_fix.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# subprocess - Subprocesses with accessible I/O streams
-#
-# For more information about this module, see PEP 324.
-#
-# This module should remain compatible with Python 2.2, see PEP 291.
-#
-# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
-#
-# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/2.4/license for licensing details.
-
-def list2cmdline(seq):
- """
- Translate a sequence of arguments into a command line
- string, using the same rules as the MS C runtime:
-
- 1) Arguments are delimited by white space, which is either a
- space or a tab.
-
- 2) A string surrounded by double quotation marks is
- interpreted as a single argument, regardless of white space
- contained within. A quoted string can be embedded in an
- argument.
-
- 3) A double quotation mark preceded by a backslash is
- interpreted as a literal double quotation mark.
-
- 4) Backslashes are interpreted literally, unless they
- immediately precede a double quotation mark.
-
- 5) If backslashes immediately precede a double quotation mark,
- every pair of backslashes is interpreted as a literal
- backslash. If the number of backslashes is odd, the last
- backslash escapes the next double quotation mark as
- described in rule 3.
- """
-
- # See
- # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
- result = []
- needquote = False
- for arg in seq:
- bs_buf = []
-
- # Add a space to separate this argument from the others
- if result:
- result.append(' ')
-
- needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == ""
- if needquote:
- result.append('"')
-
- for c in arg:
- if c == '\\':
- # Don't know if we need to double yet.
- bs_buf.append(c)
- elif c == '"':
- # Double backspaces.
- result.append('\\' * len(bs_buf)*2)
- bs_buf = []
- result.append('\\"')
- else:
- # Normal char
- if bs_buf:
- result.extend(bs_buf)
- bs_buf = []
- result.append(c)
-
- # Add remaining backspaces, if any.
- if bs_buf:
- result.extend(bs_buf)
-
- if needquote:
- result.extend(bs_buf)
- result.append('"')
-
- return ''.join(result)
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 383c18323..329cda33a 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -37,11 +37,12 @@ from bb import monitordisk
import subprocess
import pickle
from multiprocessing import Process
+import shlex
bblogger = logging.getLogger("BitBake")
logger = logging.getLogger("BitBake.RunQueue")
-__find_md5__ = re.compile( r'(?i)(?<![a-z0-9])[a-f0-9]{32}(?![a-z0-9])' )
+__find_sha256__ = re.compile( r'(?i)(?<![a-z0-9])[a-f0-9]{64}(?![a-z0-9])' )
def fn_from_tid(tid):
return tid.rsplit(":", 1)[0]
@@ -351,6 +352,7 @@ class RunTaskEntry(object):
self.depends = set()
self.revdeps = set()
self.hash = None
+ self.unihash = None
self.task = None
self.weight = 1
@@ -390,6 +392,9 @@ class RunQueueData:
def get_task_hash(self, tid):
return self.runtaskentries[tid].hash
+ def get_task_unihash(self, tid):
+ return self.runtaskentries[tid].unihash
+
def get_user_idstring(self, tid, task_name_suffix = ""):
return tid + task_name_suffix
@@ -1161,18 +1166,21 @@ class RunQueueData:
if len(self.runtaskentries[tid].depends - dealtwith) == 0:
dealtwith.add(tid)
todeal.remove(tid)
- procdep = []
- for dep in self.runtaskentries[tid].depends:
- procdep.append(fn_from_tid(dep) + "." + taskname_from_tid(dep))
- (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(taskfn, taskname, procdep, self.dataCaches[mc])
- task = self.runtaskentries[tid].task
+ self.prepare_task_hash(tid)
bb.parse.siggen.writeout_file_checksum_cache()
#self.dump_data()
return len(self.runtaskentries)
+ def prepare_task_hash(self, tid):
+ procdep = []
+ for dep in self.runtaskentries[tid].depends:
+ procdep.append(fn_from_tid(dep) + "." + taskname_from_tid(dep))
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(taskfn, taskname, procdep, self.dataCaches[mc])
+ self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(taskfn + "." + taskname)
+
def dump_data(self):
"""
Dump some debug information on the internal data structures
@@ -1224,28 +1232,23 @@ class RunQueue:
if fakeroot:
magic = magic + "beef"
mcdata = self.cooker.databuilder.mcdata[mc]
- fakerootcmd = mcdata.getVar("FAKEROOTCMD")
+ fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD"))
fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split()
env = os.environ.copy()
for key, value in (var.split('=') for var in fakerootenv):
env[key] = value
- worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
+ worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
else:
worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
bb.utils.nonblockingfd(worker.stdout)
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
- runqhash = {}
- for tid in self.rqdata.runtaskentries:
- runqhash[tid] = self.rqdata.runtaskentries[tid].hash
-
workerdata = {
"taskdeps" : self.rqdata.dataCaches[mc].task_deps,
"fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
"fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
"fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
"sigdata" : bb.parse.siggen.get_taskdata(),
- "runq_hash" : runqhash,
"logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
"logdefaultverbose" : bb.msg.loggerDefaultVerbose,
"logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
@@ -1387,6 +1390,26 @@ class RunQueue:
cache[tid] = iscurrent
return iscurrent
+ def validate_hash(self, *, sq_fn, sq_task, sq_hash, sq_hashfn, siginfo, sq_unihash, d):
+ locs = {"sq_fn" : sq_fn, "sq_task" : sq_task, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn,
+ "sq_unihash" : sq_unihash, "siginfo" : siginfo, "d" : d}
+
+ hashvalidate_args = ("(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=siginfo, sq_unihash=sq_unihash)",
+ "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=siginfo)",
+ "(sq_fn, sq_task, sq_hash, sq_hashfn, d)")
+
+ for args in hashvalidate_args[:-1]:
+ try:
+ call = self.hashvalidate + args
+ return bb.utils.better_eval(call, locs)
+ except TypeError:
+ continue
+
+ # Call the last entry without a try...catch to propagate any thrown
+ # TypeError
+ call = self.hashvalidate + hashvalidate_args[-1]
+ return bb.utils.better_eval(call, locs)
+
def _execute_runqueue(self):
"""
Run the tasks in a queue prepared by rqdata.prepare()
@@ -1558,6 +1581,7 @@ class RunQueue:
valid = []
sq_hash = []
sq_hashfn = []
+ sq_unihash = []
sq_fn = []
sq_taskname = []
sq_task = []
@@ -1576,16 +1600,13 @@ class RunQueue:
sq_fn.append(fn)
sq_hashfn.append(self.rqdata.dataCaches[mc].hashfn[taskfn])
sq_hash.append(self.rqdata.runtaskentries[tid].hash)
+ sq_unihash.append(self.rqdata.runtaskentries[tid].unihash)
sq_taskname.append(taskname)
sq_task.append(tid)
- locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
- try:
- call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
- valid = bb.utils.better_eval(call, locs)
- # Handle version with no siginfo parameter
- except TypeError:
- call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
- valid = bb.utils.better_eval(call, locs)
+
+ valid = self.validate_hash(sq_fn=sq_fn, sq_task=sq_taskname, sq_hash=sq_hash, sq_hashfn=sq_hashfn,
+ siginfo=True, sq_unihash=sq_unihash, d=self.cooker.data)
+
for v in valid:
valid_new.add(sq_task[v])
@@ -1667,7 +1688,7 @@ class RunQueue:
matches = {k : v for k, v in iter(matches.items()) if h not in k}
if matches:
latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
- prevh = __find_md5__.search(latestmatch).group(0)
+ prevh = __find_sha256__.search(latestmatch).group(0)
output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
@@ -2042,6 +2063,8 @@ class RunQueueExecuteTasks(RunQueueExecute):
taskdepdata = self.build_taskdepdata(task)
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
+ taskhash = self.rqdata.get_task_hash(task)
+ unihash = self.rqdata.get_task_unihash(task)
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
if not mc in self.rq.fakeworker:
try:
@@ -2051,10 +2074,10 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.rq.state = runQueueFailed
self.stats.taskFailed()
return True
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
self.rq.worker[mc].process.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2109,8 +2132,9 @@ class RunQueueExecuteTasks(RunQueueExecute):
deps = self.rqdata.runtaskentries[revdep].depends
provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
taskhash = self.rqdata.runtaskentries[revdep].hash
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash]
+ unihash = self.rqdata.runtaskentries[revdep].unihash
deps = self.filtermcdeps(task, deps)
+ taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
for revdep2 in deps:
if revdep2 not in taskdepdata:
additional.append(revdep2)
@@ -2313,6 +2337,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
if self.rq.hashvalidate:
sq_hash = []
sq_hashfn = []
+ sq_unihash = []
sq_fn = []
sq_taskname = []
sq_task = []
@@ -2344,14 +2369,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
sq_fn.append(fn)
sq_hashfn.append(self.rqdata.dataCaches[mc].hashfn[taskfn])
sq_hash.append(self.rqdata.runtaskentries[tid].hash)
+ sq_unihash.append(self.rqdata.runtaskentries[tid].unihash)
sq_taskname.append(taskname)
sq_task.append(tid)
self.cooker.data.setVar("BB_SETSCENE_STAMPCURRENT_COUNT", len(stamppresent))
- call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
- locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
- valid = bb.utils.better_eval(call, locs)
+ valid = self.rq.validate_hash(sq_fn=sq_fn, sq_task=sq_taskname, sq_hash=sq_hash, sq_hashfn=sq_hashfn,
+ siginfo=False, sq_unihash=sq_unihash, d=self.cooker.data)
self.cooker.data.delVar("BB_SETSCENE_STAMPCURRENT_COUNT")
@@ -2482,13 +2507,15 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
taskdepdata = self.build_taskdepdata(task)
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
+ taskhash = self.rqdata.get_task_hash(task)
+ unihash = self.rqdata.get_task_unihash(task)
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not mc in self.rq.fakeworker:
self.rq.start_fakeworker(self, mc)
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
self.rq.worker[mc].process.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2552,7 +2579,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
deps = getsetscenedeps(revdep)
provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
taskhash = self.rqdata.runtaskentries[revdep].hash
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash]
+ unihash = self.rqdata.runtaskentries[revdep].unihash
+ taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
for revdep2 in deps:
if revdep2 not in taskdepdata:
additional.append(revdep2)
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 352dcab85..09c9c8a25 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -41,6 +41,9 @@ class SignatureGenerator(object):
def finalise(self, fn, d, varient):
return
+ def get_unihash(self, task):
+ return self.taskhash[task]
+
def get_taskhash(self, fn, task, deps, dataCache):
return "0"
@@ -87,7 +90,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.taints = {}
self.gendeps = {}
self.lookupcache = {}
- self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
+ self.pkgnameextract = re.compile(r"(?P<fn>.*)\..*")
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split())
self.taskwhitelist = None
self.init_rundepcheck(data)
@@ -188,7 +191,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
continue
if dep not in self.taskhash:
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
- data = data + self.taskhash[dep]
+ data = data + self.get_unihash(dep)
self.runtaskdeps[k].append(dep)
if task in dataCache.file_checksums[fn]:
@@ -215,7 +218,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.taints[k] = taint
logger.warning("%s is tainted from a forced run" % k)
- h = hashlib.md5(data.encode("utf-8")).hexdigest()
+ h = hashlib.sha256(data.encode("utf-8")).hexdigest()
self.taskhash[k] = h
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
return h
@@ -263,7 +266,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k]]
data['runtaskhashes'] = {}
for dep in data['runtaskdeps']:
- data['runtaskhashes'][dep] = self.taskhash[dep]
+ data['runtaskhashes'][dep] = self.get_unihash(dep)
data['taskhash'] = self.taskhash[k]
taint = self.read_taint(fn, task, referencestamp)
@@ -313,6 +316,13 @@ class SignatureGeneratorBasic(SignatureGenerator):
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
name = "basichash"
+ def get_stampfile_hash(self, task):
+ if task in self.taskhash:
+ return self.taskhash[task]
+
+ # If task is not in basehash, then error
+ return self.basehash[task]
+
def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
if taskname != "do_setscene" and taskname.endswith("_setscene"):
k = fn + "." + taskname[:-9]
@@ -320,11 +330,9 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
k = fn + "." + taskname
if clean:
h = "*"
- elif k in self.taskhash:
- h = self.taskhash[k]
else:
- # If k is not in basehash, then error
- h = self.basehash[k]
+ h = self.get_stampfile_hash(k)
+
return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
@@ -644,7 +652,7 @@ def calc_basehash(sigdata):
if val is not None:
basedata = basedata + str(val)
- return hashlib.md5(basedata.encode("utf-8")).hexdigest()
+ return hashlib.sha256(basedata.encode("utf-8")).hexdigest()
def calc_taskhash(sigdata):
data = sigdata['basehash']
@@ -662,7 +670,7 @@ def calc_taskhash(sigdata):
else:
data = data + sigdata['taint']
- return hashlib.md5(data.encode("utf-8")).hexdigest()
+ return hashlib.sha256(data.encode("utf-8")).hexdigest()
def dump_sigfile(a):
diff --git a/poky/bitbake/lib/bb/tests/codeparser.py b/poky/bitbake/lib/bb/tests/codeparser.py
index e30e78c15..3fd76a8f9 100644
--- a/poky/bitbake/lib/bb/tests/codeparser.py
+++ b/poky/bitbake/lib/bb/tests/codeparser.py
@@ -123,6 +123,13 @@ ${D}${libdir}/pkgconfig/*.pc
self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
self.assertExecs(set(["sed"]))
+ def test_parameter_expansion_modifiers(self):
+ # - and + are also valid modifiers for parameter expansion, but are
+ # valid characters in bitbake variable names, so are not included here
+ for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'):
+ name = "foo%sbar" % i
+ self.parseExpression("${%s}" % name)
+ self.assertNotIn(name, self.references)
def test_until(self):
self.parseExpression("until false; do echo true; done")
diff --git a/poky/bitbake/lib/bb/tests/data.py b/poky/bitbake/lib/bb/tests/data.py
index db3e2010a..3c511f214 100644
--- a/poky/bitbake/lib/bb/tests/data.py
+++ b/poky/bitbake/lib/bb/tests/data.py
@@ -394,6 +394,15 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), " testvalue5")
+ # Test an override with _<numeric> in it based on a real world OE issue
+ def test_underscore_override(self):
+ self.d.setVar("TARGET_ARCH", "x86_64")
+ self.d.setVar("PN", "test-${TARGET_ARCH}")
+ self.d.setVar("VERSION", "1")
+ self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2")
+ self.d.setVar("OVERRIDES", "pn-${PN}")
+ bb.data.expandKeys(self.d)
+ self.assertEqual(self.d.getVar("VERSION"), "2")
class TestKeyExpansion(unittest.TestCase):
def setUp(self):
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 522d2024f..429998b34 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -942,6 +942,25 @@ class FetcherNetworkTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/json/config')), msg='Missing submodule config "extern/json"')
self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/sanitizers/config')), msg='Missing submodule config "extern/sanitizers"')
+ def test_git_submodule_update_CLI11(self):
+ """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+
+ # CLI11 that pulls in a newer nlohmann-json
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ # Previous cwd has been deleted
+ os.chdir(os.path.dirname(self.unpackdir))
+ fetcher.unpack(self.unpackdir)
+
+ repo_path = os.path.join(self.tempdir, 'unpacked', 'git')
+ self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/googletest/config')), msg='Missing submodule config "extern/googletest"')
+ self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/json/config')), msg='Missing submodule config "extern/json"')
+ self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/sanitizers/config')), msg='Missing submodule config "extern/sanitizers"')
+
def test_git_submodule_aktualizr(self):
url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
fetcher = bb.fetch.Fetch([url], self.d)
@@ -1338,7 +1357,7 @@ class GitShallowTest(FetcherTest):
def fetch(self, uri=None):
if uri is None:
- uris = self.d.getVar('SRC_URI', True).split()
+ uris = self.d.getVar('SRC_URI').split()
uri = uris[0]
d = self.d
else:
@@ -1397,7 +1416,7 @@ class GitShallowTest(FetcherTest):
srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip()
self.d.setVar('SRCREV', srcrev)
- uri = self.d.getVar('SRC_URI', True).split()[0]
+ uri = self.d.getVar('SRC_URI').split()[0]
uri = '%s;nobranch=1;bare=1' % uri
self.fetch_shallow(uri)
@@ -1576,7 +1595,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('f')
self.assertRevCount(7, cwd=self.srcdir)
- uri = self.d.getVar('SRC_URI', True).split()[0]
+ uri = self.d.getVar('SRC_URI').split()[0]
uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
@@ -1602,7 +1621,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('f')
self.assertRevCount(7, cwd=self.srcdir)
- uri = self.d.getVar('SRC_URI', True).split()[0]
+ uri = self.d.getVar('SRC_URI').split()[0]
uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
diff --git a/poky/bitbake/lib/bb/tests/persist_data.py b/poky/bitbake/lib/bb/tests/persist_data.py
new file mode 100644
index 000000000..812bcbd7b
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/persist_data.py
@@ -0,0 +1,142 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# BitBake Test for lib/bb/persist_data/
+#
+# Copyright (C) 2018 Garmin Ltd.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+
+import unittest
+import bb.data
+import bb.persist_data
+import tempfile
+import threading
+
+class PersistDataTest(unittest.TestCase):
+ def _create_data(self):
+ return bb.persist_data.persist('TEST_PERSIST_DATA', self.d)
+
+ def setUp(self):
+ self.d = bb.data.init()
+ self.tempdir = tempfile.TemporaryDirectory()
+ self.d['PERSISTENT_DIR'] = self.tempdir.name
+ self.data = self._create_data()
+ self.items = {
+ 'A1': '1',
+ 'B1': '2',
+ 'C2': '3'
+ }
+ self.stress_count = 10000
+ self.thread_count = 5
+
+ for k,v in self.items.items():
+ self.data[k] = v
+
+ def tearDown(self):
+ self.tempdir.cleanup()
+
+ def _iter_helper(self, seen, iterator):
+ with iter(iterator):
+ for v in iterator:
+ self.assertTrue(v in seen)
+ seen.remove(v)
+ self.assertEqual(len(seen), 0, '%s not seen' % seen)
+
+ def test_get(self):
+ for k, v in self.items.items():
+ self.assertEqual(self.data[k], v)
+
+ self.assertIsNone(self.data.get('D'))
+ with self.assertRaises(KeyError):
+ self.data['D']
+
+ def test_set(self):
+ for k, v in self.items.items():
+ self.data[k] += '-foo'
+
+ for k, v in self.items.items():
+ self.assertEqual(self.data[k], v + '-foo')
+
+ def test_delete(self):
+ self.data['D'] = '4'
+ self.assertEqual(self.data['D'], '4')
+ del self.data['D']
+ self.assertIsNone(self.data.get('D'))
+ with self.assertRaises(KeyError):
+ self.data['D']
+
+ def test_contains(self):
+ for k in self.items:
+ self.assertTrue(k in self.data)
+ self.assertTrue(self.data.has_key(k))
+ self.assertFalse('NotFound' in self.data)
+ self.assertFalse(self.data.has_key('NotFound'))
+
+ def test_len(self):
+ self.assertEqual(len(self.data), len(self.items))
+
+ def test_iter(self):
+ self._iter_helper(set(self.items.keys()), self.data)
+
+ def test_itervalues(self):
+ self._iter_helper(set(self.items.values()), self.data.itervalues())
+
+ def test_iteritems(self):
+ self._iter_helper(set(self.items.items()), self.data.iteritems())
+
+ def test_get_by_pattern(self):
+ self._iter_helper({'1', '2'}, self.data.get_by_pattern('_1'))
+
+ def _stress_read(self, data):
+ for i in range(self.stress_count):
+ for k in self.items:
+ data[k]
+
+ def _stress_write(self, data):
+ for i in range(self.stress_count):
+ for k, v in self.items.items():
+ data[k] = v + str(i)
+
+ def _validate_stress(self):
+ for k, v in self.items.items():
+ self.assertEqual(self.data[k], v + str(self.stress_count - 1))
+
+ def test_stress(self):
+ self._stress_read(self.data)
+ self._stress_write(self.data)
+ self._validate_stress()
+
+ def test_stress_threads(self):
+ def read_thread():
+ data = self._create_data()
+ self._stress_read(data)
+
+ def write_thread():
+ data = self._create_data()
+ self._stress_write(data)
+
+ threads = []
+ for i in range(self.thread_count):
+ threads.append(threading.Thread(target=read_thread))
+ threads.append(threading.Thread(target=write_thread))
+
+ for t in threads:
+ t.start()
+ self._stress_read(self.data)
+ for t in threads:
+ t.join()
+ self._validate_stress()
+
diff --git a/poky/bitbake/lib/bb/tests/utils.py b/poky/bitbake/lib/bb/tests/utils.py
index 2f4ccf3c6..f1cd83a41 100644
--- a/poky/bitbake/lib/bb/tests/utils.py
+++ b/poky/bitbake/lib/bb/tests/utils.py
@@ -42,6 +42,10 @@ class VerCmpString(unittest.TestCase):
self.assertTrue(result < 0)
result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
self.assertTrue(result > 0)
+ result = bb.utils.vercmp_string('1.', '1.1')
+ self.assertTrue(result < 0)
+ result = bb.utils.vercmp_string('1.1', '1.')
+ self.assertTrue(result > 0)
def test_explode_dep_versions(self):
correctresult = {"foo" : ["= 1.10"]}
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index 73b6cb423..b652a6838 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -27,7 +27,8 @@ import bb
import bb.msg
import multiprocessing
import fcntl
-import imp
+import importlib
+from importlib import machinery
import itertools
import subprocess
import glob
@@ -43,7 +44,7 @@ from contextlib import contextmanager
from ctypes import cdll
logger = logging.getLogger("BitBake.Util")
-python_extensions = [e for e, _, _ in imp.get_suffixes()]
+python_extensions = importlib.machinery.all_suffixes()
def clean_context():
@@ -68,8 +69,8 @@ class VersionStringException(Exception):
def explode_version(s):
r = []
- alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
- numeric_regexp = re.compile('^(\d+)(.*)$')
+ alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
+ numeric_regexp = re.compile(r'^(\d+)(.*)$')
while (s != ''):
if s[0] in string.digits:
m = numeric_regexp.match(s)
@@ -120,6 +121,10 @@ def vercmp_part(a, b):
return -1
elif oa > ob:
return 1
+ elif ca is None:
+ return -1
+ elif cb is None:
+ return 1
elif ca < cb:
return -1
elif ca > cb:
@@ -317,10 +322,13 @@ def better_compile(text, file, realfile, mode = "exec", lineno = 0):
error = []
# split the text into lines again
body = text.split('\n')
- error.append("Error in compiling python function in %s, line %s:\n" % (realfile, lineno))
+ error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
if hasattr(e, "lineno"):
error.append("The code lines resulting in this error were:")
- error.extend(_print_trace(body, e.lineno))
+ # e.lineno: line's position in reaflile
+ # lineno: function name's "position -1" in realfile
+ # e.lineno - lineno: line's relative position in function
+ error.extend(_print_trace(body, e.lineno - lineno))
else:
error.append("The function causing this error was:")
for line in body:
@@ -704,15 +712,7 @@ def prunedir(topdir):
# CAUTION: This is dangerous!
if _check_unsafe_delete_path(topdir):
raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
- for root, dirs, files in os.walk(topdir, topdown = False):
- for name in files:
- os.remove(os.path.join(root, name))
- for name in dirs:
- if os.path.islink(os.path.join(root, name)):
- os.remove(os.path.join(root, name))
- else:
- os.rmdir(os.path.join(root, name))
- os.rmdir(topdir)
+ remove(topdir, recurse=True)
#
# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
@@ -1157,14 +1157,14 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
var_res = {}
if match_overrides:
- override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
+ override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
else:
override_re = ''
for var in variables:
if var.endswith('()'):
- var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
+ var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
else:
- var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
+ var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
updated = False
varset_start = ''
@@ -1501,6 +1501,8 @@ def ioprio_set(who, cls, value):
NR_ioprio_set = 251
elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
NR_ioprio_set = 289
+ elif _unamearch == "aarch64":
+ NR_ioprio_set = 30
if NR_ioprio_set:
ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
@@ -1544,12 +1546,9 @@ def export_proxies(d):
def load_plugins(logger, plugins, pluginpath):
def load_plugin(name):
logger.debug(1, 'Loading plugin %s' % name)
- fp, pathname, description = imp.find_module(name, [pluginpath])
- try:
- return imp.load_module(name, fp, pathname, description)
- finally:
- if fp:
- fp.close()
+ spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
+ if spec:
+ return spec.loader.load_module()
logger.debug(1, 'Loading plugins from %s...' % pluginpath)
OpenPOWER on IntegriCloud