summaryrefslogtreecommitdiffstats
path: root/import-layers/yocto-poky/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'import-layers/yocto-poky/bitbake/lib/bb/fetch2')
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py152
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py13
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py6
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py17
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py118
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py25
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py23
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py28
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py4
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py71
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py10
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py269
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py15
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py8
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py6
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py15
-rw-r--r--import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py99
17 files changed, 497 insertions, 382 deletions
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
index 1fa67020c..cd7362c44 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
@@ -25,31 +25,26 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-from __future__ import absolute_import
-from __future__ import print_function
import os, re
import signal
import logging
-import urllib
-import urlparse
+import urllib.request, urllib.parse, urllib.error
+if 'git' not in urllib.parse.uses_netloc:
+ urllib.parse.uses_netloc.append('git')
+import operator
+import collections
+import subprocess
+import pickle
import bb.persist_data, bb.utils
import bb.checksum
from bb import data
import bb.process
-import subprocess
__version__ = "2"
_checksum_cache = bb.checksum.FileChecksumCache()
logger = logging.getLogger("BitBake.Fetcher")
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
class BBFetchException(Exception):
"""Class all fetch exceptions inherit from"""
def __init__(self, message):
@@ -231,14 +226,14 @@ class URI(object):
# them are not quite RFC compliant.
uri, param_str = (uri.split(";", 1) + [None])[:2]
- urlp = urlparse.urlparse(uri)
+ urlp = urllib.parse.urlparse(uri)
self.scheme = urlp.scheme
reparse = 0
# Coerce urlparse to make URI scheme use netloc
- if not self.scheme in urlparse.uses_netloc:
- urlparse.uses_params.append(self.scheme)
+ if not self.scheme in urllib.parse.uses_netloc:
+ urllib.parse.uses_params.append(self.scheme)
reparse = 1
# Make urlparse happy(/ier) by converting local resources
@@ -249,7 +244,7 @@ class URI(object):
reparse = 1
if reparse:
- urlp = urlparse.urlparse(uri)
+ urlp = urllib.parse.urlparse(uri)
# Identify if the URI is relative or not
if urlp.scheme in self._relative_schemes and \
@@ -265,7 +260,7 @@ class URI(object):
if urlp.password:
self.userinfo += ':%s' % urlp.password
- self.path = urllib.unquote(urlp.path)
+ self.path = urllib.parse.unquote(urlp.path)
if param_str:
self.params = self._param_str_split(param_str, ";")
@@ -297,7 +292,7 @@ class URI(object):
if self.query else '')
def _param_str_split(self, string, elmdelim, kvdelim="="):
- ret = {}
+ ret = collections.OrderedDict()
for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
ret[k] = v
return ret
@@ -313,11 +308,11 @@ class URI(object):
@property
def path_quoted(self):
- return urllib.quote(self.path)
+ return urllib.parse.quote(self.path)
@path_quoted.setter
def path_quoted(self, path):
- self.path = urllib.unquote(path)
+ self.path = urllib.parse.unquote(path)
@property
def path(self):
@@ -390,7 +385,7 @@ def decodeurl(url):
user = ''
pswd = ''
- p = {}
+ p = collections.OrderedDict()
if parm:
for s in parm.split(';'):
if s:
@@ -399,7 +394,7 @@ def decodeurl(url):
s1, s2 = s.split('=')
p[s1] = s2
- return type, host, urllib.unquote(path), user, pswd, p
+ return type, host, urllib.parse.unquote(path), user, pswd, p
def encodeurl(decoded):
"""Encodes a URL from tokens (scheme, network location, path,
@@ -423,7 +418,7 @@ def encodeurl(decoded):
# Standardise path to ensure comparisons work
while '//' in path:
path = path.replace("//", "/")
- url += "%s" % urllib.quote(path)
+ url += "%s" % urllib.parse.quote(path)
if p:
for parm in p:
url += ";%s=%s" % (parm, p[parm])
@@ -586,12 +581,12 @@ def verify_checksum(ud, d, precomputed={}):
raise NoChecksumError('Missing SRC_URI checksum', ud.url)
# Log missing sums so user can more easily add them
- logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"',
- ud.localpath, ud.md5_name, md5data)
- logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"',
- ud.localpath, ud.sha256_name, sha256data)
+ logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"',
+ ud.localpath, ud.md5_name, md5data)
+ logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"',
+ ud.localpath, ud.sha256_name, sha256data)
# We want to alert the user if a checksum is defined in the recipe but
# it does not match.
@@ -659,9 +654,9 @@ def verify_donestamp(ud, d, origud=None):
# files to those containing the checksums.
if not isinstance(e, EOFError):
# Ignore errors, they aren't fatal
- logger.warn("Couldn't load checksums from donestamp %s: %s "
- "(msg: %s)" % (ud.donestamp, type(e).__name__,
- str(e)))
+ logger.warning("Couldn't load checksums from donestamp %s: %s "
+ "(msg: %s)" % (ud.donestamp, type(e).__name__,
+ str(e)))
try:
checksums = verify_checksum(ud, d, precomputed_checksums)
@@ -669,14 +664,14 @@ def verify_donestamp(ud, d, origud=None):
# as an upgrade path from the previous done stamp file format.
if checksums != precomputed_checksums:
with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+ p = pickle.Pickler(cachefile, 2)
p.dump(checksums)
return True
except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file.
- logger.warn("Checksum mismatch for local file %s\n"
- "Cleaning and trying again." % ud.localpath)
+ logger.warning("Checksum mismatch for local file %s\n"
+ "Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
@@ -703,13 +698,13 @@ def update_stamp(ud, d):
checksums = verify_checksum(ud, d)
# Store the checksums for later re-verification against the recipe
with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+ p = pickle.Pickler(cachefile, 2)
p.dump(checksums)
except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file.
- logger.warn("Checksum mismatch for local file %s\n"
- "Cleaning and trying again." % ud.localpath)
+ logger.warning("Checksum mismatch for local file %s\n"
+ "Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
@@ -766,6 +761,7 @@ def get_srcrev(d, method_name='sortable_revision'):
if not format:
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+ name_to_rev = {}
seenautoinc = False
for scm in scms:
ud = urldata[scm]
@@ -774,7 +770,16 @@ def get_srcrev(d, method_name='sortable_revision'):
seenautoinc = seenautoinc or autoinc
if len(rev) > 10:
rev = rev[:10]
- format = format.replace(name, rev)
+ name_to_rev[name] = rev
+ # Replace names by revisions in the SRCREV_FORMAT string. The approach used
+ # here can handle names being prefixes of other names and names appearing
+ # as substrings in revisions (in which case the name should not be
+ # expanded). The '|' regular expression operator tries matches from left to
+ # right, so we need to sort the names with the longest ones first.
+ names_descending_len = sorted(name_to_rev, key=len, reverse=True)
+ name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
+ format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
+
if seenautoinc:
format = "AUTOINC+" + format
@@ -784,7 +789,7 @@ def localpath(url, d):
fetcher = bb.fetch2.Fetch([url], d)
return fetcher.localpath(url)
-def runfetchcmd(cmd, d, quiet=False, cleanup=None):
+def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
"""
Run cmd returning the command output
Raise an error if interrupted or cmd fails
@@ -807,13 +812,16 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None):
'GIT_SSL_CAINFO',
'GIT_SMART_HTTP',
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
- 'SOCKS5_USER', 'SOCKS5_PASSWD']
+ 'SOCKS5_USER', 'SOCKS5_PASSWD',
+ 'DBUS_SESSION_BUS_ADDRESS',
+ 'P4CONFIG']
if not cleanup:
cleanup = []
+ origenv = d.getVar("BB_ORIGENV", False)
for var in exportvars:
- val = d.getVar(var, True)
+ val = d.getVar(var, True) or (origenv and origenv.getVar(var, True))
if val:
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
@@ -823,7 +831,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None):
error_message = ""
try:
- (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
+ (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
success = True
except bb.process.NotFoundError as e:
error_message = "Fetch command %s" % (e.command)
@@ -834,7 +842,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None):
output = "output:\n%s" % e.stderr
else:
output = "no output"
- error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
+ error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
except bb.process.CmdError as e:
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
if not success:
@@ -937,8 +945,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
return found
return False
- os.chdir(ld.getVar("DL_DIR", True))
-
if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
ud.method.download(ud, ld)
if hasattr(ud.method,"build_mirror_data"):
@@ -982,8 +988,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
except bb.fetch2.BBFetchException as e:
if isinstance(e, ChecksumError):
- logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
- logger.warn(str(e))
+ logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
+ logger.warning(str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
@@ -1198,7 +1204,7 @@ class FetchData(object):
raise NonLocalMethod()
if self.parm.get("proto", None) and "protocol" not in self.parm:
- logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
+ logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
self.parm["protocol"] = self.parm.get("proto", None)
if hasattr(self.method, "urldata_init"):
@@ -1395,7 +1401,18 @@ class FetchMethod(object):
else:
cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
elif file.endswith('.deb') or file.endswith('.ipk'):
- cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
+ output = subprocess.check_output('ar -t %s' % file, preexec_fn=subprocess_setup, shell=True)
+ datafile = None
+ if output:
+ for line in output.decode().splitlines():
+ if line.startswith('data.tar.'):
+ datafile = line
+ break
+ else:
+ raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
+ else:
+ raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
+ cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
elif file.endswith('.tar.7z'):
cmd = '7z x -so %s | tar xf - ' % file
elif file.endswith('.7z'):
@@ -1403,7 +1420,13 @@ class FetchMethod(object):
# If 'subdir' param exists, create a dir and use it as destination for unpack cmd
if 'subdir' in urldata.parm:
- unpackdir = '%s/%s' % (rootdir, urldata.parm.get('subdir'))
+ subdir = urldata.parm.get('subdir')
+ if os.path.isabs(subdir):
+ if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
+ raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
+ unpackdir = subdir
+ else:
+ unpackdir = os.path.join(rootdir, subdir)
bb.utils.mkdirhier(unpackdir)
else:
unpackdir = rootdir
@@ -1422,22 +1445,16 @@ class FetchMethod(object):
if urlpath.find("/") != -1:
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
- cmd = 'cp -fpPR %s %s' % (file, destdir)
+ cmd = 'cp -fpPRH %s %s' % (file, destdir)
if not cmd:
return
- # Change to unpackdir before executing command
- save_cwd = os.getcwd();
- os.chdir(unpackdir)
-
path = data.getVar('PATH', True)
if path:
cmd = "PATH=\"%s\" %s" % (path, cmd)
- bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
-
- os.chdir(save_cwd)
+ bb.note("Unpacking %s to %s/" % (file, unpackdir))
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
if ret != 0:
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
@@ -1505,8 +1522,9 @@ class Fetch(object):
self.connection_cache = connection_cache
fn = d.getVar('FILE', True)
- if cache and fn and fn in urldata_cache:
- self.ud = urldata_cache[fn]
+ mc = d.getVar('__BBMULTICONFIG', True) or ""
+ if cache and fn and mc + fn in urldata_cache:
+ self.ud = urldata_cache[mc + fn]
for url in urls:
if url not in self.ud:
@@ -1518,7 +1536,7 @@ class Fetch(object):
pass
if fn and cache:
- urldata_cache[fn] = self.ud
+ urldata_cache[mc + fn] = self.ud
def localpath(self, url):
if url not in self.urls:
@@ -1572,8 +1590,6 @@ class Fetch(object):
if premirroronly:
self.d.setVar("BB_NO_NETWORK", "1")
- os.chdir(self.d.getVar("DL_DIR", True))
-
firsterr = None
verified_stamp = verify_donestamp(ud, self.d)
if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
@@ -1594,14 +1610,14 @@ class Fetch(object):
except BBFetchException as e:
if isinstance(e, ChecksumError):
- logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
+ logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
logger.debug(1, str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
- logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
+ logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
logger.debug(1, str(e))
firsterr = e
# Remove any incomplete fetch
@@ -1734,7 +1750,7 @@ class FetchConnectionCache(object):
del self.cache[cn]
def close_connections(self):
- for cn in self.cache.keys():
+ for cn in list(self.cache.keys()):
self.cache[cn].close()
del self.cache[cn]
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
index 03e9ac461..72264afb5 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
@@ -88,28 +88,25 @@ class Bzr(FetchMethod):
bzrcmd = self._buildbzrcommand(ud, d, "update")
logger.debug(1, "BZR Update %s", ud.url)
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
- runfetchcmd(bzrcmd, d)
+ runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
else:
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
logger.debug(1, "BZR Checkout %s", ud.url)
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", bzrcmd)
- runfetchcmd(bzrcmd, d)
-
- os.chdir(ud.pkgdir)
+ runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
scmdata = ud.parm.get("scmdata", "")
if scmdata == "keep":
tar_flags = ""
else:
- tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
+ tar_flags = "--exclude='.bzr' --exclude='.bzrtags'"
# tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)),
+ d, cleanup=[ud.localpath], workdir=ud.pkgdir)
def supports_srcrev(self):
return True
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
index ba83e7cb6..70e280a8d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -202,11 +202,10 @@ class ClearCase(FetchMethod):
def _remove_view(self, ud, d):
if os.path.exists(ud.viewdir):
- os.chdir(ud.ccasedir)
cmd = self._build_ccase_command(ud, 'rmview');
logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
bb.fetch2.check_network_access(d, cmd, ud.url)
- output = runfetchcmd(cmd, d)
+ output = runfetchcmd(cmd, d, workdir=ud.ccasedir)
logger.info("rmview output: %s", output)
def need_update(self, ud, d):
@@ -241,11 +240,10 @@ class ClearCase(FetchMethod):
raise e
# Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
- os.chdir(ud.viewdir)
cmd = self._build_ccase_command(ud, 'setcs');
logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
bb.fetch2.check_network_access(d, cmd, ud.url)
- output = runfetchcmd(cmd, d)
+ output = runfetchcmd(cmd, d, workdir=ud.viewdir)
logger.info("%s", output)
# Copy the configspec to the viewdir so we have it in our source tarball later
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
index d27d96f68..5ff70ba92 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
@@ -123,22 +123,23 @@ class Cvs(FetchMethod):
pkg = d.getVar('PN', True)
pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
moddir = os.path.join(pkgdir, localdir)
+ workdir = None
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
logger.info("Update " + ud.url)
bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
# update sources there
- os.chdir(moddir)
+ workdir = moddir
cmd = cvsupdatecmd
else:
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(pkgdir)
- os.chdir(pkgdir)
+ workdir = pkgdir
logger.debug(1, "Running %s", cvscmd)
bb.fetch2.check_network_access(d, cvscmd, ud.url)
cmd = cvscmd
- runfetchcmd(cmd, d, cleanup = [moddir])
+ runfetchcmd(cmd, d, cleanup=[moddir], workdir=workdir)
if not os.access(moddir, os.R_OK):
raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
@@ -147,18 +148,18 @@ class Cvs(FetchMethod):
if scmdata == "keep":
tar_flags = ""
else:
- tar_flags = "--exclude 'CVS'"
+ tar_flags = "--exclude='CVS'"
# tar them up to a defined filename
+ workdir = None
if 'fullpath' in ud.parm:
- os.chdir(pkgdir)
+ workdir = pkgdir
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
else:
- os.chdir(moddir)
- os.chdir('..')
+ workdir = os.path.dirname(os.path.realpath(moddir))
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
- runfetchcmd(cmd, d, cleanup = [ud.localpath])
+ runfetchcmd(cmd, d, cleanup=[ud.localpath], workdir=workdir)
def clean(self, ud, d):
""" Clean CVS Files and tarballs """
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
index 526668bc2..1bec60ab7 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
@@ -49,6 +49,10 @@ Supported SRC_URI options are:
referring to commit which is valid in tag instead of branch.
The default is "0", set nobranch=1 if needed.
+- usehead
+ For local git:// urls to use the current branch HEAD as the revsion for use with
+ AUTOREV. Implies nobranch.
+
"""
#Copyright (C) 2005 Richard Purdie
@@ -71,11 +75,53 @@ import os
import re
import bb
import errno
+import bb.progress
from bb import data
from bb.fetch2 import FetchMethod
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
+
+class GitProgressHandler(bb.progress.LineFilterProgressHandler):
+ """Extract progress information from git output"""
+ def __init__(self, d):
+ self._buffer = ''
+ self._count = 0
+ super(GitProgressHandler, self).__init__(d)
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(-1)
+
+ def write(self, string):
+ self._buffer += string
+ stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
+ stage_weights = [0.2, 0.05, 0.5, 0.25]
+ stagenum = 0
+ for i, stage in reversed(list(enumerate(stages))):
+ if stage in self._buffer:
+ stagenum = i
+ self._buffer = ''
+ break
+ self._status = stages[stagenum]
+ percs = re.findall(r'(\d+)%', string)
+ if percs:
+ progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
+ rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
+ if rates:
+ rate = rates[-1]
+ else:
+ rate = None
+ self.update(progress, rate)
+ else:
+ if stagenum == 0:
+ percs = re.findall(r': (\d+)', string)
+ if percs:
+ count = int(percs[-1])
+ if count > self._count:
+ self._count = count
+ self._fire_progress(-count)
+ super(GitProgressHandler, self).write(string)
+
+
class Git(FetchMethod):
"""Class to fetch a module or modules from git repositories"""
def init(self, d):
@@ -111,6 +157,13 @@ class Git(FetchMethod):
ud.nobranch = ud.parm.get("nobranch","0") == "1"
+ # usehead implies nobranch
+ ud.usehead = ud.parm.get("usehead","0") == "1"
+ if ud.usehead:
+ if ud.proto != "file":
+ raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
+ ud.nobranch = 1
+
# bareclone implies nocheckout
ud.bareclone = ud.parm.get("bareclone","0") == "1"
if ud.bareclone:
@@ -126,6 +179,9 @@ class Git(FetchMethod):
ud.branches[name] = branch
ud.unresolvedrev[name] = branch
+ if ud.usehead:
+ ud.unresolvedrev['default'] = 'HEAD'
+
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
@@ -163,9 +219,8 @@ class Git(FetchMethod):
def need_update(self, ud, d):
if not os.path.exists(ud.clonedir):
return True
- os.chdir(ud.clonedir)
for name in ud.names:
- if not self._contains_ref(ud, d, name):
+ if not self._contains_ref(ud, d, name, ud.clonedir):
return True
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
return True
@@ -186,8 +241,7 @@ class Git(FetchMethod):
# If the checkout doesn't exist and the mirror tarball does, extract it
if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
bb.utils.mkdirhier(ud.clonedir)
- os.chdir(ud.clonedir)
- runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
+ runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.clonedir)
repourl = self._get_repo_url(ud)
@@ -196,38 +250,38 @@ class Git(FetchMethod):
# We do this since git will use a "-l" option automatically for local urls where possible
if repourl.startswith("file://"):
repourl = repourl[7:]
- clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
+ clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, clone_cmd)
- runfetchcmd(clone_cmd, d)
+ progresshandler = GitProgressHandler(d)
+ runfetchcmd(clone_cmd, d, log=progresshandler)
- os.chdir(ud.clonedir)
# Update the checkout if needed
needupdate = False
for name in ud.names:
- if not self._contains_ref(ud, d, name):
+ if not self._contains_ref(ud, d, name, ud.clonedir):
needupdate = True
if needupdate:
try:
- runfetchcmd("%s remote rm origin" % ud.basecmd, d)
+ runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
except bb.fetch2.FetchError:
logger.debug(1, "No Origin")
- runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
- fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
+ runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
+ fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
- runfetchcmd(fetch_cmd, d)
- runfetchcmd("%s prune-packed" % ud.basecmd, d)
- runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
+ progresshandler = GitProgressHandler(d)
+ runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
+ runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
+ runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
try:
os.unlink(ud.fullmirror)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
- os.chdir(ud.clonedir)
for name in ud.names:
- if not self._contains_ref(ud, d, name):
+ if not self._contains_ref(ud, d, name, ud.clonedir):
raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
def build_mirror_data(self, ud, d):
@@ -237,10 +291,9 @@ class Git(FetchMethod):
if os.path.islink(ud.fullmirror):
os.unlink(ud.fullmirror)
- os.chdir(ud.clonedir)
logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+ runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d, workdir=ud.clonedir)
+ runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.clonedir)
def unpack(self, ud, destdir, d):
""" unpack the downloaded src to destdir"""
@@ -263,21 +316,21 @@ class Git(FetchMethod):
cloneflags += " --mirror"
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, ud.clonedir, destdir), d)
- os.chdir(destdir)
repourl = self._get_repo_url(ud)
- runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
+ runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
if not ud.nocheckout:
if subdir != "":
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
- runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
+ runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
+ workdir=destdir)
+ runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
elif not ud.nobranch:
branchname = ud.branches[ud.names[0]]
runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
- ud.revisions[ud.names[0]]), d)
+ ud.revisions[ud.names[0]]), d, workdir=destdir)
runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
- branchname), d)
+ branchname), d, workdir=destdir)
else:
- runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
+ runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
return True
@@ -291,7 +344,7 @@ class Git(FetchMethod):
def supports_srcrev(self):
return True
- def _contains_ref(self, ud, d, name):
+ def _contains_ref(self, ud, d, name, wd):
cmd = ""
if ud.nobranch:
cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
@@ -300,7 +353,7 @@ class Git(FetchMethod):
cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
ud.basecmd, ud.revisions[name], ud.branches[name])
try:
- output = runfetchcmd(cmd, d, quiet=True)
+ output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
except bb.fetch2.FetchError:
return False
if len(output.split()) > 1:
@@ -343,16 +396,17 @@ class Git(FetchMethod):
"""
output = self._lsremote(ud, d, "")
# Tags of the form ^{} may not work, need to fallback to other form
- if ud.unresolvedrev[name][:5] == "refs/":
+ if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
head = ud.unresolvedrev[name]
tag = ud.unresolvedrev[name]
else:
head = "refs/heads/%s" % ud.unresolvedrev[name]
tag = "refs/tags/%s" % ud.unresolvedrev[name]
for s in [head, tag + "^{}", tag]:
- for l in output.split('\n'):
- if s in l:
- return l.split()[0]
+ for l in output.strip().split('\n'):
+ sha1, ref = l.split()
+ if s == ref:
+ return sha1
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
(ud.unresolvedrev[name], ud.host+ud.path))
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
index 0f3789745..4937a1089 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
@@ -34,43 +34,42 @@ class GitANNEX(Git):
"""
return ud.type in ['gitannex']
- def uses_annex(self, ud, d):
+ def uses_annex(self, ud, d, wd):
for name in ud.names:
try:
- runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
+ runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True, workdir=wd)
return True
except bb.fetch.FetchError:
pass
return False
- def update_annex(self, ud, d):
+ def update_annex(self, ud, d, wd):
try:
- runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
+ runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True, workdir=wd)
except bb.fetch.FetchError:
return False
- runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
+ runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True, workdir=wd)
return True
def download(self, ud, d):
Git.download(self, ud, d)
- os.chdir(ud.clonedir)
- annex = self.uses_annex(ud, d)
+ annex = self.uses_annex(ud, d, ud.clonedir)
if annex:
- self.update_annex(ud, d)
+ self.update_annex(ud, d, ud.clonedir)
def unpack(self, ud, destdir, d):
Git.unpack(self, ud, destdir, d)
- os.chdir(ud.destdir)
try:
- runfetchcmd("%s annex sync" % (ud.basecmd), d)
+ runfetchcmd("%s annex init" % (ud.basecmd), d, workdir=ud.destdir)
except bb.fetch.FetchError:
pass
- annex = self.uses_annex(ud, d)
+ annex = self.uses_annex(ud, d, ud.destdir)
if annex:
- runfetchcmd("%s annex get" % (ud.basecmd), d)
- runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
+ runfetchcmd("%s annex get" % (ud.basecmd), d, workdir=ud.destdir)
+ runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True, workdir=ud.destdir)
+
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
index 752f1d3c1..661376204 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -43,10 +43,10 @@ class GitSM(Git):
"""
return ud.type in ['gitsm']
- def uses_submodules(self, ud, d):
+ def uses_submodules(self, ud, d, wd):
for name in ud.names:
try:
- runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
+ runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
return True
except bb.fetch.FetchError:
pass
@@ -107,28 +107,25 @@ class GitSM(Git):
os.mkdir(tmpclonedir)
os.rename(ud.clonedir, gitdir)
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
- os.chdir(tmpclonedir)
- runfetchcmd(ud.basecmd + " reset --hard", d)
- runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
- runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
+ runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
+ runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
+ runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
self._set_relative_paths(tmpclonedir)
- runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
+ runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
os.rename(gitdir, ud.clonedir,)
bb.utils.remove(tmpclonedir, True)
def download(self, ud, d):
Git.download(self, ud, d)
- os.chdir(ud.clonedir)
- submodules = self.uses_submodules(ud, d)
+ submodules = self.uses_submodules(ud, d, ud.clonedir)
if submodules:
self.update_submodules(ud, d)
def unpack(self, ud, destdir, d):
Git.unpack(self, ud, destdir, d)
- os.chdir(ud.destdir)
- submodules = self.uses_submodules(ud, d)
+ submodules = self.uses_submodules(ud, d, ud.destdir)
if submodules:
- runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
- runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
+ runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
+ runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
index 3b743ff51..20df8016d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
@@ -169,25 +169,22 @@ class Hg(FetchMethod):
# If the checkout doesn't exist and the mirror tarball does, extract it
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
+ runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.pkgdir)
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
# Found the source, check whether need pull
updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
logger.debug(1, "Running %s", updatecmd)
try:
- runfetchcmd(updatecmd, d)
+ runfetchcmd(updatecmd, d, workdir=ud.moddir)
except bb.fetch2.FetchError:
# Runnning pull in the repo
pullcmd = self._buildhgcommand(ud, d, "pull")
logger.info("Pulling " + ud.url)
# update sources there
- os.chdir(ud.moddir)
logger.debug(1, "Running %s", pullcmd)
bb.fetch2.check_network_access(d, pullcmd, ud.url)
- runfetchcmd(pullcmd, d)
+ runfetchcmd(pullcmd, d, workdir=ud.moddir)
try:
os.unlink(ud.fullmirror)
except OSError as exc:
@@ -200,17 +197,15 @@ class Hg(FetchMethod):
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", fetchcmd)
bb.fetch2.check_network_access(d, fetchcmd, ud.url)
- runfetchcmd(fetchcmd, d)
+ runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
# Even when we clone (fetch), we still need to update as hg's clone
# won't checkout the specified revision if its on a branch
updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
+ runfetchcmd(updatecmd, d, workdir=ud.moddir)
def clean(self, ud, d):
""" Clean the hg dir """
@@ -246,10 +241,9 @@ class Hg(FetchMethod):
if os.path.islink(ud.fullmirror):
os.unlink(ud.fullmirror)
- os.chdir(ud.pkgdir)
logger.info("Creating tarball of hg repository")
- runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+ runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d, workdir=ud.pkgdir)
+ runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.pkgdir)
def localpath(self, ud, d):
return ud.pkgdir
@@ -269,10 +263,8 @@ class Hg(FetchMethod):
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
logger.debug(2, "Unpack: updating source in '" + codir + "'")
- os.chdir(codir)
- runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
- runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
+ runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d, workdir=codir)
+ runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
else:
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
- os.chdir(ud.moddir)
- runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
+ runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
index 303a52b63..51ca78d12 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
@@ -26,7 +26,7 @@ BitBake build tools.
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os
-import urllib
+import urllib.request, urllib.parse, urllib.error
import bb
import bb.utils
from bb import data
@@ -42,7 +42,7 @@ class Local(FetchMethod):
def urldata_init(self, ud, d):
# We don't set localfile as for this fetcher the file is already local!
- ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
+ ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0])
ud.basename = os.path.basename(ud.decodedurl)
ud.basepath = ud.decodedurl
ud.needdonestamp = False
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
index e8d9b1109..699ae72e0 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
@@ -13,14 +13,14 @@ Usage in the recipe:
- name
- version
- npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};ver=${PV}
+ npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};version=${PV}
The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
"""
import os
import sys
-import urllib
+import urllib.request, urllib.parse, urllib.error
import json
import subprocess
import signal
@@ -88,7 +88,7 @@ class Npm(FetchMethod):
ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
- self.basecmd += " --directory-prefix=%s " % prefixdir
+ ud.prefixdir = prefixdir
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0")
ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
@@ -102,7 +102,8 @@ class Npm(FetchMethod):
def _runwget(self, ud, d, command, quiet):
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
bb.fetch2.check_network_access(d, command)
- runfetchcmd(command, d, quiet)
+ dldir = d.getVar("DL_DIR", True)
+ runfetchcmd(command, d, quiet, workdir=dldir)
def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
file = data[pkg]['tgz']
@@ -113,16 +114,13 @@ class Npm(FetchMethod):
bb.fatal("NPM package %s downloaded not a tarball!" % file)
# Change to subdir before executing command
- save_cwd = os.getcwd()
if not os.path.exists(destdir):
os.makedirs(destdir)
- os.chdir(destdir)
path = d.getVar('PATH', True)
if path:
cmd = "PATH=\"%s\" %s" % (path, cmd)
- bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
- os.chdir(save_cwd)
+ bb.note("Unpacking %s to %s/" % (file, destdir))
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)
if ret != 0:
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
@@ -140,7 +138,12 @@ class Npm(FetchMethod):
workobj = json.load(datafile)
dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
- self._unpackdep(ud, ud.pkgname, workobj, "%s/npmpkg" % destdir, dldir, d)
+ if 'subdir' in ud.parm:
+ unpackdir = '%s/%s' % (destdir, ud.parm.get('subdir'))
+ else:
+ unpackdir = '%s/npmpkg' % destdir
+
+ self._unpackdep(ud, ud.pkgname, workobj, unpackdir, dldir, d)
def _parse_view(self, output):
'''
@@ -162,7 +165,9 @@ class Npm(FetchMethod):
pdata = json.loads('\n'.join(datalines))
return pdata
- def _getdependencies(self, pkg, data, version, d, ud, optional=False):
+ def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
+ if fetchedlist is None:
+ fetchedlist = []
pkgfullname = pkg
if version != '*' and not '/' in version:
pkgfullname += "@'%s'" % version
@@ -184,7 +189,9 @@ class Npm(FetchMethod):
outputurl = pdata['dist']['tarball']
data[pkg] = {}
data[pkg]['tgz'] = os.path.basename(outputurl)
- self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
+ if not outputurl in fetchedlist:
+ self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
+ fetchedlist.append(outputurl)
dependencies = pdata.get('dependencies', {})
optionalDependencies = pdata.get('optionalDependencies', {})
@@ -196,13 +203,20 @@ class Npm(FetchMethod):
optdepsfound[dep] = dependencies[dep]
else:
depsfound[dep] = dependencies[dep]
- for dep, version in optdepsfound.iteritems():
- self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True)
- for dep, version in depsfound.iteritems():
- self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
+ for dep, version in optdepsfound.items():
+ self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
+ for dep, version in depsfound.items():
+ self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
- def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest):
+ def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
logger.debug(2, "NPM shrinkwrap file is %s" % data)
+ if toplevel:
+ name = data.get('name', None)
+ if name and name != pkg:
+ for obj in data.get('dependencies', []):
+ if obj == pkg:
+ self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
+ return
outputurl = "invalid"
if ('resolved' not in data) or (not data['resolved'].startswith('http')):
# will be the case for ${PN}
@@ -211,7 +225,7 @@ class Npm(FetchMethod):
outputurl = runfetchcmd(fetchcmd, d, True)
else:
outputurl = data['resolved']
- self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
+ self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
manifest[pkg] = {}
manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
manifest[pkg]['deps'] = {}
@@ -228,7 +242,7 @@ class Npm(FetchMethod):
if 'dependencies' in data:
for obj in data['dependencies']:
logger.debug(2, "Found dep is %s" % str(obj))
- self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'])
+ self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False)
def download(self, ud, d):
"""Fetch url"""
@@ -239,10 +253,7 @@ class Npm(FetchMethod):
if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
dest = d.getVar("DL_DIR", True)
bb.utils.mkdirhier(dest)
- save_cwd = os.getcwd()
- os.chdir(dest)
- runfetchcmd("tar -xJf %s" % (ud.fullmirror), d)
- os.chdir(save_cwd)
+ runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
return
shwrf = d.getVar('NPM_SHRINKWRAP', True)
@@ -251,14 +262,14 @@ class Npm(FetchMethod):
with open(shwrf) as datafile:
shrinkobj = json.load(datafile)
except:
- logger.warn('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
+ logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
lckdf = d.getVar('NPM_LOCKDOWN', True)
logger.debug(2, "NPM lockdown file is %s" % lckdf)
try:
with open(lckdf) as datafile:
lockdown = json.load(datafile)
except:
- logger.warn('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
+ logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
if ('name' not in shrinkobj):
self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
@@ -275,10 +286,8 @@ class Npm(FetchMethod):
if os.path.islink(ud.fullmirror):
os.unlink(ud.fullmirror)
- save_cwd = os.getcwd()
- os.chdir(d.getVar("DL_DIR", True))
+ dldir = d.getVar("DL_DIR", True)
logger.info("Creating tarball of npm data")
- runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d)
- runfetchcmd("touch %s.done" % (ud.fullmirror), d)
- os.chdir(save_cwd)
-
+ runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
+ workdir=dldir)
+ runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
index d051dfdaf..295abf953 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
@@ -88,23 +88,21 @@ class Osc(FetchMethod):
oscupdatecmd = self._buildosccommand(ud, d, "update")
logger.info("Update "+ ud.url)
# update sources there
- os.chdir(ud.moddir)
logger.debug(1, "Running %s", oscupdatecmd)
bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
- runfetchcmd(oscupdatecmd, d)
+ runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
else:
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", oscfetchcmd)
bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
- runfetchcmd(oscfetchcmd, d)
+ runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
- os.chdir(os.path.join(ud.pkgdir + ud.path))
# tar them up to a defined filename
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
+ runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d,
+ cleanup=[ud.localpath], workdir=os.path.join(ud.pkgdir + ud.path))
def supports_srcrev(self):
return False
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
index 3a10c7ca3..50cb47909 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
@@ -1,14 +1,12 @@
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
+BitBake 'Fetch' implementation for perforce
"""
# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2016 Kodak Alaris, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -25,9 +23,7 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-from future_builtins import zip
import os
-import subprocess
import logging
import bb
from bb import data
@@ -37,151 +33,178 @@ from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
class Perforce(FetchMethod):
+ """ Class to fetch from perforce repositories """
def supports(self, ud, d):
+ """ Check to see if a given url can be fetched with perforce. """
return ud.type in ['p4']
- def doparse(url, d):
- parm = {}
- path = url.split("://")[1]
- delim = path.find("@");
+ def urldata_init(self, ud, d):
+ """
+ Initialize perforce specific variables within url data. If P4CONFIG is
+ provided by the env, use it. If P4PORT is specified by the recipe, use
+ its values, which may override the settings in P4CONFIG.
+ """
+ ud.basecmd = d.getVar('FETCHCMD_p4', True)
+ if not ud.basecmd:
+ ud.basecmd = "/usr/bin/env p4"
+
+ ud.dldir = d.getVar('P4DIR', True)
+ if not ud.dldir:
+ ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4')
+
+ path = ud.url.split('://')[1]
+ path = path.split(';')[0]
+ delim = path.find('@');
if delim != -1:
- (user, pswd, host, port) = path.split('@')[0].split(":")
- path = path.split('@')[1]
+ (ud.user, ud.pswd) = path.split('@')[0].split(':')
+ ud.path = path.split('@')[1]
else:
- (host, port) = d.getVar('P4PORT', False).split(':')
- user = ""
- pswd = ""
-
- if path.find(";") != -1:
- keys=[]
- values=[]
- plist = path.split(';')
- for item in plist:
- if item.count('='):
- (key, value) = item.split('=')
- keys.append(key)
- values.append(value)
-
- parm = dict(zip(keys, values))
- path = "//" + path.split(';')[0]
- host += ":%s" % (port)
- parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
-
- return host, path, user, pswd, parm
- doparse = staticmethod(doparse)
-
- def getcset(d, depot, host, user, pswd, parm):
- p4opt = ""
- if "cset" in parm:
- return parm["cset"];
- if user:
- p4opt += " -u %s" % (user)
- if pswd:
- p4opt += " -P %s" % (pswd)
- if host:
- p4opt += " -p %s" % (host)
-
- p4date = d.getVar("P4DATE", True)
- if "revision" in parm:
- depot += "#%s" % (parm["revision"])
- elif "label" in parm:
- depot += "@%s" % (parm["label"])
- elif p4date:
- depot += "@%s" % (p4date)
-
- p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
- logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
- p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
- cset = p4file.strip()
- logger.debug(1, "READ %s", cset)
- if not cset:
- return -1
-
- return cset.split(' ')[1]
- getcset = staticmethod(getcset)
+ ud.path = path
- def urldata_init(self, ud, d):
- (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
+ ud.usingp4config = False
+ p4port = d.getVar('P4PORT', True)
- base_path = path.replace('/...', '')
- base_path = self._strip_leading_slashes(base_path)
-
- if "label" in parm:
- version = parm["label"]
+ if p4port:
+ logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
+ ud.host = p4port
+ else:
+ logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
+ ud.usingp4config = True
+ p4cmd = '%s info | grep "Server address"' % ud.basecmd
+ bb.fetch2.check_network_access(d, p4cmd)
+ ud.host = runfetchcmd(p4cmd, d, True)
+ ud.host = ud.host.split(': ')[1].strip()
+ logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
+ if not ud.host:
+ raise FetchError('Could not determine P4PORT from P4CONFIG')
+
+ if ud.path.find('/...') >= 0:
+ ud.pathisdir = True
else:
- version = Perforce.getcset(d, path, host, user, pswd, parm)
+ ud.pathisdir = False
- ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
+ cleanedpath = ud.path.replace('/...', '').replace('/', '.')
+ cleanedhost = ud.host.replace(':', '.')
+ ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
- def download(self, ud, d):
+ ud.setup_revisons(d)
+
+ ud.localfile = data.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision), d)
+
+ def _buildp4command(self, ud, d, command, depot_filename=None):
"""
- Fetch urls
+ Build a p4 commandline. Valid commands are "changes", "print", and
+ "files". depot_filename is the full path to the file in the depot
+ including the trailing '#rev' value.
"""
+ p4opt = ""
+
+ if ud.user:
+ p4opt += ' -u "%s"' % (ud.user)
- (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
+ if ud.pswd:
+ p4opt += ' -P "%s"' % (ud.pswd)
- if depot.find('/...') != -1:
- path = depot[:depot.find('/...')]
+ if ud.host and not ud.usingp4config:
+ p4opt += ' -p %s' % (ud.host)
+
+ if hasattr(ud, 'revision') and ud.revision:
+ pathnrev = '%s@%s' % (ud.path, ud.revision)
+ else:
+ pathnrev = '%s' % (ud.path)
+
+ if depot_filename:
+ if ud.pathisdir: # Remove leading path to obtain filename
+ filename = depot_filename[len(ud.path)-1:]
+ else:
+ filename = depot_filename[depot_filename.rfind('/'):]
+ filename = filename[:filename.find('#')] # Remove trailing '#rev'
+
+ if command == 'changes':
+ p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev)
+ elif command == 'print':
+ if depot_filename != None:
+ p4cmd = '%s%s print -o "p4/%s" "%s"' % (ud.basecmd, p4opt, filename, depot_filename)
+ else:
+ raise FetchError('No depot file name provided to p4 %s' % command, ud.url)
+ elif command == 'files':
+ p4cmd = '%s%s files //%s' % (ud.basecmd, p4opt, pathnrev)
else:
- path = depot[:depot.rfind('/')]
+ raise FetchError('Invalid p4 command %s' % command, ud.url)
- module = parm.get('module', os.path.basename(path))
+ return p4cmd
- # Get the p4 command
- p4opt = ""
- if user:
- p4opt += " -u %s" % (user)
+ def _p4listfiles(self, ud, d):
+ """
+ Return a list of the file names which are present in the depot using the
+ 'p4 files' command, including trailing '#rev' file revision indicator
+ """
+ p4cmd = self._buildp4command(ud, d, 'files')
+ bb.fetch2.check_network_access(d, p4cmd)
+ p4fileslist = runfetchcmd(p4cmd, d, True)
+ p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()]
+
+ if not p4fileslist:
+ raise FetchError('Unable to fetch listing of p4 files from %s@%s' % (ud.host, ud.path))
+
+ count = 0
+ filelist = []
- if pswd:
- p4opt += " -P %s" % (pswd)
+ for filename in p4fileslist:
+ item = filename.split(' - ')
+ lastaction = item[1].split()
+ logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
+ if lastaction[0] == 'delete':
+ continue
+ filelist.append(item[0])
- if host:
- p4opt += " -p %s" % (host)
+ return filelist
- p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
+ def download(self, ud, d):
+ """ Get the list of files, fetch each one """
+ filelist = self._p4listfiles(ud, d)
+ if not filelist:
+ raise FetchError('No files found in depot %s@%s' % (ud.host, ud.path))
- # create temp directory
- logger.debug(2, "Fetch: creating temporary directory")
- bb.utils.mkdirhier(d.expand('${WORKDIR}'))
- mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
- tmpfile, errors = bb.process.run(mktemp)
- tmpfile = tmpfile.strip()
- if not tmpfile:
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
+ bb.utils.remove(ud.pkgdir, True)
+ bb.utils.mkdirhier(ud.pkgdir)
- if "label" in parm:
- depot = "%s@%s" % (depot, parm["label"])
- else:
- cset = Perforce.getcset(d, depot, host, user, pswd, parm)
- depot = "%s@%s" % (depot, cset)
+ for afile in filelist:
+ p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
+ bb.fetch2.check_network_access(d, p4fetchcmd)
+ runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
- os.chdir(tmpfile)
- logger.info("Fetch " + ud.url)
- logger.info("%s%s files %s", p4cmd, p4opt, depot)
- p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
- p4file = [f.rstrip() for f in p4file.splitlines()]
+ runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
- if not p4file:
- raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
+ def clean(self, ud, d):
+ """ Cleanup p4 specific files and dirs"""
+ bb.utils.remove(ud.localpath)
+ bb.utils.remove(ud.pkgdir, True)
- count = 0
+ def supports_srcrev(self):
+ return True
- for file in p4file:
- list = file.split()
+ def _revision_key(self, ud, d, name):
+ """ Return a unique key for the url """
+ return 'p4:%s' % ud.pkgdir
- if list[2] == "delete":
- continue
+ def _latest_revision(self, ud, d, name):
+ """ Return the latest upstream scm revision number """
+ p4cmd = self._buildp4command(ud, d, "changes")
+ bb.fetch2.check_network_access(d, p4cmd)
+ tip = runfetchcmd(p4cmd, d, True)
+
+ if not tip:
+ raise FetchError('Could not determine the latest perforce changelist')
- dest = list[0][len(path)+1:]
- where = dest.find("#")
+ tipcset = tip.split(' ')[1]
+ logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
+ return tipcset
- subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
- count = count + 1
+ def sortable_revision(self, ud, d, name):
+ """ Return a sortable revision number """
+ return False, self._build_revision(ud, d)
- if count == 0:
- logger.error()
- raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
+ def _build_revision(self, ud, d):
+ return ud.revision
- runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
index 21678eb7d..ecc6e68e9 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
@@ -69,24 +69,23 @@ class Repo(FetchMethod):
else:
username = ""
- bb.utils.mkdirhier(os.path.join(codir, "repo"))
- os.chdir(os.path.join(codir, "repo"))
- if not os.path.exists(os.path.join(codir, "repo", ".repo")):
+ repodir = os.path.join(codir, "repo")
+ bb.utils.mkdirhier(repodir)
+ if not os.path.exists(os.path.join(repodir, ".repo")):
bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
- runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
+ runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
- runfetchcmd("repo sync", d)
- os.chdir(codir)
+ runfetchcmd("repo sync", d, workdir=repodir)
scmdata = ud.parm.get("scmdata", "")
if scmdata == "keep":
tar_flags = ""
else:
- tar_flags = "--exclude '.repo' --exclude '.git'"
+ tar_flags = "--exclude='.repo' --exclude='.git'"
# Create a cache
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d, workdir=codir)
def supports_srcrev(self):
return False
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
index cb2f753a8..7989fccc7 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
@@ -61,8 +61,7 @@ SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
import os
import bb
-import urllib
-import commands
+import urllib.request, urllib.parse, urllib.error
from bb import data
from bb.fetch2 import URI
from bb.fetch2 import FetchMethod
@@ -93,7 +92,7 @@ class SFTP(FetchMethod):
else:
ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
def download(self, ud, d):
"""Fetch urls"""
@@ -121,8 +120,7 @@ class SFTP(FetchMethod):
remote = '%s%s:%s' % (user, urlo.hostname, path)
- cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
- commands.mkarg(lpath))
+ cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
bb.fetch2.check_network_access(d, cmd, ud.url)
runfetchcmd(cmd, d)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
index 635578a71..56f9b7eb3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
@@ -114,12 +114,10 @@ class SSH(FetchMethod):
fr = host
fr += ':%s' % path
-
- import commands
cmd = 'scp -B -r %s %s %s/' % (
portarg,
- commands.mkarg(fr),
- commands.mkarg(dldir)
+ fr,
+ dldir
)
bb.fetch2.check_network_access(d, cmd, urldata.url)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
index 8a291935c..6ca79d35d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
@@ -126,35 +126,32 @@ class Svn(FetchMethod):
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
svnupdatecmd = self._buildsvncommand(ud, d, "update")
logger.info("Update " + ud.url)
- # update sources there
- os.chdir(ud.moddir)
# We need to attempt to run svn upgrade first in case its an older working format
try:
- runfetchcmd(ud.basecmd + " upgrade", d)
+ runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
except FetchError:
pass
logger.debug(1, "Running %s", svnupdatecmd)
bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
- runfetchcmd(svnupdatecmd, d)
+ runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
else:
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", svnfetchcmd)
bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
- runfetchcmd(svnfetchcmd, d)
+ runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
scmdata = ud.parm.get("scmdata", "")
if scmdata == "keep":
tar_flags = ""
else:
- tar_flags = "--exclude '.svn'"
+ tar_flags = "--exclude='.svn'"
- os.chdir(ud.pkgdir)
# tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup = [ud.localpath])
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
+ cleanup=[ud.localpath], workdir=ud.pkgdir)
def clean(self, ud, d):
""" Clean SVN specific files and dirs """
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
index 8bc9e93ca..ecb946aa8 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
@@ -31,7 +31,8 @@ import subprocess
import os
import logging
import bb
-import urllib
+import bb.progress
+import urllib.request, urllib.parse, urllib.error
from bb import data
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
@@ -41,6 +42,27 @@ from bb.utils import export_proxies
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
+class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
+ """
+ Extract progress information from wget output.
+ Note: relies on --progress=dot (with -v or without -q/-nv) being
+ specified on the wget command line.
+ """
+ def __init__(self, d):
+ super(WgetProgressHandler, self).__init__(d)
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(0)
+
+ def writeline(self, line):
+ percs = re.findall(r'(\d+)%\s+([\d.]+[A-Z])', line)
+ if percs:
+ progress = int(percs[-1][0])
+ rate = percs[-1][1] + '/s'
+ self.update(progress, rate)
+ return False
+ return True
+
+
class Wget(FetchMethod):
"""Class to fetch urls via 'wget'"""
def supports(self, ud, d):
@@ -62,17 +84,19 @@ class Wget(FetchMethod):
else:
ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
if not ud.localfile:
- ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d)
- self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
+ self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
def _runwget(self, ud, d, command, quiet):
+ progresshandler = WgetProgressHandler(d)
+
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
bb.fetch2.check_network_access(d, command)
- runfetchcmd(command, d, quiet)
+ runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler)
def download(self, ud, d):
"""Fetch urls"""
@@ -84,6 +108,10 @@ class Wget(FetchMethod):
bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
fetchcmd += " -O " + dldir + os.sep + ud.localfile
+ if ud.user:
+ up = ud.user.split(":")
+ fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (up[0],up[1])
+
uri = ud.url.split(";")[0]
if os.path.exists(ud.localpath):
# file exists, but we didnt complete it.. trying again..
@@ -104,12 +132,12 @@ class Wget(FetchMethod):
return True
- def checkstatus(self, fetch, ud, d):
- import urllib2, socket, httplib
- from urllib import addinfourl
+ def checkstatus(self, fetch, ud, d, try_again=True):
+ import urllib.request, urllib.error, urllib.parse, socket, http.client
+ from urllib.response import addinfourl
from bb.fetch2 import FetchConnectionCache
- class HTTPConnectionCache(httplib.HTTPConnection):
+ class HTTPConnectionCache(http.client.HTTPConnection):
if fetch.connection_cache:
def connect(self):
"""Connect to the host and port specified in __init__."""
@@ -125,7 +153,7 @@ class Wget(FetchMethod):
if self._tunnel_host:
self._tunnel()
- class CacheHTTPHandler(urllib2.HTTPHandler):
+ class CacheHTTPHandler(urllib.request.HTTPHandler):
def http_open(self, req):
return self.do_open(HTTPConnectionCache, req)
@@ -139,7 +167,7 @@ class Wget(FetchMethod):
- geturl(): return the original request URL
- code: HTTP status code
"""
- host = req.get_host()
+ host = req.host
if not host:
raise urlllib2.URLError('no host given')
@@ -147,7 +175,7 @@ class Wget(FetchMethod):
h.set_debuglevel(self._debuglevel)
headers = dict(req.unredirected_hdrs)
- headers.update(dict((k, v) for k, v in req.headers.items()
+ headers.update(dict((k, v) for k, v in list(req.headers.items())
if k not in headers))
# We want to make an HTTP/1.1 request, but the addinfourl
@@ -164,7 +192,7 @@ class Wget(FetchMethod):
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
headers = dict(
- (name.title(), val) for name, val in headers.items())
+ (name.title(), val) for name, val in list(headers.items()))
if req._tunnel_host:
tunnel_headers = {}
@@ -177,12 +205,12 @@ class Wget(FetchMethod):
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
- h.request(req.get_method(), req.get_selector(), req.data, headers)
- except socket.error, err: # XXX what error?
+ h.request(req.get_method(), req.selector, req.data, headers)
+ except socket.error as err: # XXX what error?
# Don't close connection when cache is enabled.
if fetch.connection_cache is None:
h.close()
- raise urllib2.URLError(err)
+ raise urllib.error.URLError(err)
else:
try:
r = h.getresponse(buffering=True)
@@ -222,7 +250,7 @@ class Wget(FetchMethod):
return resp
- class HTTPMethodFallback(urllib2.BaseHandler):
+ class HTTPMethodFallback(urllib.request.BaseHandler):
"""
Fallback to GET if HEAD is not allowed (405 HTTP error)
"""
@@ -230,11 +258,11 @@ class Wget(FetchMethod):
fp.read()
fp.close()
- newheaders = dict((k,v) for k,v in req.headers.items()
+ newheaders = dict((k,v) for k,v in list(req.headers.items())
if k.lower() not in ("content-length", "content-type"))
- return self.parent.open(urllib2.Request(req.get_full_url(),
+ return self.parent.open(urllib.request.Request(req.get_full_url(),
headers=newheaders,
- origin_req_host=req.get_origin_req_host(),
+ origin_req_host=req.origin_req_host,
unverifiable=True))
"""
@@ -249,38 +277,49 @@ class Wget(FetchMethod):
"""
http_error_406 = http_error_405
- class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
+ class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
"""
urllib2.HTTPRedirectHandler resets the method to GET on redirect,
when we want to follow redirects using the original method.
"""
def redirect_request(self, req, fp, code, msg, headers, newurl):
- newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
+ newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
newreq.get_method = lambda: req.get_method()
return newreq
exported_proxies = export_proxies(d)
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
if export_proxies:
- handlers.append(urllib2.ProxyHandler())
+ handlers.append(urllib.request.ProxyHandler())
handlers.append(CacheHTTPHandler())
# XXX: Since Python 2.7.9 ssl cert validation is enabled by default
# see PEP-0476, this causes verification errors on some https servers
# so disable by default.
import ssl
if hasattr(ssl, '_create_unverified_context'):
- handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
- opener = urllib2.build_opener(*handlers)
+ handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
+ opener = urllib.request.build_opener(*handlers)
try:
uri = ud.url.split(";")[0]
- r = urllib2.Request(uri)
+ r = urllib.request.Request(uri)
r.get_method = lambda: "HEAD"
+
+ if ud.user:
+ import base64
+ encodeuser = base64.b64encode(ud.user.encode('utf-8')).decode("utf-8")
+ authheader = "Basic %s" % encodeuser
+ r.add_header("Authorization", authheader)
+
opener.open(r)
- except urllib2.URLError as e:
- # debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug(2, "checkstatus() urlopen failed: %s" % e)
- return False
+ except urllib.error.URLError as e:
+ if try_again:
+ logger.debug(2, "checkstatus: trying again")
+ return self.checkstatus(fetch, ud, d, False)
+ else:
+ # debug for now to avoid spamming the logs in e.g. remote sstate searches
+ logger.debug(2, "checkstatus() urlopen failed: %s" % e)
+ return False
return True
def _parse_path(self, regex, s):
OpenPOWER on IntegriCloud