Squashed 'yocto-poky/' content from commit ea562de

git-subtree-dir: yocto-poky
git-subtree-split: ea562de57590c966cd5a75fda8defecd397e6436
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000..3d53b63
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1787 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2012  Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+from __future__ import absolute_import
+from __future__ import print_function
+import os, re
+import signal
+import glob
+import logging
+import urllib
+import urlparse
+import operator
+import bb.persist_data, bb.utils
+import bb.checksum
+from bb import data
+import bb.process
+import subprocess
+
+__version__ = "2"
+_checksum_cache = bb.checksum.FileChecksumCache()
+
+logger = logging.getLogger("BitBake.Fetcher")
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+    logger.info("Importing cPickle failed. "
+                "Falling back to a very slow implementation.")
+
+class BBFetchException(Exception):
+    """Class all fetch exceptions inherit from"""
+    def __init__(self, message):
+         self.msg = message
+         Exception.__init__(self, message)
+
+    def __str__(self):
+         return self.msg
+
+class UntrustedUrl(BBFetchException):
+    """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
+    def __init__(self, url, message=''):
+        if message:
+            msg = message
+        else:
+            msg = "The URL: '%s' is not trusted and cannot be used" % url
+        self.url = url
+        BBFetchException.__init__(self, msg)
+        self.args = (url,)
+
+class MalformedUrl(BBFetchException):
+    """Exception raised when encountering an invalid url"""
+    def __init__(self, url, message=''):
+         if message:
+             msg = message
+         else:
+             msg = "The URL: '%s' is invalid and cannot be interpreted" % url
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (url,)
+
+class FetchError(BBFetchException):
+    """General fetcher exception when something happens incorrectly"""
+    def __init__(self, message, url = None):
+         if url:
+            msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
+         else:
+            msg = "Fetcher failure: %s" % message
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (message, url)
+
+class ChecksumError(FetchError):
+    """Exception when mismatched checksum encountered"""
+    def __init__(self, message, url = None, checksum = None):
+        self.checksum = checksum
+        FetchError.__init__(self, message, url)
+
+class NoChecksumError(FetchError):
+    """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
+
+class UnpackError(BBFetchException):
+    """General fetcher exception when something happens incorrectly when unpacking"""
+    def __init__(self, message, url):
+         msg = "Unpack failure for URL: '%s'. %s" % (url, message)
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (message, url)
+
+class NoMethodError(BBFetchException):
+    """Exception raised when there is no method to obtain a supplied url or set of urls"""
+    def __init__(self, url):
+         msg = "Could not find a fetcher which supports the URL: '%s'" % url
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (url,)
+
+class MissingParameterError(BBFetchException):
+    """Exception raised when a fetch method is missing a critical parameter in the url"""
+    def __init__(self, missing, url):
+         msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
+         self.url = url
+         self.missing = missing
+         BBFetchException.__init__(self, msg)
+         self.args = (missing, url)
+
+class ParameterError(BBFetchException):
+    """Exception raised when a url cannot be proccessed due to invalid parameters."""
+    def __init__(self, message, url):
+         msg = "URL: '%s' has invalid parameters. %s" % (url, message)
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (message, url)
+
+class NetworkAccess(BBFetchException):
+    """Exception raised when network access is disabled but it is required."""
+    def __init__(self, url, cmd):
+         msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
+         self.url = url
+         self.cmd = cmd
+         BBFetchException.__init__(self, msg)
+         self.args = (url, cmd)
+
+class NonLocalMethod(Exception):
+    def __init__(self):
+        Exception.__init__(self)
+
+
+class URI(object):
+    """
+    A class representing a generic URI, with methods for
+    accessing the URI components, and stringifies to the
+    URI.
+
+    It is constructed by calling it with a URI, or setting
+    the attributes manually:
+
+     uri = URI("http://example.com/")
+
+     uri = URI()
+     uri.scheme = 'http'
+     uri.hostname = 'example.com'
+     uri.path = '/'
+
+    It has the following attributes:
+
+      * scheme (read/write)
+      * userinfo (authentication information) (read/write)
+        * username (read/write)
+        * password (read/write)
+
+        Note, password is deprecated as of RFC 3986.
+
+      * hostname (read/write)
+      * port (read/write)
+      * hostport (read only)
+        "hostname:port", if both are set, otherwise just "hostname"
+      * path (read/write)
+      * path_quoted (read/write)
+        A URI quoted version of path
+      * params (dict) (read/write)
+      * query (dict) (read/write)
+      * relative (bool) (read only)
+        True if this is a "relative URI", (e.g. file:foo.diff)
+
+    It stringifies to the URI itself.
+
+    Some notes about relative URIs: while it's specified that
+    a URI beginning with <scheme>:// should either be directly
+    followed by a hostname or a /, the old URI handling of the
+    fetch2 library did not comform to this. Therefore, this URI
+    class has some kludges to make sure that URIs are parsed in
+    a way comforming to bitbake's current usage. This URI class
+    supports the following:
+
+     file:relative/path.diff (IETF compliant)
+     git:relative/path.git (IETF compliant)
+     git:///absolute/path.git (IETF compliant)
+     file:///absolute/path.diff (IETF compliant)
+
+     file://relative/path.diff (not IETF compliant)
+
+    But it does not support the following:
+
+     file://hostname/absolute/path.diff (would be IETF compliant)
+
+    Note that the last case only applies to a list of
+    "whitelisted" schemes (currently only file://), that requires
+    its URIs to not have a network location.
+    """
+
+    _relative_schemes = ['file', 'git']
+    _netloc_forbidden = ['file']
+
+    def __init__(self, uri=None):
+        self.scheme = ''
+        self.userinfo = ''
+        self.hostname = ''
+        self.port = None
+        self._path = ''
+        self.params = {}
+        self.query = {}
+        self.relative = False
+
+        if not uri:
+            return
+
+        # We hijack the URL parameters, since the way bitbake uses
+        # them are not quite RFC compliant.
+        uri, param_str = (uri.split(";", 1) + [None])[:2]
+
+        urlp = urlparse.urlparse(uri)
+        self.scheme = urlp.scheme
+
+        reparse = 0
+
+        # Coerce urlparse to make URI scheme use netloc
+        if not self.scheme in urlparse.uses_netloc:
+            urlparse.uses_params.append(self.scheme)
+            reparse = 1
+
+        # Make urlparse happy(/ier) by converting local resources
+        # to RFC compliant URL format. E.g.:
+        #   file://foo.diff -> file:foo.diff
+        if urlp.scheme in self._netloc_forbidden:
+            uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
+            reparse = 1
+
+        if reparse:
+            urlp = urlparse.urlparse(uri)
+
+        # Identify if the URI is relative or not
+        if urlp.scheme in self._relative_schemes and \
+           re.compile("^\w+:(?!//)").match(uri):
+            self.relative = True
+
+        if not self.relative:
+            self.hostname = urlp.hostname or ''
+            self.port = urlp.port
+
+            self.userinfo += urlp.username or ''
+
+            if urlp.password:
+                self.userinfo += ':%s' % urlp.password
+
+        self.path = urllib.unquote(urlp.path)
+
+        if param_str:
+            self.params = self._param_str_split(param_str, ";")
+        if urlp.query:
+            self.query = self._param_str_split(urlp.query, "&")
+
+    def __str__(self):
+        userinfo = self.userinfo
+        if userinfo:
+            userinfo += '@'
+
+        return "%s:%s%s%s%s%s%s" % (
+            self.scheme,
+            '' if self.relative else '//',
+            userinfo,
+            self.hostport,
+            self.path_quoted,
+            self._query_str(),
+            self._param_str())
+
+    def _param_str(self):
+        return (
+            ''.join([';', self._param_str_join(self.params, ";")])
+            if self.params else '')
+
+    def _query_str(self):
+        return (
+            ''.join(['?', self._param_str_join(self.query, "&")])
+            if self.query else '')
+
+    def _param_str_split(self, string, elmdelim, kvdelim="="):
+        ret = {}
+        for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
+            ret[k] = v
+        return ret
+
+    def _param_str_join(self, dict_, elmdelim, kvdelim="="):
+        return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
+
+    @property
+    def hostport(self):
+        if not self.port:
+            return self.hostname
+        return "%s:%d" % (self.hostname, self.port)
+
+    @property
+    def path_quoted(self):
+        return urllib.quote(self.path)
+
+    @path_quoted.setter
+    def path_quoted(self, path):
+        self.path = urllib.unquote(path)
+
+    @property
+    def path(self):
+        return self._path
+
+    @path.setter
+    def path(self, path):
+        self._path = path
+
+        if re.compile("^/").match(path):
+            self.relative = False
+        else:
+            self.relative = True
+
+    @property
+    def username(self):
+        if self.userinfo:
+            return (self.userinfo.split(":", 1))[0]
+        return ''
+
+    @username.setter
+    def username(self, username):
+        password = self.password
+        self.userinfo = username
+        if password:
+            self.userinfo += ":%s" % password
+
+    @property
+    def password(self):
+        if self.userinfo and ":" in self.userinfo:
+            return (self.userinfo.split(":", 1))[1]
+        return ''
+
+    @password.setter
+    def password(self, password):
+        self.userinfo = "%s:%s" % (self.username, password)
+
+def decodeurl(url):
+    """Decodes an URL into the tokens (scheme, network location, path,
+    user, password, parameters).
+    """
+
+    m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
+    if not m:
+        raise MalformedUrl(url)
+
+    type = m.group('type')
+    location = m.group('location')
+    if not location:
+        raise MalformedUrl(url)
+    user = m.group('user')
+    parm = m.group('parm')
+
+    locidx = location.find('/')
+    if locidx != -1 and type.lower() != 'file':
+        host = location[:locidx]
+        path = location[locidx:]
+    else:
+        host = ""
+        path = location
+    if user:
+        m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
+        if m:
+            user = m.group('user')
+            pswd = m.group('pswd')
+    else:
+        user = ''
+        pswd = ''
+
+    p = {}
+    if parm:
+        for s in parm.split(';'):
+            if s:
+                if not '=' in s:
+                    raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
+                s1, s2 = s.split('=')
+                p[s1] = s2
+
+    return type, host, urllib.unquote(path), user, pswd, p
+
+def encodeurl(decoded):
+    """Encodes a URL from tokens (scheme, network location, path,
+    user, password, parameters).
+    """
+
+    type, host, path, user, pswd, p = decoded
+
+    if not path:
+        raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
+    if not type:
+        raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
+    url = '%s://' % type
+    if user and type != "file":
+        url += "%s" % user
+        if pswd:
+            url += ":%s" % pswd
+        url += "@"
+    if host and type != "file":
+        url += "%s" % host
+    # Standardise path to ensure comparisons work
+    while '//' in path:
+        path = path.replace("//", "/")
+    url += "%s" % urllib.quote(path)
+    if p:
+        for parm in p:
+            url += ";%s=%s" % (parm, p[parm])
+
+    return url
+
+def uri_replace(ud, uri_find, uri_replace, replacements, d):
+    if not ud.url or not uri_find or not uri_replace:
+        logger.error("uri_replace: passed an undefined value, not replacing")
+        return None
+    uri_decoded = list(decodeurl(ud.url))
+    uri_find_decoded = list(decodeurl(uri_find))
+    uri_replace_decoded = list(decodeurl(uri_replace))
+    logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
+    result_decoded = ['', '', '', '', '', {}]
+    for loc, i in enumerate(uri_find_decoded):
+        result_decoded[loc] = uri_decoded[loc]
+        regexp = i
+        if loc == 0 and regexp and not regexp.endswith("$"):
+            # Leaving the type unanchored can mean "https" matching "file" can become "files"
+            # which is clearly undesirable.
+            regexp += "$"
+        if loc == 5:
+            # Handle URL parameters
+            if i:
+                # Any specified URL parameters must match
+                for k in uri_replace_decoded[loc]:
+                    if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
+                        return None
+            # Overwrite any specified replacement parameters
+            for k in uri_replace_decoded[loc]:
+                for l in replacements:
+                    uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
+                result_decoded[loc][k] = uri_replace_decoded[loc][k]
+        elif (re.match(regexp, uri_decoded[loc])):
+            if not uri_replace_decoded[loc]:
+                result_decoded[loc] = ""    
+            else:
+                for k in replacements:
+                    uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
+                #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
+                result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
+            if loc == 2:
+                # Handle path manipulations
+                basename = None
+                if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
+                    # If the source and destination url types differ, must be a mirrortarball mapping
+                    basename = os.path.basename(ud.mirrortarball)
+                    # Kill parameters, they make no sense for mirror tarballs
+                    uri_decoded[5] = {}
+                elif ud.localpath and ud.method.supports_checksum(ud):
+                    basename = os.path.basename(ud.localpath)
+                if basename and not result_decoded[loc].endswith(basename):
+                    result_decoded[loc] = os.path.join(result_decoded[loc], basename)
+        else:
+            return None
+    result = encodeurl(result_decoded)
+    if result == ud.url:
+        return None
+    logger.debug(2, "For url %s returning %s" % (ud.url, result))
+    return result
+
+methods = []
+urldata_cache = {}
+saved_headrevs = {}
+
+def fetcher_init(d):
+    """
+    Called to initialize the fetchers once the configuration data is known.
+    Calls before this must not hit the cache.
+    """
+    # When to drop SCM head revisions controlled by user policy
+    srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
+    if srcrev_policy == "cache":
+        logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
+    elif srcrev_policy == "clear":
+        logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+        revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
+        try:
+            bb.fetch2.saved_headrevs = revs.items()
+        except:
+            pass
+        revs.clear()
+    else:
+        raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
+
+    _checksum_cache.init_cache(d)
+
+    for m in methods:
+        if hasattr(m, "init"):
+            m.init(d)
+
+def fetcher_parse_save(d):
+    _checksum_cache.save_extras(d)
+
+def fetcher_parse_done(d):
+    _checksum_cache.save_merge(d)
+
+def fetcher_compare_revisions(d):
+    """
+    Compare the revisions in the persistant cache with current values and
+    return true/false on whether they've changed.
+    """
+
+    data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
+    data2 = bb.fetch2.saved_headrevs
+
+    changed = False
+    for key in data:
+        if key not in data2 or data2[key] != data[key]:
+            logger.debug(1, "%s changed", key)
+            changed = True
+            return True
+        else:
+            logger.debug(2, "%s did not change", key)
+    return False
+
+def mirror_from_string(data):
+    return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
+
+def verify_checksum(ud, d, precomputed={}):
+    """
+    verify the MD5 and SHA256 checksum for downloaded src
+
+    Raises a FetchError if one or both of the SRC_URI checksums do not match
+    the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
+    checksums specified.
+
+    Returns a dict of checksums that can be stored in a done stamp file and
+    passed in as precomputed parameter in a later call to avoid re-computing
+    the checksums from the file. This allows verifying the checksums of the
+    file against those in the recipe each time, rather than only after
+    downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
+    """
+
+    _MD5_KEY = "md5"
+    _SHA256_KEY = "sha256"
+
+    if ud.ignore_checksums or not ud.method.supports_checksum(ud):
+        return {}
+
+    if _MD5_KEY in precomputed:
+        md5data = precomputed[_MD5_KEY]
+    else:
+        md5data = bb.utils.md5_file(ud.localpath)
+
+    if _SHA256_KEY in precomputed:
+        sha256data = precomputed[_SHA256_KEY]
+    else:
+        sha256data = bb.utils.sha256_file(ud.localpath)
+
+    if ud.method.recommends_checksum(ud):
+        # If strict checking enabled and neither sum defined, raise error
+        strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
+        if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
+            logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
+                             'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
+                             (ud.localpath, ud.md5_name, md5data,
+                              ud.sha256_name, sha256data))
+            raise NoChecksumError('Missing SRC_URI checksum', ud.url)
+
+        # Log missing sums so user can more easily add them
+        if not ud.md5_expected:
+            logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
+                        'SRC_URI[%s] = "%s"',
+                        ud.localpath, ud.md5_name, md5data)
+
+        if not ud.sha256_expected:
+            logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
+                        'SRC_URI[%s] = "%s"',
+                        ud.localpath, ud.sha256_name, sha256data)
+
+    md5mismatch = False
+    sha256mismatch = False
+
+    if ud.md5_expected != md5data:
+        md5mismatch = True
+
+    if ud.sha256_expected != sha256data:
+        sha256mismatch = True
+
+    # We want to alert the user if a checksum is defined in the recipe but
+    # it does not match.
+    msg = ""
+    mismatch = False
+    if md5mismatch and ud.md5_expected:
+        msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
+        mismatch = True;
+
+    if sha256mismatch and ud.sha256_expected:
+        msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
+        mismatch = True;
+
+    if mismatch:
+        msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
+
+    if len(msg):
+        raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
+
+    return {
+        _MD5_KEY: md5data,
+        _SHA256_KEY: sha256data
+    }
+
+
+def verify_donestamp(ud, d, origud=None):
+    """
+    Check whether the done stamp file has the right checksums (if the fetch
+    method supports them). If it doesn't, delete the done stamp and force
+    a re-download.
+
+    Returns True, if the donestamp exists and is valid, False otherwise. When
+    returning False, any existing done stamps are removed.
+    """
+    if not os.path.exists(ud.donestamp):
+        return False
+
+    if (not ud.method.supports_checksum(ud) or
+        (origud and not origud.method.supports_checksum(origud))):
+        # done stamp exists, checksums not supported; assume the local file is
+        # current
+        return True
+
+    if not os.path.exists(ud.localpath):
+        # done stamp exists, but the downloaded file does not; the done stamp
+        # must be incorrect, re-trigger the download
+        bb.utils.remove(ud.donestamp)
+        return False
+
+    precomputed_checksums = {}
+    # Only re-use the precomputed checksums if the donestamp is newer than the
+    # file. Do not rely on the mtime of directories, though. If ud.localpath is
+    # a directory, there will probably not be any checksums anyway.
+    if (os.path.isdir(ud.localpath) or
+            os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
+        try:
+            with open(ud.donestamp, "rb") as cachefile:
+                pickled = pickle.Unpickler(cachefile)
+                precomputed_checksums.update(pickled.load())
+        except Exception as e:
+            # Avoid the warnings on the upgrade path from emtpy done stamp
+            # files to those containing the checksums.
+            if not isinstance(e, EOFError):
+                # Ignore errors, they aren't fatal
+                logger.warn("Couldn't load checksums from donestamp %s: %s "
+                            "(msg: %s)" % (ud.donestamp, type(e).__name__,
+                                           str(e)))
+
+    try:
+        checksums = verify_checksum(ud, d, precomputed_checksums)
+        # If the cache file did not have the checksums, compute and store them
+        # as an upgrade path from the previous done stamp file format.
+        if checksums != precomputed_checksums:
+            with open(ud.donestamp, "wb") as cachefile:
+                p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+                p.dump(checksums)
+        return True
+    except ChecksumError as e:
+        # Checksums failed to verify, trigger re-download and remove the
+        # incorrect stamp file.
+        logger.warn("Checksum mismatch for local file %s\n"
+                    "Cleaning and trying again." % ud.localpath)
+        rename_bad_checksum(ud, e.checksum)
+        bb.utils.remove(ud.donestamp)
+    return False
+
+
+def update_stamp(ud, d):
+    """
+        donestamp is file stamp indicating the whole fetching is done
+        this function update the stamp after verifying the checksum
+    """
+    if os.path.exists(ud.donestamp):
+        # Touch the done stamp file to show active use of the download
+        try:
+            os.utime(ud.donestamp, None)
+        except:
+            # Errors aren't fatal here
+            pass
+    else:
+        checksums = verify_checksum(ud, d)
+        # Store the checksums for later re-verification against the recipe
+        with open(ud.donestamp, "wb") as cachefile:
+            p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+            p.dump(checksums)
+
+def subprocess_setup():
+    # Python installs a SIGPIPE handler by default. This is usually not what
+    # non-Python subprocesses expect.
+    # SIGPIPE errors are known issues with gzip/bash
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+def get_autorev(d):
+    #  only not cache src rev in autorev case
+    if d.getVar('BB_SRCREV_POLICY', True) != "cache":
+        d.setVar('__BB_DONT_CACHE', '1')
+    return "AUTOINC"
+
+def get_srcrev(d, method_name='sortable_revision'):
+    """
+    Return the revsion string, usually for use in the version string (PV) of the current package
+    Most packages usually only have one SCM so we just pass on the call.
+    In the multi SCM case, we build a value based on SRCREV_FORMAT which must
+    have been set.
+
+    The idea here is that we put the string "AUTOINC+" into return value if the revisions are not 
+    incremental, other code is then responsible for turning that into an increasing value (if needed)
+
+    A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
+    that fetcher provides a method with the given name and the same signature as sortable_revision.
+    """
+
+    scms = []
+    fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
+    urldata = fetcher.ud
+    for u in urldata:
+        if urldata[u].method.supports_srcrev():
+            scms.append(u)
+
+    if len(scms) == 0:
+        raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+
+    if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
+        autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
+        if len(rev) > 10:
+            rev = rev[:10]
+        if autoinc:
+            return "AUTOINC+" + rev
+        return rev
+
+    #
+    # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
+    #
+    format = d.getVar('SRCREV_FORMAT', True)
+    if not format:
+        raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+
+    seenautoinc = False
+    for scm in scms:
+        ud = urldata[scm]
+        for name in ud.names:
+            autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
+            seenautoinc = seenautoinc or autoinc
+            if len(rev) > 10:
+                rev = rev[:10]
+            format = format.replace(name, rev)
+    if seenautoinc:
+       format = "AUTOINC+" + format
+
+    return format
+
+def localpath(url, d):
+    fetcher = bb.fetch2.Fetch([url], d)
+    return fetcher.localpath(url)
+
+def runfetchcmd(cmd, d, quiet=False, cleanup=None):
+    """
+    Run cmd returning the command output
+    Raise an error if interrupted or cmd fails
+    Optionally echo command output to stdout
+    Optionally remove the files/directories listed in cleanup upon failure
+    """
+
+    # Need to export PATH as binary could be in metadata paths
+    # rather than host provided
+    # Also include some other variables.
+    # FIXME: Should really include all export varaiables?
+    exportvars = ['HOME', 'PATH',
+                  'HTTP_PROXY', 'http_proxy',
+                  'HTTPS_PROXY', 'https_proxy',
+                  'FTP_PROXY', 'ftp_proxy',
+                  'FTPS_PROXY', 'ftps_proxy',
+                  'NO_PROXY', 'no_proxy',
+                  'ALL_PROXY', 'all_proxy',
+                  'GIT_PROXY_COMMAND',
+                  'GIT_SSL_CAINFO',
+                  'GIT_SMART_HTTP',
+                  'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
+                  'SOCKS5_USER', 'SOCKS5_PASSWD']
+
+    if not cleanup:
+        cleanup = []
+
+    for var in exportvars:
+        val = d.getVar(var, True)
+        if val:
+            cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
+
+    logger.debug(1, "Running %s", cmd)
+
+    success = False
+    error_message = ""
+
+    try:
+        (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
+        success = True
+    except bb.process.NotFoundError as e:
+        error_message = "Fetch command %s" % (e.command)
+    except bb.process.ExecutionError as e:
+        if e.stdout:
+            output = "output:\n%s\n%s" % (e.stdout, e.stderr)
+        elif e.stderr:
+            output = "output:\n%s" % e.stderr
+        else:
+            output = "no output"
+        error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
+    except bb.process.CmdError as e:
+        error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
+    if not success:
+        for f in cleanup:
+            try:
+                bb.utils.remove(f, True)
+            except OSError:
+                pass
+
+        raise FetchError(error_message)
+
+    return output
+
+def check_network_access(d, info = "", url = None):
+    """
+    log remote network access, and error if BB_NO_NETWORK is set
+    """
+    if d.getVar("BB_NO_NETWORK", True) == "1":
+        raise NetworkAccess(url, info)
+    else:
+        logger.debug(1, "Fetcher accessed the network with the command %s" % info)
+
+def build_mirroruris(origud, mirrors, ld):
+    uris = []
+    uds = []
+
+    replacements = {}
+    replacements["TYPE"] = origud.type
+    replacements["HOST"] = origud.host
+    replacements["PATH"] = origud.path
+    replacements["BASENAME"] = origud.path.split("/")[-1]
+    replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
+
+    def adduri(ud, uris, uds):
+        for line in mirrors:
+            try:
+                (find, replace) = line
+            except ValueError:
+                continue
+            newuri = uri_replace(ud, find, replace, replacements, ld)
+            if not newuri or newuri in uris or newuri == origud.url:
+                continue
+
+            if not trusted_network(ld, newuri):
+                logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" %  (newuri))
+                continue
+
+            try:
+                newud = FetchData(newuri, ld)
+                newud.setup_localpath(ld)
+            except bb.fetch2.BBFetchException as e:
+                logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
+                logger.debug(1, str(e))
+                try:
+                    # setup_localpath of file:// urls may fail, we should still see 
+                    # if mirrors of the url exist
+                    adduri(newud, uris, uds)
+                except UnboundLocalError:
+                    pass
+                continue   
+            uris.append(newuri)
+            uds.append(newud)
+
+            adduri(newud, uris, uds)
+
+    adduri(origud, uris, uds)
+
+    return uris, uds
+
+def rename_bad_checksum(ud, suffix):
+    """
+    Renames files to have suffix from parameter
+    """
+
+    if ud.localpath is None:
+        return
+
+    new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
+    bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
+    bb.utils.movefile(ud.localpath, new_localpath)
+
+
+def try_mirror_url(fetch, origud, ud, ld, check = False):
+    # Return of None or a value means we're finished
+    # False means try another url
+    try:
+        if check:
+            found = ud.method.checkstatus(fetch, ud, ld)
+            if found:
+                return found
+            return False
+
+        os.chdir(ld.getVar("DL_DIR", True))
+
+        if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
+            ud.method.download(ud, ld)
+            if hasattr(ud.method,"build_mirror_data"):
+                ud.method.build_mirror_data(ud, ld)
+
+        if not ud.localpath or not os.path.exists(ud.localpath):
+            return False
+
+        if ud.localpath == origud.localpath:
+            return ud.localpath
+
+        # We may be obtaining a mirror tarball which needs further processing by the real fetcher
+        # If that tarball is a local file:// we need to provide a symlink to it
+        dldir = ld.getVar("DL_DIR", True)
+        if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
+                and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
+            # Create donestamp in old format to avoid triggering a re-download
+            bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
+            open(ud.donestamp, 'w').close()
+            dest = os.path.join(dldir, os.path.basename(ud.localpath))
+            if not os.path.exists(dest):
+                os.symlink(ud.localpath, dest)
+            if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
+                origud.method.download(origud, ld)
+                if hasattr(origud.method,"build_mirror_data"):
+                    origud.method.build_mirror_data(origud, ld)
+            return ud.localpath
+        # Otherwise the result is a local file:// and we symlink to it
+        if not os.path.exists(origud.localpath):
+            if os.path.islink(origud.localpath):
+                # Broken symbolic link
+                os.unlink(origud.localpath)
+
+            os.symlink(ud.localpath, origud.localpath)
+        update_stamp(origud, ld)
+        return ud.localpath
+
+    except bb.fetch2.NetworkAccess:
+        raise
+
+    except bb.fetch2.BBFetchException as e:
+        if isinstance(e, ChecksumError):
+            logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
+            logger.warn(str(e))
+            rename_bad_checksum(ud, e.checksum)
+        elif isinstance(e, NoChecksumError):
+            raise
+        else:
+            logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
+            logger.debug(1, str(e))
+        try:
+            ud.method.clean(ud, ld)
+        except UnboundLocalError:
+            pass
+        return False
+
+def try_mirrors(fetch, d, origud, mirrors, check = False):
+    """
+    Try to use a mirrored version of the sources.
+    This method will be automatically called before the fetchers go.
+
+    d Is a bb.data instance
+    uri is the original uri we're trying to download
+    mirrors is the list of mirrors we're going to try
+    """
+    ld = d.createCopy()
+
+    uris, uds = build_mirroruris(origud, mirrors, ld)
+
+    for index, uri in enumerate(uris):
+        ret = try_mirror_url(fetch, origud, uds[index], ld, check)
+        if ret != False:
+            return ret
+    return None
+
+def trusted_network(d, url):
+    """
+    Use a trusted url during download if networking is enabled and
+    BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
+    Note: modifies SRC_URI & mirrors.
+    """
+    if d.getVar('BB_NO_NETWORK', True) == "1":
+        return True
+
+    pkgname = d.expand(d.getVar('PN', False))
+    trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname)
+
+    if not trusted_hosts:
+        trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
+
+    # Not enabled.
+    if not trusted_hosts:
+        return True
+
+    scheme, network, path, user, passwd, param = decodeurl(url)
+
+    if not network:
+        return True
+
+    network = network.lower()
+
+    for host in trusted_hosts.split(" "):
+        host = host.lower()
+        if host.startswith("*.") and ("." + network).endswith(host[1:]):
+            return True
+        if host == network:
+            return True
+
+    return False
+
+def srcrev_internal_helper(ud, d, name):
+    """
+    Return:
+        a) a source revision if specified
+        b) latest revision if SRCREV="AUTOINC"
+        c) None if not specified
+    """
+
+    srcrev = None
+    pn = d.getVar("PN", True)
+    attempts = []
+    if name != '' and pn:
+        attempts.append("SRCREV_%s_pn-%s" % (name, pn))
+    if name != '':
+        attempts.append("SRCREV_%s" % name)
+    if pn:
+        attempts.append("SRCREV_pn-%s" % pn)
+    attempts.append("SRCREV")
+
+    for a in attempts:
+        srcrev = d.getVar(a, True)              
+        if srcrev and srcrev != "INVALID":
+            break
+
+    if 'rev' in ud.parm and 'tag' in ud.parm:
+        raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
+
+    if 'rev' in ud.parm or 'tag' in ud.parm:
+        if 'rev' in ud.parm:
+            parmrev = ud.parm['rev']
+        else:
+            parmrev = ud.parm['tag']
+        if srcrev == "INVALID" or not srcrev:
+            return parmrev
+        if srcrev != parmrev:
+            raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
+        return parmrev
+
+    if srcrev == "INVALID" or not srcrev:
+        raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
+    if srcrev == "AUTOINC":
+        srcrev = ud.method.latest_revision(ud, d, name)
+
+    return srcrev
+
+def get_checksum_file_list(d):
+    """ Get a list of files checksum in SRC_URI
+
+    Returns the resolved local paths of all local file entries in
+    SRC_URI as a space-separated string
+    """
+    fetch = Fetch([], d, cache = False, localonly = True)
+
+    dl_dir = d.getVar('DL_DIR', True)
+    filelist = []
+    for u in fetch.urls:
+        ud = fetch.ud[u]
+
+        if ud and isinstance(ud.method, local.Local):
+            paths = ud.method.localpaths(ud, d)
+            for f in paths:
+                pth = ud.decodedurl
+                if '*' in pth:
+                    f = os.path.join(os.path.abspath(f), pth)
+                if f.startswith(dl_dir):
+                    # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
+                    if os.path.exists(f):
+                        bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
+                    else:
+                        bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
+                filelist.append(f + ":" + str(os.path.exists(f)))
+
+    return " ".join(filelist)
+
+def get_file_checksums(filelist, pn):
+    """Get a list of the checksums for a list of local files
+
+    Returns the checksums for a list of local files, caching the results as
+    it proceeds
+
+    """
+
+    def checksum_file(f):
+        try:
+            checksum = _checksum_cache.get_checksum(f)
+        except OSError as e:
+            bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
+            return None
+        return checksum
+
+    def checksum_dir(pth):
+        # Handle directories recursively
+        dirchecksums = []
+        for root, dirs, files in os.walk(pth):
+            for name in files:
+                fullpth = os.path.join(root, name)
+                checksum = checksum_file(fullpth)
+                if checksum:
+                    dirchecksums.append((fullpth, checksum))
+        return dirchecksums
+
+    checksums = []
+    for pth in filelist.split():
+        exist = pth.split(":")[1]
+        if exist == "False":
+            continue
+        pth = pth.split(":")[0]
+        if '*' in pth:
+            # Handle globs
+            for f in glob.glob(pth):
+                if os.path.isdir(f):
+                    checksums.extend(checksum_dir(f))
+                else:
+                    checksum = checksum_file(f)
+                    checksums.append((f, checksum))
+        elif os.path.isdir(pth):
+            checksums.extend(checksum_dir(pth))
+        else:
+            checksum = checksum_file(pth)
+            checksums.append((pth, checksum))
+
+    checksums.sort(key=operator.itemgetter(1))
+    return checksums
+
+
+class FetchData(object):
+    """
+    A class which represents the fetcher state for a given URI.
+    """
+    def __init__(self, url, d, localonly = False):
+        # localpath is the location of a downloaded result. If not set, the file is local.
+        self.donestamp = None
+        self.localfile = ""
+        self.localpath = None
+        self.lockfile = None
+        self.mirrortarball = None
+        self.basename = None
+        self.basepath = None
+        (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
+        self.date = self.getSRCDate(d)
+        self.url = url
+        if not self.user and "user" in self.parm:
+            self.user = self.parm["user"]
+        if not self.pswd and "pswd" in self.parm:
+            self.pswd = self.parm["pswd"]
+        self.setup = False
+
+        if "name" in self.parm:
+            self.md5_name = "%s.md5sum" % self.parm["name"]
+            self.sha256_name = "%s.sha256sum" % self.parm["name"]
+        else:
+            self.md5_name = "md5sum"
+            self.sha256_name = "sha256sum"
+        if self.md5_name in self.parm:
+            self.md5_expected = self.parm[self.md5_name]
+        elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
+            self.md5_expected = None
+        else:
+            self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
+        if self.sha256_name in self.parm:
+            self.sha256_expected = self.parm[self.sha256_name]
+        elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
+            self.sha256_expected = None
+        else:
+            self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
+        self.ignore_checksums = False
+
+        self.names = self.parm.get("name",'default').split(',')
+
+        self.method = None
+        for m in methods:
+            if m.supports(self, d):
+                self.method = m
+                break                
+
+        if not self.method:
+            raise NoMethodError(url)
+
+        if localonly and not isinstance(self.method, local.Local):
+            raise NonLocalMethod()
+
+        if self.parm.get("proto", None) and "protocol" not in self.parm:
+            logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
+            self.parm["protocol"] = self.parm.get("proto", None)
+
+        if hasattr(self.method, "urldata_init"):
+            self.method.urldata_init(self, d)
+
+        if "localpath" in self.parm:
+            # if user sets localpath for file, use it instead.
+            self.localpath = self.parm["localpath"]
+            self.basename = os.path.basename(self.localpath)
+        elif self.localfile:
+            self.localpath = self.method.localpath(self, d)
+
+        dldir = d.getVar("DL_DIR", True)
+        # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
+        if self.localpath and self.localpath.startswith(dldir):
+            basepath = self.localpath
+        elif self.localpath:
+            basepath = dldir + os.sep + os.path.basename(self.localpath)
+        else:
+            basepath = dldir + os.sep + (self.basepath or self.basename)
+        self.donestamp = basepath + '.done'
+        self.lockfile = basepath + '.lock'
+
+    def setup_revisons(self, d):
+        self.revisions = {}
+        for name in self.names:
+            self.revisions[name] = srcrev_internal_helper(self, d, name)
+
+        # add compatibility code for non name specified case
+        if len(self.names) == 1:
+            self.revision = self.revisions[self.names[0]]
+
+    def setup_localpath(self, d):
+        if not self.localpath:
+            self.localpath = self.method.localpath(self, d)
+
+    def getSRCDate(self, d):
+        """
+        Return the SRC Date for the component
+
+        d the bb.data module
+        """
+        if "srcdate" in self.parm:
+            return self.parm['srcdate']
+
+        pn = d.getVar("PN", True)
+
+        if pn:
+            return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
+
+        return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
+
+class FetchMethod(object):
+    """Base class for 'fetch'ing data"""
+
+    def __init__(self, urls=None):
+        self.urls = []
+
+    def supports(self, urldata, d):
+        """
+        Check to see if this fetch class supports a given url.
+        """
+        return 0
+
+    def localpath(self, urldata, d):
+        """
+        Return the local filename of a given url assuming a successful fetch.
+        Can also setup variables in urldata for use in go (saving code duplication
+        and duplicate code execution)
+        """
+        return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
+
+    def supports_checksum(self, urldata):
+        """
+        Is localpath something that can be represented by a checksum?
+        """
+
+        # We cannot compute checksums for directories
+        if os.path.isdir(urldata.localpath) == True:
+            return False
+        if urldata.localpath.find("*") != -1:
+             return False
+
+        return True
+
+    def recommends_checksum(self, urldata):
+        """
+        Is the backend on where checksumming is recommended (should warnings 
+        be displayed if there is no checksum)?
+        """
+        return False
+
+    def _strip_leading_slashes(self, relpath):
+        """
+        Remove leading slash as os.path.join can't cope
+        """
+        while os.path.isabs(relpath):
+            relpath = relpath[1:]
+        return relpath
+
+    def setUrls(self, urls):
+        self.__urls = urls
+
+    def getUrls(self):
+        return self.__urls
+
+    urls = property(getUrls, setUrls, None, "Urls property")
+
+    def need_update(self, ud, d):
+        """
+        Force a fetch, even if localpath exists?
+        """
+        if os.path.exists(ud.localpath):
+            return False
+        return True
+
+    def supports_srcrev(self):
+        """
+        The fetcher supports auto source revisions (SRCREV)
+        """
+        return False
+
+    def download(self, urldata, d):
+        """
+        Fetch urls
+        Assumes localpath was called first
+        """
+        raise NoMethodError(url)
+
+    def unpack(self, urldata, rootdir, data):
+        iterate = False
+        file = urldata.localpath
+
+        try:
+            unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
+        except ValueError as exc:
+            bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
+                     (file, urldata.parm.get('unpack')))
+
+        base, ext = os.path.splitext(file)
+        if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
+            efile = os.path.join(rootdir, os.path.basename(base))
+        else:
+            efile = file
+        cmd = None
+
+        if unpack:
+            if file.endswith('.tar'):
+                cmd = 'tar x --no-same-owner -f %s' % file
+            elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
+                cmd = 'tar xz --no-same-owner -f %s' % file
+            elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
+                cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
+            elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
+                cmd = 'gzip -dc %s > %s' % (file, efile)
+            elif file.endswith('.bz2'):
+                cmd = 'bzip2 -dc %s > %s' % (file, efile)
+            elif file.endswith('.tar.xz'):
+                cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
+            elif file.endswith('.xz'):
+                cmd = 'xz -dc %s > %s' % (file, efile)
+            elif file.endswith('.tar.lz'):
+                cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
+            elif file.endswith('.lz'):
+                cmd = 'lzip -dc %s > %s' % (file, efile)
+            elif file.endswith('.zip') or file.endswith('.jar'):
+                try:
+                    dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
+                except ValueError as exc:
+                    bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
+                             (file, urldata.parm.get('dos')))
+                cmd = 'unzip -q -o'
+                if dos:
+                    cmd = '%s -a' % cmd
+                cmd = "%s '%s'" % (cmd, file)
+            elif file.endswith('.rpm') or file.endswith('.srpm'):
+                if 'extract' in urldata.parm:
+                    unpack_file = urldata.parm.get('extract')
+                    cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
+                    iterate = True
+                    iterate_file = unpack_file
+                else:
+                    cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
+            elif file.endswith('.deb') or file.endswith('.ipk'):
+                cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
+
+        if not unpack or not cmd:
+            # If file == dest, then avoid any copies, as we already put the file into dest!
+            dest = os.path.join(rootdir, os.path.basename(file))
+            if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
+                if os.path.isdir(file):
+                    # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
+                    basepath = getattr(urldata, "basepath", None)
+                    destdir = "."
+                    if basepath and basepath.endswith("/"):
+                        basepath = basepath.rstrip("/")
+                    elif basepath:
+                        basepath = os.path.dirname(basepath)
+                    if basepath and basepath.find("/") != -1:
+                        destdir = basepath[:basepath.rfind('/')]
+                        destdir = destdir.strip('/')
+                    if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
+                        os.makedirs("%s/%s" % (rootdir, destdir))
+                    cmd = 'cp -fpPR %s %s/%s/' % (file, rootdir, destdir)
+                    #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
+                else:
+                    # The "destdir" handling was specifically done for FILESPATH
+                    # items.  So, only do so for file:// entries.
+                    if urldata.type == "file" and urldata.path.find("/") != -1:
+                       destdir = urldata.path.rsplit("/", 1)[0]
+                       if urldata.parm.get('subdir') != None:
+                          destdir = urldata.parm.get('subdir') + "/" + destdir
+                    else:
+                       if urldata.parm.get('subdir') != None:
+                          destdir = urldata.parm.get('subdir')
+                       else:
+                          destdir = "."
+                    bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
+                    cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
+
+        if not cmd:
+            return
+
+        # Change to subdir before executing command
+        save_cwd = os.getcwd();
+        os.chdir(rootdir)
+        if 'subdir' in urldata.parm:
+            newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
+            bb.utils.mkdirhier(newdir)
+            os.chdir(newdir)
+
+        path = data.getVar('PATH', True)
+        if path:
+            cmd = "PATH=\"%s\" %s" % (path, cmd)
+        bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
+        ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
+
+        os.chdir(save_cwd)
+
+        if ret != 0:
+            raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
+
+        if iterate is True:
+            iterate_urldata = urldata
+            iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
+            self.unpack(urldata, rootdir, data)
+
+        return
+
+    def clean(self, urldata, d):
+        """
+        Clean any existing full or partial download
+        """
+        bb.utils.remove(urldata.localpath)
+
+    def try_premirror(self, urldata, d):
+        """
+        Should premirrors be used?
+        """
+        return True
+
+    def checkstatus(self, fetch, urldata, d):
+        """
+        Check the status of a URL
+        Assumes localpath was called first
+        """
+        logger.info("URL %s could not be checked for status since no method exists.", url)
+        return True
+
+    def latest_revision(self, ud, d, name):
+        """
+        Look in the cache for the latest revision, if not present ask the SCM.
+        """
+        if not hasattr(self, "_latest_revision"):
+            raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
+
+        revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
+        key = self.generate_revision_key(ud, d, name)
+        try:
+            return revs[key]
+        except KeyError:
+            revs[key] = rev = self._latest_revision(ud, d, name)
+            return rev
+
+    def sortable_revision(self, ud, d, name):
+        latest_rev = self._build_revision(ud, d, name)
+        return True, str(latest_rev)
+
+    def generate_revision_key(self, ud, d, name):
+        key = self._revision_key(ud, d, name)
+        return "%s-%s" % (key, d.getVar("PN", True) or "")
+
+class Fetch(object):
+    def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
+        if localonly and cache:
+            raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
+
+        if len(urls) == 0:
+            urls = d.getVar("SRC_URI", True).split()
+        self.urls = urls
+        self.d = d
+        self.ud = {}
+        self.connection_cache = connection_cache
+
+        fn = d.getVar('FILE', True)
+        if cache and fn and fn in urldata_cache:
+            self.ud = urldata_cache[fn]
+
+        for url in urls:
+            if url not in self.ud:
+                try:
+                    self.ud[url] = FetchData(url, d, localonly)
+                except NonLocalMethod:
+                    if localonly:
+                        self.ud[url] = None
+                        pass
+
+        if fn and cache:
+            urldata_cache[fn] = self.ud
+
+    def localpath(self, url):
+        if url not in self.urls:
+            self.ud[url] = FetchData(url, self.d)
+
+        self.ud[url].setup_localpath(self.d)
+        return self.d.expand(self.ud[url].localpath)
+
+    def localpaths(self):
+        """
+        Return a list of the local filenames, assuming successful fetch
+        """
+        local = []
+
+        for u in self.urls:
+            ud = self.ud[u]
+            ud.setup_localpath(self.d)
+            local.append(ud.localpath)
+
+        return local
+
+    def download(self, urls=None):
+        """
+        Fetch all urls
+        """
+        if not urls:
+            urls = self.urls
+
+        network = self.d.getVar("BB_NO_NETWORK", True)
+        premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
+
+        for u in urls:
+            ud = self.ud[u]
+            ud.setup_localpath(self.d)
+            m = ud.method
+            localpath = ""
+
+            lf = bb.utils.lockfile(ud.lockfile)
+
+            try:
+                self.d.setVar("BB_NO_NETWORK", network)
+ 
+                if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
+                    localpath = ud.localpath
+                elif m.try_premirror(ud, self.d):
+                    logger.debug(1, "Trying PREMIRRORS")
+                    mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
+                    localpath = try_mirrors(self, self.d, ud, mirrors, False)
+
+                if premirroronly:
+                    self.d.setVar("BB_NO_NETWORK", "1")
+
+                os.chdir(self.d.getVar("DL_DIR", True))
+
+                firsterr = None
+                verified_stamp = verify_donestamp(ud, self.d)
+                if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
+                    try:
+                        if not trusted_network(self.d, ud.url):
+                            raise UntrustedUrl(ud.url)
+                        logger.debug(1, "Trying Upstream")
+                        m.download(ud, self.d)
+                        if hasattr(m, "build_mirror_data"):
+                            m.build_mirror_data(ud, self.d)
+                        localpath = ud.localpath
+                        # early checksum verify, so that if checksum mismatched,
+                        # fetcher still have chance to fetch from mirror
+                        update_stamp(ud, self.d)
+
+                    except bb.fetch2.NetworkAccess:
+                        raise
+
+                    except BBFetchException as e:
+                        if isinstance(e, ChecksumError):
+                            logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
+                            logger.debug(1, str(e))
+                            rename_bad_checksum(ud, e.checksum)
+                        elif isinstance(e, NoChecksumError):
+                            raise
+                        else:
+                            logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
+                            logger.debug(1, str(e))
+                        firsterr = e
+                        # Remove any incomplete fetch
+                        if not verified_stamp:
+                            m.clean(ud, self.d)
+                        logger.debug(1, "Trying MIRRORS")
+                        mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
+                        localpath = try_mirrors(self, self.d, ud, mirrors)
+
+                if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
+                    if firsterr:
+                        logger.error(str(firsterr))
+                    raise FetchError("Unable to fetch URL from any source.", u)
+
+                update_stamp(ud, self.d)
+
+            except BBFetchException as e:
+                if isinstance(e, ChecksumError):
+                    logger.error("Checksum failure fetching %s" % u)
+                raise
+
+            finally:
+                bb.utils.unlockfile(lf)
+
+    def checkstatus(self, urls=None):
+        """
+        Check all urls exist upstream
+        """
+
+        if not urls:
+            urls = self.urls
+
+        for u in urls:
+            ud = self.ud[u]
+            ud.setup_localpath(self.d)
+            m = ud.method
+            logger.debug(1, "Testing URL %s", u)
+            # First try checking uri, u, from PREMIRRORS
+            mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
+            ret = try_mirrors(self, self.d, ud, mirrors, True)
+            if not ret:
+                # Next try checking from the original uri, u
+                try:
+                    ret = m.checkstatus(self, ud, self.d)
+                except:
+                    # Finally, try checking uri, u, from MIRRORS
+                    mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
+                    ret = try_mirrors(self, self.d, ud, mirrors, True)
+
+            if not ret:
+                raise FetchError("URL %s doesn't work" % u, u)
+
+    def unpack(self, root, urls=None):
+        """
+        Check all urls exist upstream
+        """
+
+        if not urls:
+            urls = self.urls
+
+        for u in urls:
+            ud = self.ud[u]
+            ud.setup_localpath(self.d)
+
+            if self.d.expand(self.localpath) is None:
+                continue
+
+            if ud.lockfile:
+                lf = bb.utils.lockfile(ud.lockfile)
+
+            ud.method.unpack(ud, root, self.d)
+
+            if ud.lockfile:
+                bb.utils.unlockfile(lf)
+
+    def clean(self, urls=None):
+        """
+        Clean files that the fetcher gets or places
+        """
+
+        if not urls:
+            urls = self.urls
+
+        for url in urls:
+            if url not in self.ud:
+                self.ud[url] = FetchData(url, d)
+            ud = self.ud[url]
+            ud.setup_localpath(self.d)
+
+            if not ud.localfile and ud.localpath is None:
+                continue
+
+            if ud.lockfile:
+                lf = bb.utils.lockfile(ud.lockfile)
+
+            ud.method.clean(ud, self.d)
+            if ud.donestamp:
+                bb.utils.remove(ud.donestamp)
+
+            if ud.lockfile:
+                bb.utils.unlockfile(lf)
+
+class FetchConnectionCache(object):
+    """
+        A class which represents an container for socket connections.
+    """
+    def __init__(self):
+        self.cache = {}
+
+    def get_connection_name(self, host, port):
+        return host + ':' + str(port)
+
+    def add_connection(self, host, port, connection):
+        cn = self.get_connection_name(host, port)
+
+        if cn not in self.cache:
+            self.cache[cn] = connection
+
+    def get_connection(self, host, port):
+        connection = None
+
+        cn = self.get_connection_name(host, port)
+        if cn in self.cache:
+            connection = self.cache[cn]
+
+        return connection
+
+    def remove_connection(self, host, port):
+        cn = self.get_connection_name(host, port)
+        if cn in self.cache:
+            self.cache[cn].close()
+            del self.cache[cn]
+
+    def close_connections(self):
+        for cn in self.cache.keys():
+            self.cache[cn].close()
+            del self.cache[cn]
+
+from . import cvs
+from . import git
+from . import gitsm
+from . import gitannex
+from . import local
+from . import svn
+from . import wget
+from . import ssh
+from . import sftp
+from . import perforce
+from . import bzr
+from . import hg
+from . import osc
+from . import repo
+from . import clearcase
+
+methods.append(local.Local())
+methods.append(wget.Wget())
+methods.append(svn.Svn())
+methods.append(git.Git())
+methods.append(gitsm.GitSM())
+methods.append(gitannex.GitANNEX())
+methods.append(cvs.Cvs())
+methods.append(ssh.SSH())
+methods.append(sftp.SFTP())
+methods.append(perforce.Perforce())
+methods.append(bzr.Bzr())
+methods.append(hg.Hg())
+methods.append(osc.Osc())
+methods.append(repo.Repo())
+methods.append(clearcase.ClearCase())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 0000000..03e9ac4
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,143 @@
+"""
+BitBake 'Fetch' implementation for bzr.
+
+"""
+
+# Copyright (C) 2007 Ross Burton
+# Copyright (C) 2007 Richard Purdie
+#
+#   Classes for obtaining upstream sources for the
+#   BitBake build tools.
+#   Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Bzr(FetchMethod):
+    def supports(self, ud, d):
+        return ud.type in ['bzr']
+
+    def urldata_init(self, ud, d):
+        """
+        init bzr specific variable within url data
+        """
+        # Create paths to bzr checkouts
+        relpath = self._strip_leading_slashes(ud.path)
+        ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
+
+        ud.setup_revisons(d)
+
+        if not ud.revision:
+            ud.revision = self.latest_revision(ud, d)
+
+        ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
+
+    def _buildbzrcommand(self, ud, d, command):
+        """
+        Build up an bzr commandline based on ud
+        command is "fetch", "update", "revno"
+        """
+
+        basecmd = data.expand('${FETCHCMD_bzr}', d)
+
+        proto =  ud.parm.get('protocol', 'http')
+
+        bzrroot = ud.host + ud.path
+
+        options = []
+
+        if command == "revno":
+            bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
+        else:
+            if ud.revision:
+                options.append("-r %s" % ud.revision)
+
+            if command == "fetch":
+                bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
+            elif command == "update":
+                bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
+            else:
+                raise FetchError("Invalid bzr command %s" % command, ud.url)
+
+        return bzrcmd
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
+            bzrcmd = self._buildbzrcommand(ud, d, "update")
+            logger.debug(1, "BZR Update %s", ud.url)
+            bb.fetch2.check_network_access(d, bzrcmd, ud.url)
+            os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
+            runfetchcmd(bzrcmd, d)
+        else:
+            bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
+            bzrcmd = self._buildbzrcommand(ud, d, "fetch")
+            bb.fetch2.check_network_access(d, bzrcmd, ud.url)
+            logger.debug(1, "BZR Checkout %s", ud.url)
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            logger.debug(1, "Running %s", bzrcmd)
+            runfetchcmd(bzrcmd, d)
+
+        os.chdir(ud.pkgdir)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata == "keep":
+            tar_flags = ""
+        else:
+            tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
+
+        # tar them up to a defined filename
+        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
+
+    def supports_srcrev(self):
+        return True
+
+    def _revision_key(self, ud, d, name):
+        """
+        Return a unique key for the url
+        """
+        return "bzr:" + ud.pkgdir
+
+    def _latest_revision(self, ud, d, name):
+        """
+        Return the latest upstream revision number
+        """
+        logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
+
+        bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
+
+        output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
+
+        return output.strip()
+
+    def sortable_revision(self, ud, d, name):
+        """
+        Return a sortable revision number which in our case is the revision number
+        """
+
+        return False, self._build_revision(ud, d)
+
+    def _build_revision(self, ud, d):
+        return ud.revision
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
new file mode 100644
index 0000000..ba83e7c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -0,0 +1,263 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' clearcase implementation
+
+The clearcase fetcher is used to retrieve files from a ClearCase repository.
+
+Usage in the recipe:
+
+    SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
+    SRCREV = "EXAMPLE_CLEARCASE_TAG"
+    PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
+
+The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
+
+Supported SRC_URI options are:
+
+- vob
+    (required) The name of the clearcase VOB (with prepending "/")
+
+- module
+    The module in the selected VOB (with prepending "/")
+
+    The module and vob parameters are combined to create
+    the following load rule in the view config spec:
+                load <vob><module>
+
+- proto
+    http or https
+
+Related variables:
+
+    CCASE_CUSTOM_CONFIG_SPEC
+            Write a config spec to this variable in your recipe to use it instead
+            of the default config spec generated by this fetcher.
+            Please note that the SRCREV loses its functionality if you specify
+            this variable. SRCREV is still used to label the archive after a fetch,
+            but it doesn't define what's fetched.
+
+User credentials:
+    cleartool:
+            The login of cleartool is handled by the system. No special steps needed.
+
+    rcleartool:
+            In order to use rcleartool with authenticated users an `rcleartool login` is
+            necessary before using the fetcher.
+"""
+# Copyright (C) 2014 Siemens AG
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+
+import os
+import sys
+import shutil
+import bb
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+from   distutils import spawn
+
+class ClearCase(FetchMethod):
+    """Class to fetch urls via 'clearcase'"""
+    def init(self, d):
+        pass
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with Clearcase.
+        """
+        return ud.type in ['ccrc']
+
+    def debug(self, msg):
+        logger.debug(1, "ClearCase: %s", msg)
+
+    def urldata_init(self, ud, d):
+        """
+        init ClearCase specific variable within url data
+        """
+        ud.proto = "https"
+        if 'protocol' in ud.parm:
+            ud.proto = ud.parm['protocol']
+        if not ud.proto in ('http', 'https'):
+            raise fetch2.ParameterError("Invalid protocol type", ud.url)
+
+        ud.vob = ''
+        if 'vob' in ud.parm:
+            ud.vob = ud.parm['vob']
+        else:
+            msg = ud.url+": vob must be defined so the fetcher knows what to get."
+            raise MissingParameterError('vob', msg)
+
+        if 'module' in ud.parm:
+            ud.module = ud.parm['module']
+        else:
+            ud.module = ""
+
+        ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
+
+        if data.getVar("SRCREV", d, True) == "INVALID":
+          raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
+
+        ud.label = d.getVar("SRCREV", False)
+        ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
+
+        ud.server     = "%s://%s%s" % (ud.proto, ud.host, ud.path)
+
+        ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
+                                                ud.module.replace("/", "."),
+                                                ud.label.replace("/", "."))
+
+        ud.viewname         = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
+        ud.csname           = "%s-config-spec" % (ud.identifier)
+        ud.ccasedir         = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
+        ud.viewdir          = os.path.join(ud.ccasedir, ud.viewname)
+        ud.configspecfile   = os.path.join(ud.ccasedir, ud.csname)
+        ud.localfile        = "%s.tar.gz" % (ud.identifier)
+
+        self.debug("host            = %s" % ud.host)
+        self.debug("path            = %s" % ud.path)
+        self.debug("server          = %s" % ud.server)
+        self.debug("proto           = %s" % ud.proto)
+        self.debug("type            = %s" % ud.type)
+        self.debug("vob             = %s" % ud.vob)
+        self.debug("module          = %s" % ud.module)
+        self.debug("basecmd         = %s" % ud.basecmd)
+        self.debug("label           = %s" % ud.label)
+        self.debug("ccasedir        = %s" % ud.ccasedir)
+        self.debug("viewdir         = %s" % ud.viewdir)
+        self.debug("viewname        = %s" % ud.viewname)
+        self.debug("configspecfile  = %s" % ud.configspecfile)
+        self.debug("localfile       = %s" % ud.localfile)
+
+        ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
+
+    def _build_ccase_command(self, ud, command):
+        """
+        Build up a commandline based on ud
+        command is: mkview, setcs, rmview
+        """
+        options = []
+
+        if "rcleartool" in ud.basecmd:
+            options.append("-server %s" % ud.server)
+
+        basecmd = "%s %s" % (ud.basecmd, command)
+
+        if command is 'mkview':
+            if not "rcleartool" in ud.basecmd:
+                # Cleartool needs a -snapshot view
+                options.append("-snapshot")
+            options.append("-tag %s" % ud.viewname)
+            options.append(ud.viewdir)
+
+        elif command is 'rmview':
+            options.append("-force")
+            options.append("%s" % ud.viewdir)
+
+        elif command is 'setcs':
+            options.append("-overwrite")
+            options.append(ud.configspecfile)
+
+        else:
+            raise FetchError("Invalid ccase command %s" % command)
+
+        ccasecmd = "%s %s" % (basecmd, " ".join(options))
+        self.debug("ccasecmd = %s" % ccasecmd)
+        return ccasecmd
+
+    def _write_configspec(self, ud, d):
+        """
+        Create config spec file (ud.configspecfile) for ccase view
+        """
+        config_spec = ""
+        custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
+        if custom_config_spec is not None:
+            for line in custom_config_spec.split("\\n"):
+                config_spec += line+"\n"
+            bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
+        else:
+            config_spec += "element * CHECKEDOUT\n"
+            config_spec += "element * %s\n" % ud.label
+            config_spec += "load %s%s\n" % (ud.vob, ud.module)
+
+        logger.info("Using config spec: \n%s" % config_spec)
+
+        with open(ud.configspecfile, 'w') as f:
+            f.write(config_spec)
+
+    def _remove_view(self, ud, d):
+        if os.path.exists(ud.viewdir):
+            os.chdir(ud.ccasedir)
+            cmd = self._build_ccase_command(ud, 'rmview');
+            logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+            bb.fetch2.check_network_access(d, cmd, ud.url)
+            output = runfetchcmd(cmd, d)
+            logger.info("rmview output: %s", output)
+
+    def need_update(self, ud, d):
+        if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
+            ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
+            return True
+        if os.path.exists(ud.localpath):
+            return False
+        return True
+
+    def supports_srcrev(self):
+        return True
+
+    def sortable_revision(self, ud, d, name):
+        return False, ud.identifier
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        # Make a fresh view
+        bb.utils.mkdirhier(ud.ccasedir)
+        self._write_configspec(ud, d)
+        cmd = self._build_ccase_command(ud, 'mkview')
+        logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        try:
+            runfetchcmd(cmd, d)
+        except FetchError as e:
+            if "CRCLI2008E" in e.msg:
+                raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
+            else:
+                raise e
+
+        # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
+        os.chdir(ud.viewdir)
+        cmd = self._build_ccase_command(ud, 'setcs');
+        logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        output = runfetchcmd(cmd, d)
+        logger.info("%s", output)
+
+        # Copy the configspec to the viewdir so we have it in our source tarball later
+        shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
+
+        # Clean clearcase meta-data before tar
+
+        runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
+
+        # Clean up so we can create a new view next time
+        self.clean(ud, d);
+
+    def clean(self, ud, d):
+        self._remove_view(ud, d)
+        bb.utils.remove(ud.configspecfile)
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 0000000..d27d96f
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,171 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+#Based on functions from the base bb module, Copyright 2003 Holger Schurig
+#
+
+import os
+import logging
+import bb
+from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
+from bb.fetch2 import runfetchcmd
+
+class Cvs(FetchMethod):
+    """
+    Class to fetch a module or modules from cvs repositories
+    """
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with cvs.
+        """
+        return ud.type in ['cvs']
+
+    def urldata_init(self, ud, d):
+        if not "module" in ud.parm:
+            raise MissingParameterError("module", ud.url)
+        ud.module = ud.parm["module"]
+
+        ud.tag = ud.parm.get('tag', "")
+
+        # Override the default date in certain cases
+        if 'date' in ud.parm:
+            ud.date = ud.parm['date']
+        elif ud.tag:
+            ud.date = ""
+
+        norecurse = ''
+        if 'norecurse' in ud.parm:
+            norecurse = '_norecurse'
+
+        fullpath = ''
+        if 'fullpath' in ud.parm:
+            fullpath = '_fullpath'
+
+        ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
+
+    def need_update(self, ud, d):
+        if (ud.date == "now"):
+            return True
+        if not os.path.exists(ud.localpath):
+            return True
+        return False
+
+    def download(self, ud, d):
+
+        method = ud.parm.get('method', 'pserver')
+        localdir = ud.parm.get('localdir', ud.module)
+        cvs_port = ud.parm.get('port', '')
+
+        cvs_rsh = None
+        if method == "ext":
+            if "rsh" in ud.parm:
+                cvs_rsh = ud.parm["rsh"]
+
+        if method == "dir":
+            cvsroot = ud.path
+        else:
+            cvsroot = ":" + method
+            cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
+            if cvsproxyhost:
+                cvsroot += ";proxy=" + cvsproxyhost
+            cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
+            if cvsproxyport:
+                cvsroot += ";proxyport=" + cvsproxyport
+            cvsroot += ":" + ud.user
+            if ud.pswd:
+                cvsroot += ":" + ud.pswd
+            cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
+
+        options = []
+        if 'norecurse' in ud.parm:
+            options.append("-l")
+        if ud.date:
+            # treat YYYYMMDDHHMM specially for CVS
+            if len(ud.date) == 12:
+                options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
+            else:
+                options.append("-D \"%s UTC\"" % ud.date)
+        if ud.tag:
+            options.append("-r %s" % ud.tag)
+
+        cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
+        cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
+        cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
+
+        if cvs_rsh:
+            cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
+            cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
+
+        # create module directory
+        logger.debug(2, "Fetch: checking for module directory")
+        pkg = d.getVar('PN', True)
+        pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
+        moddir = os.path.join(pkgdir, localdir)
+        if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
+            logger.info("Update " + ud.url)
+            bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
+            # update sources there
+            os.chdir(moddir)
+            cmd = cvsupdatecmd
+        else:
+            logger.info("Fetch " + ud.url)
+            # check out sources there
+            bb.utils.mkdirhier(pkgdir)
+            os.chdir(pkgdir)
+            logger.debug(1, "Running %s", cvscmd)
+            bb.fetch2.check_network_access(d, cvscmd, ud.url)
+            cmd = cvscmd
+
+        runfetchcmd(cmd, d, cleanup = [moddir])
+
+        if not os.access(moddir, os.R_OK):
+            raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata == "keep":
+            tar_flags = ""
+        else:
+            tar_flags = "--exclude 'CVS'"
+
+        # tar them up to a defined filename
+        if 'fullpath' in ud.parm:
+            os.chdir(pkgdir)
+            cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
+        else:
+            os.chdir(moddir)
+            os.chdir('..')
+            cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
+
+        runfetchcmd(cmd, d, cleanup = [ud.localpath])
+
+    def clean(self, ud, d):
+        """ Clean CVS Files and tarballs """
+
+        pkg = d.getVar('PN', True)
+        pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
+
+        bb.utils.remove(pkgdir, True)
+        bb.utils.remove(ud.localpath)
+
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 0000000..40658ff
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,447 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git implementation
+
+git fetcher support the SRC_URI with format of:
+SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
+
+Supported SRC_URI options are:
+
+- branch
+   The git branch to retrieve from. The default is "master"
+
+   This option also supports multiple branch fetching, with branches
+   separated by commas.  In multiple branches case, the name option
+   must have the same number of names to match the branches, which is
+   used to specify the SRC_REV for the branch
+   e.g:
+   SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
+   SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
+   SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
+
+- tag
+    The git tag to retrieve. The default is "master"
+
+- protocol
+   The method to use to access the repository. Common options are "git",
+   "http", "https", "file", "ssh" and "rsync". The default is "git".
+
+- rebaseable
+   rebaseable indicates that the upstream git repo may rebase in the future,
+   and current revision may disappear from upstream repo. This option will
+   remind fetcher to preserve local cache carefully for future use.
+   The default value is "0", set rebaseable=1 for rebaseable git repo.
+
+- nocheckout
+   Don't checkout source code when unpacking. set this option for the recipe
+   who has its own routine to checkout code.
+   The default is "0", set nocheckout=1 if needed.
+
+- bareclone
+   Create a bare clone of the source code and don't checkout the source code
+   when unpacking. Set this option for the recipe who has its own routine to
+   checkout code and tracking branch requirements.
+   The default is "0", set bareclone=1 if needed.
+
+- nobranch
+   Don't check the SHA validation for branch. set this option for the recipe
+   referring to commit which is valid in tag instead of branch.
+   The default is "0", set nobranch=1 if needed.
+
+"""
+
+#Copyright (C) 2005 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import re
+import bb
+from   bb    import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+
+class Git(FetchMethod):
+    """Class to fetch a module or modules from git repositories"""
+    def init(self, d):
+        pass
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with git.
+        """
+        return ud.type in ['git']
+
+    def supports_checksum(self, urldata):
+        return False
+
+    def urldata_init(self, ud, d):
+        """
+        init git specific variable within url data
+        so that the git method like latest_revision() can work
+        """
+        if 'protocol' in ud.parm:
+            ud.proto = ud.parm['protocol']
+        elif not ud.host:
+            ud.proto = 'file'
+        else:
+            ud.proto = "git"
+
+        if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
+            raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
+
+        ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
+
+        ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
+
+        ud.nobranch = ud.parm.get("nobranch","0") == "1"
+
+        # bareclone implies nocheckout
+        ud.bareclone = ud.parm.get("bareclone","0") == "1"
+        if ud.bareclone:
+            ud.nocheckout = 1
+  
+        ud.unresolvedrev = {}
+        branches = ud.parm.get("branch", "master").split(',')
+        if len(branches) != len(ud.names):
+            raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
+        ud.branches = {}
+        for name in ud.names:
+            branch = branches[ud.names.index(name)]
+            ud.branches[name] = branch
+            ud.unresolvedrev[name] = branch
+
+        ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
+
+        ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
+
+        ud.setup_revisons(d)
+
+        for name in ud.names:
+            # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
+            if not ud.revisions[name] or len(ud.revisions[name]) != 40  or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
+                if ud.revisions[name]:
+                    ud.unresolvedrev[name] = ud.revisions[name]
+                ud.revisions[name] = self.latest_revision(ud, d, name)
+
+        gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
+        if gitsrcname.startswith('.'):
+            gitsrcname = gitsrcname[1:]
+
+        # for rebaseable git repo, it is necessary to keep mirror tar ball
+        # per revision, so that even the revision disappears from the
+        # upstream repo in the future, the mirror will remain intact and still
+        # contains the revision
+        if ud.rebaseable:
+            for name in ud.names:
+                gitsrcname = gitsrcname + '_' + ud.revisions[name]
+        ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
+        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
+        gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
+        ud.clonedir = os.path.join(gitdir, gitsrcname)
+
+        ud.localfile = ud.clonedir
+
+    def localpath(self, ud, d):
+        return ud.clonedir
+
+    def need_update(self, ud, d):
+        if not os.path.exists(ud.clonedir):
+            return True
+        os.chdir(ud.clonedir)
+        for name in ud.names:
+            if not self._contains_ref(ud, d, name):
+                return True
+        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
+            return True
+        return False
+
+    def try_premirror(self, ud, d):
+        # If we don't do this, updating an existing checkout with only premirrors
+        # is not possible
+        if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
+            return True
+        if os.path.exists(ud.clonedir):
+            return False
+        return True
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        ud.repochanged = not os.path.exists(ud.fullmirror)
+
+        # If the checkout doesn't exist and the mirror tarball does, extract it
+        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
+            bb.utils.mkdirhier(ud.clonedir)
+            os.chdir(ud.clonedir)
+            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
+
+        repourl = self._get_repo_url(ud)
+
+        # If the repo still doesn't exist, fallback to cloning it
+        if not os.path.exists(ud.clonedir):
+            # We do this since git will use a "-l" option automatically for local urls where possible
+            if repourl.startswith("file://"):
+                repourl = repourl[7:]
+            clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
+            if ud.proto.lower() != 'file':
+                bb.fetch2.check_network_access(d, clone_cmd)
+            runfetchcmd(clone_cmd, d)
+
+        os.chdir(ud.clonedir)
+        # Update the checkout if needed
+        needupdate = False
+        for name in ud.names:
+            if not self._contains_ref(ud, d, name):
+                needupdate = True
+        if needupdate:
+            try: 
+                runfetchcmd("%s remote rm origin" % ud.basecmd, d) 
+            except bb.fetch2.FetchError:
+                logger.debug(1, "No Origin")
+
+            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
+            fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
+            if ud.proto.lower() != 'file':
+                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
+            runfetchcmd(fetch_cmd, d)
+            runfetchcmd("%s prune-packed" % ud.basecmd, d)
+            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
+            ud.repochanged = True
+        os.chdir(ud.clonedir)
+        for name in ud.names:
+            if not self._contains_ref(ud, d, name):
+                raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
+
+    def build_mirror_data(self, ud, d):
+        # Generate a mirror tarball if needed
+        if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
+            # it's possible that this symlink points to read-only filesystem with PREMIRROR
+            if os.path.islink(ud.fullmirror):
+                os.unlink(ud.fullmirror)
+
+            os.chdir(ud.clonedir)
+            logger.info("Creating tarball of git repository")
+            runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
+            runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+
+    def unpack(self, ud, destdir, d):
+        """ unpack the downloaded src to destdir"""
+
+        subdir = ud.parm.get("subpath", "")
+        if subdir != "":
+            readpathspec = ":%s" % (subdir)
+            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
+        else:
+            readpathspec = ""
+            def_destsuffix = "git/"
+
+        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
+        destdir = ud.destdir = os.path.join(destdir, destsuffix)
+        if os.path.exists(destdir):
+            bb.utils.prunedir(destdir)
+
+        cloneflags = "-s -n"
+        if ud.bareclone:
+            cloneflags += " --mirror"
+
+        # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
+        # and you end up with some horrible union of the two when you attempt to clone it
+        # The least invasive workaround seems to be a symlink to the real directory to
+        # fool git into ignoring any .git version that may also be present.
+        #
+        # The issue is fixed in more recent versions of git so we can drop this hack in future
+        # when that version becomes common enough.
+        clonedir = ud.clonedir
+        if not ud.path.endswith(".git"):
+            indirectiondir = destdir[:-1] + ".indirectionsymlink"
+            if os.path.exists(indirectiondir):
+                os.remove(indirectiondir)
+            bb.utils.mkdirhier(os.path.dirname(indirectiondir))
+            os.symlink(ud.clonedir, indirectiondir)
+            clonedir = indirectiondir
+
+        runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
+        os.chdir(destdir)
+        repourl = self._get_repo_url(ud)
+        runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
+        if not ud.nocheckout:
+            if subdir != "":
+                runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
+                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
+            elif not ud.nobranch:
+                branchname =  ud.branches[ud.names[0]]
+                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
+                            ud.revisions[ud.names[0]]), d)
+                runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
+                            branchname), d)
+            else:
+                runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
+
+        return True
+
+    def clean(self, ud, d):
+        """ clean the git directory """
+
+        bb.utils.remove(ud.localpath, True)
+        bb.utils.remove(ud.fullmirror)
+        bb.utils.remove(ud.fullmirror + ".done")
+
+    def supports_srcrev(self):
+        return True
+
+    def _contains_ref(self, ud, d, name):
+        cmd = ""
+        if ud.nobranch:
+            cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
+                ud.basecmd, ud.revisions[name])
+        else:
+            cmd =  "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
+                ud.basecmd, ud.revisions[name], ud.branches[name])
+        try:
+            output = runfetchcmd(cmd, d, quiet=True)
+        except bb.fetch2.FetchError:
+            return False
+        if len(output.split()) > 1:
+            raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
+        return output.split()[0] != "0"
+
+    def _get_repo_url(self, ud):
+        """
+        Return the repository URL
+        """
+        if ud.user:
+            username = ud.user + '@'
+        else:
+            username = ""
+        return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
+
+    def _revision_key(self, ud, d, name):
+        """
+        Return a unique key for the url
+        """
+        return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
+
+    def _lsremote(self, ud, d, search):
+        """
+        Run git ls-remote with the specified search string
+        """
+        repourl = self._get_repo_url(ud)
+        cmd = "%s ls-remote %s %s" % \
+              (ud.basecmd, repourl, search)
+        if ud.proto.lower() != 'file':
+            bb.fetch2.check_network_access(d, cmd)
+        output = runfetchcmd(cmd, d, True)
+        if not output:
+            raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
+        return output
+
+    def _latest_revision(self, ud, d, name):
+        """
+        Compute the HEAD revision for the url
+        """
+        output = self._lsremote(ud, d, "")
+        # Tags of the form ^{} may not work, need to fallback to other form
+        if ud.unresolvedrev[name][:5] == "refs/":
+            head = ud.unresolvedrev[name]
+            tag = ud.unresolvedrev[name]
+        else:
+            head = "refs/heads/%s" % ud.unresolvedrev[name]
+            tag = "refs/tags/%s" % ud.unresolvedrev[name]
+        for s in [head, tag + "^{}", tag]:
+            for l in output.split('\n'):
+                if s in l:
+                    return l.split()[0]
+        raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
+            (ud.unresolvedrev[name], ud.host+ud.path))
+
+    def latest_versionstring(self, ud, d):
+        """
+        Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
+        by searching through the tags output of ls-remote, comparing
+        versions and returning the highest match.
+        """
+        pupver = ('', '')
+
+        tagregex = re.compile(d.getVar('GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
+        try:
+            output = self._lsremote(ud, d, "refs/tags/*")
+        except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
+            return pupver
+
+        verstring = ""
+        revision = ""
+        for line in output.split("\n"):
+            if not line:
+                break
+
+            tag_head = line.split("/")[-1]
+            # Ignore non-released branches
+            m = re.search("(alpha|beta|rc|final)+", tag_head)
+            if m:
+                continue
+
+            # search for version in the line
+            tag = tagregex.search(tag_head)
+            if tag == None:
+                continue
+
+            tag = tag.group('pver')
+            tag = tag.replace("_", ".")
+
+            if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
+                continue
+
+            verstring = tag
+            revision = line.split()[0]
+            pupver = (verstring, revision)
+
+        return pupver
+
+    def _build_revision(self, ud, d, name):
+        return ud.revisions[name]
+
+    def gitpkgv_revision(self, ud, d, name):
+        """
+        Return a sortable revision number by counting commits in the history
+        Based on gitpkgv.bblass in meta-openembedded
+        """
+        rev = self._build_revision(ud, d, name)
+        localpath = ud.localpath
+        rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
+        if not os.path.exists(localpath):
+            commits = None
+        else:
+            if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
+                from pipes import quote
+                commits = bb.fetch2.runfetchcmd(
+                        "git rev-list %s -- | wc -l" % (quote(rev)),
+                        d, quiet=True).strip().lstrip('0')
+                if commits:
+                    open(rev_file, "w").write("%d\n" % int(commits))
+            else:
+                commits = open(rev_file, "r").readline(128).strip()
+        if commits:
+            return False, "%s+%s" % (commits, rev[:7])
+        else:
+            return True, str(rev)
+
+    def checkstatus(self, fetch, ud, d):
+        try:
+            self._lsremote(ud, d, "")
+            return True
+        except FetchError:
+            return False
diff --git a/bitbake/lib/bb/fetch2/gitannex.py b/bitbake/lib/bb/fetch2/gitannex.py
new file mode 100644
index 0000000..0f37897
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitannex.py
@@ -0,0 +1,76 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git annex implementation
+"""
+
+# Copyright (C) 2014 Otavio Salvador
+# Copyright (C) 2014 O.S. Systems Software LTDA.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from   bb import data
+from   bb.fetch2.git import Git
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+
+class GitANNEX(Git):
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with git.
+        """
+        return ud.type in ['gitannex']
+
+    def uses_annex(self, ud, d):
+        for name in ud.names:
+            try:
+                runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
+                return True
+            except bb.fetch.FetchError:
+                pass
+
+        return False
+
+    def update_annex(self, ud, d):
+        try:
+            runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
+        except bb.fetch.FetchError:
+            return False
+        runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
+
+        return True
+
+    def download(self, ud, d):
+        Git.download(self, ud, d)
+
+        os.chdir(ud.clonedir)
+        annex = self.uses_annex(ud, d)
+        if annex:
+            self.update_annex(ud, d)
+
+    def unpack(self, ud, destdir, d):
+        Git.unpack(self, ud, destdir, d)
+
+        os.chdir(ud.destdir)
+        try:
+            runfetchcmd("%s annex sync" % (ud.basecmd), d)
+        except bb.fetch.FetchError:
+            pass
+
+        annex = self.uses_annex(ud, d)
+        if annex:
+            runfetchcmd("%s annex get" % (ud.basecmd), d)
+            runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
new file mode 100644
index 0000000..0392e48
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -0,0 +1,137 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git submodules implementation
+
+Inherits from and extends the Git fetcher to retrieve submodules of a git repository
+after cloning.
+
+SRC_URI = "gitsm://<see Git fetcher for syntax>"
+
+See the Git fetcher, git://, for usage documentation.
+
+NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
+
+"""
+
+# Copyright (C) 2013 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from   bb    import data
+from   bb.fetch2.git import Git
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+
+class GitSM(Git):
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with git.
+        """
+        return ud.type in ['gitsm']
+
+    def uses_submodules(self, ud, d):
+        for name in ud.names:
+            try:
+                runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
+                return True
+            except bb.fetch.FetchError:
+                pass
+        return False
+
+    def _set_relative_paths(self, repopath):
+        """
+        Fix submodule paths to be relative instead of absolute,
+        so that when we move the repo it doesn't break
+        (In Git 1.7.10+ this is done automatically)
+        """
+        submodules = []
+        with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
+            for line in f.readlines():
+                if line.startswith('[submodule'):
+                    submodules.append(line.split('"')[1])
+
+        for module in submodules:
+            repo_conf = os.path.join(repopath, module, '.git')
+            if os.path.exists(repo_conf):
+                with open(repo_conf, 'r') as f:
+                    lines = f.readlines()
+                newpath = ''
+                for i, line in enumerate(lines):
+                    if line.startswith('gitdir:'):
+                        oldpath = line.split(': ')[-1].rstrip()
+                        if oldpath.startswith('/'):
+                            newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
+                            lines[i] = 'gitdir: %s\n' % newpath
+                            break
+                if newpath:
+                    with open(repo_conf, 'w') as f:
+                        for line in lines:
+                            f.write(line)
+
+            repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
+            if os.path.exists(repo_conf2):
+                with open(repo_conf2, 'r') as f:
+                    lines = f.readlines()
+                newpath = ''
+                for i, line in enumerate(lines):
+                    if line.lstrip().startswith('worktree = '):
+                        oldpath = line.split(' = ')[-1].rstrip()
+                        if oldpath.startswith('/'):
+                            newpath = '../' * (module.count('/') + 3) + module
+                            lines[i] = '\tworktree = %s\n' % newpath
+                            break
+                if newpath:
+                    with open(repo_conf2, 'w') as f:
+                        for line in lines:
+                            f.write(line)
+
+    def update_submodules(self, ud, d):
+        # We have to convert bare -> full repo, do the submodule bit, then convert back
+        tmpclonedir = ud.clonedir + ".tmp"
+        gitdir = tmpclonedir + os.sep + ".git"
+        bb.utils.remove(tmpclonedir, True)
+        os.mkdir(tmpclonedir)
+        os.rename(ud.clonedir, gitdir)
+        runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
+        os.chdir(tmpclonedir)
+        runfetchcmd(ud.basecmd + " reset --hard", d)
+        runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
+        runfetchcmd(ud.basecmd + " submodule init", d)
+        runfetchcmd(ud.basecmd + " submodule update", d)
+        self._set_relative_paths(tmpclonedir)
+        runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
+        os.rename(gitdir, ud.clonedir,)
+        bb.utils.remove(tmpclonedir, True)
+
+    def download(self, ud, d):
+        Git.download(self, ud, d)
+
+        os.chdir(ud.clonedir)
+        submodules = self.uses_submodules(ud, d)
+        if submodules:
+            self.update_submodules(ud, d)
+
+    def unpack(self, ud, destdir, d):
+        Git.unpack(self, ud, destdir, d)
+        
+        os.chdir(ud.destdir)
+        submodules = self.uses_submodules(ud, d)
+        if submodules:
+            runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d)
+            runfetchcmd(ud.basecmd + " submodule init", d)
+            runfetchcmd(ud.basecmd + " submodule update", d)
+
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 0000000..d978630
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,275 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for mercurial DRCS (hg).
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2004        Marcin Juszkiewicz
+# Copyright (C) 2007        Robert Schuster
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Hg(FetchMethod):
+    """Class to fetch from mercurial repositories"""
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with mercurial.
+        """
+        return ud.type in ['hg']
+
+    def supports_checksum(self, urldata):
+        """
+        Don't require checksums for local archives created from
+        repository checkouts.
+        """ 
+        return False
+
+    def urldata_init(self, ud, d):
+        """
+        init hg specific variable within url data
+        """
+        if not "module" in ud.parm:
+            raise MissingParameterError('module', ud.url)
+
+        ud.module = ud.parm["module"]
+
+        if 'protocol' in ud.parm:
+            ud.proto = ud.parm['protocol']
+        elif not ud.host:
+            ud.proto = 'file'
+        else:
+            ud.proto = "hg"
+
+        ud.setup_revisons(d)
+
+        if 'rev' in ud.parm:
+            ud.revision = ud.parm['rev']
+        elif not ud.revision:
+            ud.revision = self.latest_revision(ud, d)
+
+        # Create paths to mercurial checkouts
+        hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
+                            ud.host, ud.path.replace('/', '.'))
+        ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
+        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
+
+        hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
+        ud.pkgdir = os.path.join(hgdir, hgsrcname)
+        ud.moddir = os.path.join(ud.pkgdir, ud.module)
+        ud.localfile = ud.moddir
+        ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
+
+        ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
+
+    def need_update(self, ud, d):
+        revTag = ud.parm.get('rev', 'tip')
+        if revTag == "tip":
+            return True
+        if not os.path.exists(ud.localpath):
+            return True
+        return False
+
+    def try_premirror(self, ud, d):
+        # If we don't do this, updating an existing checkout with only premirrors
+        # is not possible
+        if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
+            return True
+        if os.path.exists(ud.moddir):
+            return False
+        return True
+
+    def _buildhgcommand(self, ud, d, command):
+        """
+        Build up an hg commandline based on ud
+        command is "fetch", "update", "info"
+        """
+
+        proto = ud.parm.get('protocol', 'http')
+
+        host = ud.host
+        if proto == "file":
+            host = "/"
+            ud.host = "localhost"
+
+        if not ud.user:
+            hgroot = host + ud.path
+        else:
+            if ud.pswd:
+                hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
+            else:
+                hgroot = ud.user + "@" + host + ud.path
+
+        if command == "info":
+            return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
+
+        options = [];
+
+        # Don't specify revision for the fetch; clone the entire repo.
+        # This avoids an issue if the specified revision is a tag, because
+        # the tag actually exists in the specified revision + 1, so it won't
+        # be available when used in any successive commands.
+        if ud.revision and command != "fetch":
+            options.append("-r %s" % ud.revision)
+
+        if command == "fetch":
+            if ud.user and ud.pswd:
+                cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
+            else:
+                cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
+        elif command == "pull":
+            # do not pass options list; limiting pull to rev causes the local
+            # repo not to contain it and immediately following "update" command
+            # will crash
+            if ud.user and ud.pswd:
+                cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
+            else:
+                cmd = "%s pull" % (ud.basecmd)
+        elif command == "update":
+            if ud.user and ud.pswd:
+                cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
+            else:
+                cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
+        else:
+            raise FetchError("Invalid hg command %s" % command, ud.url)
+
+        return cmd
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        ud.repochanged = not os.path.exists(ud.fullmirror)
+
+        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+        # If the checkout doesn't exist and the mirror tarball does, extract it
+        if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
+
+        if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
+            # Found the source, check whether need pull
+            updatecmd = self._buildhgcommand(ud, d, "update")
+            os.chdir(ud.moddir)
+            logger.debug(1, "Running %s", updatecmd)
+            try:
+                runfetchcmd(updatecmd, d)
+            except bb.fetch2.FetchError:
+                # Runnning pull in the repo
+                pullcmd = self._buildhgcommand(ud, d, "pull")
+                logger.info("Pulling " + ud.url)
+                # update sources there
+                os.chdir(ud.moddir)
+                logger.debug(1, "Running %s", pullcmd)
+                bb.fetch2.check_network_access(d, pullcmd, ud.url)
+                runfetchcmd(pullcmd, d)
+                ud.repochanged = True
+
+        # No source found, clone it.
+        if not os.path.exists(ud.moddir):
+            fetchcmd = self._buildhgcommand(ud, d, "fetch")
+            logger.info("Fetch " + ud.url)
+            # check out sources there
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            logger.debug(1, "Running %s", fetchcmd)
+            bb.fetch2.check_network_access(d, fetchcmd, ud.url)
+            runfetchcmd(fetchcmd, d)
+
+        # Even when we clone (fetch), we still need to update as hg's clone
+        # won't checkout the specified revision if its on a branch
+        updatecmd = self._buildhgcommand(ud, d, "update")
+        os.chdir(ud.moddir)
+        logger.debug(1, "Running %s", updatecmd)
+        runfetchcmd(updatecmd, d)
+
+    def clean(self, ud, d):
+        """ Clean the hg dir """
+
+        bb.utils.remove(ud.localpath, True)
+        bb.utils.remove(ud.fullmirror)
+        bb.utils.remove(ud.fullmirror + ".done")
+
+    def supports_srcrev(self):
+        return True
+
+    def _latest_revision(self, ud, d, name):
+        """
+        Compute tip revision for the url
+        """
+        bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
+        output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
+        return output.strip()
+
+    def _build_revision(self, ud, d, name):
+        return ud.revision
+
+    def _revision_key(self, ud, d, name):
+        """
+        Return a unique key for the url
+        """
+        return "hg:" + ud.moddir
+
+    def build_mirror_data(self, ud, d):
+        # Generate a mirror tarball if needed
+        if ud.write_tarballs == "1" and (ud.repochanged or not os.path.exists(ud.fullmirror)):
+            # it's possible that this symlink points to read-only filesystem with PREMIRROR
+            if os.path.islink(ud.fullmirror):
+                os.unlink(ud.fullmirror)
+
+            os.chdir(ud.pkgdir)
+            logger.info("Creating tarball of hg repository")
+            runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
+            runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+
+    def localpath(self, ud, d):
+        return ud.pkgdir
+
+    def unpack(self, ud, destdir, d):
+        """
+        Make a local clone or export for the url
+        """
+
+        revflag = "-r %s" % ud.revision
+        subdir = ud.parm.get("destsuffix", ud.module)
+        codir = "%s/%s" % (destdir, subdir)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata != "nokeep":
+            if not os.access(os.path.join(codir, '.hg'), os.R_OK):
+                logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
+                runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
+            logger.debug(2, "Unpack: updating source in '" + codir + "'")
+            os.chdir(codir)
+            runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
+            runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
+        else:
+            logger.debug(2, "Unpack: extracting source to '" + codir + "'")
+            os.chdir(ud.moddir)
+            runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 0000000..2d921f7
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,128 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import urllib
+import bb
+import bb.utils
+from   bb import data
+from   bb.fetch2 import FetchMethod, FetchError
+from   bb.fetch2 import logger
+
+class Local(FetchMethod):
+    def supports(self, urldata, d):
+        """
+        Check to see if a given url represents a local fetch.
+        """
+        return urldata.type in ['file']
+
+    def urldata_init(self, ud, d):
+        # We don't set localfile as for this fetcher the file is already local!
+        ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
+        ud.basename = os.path.basename(ud.decodedurl)
+        ud.basepath = ud.decodedurl
+        return
+
+    def localpath(self, urldata, d):
+        """
+        Return the local filename of a given url assuming a successful fetch.
+        """
+        return self.localpaths(urldata, d)[-1]
+
+    def localpaths(self, urldata, d):
+        """
+        Return the local filename of a given url assuming a successful fetch.
+        """
+        searched = []
+        path = urldata.decodedurl
+        newpath = path
+        if path[0] == "/":
+            return [path]
+        filespath = data.getVar('FILESPATH', d, True)
+        if filespath:
+            logger.debug(2, "Searching for %s in paths:\n    %s" % (path, "\n    ".join(filespath.split(":"))))
+            newpath, hist = bb.utils.which(filespath, path, history=True)
+            searched.extend(hist)
+        if not newpath:
+            filesdir = data.getVar('FILESDIR', d, True)
+            if filesdir:
+                logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
+                newpath = os.path.join(filesdir, path)
+                searched.append(newpath)
+        if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
+            # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
+            newpath, hist = bb.utils.which(filespath, ".", history=True)
+            searched.extend(hist)
+            logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
+            return searched
+        if not os.path.exists(newpath):
+            dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
+            logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
+            bb.utils.mkdirhier(os.path.dirname(dldirfile))
+            searched.append(dldirfile)
+            return searched
+        return searched
+
+    def need_update(self, ud, d):
+        if ud.url.find("*") != -1:
+            return False
+        if os.path.exists(ud.localpath):
+            return False
+        return True
+
+    def download(self, urldata, d):
+        """Fetch urls (no-op for Local method)"""
+        # no need to fetch local files, we'll deal with them in place.
+        if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
+            locations = []
+            filespath = data.getVar('FILESPATH', d, True)
+            if filespath:
+                locations = filespath.split(":")
+            filesdir = data.getVar('FILESDIR', d, True)
+            if filesdir:
+                locations.append(filesdir)
+            locations.append(d.getVar("DL_DIR", True))
+
+            msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n    " + "\n    ".join(locations)
+            raise FetchError(msg)
+
+        return True
+
+    def checkstatus(self, fetch, urldata, d):
+        """
+        Check the status of the url
+        """
+        if urldata.localpath.find("*") != -1:
+            logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
+            return True
+        if os.path.exists(urldata.localpath):
+            return True
+        return False
+
+    def clean(self, urldata, d):
+        return
+
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 0000000..3d87796
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,135 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+Bitbake "Fetch" implementation for osc (Opensuse build service client).
+Based on the svn "Fetch" implementation.
+
+"""
+
+import  os
+import  sys
+import logging
+import  bb
+from    bb       import data
+from    bb.fetch2 import FetchMethod
+from    bb.fetch2 import FetchError
+from    bb.fetch2 import MissingParameterError
+from    bb.fetch2 import runfetchcmd
+
+class Osc(FetchMethod):
+    """Class to fetch a module or modules from Opensuse build server
+       repositories."""
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with osc.
+        """
+        return ud.type in ['osc']
+
+    def urldata_init(self, ud, d):
+        if not "module" in ud.parm:
+            raise MissingParameterError('module', ud.url)
+
+        ud.module = ud.parm["module"]
+
+        # Create paths to osc checkouts
+        relpath = self._strip_leading_slashes(ud.path)
+        ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
+        ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
+
+        if 'rev' in ud.parm:
+            ud.revision = ud.parm['rev']
+        else:
+            pv = data.getVar("PV", d, 0)
+            rev = bb.fetch2.srcrev_internal_helper(ud, d)
+            if rev and rev != True:
+                ud.revision = rev
+            else:
+                ud.revision = ""
+
+        ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
+
+    def _buildosccommand(self, ud, d, command):
+        """
+        Build up an ocs commandline based on ud
+        command is "fetch", "update", "info"
+        """
+
+        basecmd = data.expand('${FETCHCMD_osc}', d)
+
+        proto = ud.parm.get('protocol', 'ocs')
+
+        options = []
+
+        config = "-c %s" % self.generate_config(ud, d)
+
+        if ud.revision:
+            options.append("-r %s" % ud.revision)
+
+        coroot = self._strip_leading_slashes(ud.path)
+
+        if command == "fetch":
+            osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
+        elif command == "update":
+            osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
+        else:
+            raise FetchError("Invalid osc command %s" % command, ud.url)
+
+        return osccmd
+
+    def download(self, ud, d):
+        """
+        Fetch url
+        """
+
+        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+        if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
+            oscupdatecmd = self._buildosccommand(ud, d, "update")
+            logger.info("Update "+ ud.url)
+            # update sources there
+            os.chdir(ud.moddir)
+            logger.debug(1, "Running %s", oscupdatecmd)
+            bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
+            runfetchcmd(oscupdatecmd, d)
+        else:
+            oscfetchcmd = self._buildosccommand(ud, d, "fetch")
+            logger.info("Fetch " + ud.url)
+            # check out sources there
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            logger.debug(1, "Running %s", oscfetchcmd)
+            bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
+            runfetchcmd(oscfetchcmd, d)
+
+        os.chdir(os.path.join(ud.pkgdir + ud.path))
+        # tar them up to a defined filename
+        runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
+
+    def supports_srcrev(self):
+        return False
+
+    def generate_config(self, ud, d):
+        """
+        Generate a .oscrc to be used for this run.
+        """
+
+        config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
+        if (os.path.exists(config_path)):
+            os.remove(config_path)
+
+        f = open(config_path, 'w')
+        f.write("[general]\n")
+        f.write("apisrv = %s\n" % ud.host)
+        f.write("scheme = http\n")
+        f.write("su-wrapper = su -c\n")
+        f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
+        f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
+        f.write("extra-pkgs = gzip\n")
+        f.write("\n")
+        f.write("[%s]\n" % ud.host)
+        f.write("user = %s\n" % ud.parm["user"])
+        f.write("pass = %s\n" % ud.parm["pswd"])
+        f.close()
+
+        return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 0000000..3a10c7c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,187 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+from future_builtins import zip
+import os
+import subprocess
+import logging
+import bb
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import logger
+from   bb.fetch2 import runfetchcmd
+
+class Perforce(FetchMethod):
+    def supports(self, ud, d):
+        return ud.type in ['p4']
+
+    def doparse(url, d):
+        parm = {}
+        path = url.split("://")[1]
+        delim = path.find("@");
+        if delim != -1:
+            (user, pswd, host, port) = path.split('@')[0].split(":")
+            path = path.split('@')[1]
+        else:
+            (host, port) = d.getVar('P4PORT', False).split(':')
+            user = ""
+            pswd = ""
+
+        if path.find(";") != -1:
+            keys=[]
+            values=[]
+            plist = path.split(';')
+            for item in plist:
+                if item.count('='):
+                    (key, value) = item.split('=')
+                    keys.append(key)
+                    values.append(value)
+
+            parm = dict(zip(keys, values))
+        path = "//" + path.split(';')[0]
+        host += ":%s" % (port)
+        parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
+
+        return host, path, user, pswd, parm
+    doparse = staticmethod(doparse)
+
+    def getcset(d, depot, host, user, pswd, parm):
+        p4opt = ""
+        if "cset" in parm:
+            return parm["cset"];
+        if user:
+            p4opt += " -u %s" % (user)
+        if pswd:
+            p4opt += " -P %s" % (pswd)
+        if host:
+            p4opt += " -p %s" % (host)
+
+        p4date = d.getVar("P4DATE", True)
+        if "revision" in parm:
+            depot += "#%s" % (parm["revision"])
+        elif "label" in parm:
+            depot += "@%s" % (parm["label"])
+        elif p4date:
+            depot += "@%s" % (p4date)
+
+        p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
+        logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
+        p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
+        cset = p4file.strip()
+        logger.debug(1, "READ %s", cset)
+        if not cset:
+            return -1
+
+        return cset.split(' ')[1]
+    getcset = staticmethod(getcset)
+
+    def urldata_init(self, ud, d):
+        (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
+
+        base_path = path.replace('/...', '')
+        base_path = self._strip_leading_slashes(base_path)
+        
+        if "label" in parm:
+            version = parm["label"]
+        else:
+            version = Perforce.getcset(d, path, host, user, pswd, parm)
+
+        ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
+
+    def download(self, ud, d):
+        """
+        Fetch urls
+        """
+
+        (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
+
+        if depot.find('/...') != -1:
+            path = depot[:depot.find('/...')]
+        else:
+            path = depot[:depot.rfind('/')]
+
+        module = parm.get('module', os.path.basename(path))
+
+        # Get the p4 command
+        p4opt = ""
+        if user:
+            p4opt += " -u %s" % (user)
+
+        if pswd:
+            p4opt += " -P %s" % (pswd)
+
+        if host:
+            p4opt += " -p %s" % (host)
+
+        p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
+
+        # create temp directory
+        logger.debug(2, "Fetch: creating temporary directory")
+        bb.utils.mkdirhier(d.expand('${WORKDIR}'))
+        mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
+        tmpfile, errors = bb.process.run(mktemp)
+        tmpfile = tmpfile.strip()
+        if not tmpfile:
+            raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
+
+        if "label" in parm:
+            depot = "%s@%s" % (depot, parm["label"])
+        else:
+            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
+            depot = "%s@%s" % (depot, cset)
+
+        os.chdir(tmpfile)
+        logger.info("Fetch " + ud.url)
+        logger.info("%s%s files %s", p4cmd, p4opt, depot)
+        p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
+        p4file = [f.rstrip() for f in p4file.splitlines()]
+
+        if not p4file:
+            raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
+
+        count = 0
+
+        for file in p4file:
+            list = file.split()
+
+            if list[2] == "delete":
+                continue
+
+            dest = list[0][len(path)+1:]
+            where = dest.find("#")
+
+            subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
+            count = count + 1
+
+        if count == 0:
+            logger.error()
+            raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
+
+        runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
+        # cleanup
+        bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 0000000..21678eb
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,98 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake "Fetch" repo (git) implementation
+
+"""
+
+# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
+#
+# Based on git.py which is:
+#Copyright (C) 2005 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from   bb    import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import runfetchcmd
+
+class Repo(FetchMethod):
+    """Class to fetch a module or modules from repo (git) repositories"""
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with repo.
+        """
+        return ud.type in ["repo"]
+
+    def urldata_init(self, ud, d):
+        """
+        We don"t care about the git rev of the manifests repository, but
+        we do care about the manifest to use.  The default is "default".
+        We also care about the branch or tag to be used.  The default is
+        "master".
+        """
+
+        ud.proto = ud.parm.get('protocol', 'git')
+        ud.branch = ud.parm.get('branch', 'master')
+        ud.manifest = ud.parm.get('manifest', 'default.xml')
+        if not ud.manifest.endswith('.xml'):
+            ud.manifest += '.xml'
+
+        ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
+            logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
+            return
+
+        gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
+        repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
+        codir = os.path.join(repodir, gitsrcname, ud.manifest)
+
+        if ud.user:
+            username = ud.user + "@"
+        else:
+            username = ""
+
+        bb.utils.mkdirhier(os.path.join(codir, "repo"))
+        os.chdir(os.path.join(codir, "repo"))
+        if not os.path.exists(os.path.join(codir, "repo", ".repo")):
+            bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
+            runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
+
+        bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
+        runfetchcmd("repo sync", d)
+        os.chdir(codir)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata == "keep":
+            tar_flags = ""
+        else:
+            tar_flags = "--exclude '.repo' --exclude '.git'"
+
+        # Create a cache
+        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
+
+    def supports_srcrev(self):
+        return False
+
+    def _build_revision(self, ud, d):
+        return ud.manifest
+
+    def _want_sortable_revision(self, ud, d):
+        return False
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
new file mode 100644
index 0000000..cb2f753
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -0,0 +1,129 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake SFTP Fetch implementation
+
+Class for fetching files via SFTP. It tries to adhere to the (now
+expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
+Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
+(SSH)" (SECSH URI).
+
+It uses SFTP (as to adhere to the SECSH URI specification). It only
+supports key based authentication, not password. This class, unlike
+the SSH fetcher, does not support fetching a directory tree from the
+remote.
+
+  http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
+  https://www.iana.org/assignments/uri-schemes/prov/sftp
+  https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
+
+Please note that '/' is used as host path seperator, and not ":"
+as you may be used to from the scp/sftp commands. You can use a
+~ (tilde) to specify a path relative to your home directory.
+(The /~user/ syntax, for specyfing a path relative to another
+user's home directory is not supported.) Note that the tilde must
+still follow the host path seperator ("/"). See exampels below.
+
+Example SRC_URIs:
+
+SRC_URI = "sftp://host.example.com/dir/path.file.txt"
+
+A path relative to your home directory.
+
+SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
+
+You can also specify a username (specyfing password in the
+URI is not supported, use SSH keys to authenticate):
+
+SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
+
+"""
+
+# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
+#
+# Based in part on bb.fetch2.wget:
+#    Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib
+import commands
+from bb import data
+from bb.fetch2 import URI
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import runfetchcmd
+
+
+class SFTP(FetchMethod):
+    """Class to fetch urls via 'sftp'"""
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with sftp.
+        """
+        return ud.type in ['sftp']
+
+    def recommends_checksum(self, urldata):
+        return True
+
+    def urldata_init(self, ud, d):
+        if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
+            raise bb.fetch2.ParameterError(
+                "Invalid protocol - if you wish to fetch from a " +
+                "git repository using ssh, you need to use the " +
+                "git:// prefix with protocol=ssh", ud.url)
+
+        if 'downloadfilename' in ud.parm:
+            ud.basename = ud.parm['downloadfilename']
+        else:
+            ud.basename = os.path.basename(ud.path)
+
+        ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+
+    def download(self, ud, d):
+        """Fetch urls"""
+
+        urlo = URI(ud.url)
+        basecmd = 'sftp -oBatchMode=yes'
+        port = ''
+        if urlo.port:
+            port = '-P %d' % urlo.port
+            urlo.port = None
+
+        dldir = data.getVar('DL_DIR', d, True)
+        lpath = os.path.join(dldir, ud.localfile)
+
+        user = ''
+        if urlo.userinfo:
+            user = urlo.userinfo + '@'
+
+        path = urlo.path
+
+        # Supoprt URIs relative to the user's home directory, with
+        # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
+        if path[:3] == '/~/':
+            path = path[3:]
+
+        remote = '%s%s:%s' % (user, urlo.hostname, path)
+
+        cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
+                               commands.mkarg(lpath))
+
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        runfetchcmd(cmd, d)
+        return True
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 0000000..635578a
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,128 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+'''
+BitBake 'Fetch' implementations
+
+This implementation is for Secure Shell (SSH), and attempts to comply with the
+IETF secsh internet draft:
+    http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
+
+    Currently does not support the sftp parameters, as this uses scp
+    Also does not support the 'fingerprint' connection parameter.
+
+    Please note that '/' is used as host, path separator not ':' as you may 
+    be used to, also '~' can be used to specify user HOME, but again after '/'
+    
+    Example SRC_URI:
+    SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
+    SRC_URI = "ssh://user@host.example.com/~/file.txt"
+'''
+
+# Copyright (C) 2006  OpenedHand Ltd.
+#
+#
+# Based in part on svk.py:
+#    Copyright (C) 2006 Holger Hans Peter Freyther
+#    Based on svn.py:
+#        Copyright (C) 2003, 2004  Chris Larson
+#        Based on functions from the base bb module:
+#            Copyright 2003 Holger Schurig
+#
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import re, os
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import logger
+from   bb.fetch2 import runfetchcmd
+
+
+__pattern__ = re.compile(r'''
+ \s*                 # Skip leading whitespace
+ ssh://              # scheme
+ (                   # Optional username/password block
+  (?P<user>\S+)      # username
+  (:(?P<pass>\S+))?  # colon followed by the password (optional)
+ )?
+ (?P<cparam>(;[^;]+)*)?  # connection parameters block (optional)
+ @
+ (?P<host>\S+?)          # non-greedy match of the host
+ (:(?P<port>[0-9]+))?    # colon followed by the port (optional)
+ /
+ (?P<path>[^;]+)         # path on the remote system, may be absolute or relative,
+                         # and may include the use of '~' to reference the remote home
+                         # directory
+ (?P<sparam>(;[^;]+)*)?  # parameters block (optional)
+ $
+''', re.VERBOSE)
+
+class SSH(FetchMethod):
+    '''Class to fetch a module or modules via Secure Shell'''
+
+    def supports(self, urldata, d):
+        return __pattern__.match(urldata.url) != None
+
+    def supports_checksum(self, urldata):
+        return False
+
+    def urldata_init(self, urldata, d):
+        if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
+            raise bb.fetch2.ParameterError(
+                "Invalid protocol - if you wish to fetch from a git " +
+                "repository using ssh, you need to use " +
+                "git:// prefix with protocol=ssh", urldata.url)
+        m = __pattern__.match(urldata.url)
+        path = m.group('path')
+        host = m.group('host')
+        urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
+                os.path.basename(os.path.normpath(path)))
+
+    def download(self, urldata, d):
+        dldir = d.getVar('DL_DIR', True)
+
+        m = __pattern__.match(urldata.url)
+        path = m.group('path')
+        host = m.group('host')
+        port = m.group('port')
+        user = m.group('user')
+        password = m.group('pass')
+
+        if port:
+            portarg = '-P %s' % port
+        else:
+            portarg = ''
+
+        if user:
+            fr = user
+            if password:
+                fr += ':%s' % password
+            fr += '@%s' % host
+        else:
+            fr = host
+        fr += ':%s' % path
+
+
+        import commands
+        cmd = 'scp -B -r %s %s %s/' % (
+            portarg,
+            commands.mkarg(fr),
+            commands.mkarg(dldir)
+        )
+
+        bb.fetch2.check_network_access(d, cmd, urldata.url)
+
+        runfetchcmd(cmd, d)
+
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 0000000..1733c2b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,192 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for svn.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2004        Marcin Juszkiewicz
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import bb
+import re
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import MissingParameterError
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+
+class Svn(FetchMethod):
+    """Class to fetch a module or modules from svn repositories"""
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with svn.
+        """
+        return ud.type in ['svn']
+
+    def urldata_init(self, ud, d):
+        """
+        init svn specific variable within url data
+        """
+        if not "module" in ud.parm:
+            raise MissingParameterError('module', ud.url)
+
+        ud.basecmd = d.getVar('FETCHCMD_svn', True)
+
+        ud.module = ud.parm["module"]
+
+        # Create paths to svn checkouts
+        relpath = self._strip_leading_slashes(ud.path)
+        ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
+        ud.moddir = os.path.join(ud.pkgdir, ud.module)
+
+        ud.setup_revisons(d)
+
+        if 'rev' in ud.parm:
+            ud.revision = ud.parm['rev']
+
+        ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
+
+    def _buildsvncommand(self, ud, d, command):
+        """
+        Build up an svn commandline based on ud
+        command is "fetch", "update", "info"
+        """
+
+        proto = ud.parm.get('protocol', 'svn')
+
+        svn_rsh = None
+        if proto == "svn+ssh" and "rsh" in ud.parm:
+            svn_rsh = ud.parm["rsh"]
+
+        svnroot = ud.host + ud.path
+
+        options = []
+
+        options.append("--no-auth-cache")
+
+        if ud.user:
+            options.append("--username %s" % ud.user)
+
+        if ud.pswd:
+            options.append("--password %s" % ud.pswd)
+
+        if command == "info":
+            svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
+        elif command == "log1":
+            svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
+        else:
+            suffix = ""
+            if ud.revision:
+                options.append("-r %s" % ud.revision)
+                suffix = "@%s" % (ud.revision)
+
+            if command == "fetch":
+                transportuser = ud.parm.get("transportuser", "")
+                svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.module)
+            elif command == "update":
+                svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
+            else:
+                raise FetchError("Invalid svn command %s" % command, ud.url)
+
+        if svn_rsh:
+            svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
+
+        return svncmd
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+        if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
+            svnupdatecmd = self._buildsvncommand(ud, d, "update")
+            logger.info("Update " + ud.url)
+            # update sources there
+            os.chdir(ud.moddir)
+            # We need to attempt to run svn upgrade first in case its an older working format
+            try:
+                runfetchcmd(ud.basecmd + " upgrade", d)
+            except FetchError:
+                pass
+            logger.debug(1, "Running %s", svnupdatecmd)
+            bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
+            runfetchcmd(svnupdatecmd, d)
+        else:
+            svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
+            logger.info("Fetch " + ud.url)
+            # check out sources there
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            logger.debug(1, "Running %s", svnfetchcmd)
+            bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
+            runfetchcmd(svnfetchcmd, d)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata == "keep":
+            tar_flags = ""
+        else:
+            tar_flags = "--exclude '.svn'"
+
+        os.chdir(ud.pkgdir)
+        # tar them up to a defined filename
+        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
+
+    def clean(self, ud, d):
+        """ Clean SVN specific files and dirs """
+
+        bb.utils.remove(ud.localpath)
+        bb.utils.remove(ud.moddir, True)
+        
+
+    def supports_srcrev(self):
+        return True
+
+    def _revision_key(self, ud, d, name):
+        """
+        Return a unique key for the url
+        """
+        return "svn:" + ud.moddir
+
+    def _latest_revision(self, ud, d, name):
+        """
+        Return the latest upstream revision number
+        """
+        bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"))
+
+        output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
+
+        # skip the first line, as per output of svn log
+        # then we expect the revision on the 2nd line
+        revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1)
+
+        return revision
+
+    def sortable_revision(self, ud, d, name):
+        """
+        Return a sortable revision number which in our case is the revision number
+        """
+
+        return False, self._build_revision(ud, d)
+
+    def _build_revision(self, ud, d):
+        return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 0000000..bd2a897
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,541 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import re
+import tempfile
+import subprocess
+import os
+import logging
+import bb
+import urllib
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import logger
+from   bb.fetch2 import runfetchcmd
+from   bs4 import BeautifulSoup
+
+class Wget(FetchMethod):
+    """Class to fetch urls via 'wget'"""
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with wget.
+        """
+        return ud.type in ['http', 'https', 'ftp']
+
+    def recommends_checksum(self, urldata):
+        return True
+
+    def urldata_init(self, ud, d):
+        if 'protocol' in ud.parm:
+            if ud.parm['protocol'] == 'git':
+                raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
+
+        if 'downloadfilename' in ud.parm:
+            ud.basename = ud.parm['downloadfilename']
+        else:
+            ud.basename = os.path.basename(ud.path)
+
+        ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+
+        self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
+
+    def _runwget(self, ud, d, command, quiet):
+
+        logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
+        bb.fetch2.check_network_access(d, command)
+        runfetchcmd(command, d, quiet)
+
+    def download(self, ud, d):
+        """Fetch urls"""
+
+        fetchcmd = self.basecmd
+
+        if 'downloadfilename' in ud.parm:
+            dldir = d.getVar("DL_DIR", True)
+            bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
+            fetchcmd += " -O " + dldir + os.sep + ud.localfile
+
+        uri = ud.url.split(";")[0]
+        if os.path.exists(ud.localpath):
+            # file exists, but we didnt complete it.. trying again..
+            fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
+        else:
+            fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
+
+        self._runwget(ud, d, fetchcmd, False)
+
+        # Sanity check since wget can pretend it succeed when it didn't
+        # Also, this used to happen if sourceforge sent us to the mirror page
+        if not os.path.exists(ud.localpath):
+            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
+
+        if os.path.getsize(ud.localpath) == 0:
+            os.remove(ud.localpath)
+            raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
+
+        return True
+
+    def checkstatus(self, fetch, ud, d):
+        import urllib2, socket, httplib
+        from urllib import addinfourl
+        from bb.fetch2 import FetchConnectionCache
+
+        class HTTPConnectionCache(httplib.HTTPConnection):
+            if fetch.connection_cache:
+                def connect(self):
+                    """Connect to the host and port specified in __init__."""
+
+                    sock = fetch.connection_cache.get_connection(self.host, self.port)
+                    if sock:
+                        self.sock = sock
+                    else:
+                        self.sock = socket.create_connection((self.host, self.port),
+                                    self.timeout, self.source_address)
+                        fetch.connection_cache.add_connection(self.host, self.port, self.sock)
+
+                    if self._tunnel_host:
+                        self._tunnel()
+
+        class CacheHTTPHandler(urllib2.HTTPHandler):
+            def http_open(self, req):
+                return self.do_open(HTTPConnectionCache, req)
+
+            def do_open(self, http_class, req):
+                """Return an addinfourl object for the request, using http_class.
+
+                http_class must implement the HTTPConnection API from httplib.
+                The addinfourl return value is a file-like object.  It also
+                has methods and attributes including:
+                    - info(): return a mimetools.Message object for the headers
+                    - geturl(): return the original request URL
+                    - code: HTTP status code
+                """
+                host = req.get_host()
+                if not host:
+                    raise urlllib2.URLError('no host given')
+
+                h = http_class(host, timeout=req.timeout) # will parse host:port
+                h.set_debuglevel(self._debuglevel)
+
+                headers = dict(req.unredirected_hdrs)
+                headers.update(dict((k, v) for k, v in req.headers.items()
+                            if k not in headers))
+
+                # We want to make an HTTP/1.1 request, but the addinfourl
+                # class isn't prepared to deal with a persistent connection.
+                # It will try to read all remaining data from the socket,
+                # which will block while the server waits for the next request.
+                # So make sure the connection gets closed after the (only)
+                # request.
+
+                # Don't close connection when connection_cache is enabled,
+                if fetch.connection_cache is None: 
+                    headers["Connection"] = "close"
+                else:
+                    headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
+
+                headers = dict(
+                    (name.title(), val) for name, val in headers.items())
+
+                if req._tunnel_host:
+                    tunnel_headers = {}
+                    proxy_auth_hdr = "Proxy-Authorization"
+                    if proxy_auth_hdr in headers:
+                        tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
+                        # Proxy-Authorization should not be sent to origin
+                        # server.
+                        del headers[proxy_auth_hdr]
+                    h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
+
+                try:
+                    h.request(req.get_method(), req.get_selector(), req.data, headers)
+                except socket.error, err: # XXX what error?
+                    # Don't close connection when cache is enabled.
+                    if fetch.connection_cache is None:
+                        h.close()
+                    raise urllib2.URLError(err)
+                else:
+                    try:
+                        r = h.getresponse(buffering=True)
+                    except TypeError: # buffering kw not supported
+                        r = h.getresponse()
+
+                # Pick apart the HTTPResponse object to get the addinfourl
+                # object initialized properly.
+
+                # Wrap the HTTPResponse object in socket's file object adapter
+                # for Windows.  That adapter calls recv(), so delegate recv()
+                # to read().  This weird wrapping allows the returned object to
+                # have readline() and readlines() methods.
+
+                # XXX It might be better to extract the read buffering code
+                # out of socket._fileobject() and into a base class.
+                r.recv = r.read
+
+                # no data, just have to read
+                r.read()
+                class fp_dummy(object):
+                    def read(self):
+                        return ""
+                    def readline(self):
+                        return ""
+                    def close(self):
+                        pass
+
+                resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
+                resp.code = r.status
+                resp.msg = r.reason
+
+                # Close connection when server request it.
+                if fetch.connection_cache is not None:
+                    if 'Connection' in r.msg and r.msg['Connection'] == 'close':
+                        fetch.connection_cache.remove_connection(h.host, h.port)
+
+                return resp
+
+        def export_proxies(d):
+            variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
+                            'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY']
+            exported = False
+
+            for v in variables:
+                if v in os.environ.keys():
+                    exported = True
+                else:
+                    v_proxy = d.getVar(v, True)
+                    if v_proxy is not None:
+                        os.environ[v] = v_proxy
+                        exported = True
+
+            return exported
+
+        def head_method(self):
+            return "HEAD"
+
+        exported_proxies = export_proxies(d)
+
+        # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
+        # see PEP-0476, this causes verification errors on some https servers
+        # so disable by default.
+        import ssl
+        ssl_context = None
+        if hasattr(ssl, '_create_unverified_context'):
+            ssl_context = ssl._create_unverified_context()
+
+        if exported_proxies == True and ssl_context is not None:
+            opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler,
+                    urllib2.HTTPSHandler(context=ssl_context))
+        elif exported_proxies == False and ssl_context is not None:
+            opener = urllib2.build_opener(CacheHTTPHandler,
+                    urllib2.HTTPSHandler(context=ssl_context))
+        elif exported_proxies == True and ssl_context is None:
+            opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler)
+        else:
+            opener = urllib2.build_opener(CacheHTTPHandler)
+
+        urllib2.Request.get_method = head_method
+        urllib2.install_opener(opener)
+
+        uri = ud.url.split(";")[0]
+
+        try:
+            urllib2.urlopen(uri)
+        except:
+            return False
+        return True
+
+    def _parse_path(self, regex, s):
+        """
+        Find and group name, version and archive type in the given string s
+        """
+
+        m = regex.search(s)
+        if m:
+            pname = ''
+            pver = ''
+            ptype = ''
+
+            mdict = m.groupdict()
+            if 'name' in mdict.keys():
+                pname = mdict['name']
+            if 'pver' in mdict.keys():
+                pver = mdict['pver']
+            if 'type' in mdict.keys():
+                ptype = mdict['type']
+
+            bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
+
+            return (pname, pver, ptype)
+
+        return None
+
+    def _modelate_version(self, version):
+        if version[0] in ['.', '-']:
+            if version[1].isdigit():
+                version = version[1] + version[0] + version[2:len(version)]
+            else:
+                version = version[1:len(version)]
+
+        version = re.sub('-', '.', version)
+        version = re.sub('_', '.', version)
+        version = re.sub('(rc)+', '.1000.', version)
+        version = re.sub('(beta)+', '.100.', version)
+        version = re.sub('(alpha)+', '.10.', version)
+        if version[0] == 'v':
+            version = version[1:len(version)]
+        return version
+
+    def _vercmp(self, old, new):
+        """
+        Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
+        purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
+        for simplicity as it's somehow difficult to get from various upstream format
+        """
+
+        (oldpn, oldpv, oldsuffix) = old
+        (newpn, newpv, newsuffix) = new
+
+        """
+        Check for a new suffix type that we have never heard of before
+        """
+        if (newsuffix):
+            m = self.suffix_regex_comp.search(newsuffix)
+            if not m:
+                bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
+                return False
+
+        """
+        Not our package so ignore it
+        """
+        if oldpn != newpn:
+            return False
+
+        oldpv = self._modelate_version(oldpv)
+        newpv = self._modelate_version(newpv)
+
+        return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
+
+    def _fetch_index(self, uri, ud, d):
+        """
+        Run fetch checkstatus to get directory information
+        """
+        f = tempfile.NamedTemporaryFile()
+
+        agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
+        fetchcmd = self.basecmd
+        fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+        try:
+            self._runwget(ud, d, fetchcmd, True)
+            fetchresult = f.read()
+        except bb.fetch2.BBFetchException:
+            fetchresult = ""
+
+        f.close()
+        return fetchresult
+
+    def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
+        """
+        Return the latest version of a package inside a given directory path
+        If error or no version, return ""
+        """
+        valid = 0
+        version = ['', '', '']
+
+        bb.debug(3, "VersionURL: %s" % (url))
+        soup = BeautifulSoup(self._fetch_index(url, ud, d))
+        if not soup:
+            bb.debug(3, "*** %s NO SOUP" % (url))
+            return ""
+
+        for line in soup.find_all('a', href=True):
+            bb.debug(3, "line['href'] = '%s'" % (line['href']))
+            bb.debug(3, "line = '%s'" % (str(line)))
+
+            newver = self._parse_path(package_regex, line['href'])
+            if not newver:
+                newver = self._parse_path(package_regex, str(line))
+
+            if newver:
+                bb.debug(3, "Upstream version found: %s" % newver[1])
+                if valid == 0:
+                    version = newver
+                    valid = 1
+                elif self._vercmp(version, newver) < 0:
+                    version = newver
+                
+        pupver = re.sub('_', '.', version[1])
+
+        bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
+                (package, pupver or "N/A", current_version[1]))
+
+        if valid:
+            return pupver
+
+        return ""
+
+    def _check_latest_version_by_dir(self, dirver, package, package_regex,
+            current_version, ud, d):
+        """
+            Scan every directory in order to get upstream version.
+        """
+        version_dir = ['', '', '']
+        version = ['', '', '']
+
+        dirver_regex = re.compile("(\D*)((\d+[\.\-_])+(\d+))")
+        s = dirver_regex.search(dirver)
+        if s:
+            version_dir[1] = s.group(2)
+        else:
+            version_dir[1] = dirver
+
+        dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
+                ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
+        bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
+
+        soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d))
+        if not soup:
+            return version[1]
+
+        for line in soup.find_all('a', href=True):
+            s = dirver_regex.search(line['href'].strip("/"))
+            if s:
+                version_dir_new = ['', s.group(2), '']
+                if self._vercmp(version_dir, version_dir_new) <= 0:
+                    dirver_new = s.group(1) + s.group(2)
+                    path = ud.path.replace(dirver, dirver_new, True) \
+                        .split(package)[0]
+                    uri = bb.fetch.encodeurl([ud.type, ud.host, path,
+                        ud.user, ud.pswd, {}])
+
+                    pupver = self._check_latest_version(uri,
+                            package, package_regex, current_version, ud, d)
+                    if pupver:
+                        version[1] = pupver
+
+                    version_dir = version_dir_new
+
+        return version[1]
+
+    def _init_regexes(self, package, ud, d):
+        """
+        Match as many patterns as possible such as:
+                gnome-common-2.20.0.tar.gz (most common format)
+                gtk+-2.90.1.tar.gz
+                xf86-input-synaptics-12.6.9.tar.gz
+                dri2proto-2.3.tar.gz
+                blktool_4.orig.tar.gz
+                libid3tag-0.15.1b.tar.gz
+                unzip552.tar.gz
+                icu4c-3_6-src.tgz
+                genext2fs_1.3.orig.tar.gz
+                gst-fluendo-mp3
+        """
+        # match most patterns which uses "-" as separator to version digits
+        pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
+        # a loose pattern such as for unzip552.tar.gz
+        pn_prefix2 = "[a-zA-Z]+"
+        # a loose pattern such as for 80325-quicky-0.4.tar.gz
+        pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
+        # Save the Package Name (pn) Regex for use later
+        pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+
+        # match version
+        pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
+
+        # match arch
+        parch_regex = "-source|_all_"
+
+        # src.rpm extension was added only for rpm package. Can be removed if the rpm
+        # packaged will always be considered as having to be manually upgraded
+        psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+
+        # match name, version and archive type of a package
+        package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
+                                                    % (pn_regex, pver_regex, parch_regex, psuffix_regex))
+        self.suffix_regex_comp = re.compile(psuffix_regex)
+
+        # compile regex, can be specific by package or generic regex
+        pn_regex = d.getVar('REGEX', True)
+        if pn_regex:
+            package_custom_regex_comp = re.compile(pn_regex)
+        else:
+            version = self._parse_path(package_regex_comp, package)
+            if version:
+                package_custom_regex_comp = re.compile(
+                    "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
+                    (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
+            else:
+                package_custom_regex_comp = None
+
+        return package_custom_regex_comp
+
+    def latest_versionstring(self, ud, d):
+        """
+        Manipulate the URL and try to obtain the latest package version
+
+        sanity check to ensure same name and type.
+        """
+        package = ud.path.split("/")[-1]
+        current_version = ['', d.getVar('PV', True), '']
+
+        """possible to have no version in pkg name, such as spectrum-fw"""
+        if not re.search("\d+", package):
+            current_version[1] = re.sub('_', '.', current_version[1])
+            current_version[1] = re.sub('-', '.', current_version[1])
+            return (current_version[1], '')
+
+        package_regex = self._init_regexes(package, ud, d)
+        if package_regex is None:
+            bb.warn("latest_versionstring: package %s don't match pattern" % (package))
+            return ('', '')
+        bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
+
+        uri = ""
+        regex_uri = d.getVar("REGEX_URI", True)
+        if not regex_uri:
+            path = ud.path.split(package)[0]
+
+            # search for version matches on folders inside the path, like:
+            # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
+            dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
+            m = dirver_regex.search(path)
+            if m:
+                pn = d.getVar('PN', True)
+                dirver = m.group('dirver')
+
+                dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
+                if not dirver_pn_regex.search(dirver):
+                    return (self._check_latest_version_by_dir(dirver,
+                        package, package_regex, current_version, ud, d), '')
+
+            uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
+        else:
+            uri = regex_uri
+
+        return (self._check_latest_version(uri, package, package_regex,
+                current_version, ud, d), '')