reset upstream subtrees to yocto 2.6
Reset the following subtrees on thud HEAD:
poky: 87e3a9739d
meta-openembedded: 6094ae18c8
meta-security: 31dc4e7532
meta-raspberrypi: a48743dc36
meta-xilinx: c42016e2e6
Also re-apply backports that didn't make it into thud:
poky:
17726d0 systemd-systemctl-native: handle Install wildcards
meta-openembedded:
4321a5d libtinyxml2: update to 7.0.1
042f0a3 libcereal: Add native and nativesdk classes
e23284f libcereal: Allow empty package
030e8d4 rsyslog: curl-less build with fmhttp PACKAGECONFIG
179a1b9 gtest: update to 1.8.1
Squashed OpenBMC subtree compatibility updates:
meta-aspeed:
Brad Bishop (1):
aspeed: add yocto 2.6 compatibility
meta-ibm:
Brad Bishop (1):
ibm: prepare for yocto 2.6
meta-ingrasys:
Brad Bishop (1):
ingrasys: set layer compatibility to yocto 2.6
meta-openpower:
Brad Bishop (1):
openpower: set layer compatibility to yocto 2.6
meta-phosphor:
Brad Bishop (3):
phosphor: set layer compatibility to thud
phosphor: libgpg-error: drop patches
phosphor: react to fitimage artifact rename
Ed Tanous (4):
Dropbear: upgrade options for latest upgrade
yocto2.6: update openssl options
busybox: remove upstream watchdog patch
systemd: Rebase CONFIG_CGROUP_BPF patch
Change-Id: I7b1fe71cca880d0372a82d94b5fd785323e3a9e7
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
diff --git a/poky/bitbake/lib/bb/COW.py b/poky/bitbake/lib/bb/COW.py
index bec6208..7817473 100644
--- a/poky/bitbake/lib/bb/COW.py
+++ b/poky/bitbake/lib/bb/COW.py
@@ -150,7 +150,7 @@
yield value
if type == "items":
yield (key, value)
- raise StopIteration()
+ return
def iterkeys(cls):
return cls.iter("keys")
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index d24adb8..4bc47c8 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-__version__ = "1.38.0"
+__version__ = "1.40.0"
import sys
if sys.version_info < (3, 4, 0):
@@ -63,6 +63,10 @@
def verbose(self, msg, *args, **kwargs):
return self.log(logging.INFO - 1, msg, *args, **kwargs)
+ def verbnote(self, msg, *args, **kwargs):
+ return self.log(logging.INFO + 2, msg, *args, **kwargs)
+
+
logging.raiseExceptions = False
logging.setLoggerClass(BBLogger)
@@ -93,6 +97,18 @@
def note(*args):
mainlogger.info(''.join(args))
+#
+# A higher prioity note which will show on the console but isn't a warning
+#
+# Something is happening the user should be aware of but they probably did
+# something to make it happen
+#
+def verbnote(*args):
+ mainlogger.verbnote(''.join(args))
+
+#
+# Warnings - things the user likely needs to pay attention to and fix
+#
def warn(*args):
mainlogger.warning(''.join(args))
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index 4631abd..3e2a94e 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -41,8 +41,6 @@
bblogger = logging.getLogger('BitBake')
logger = logging.getLogger('BitBake.Build')
-NULL = open(os.devnull, 'r+')
-
__mtime_cache = {}
def cached_mtime_noerror(f):
@@ -533,7 +531,6 @@
self.triggered = True
# Handle logfiles
- si = open('/dev/null', 'r')
try:
bb.utils.mkdirhier(os.path.dirname(logfn))
logfile = open(logfn, 'w')
@@ -547,7 +544,8 @@
ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
# Replace those fds with our own
- os.dup2(si.fileno(), osi[1])
+ with open('/dev/null', 'r') as si:
+ os.dup2(si.fileno(), osi[1])
os.dup2(logfile.fileno(), oso[1])
os.dup2(logfile.fileno(), ose[1])
@@ -608,7 +606,6 @@
os.close(osi[0])
os.close(oso[0])
os.close(ose[0])
- si.close()
logfile.close()
if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
@@ -803,6 +800,7 @@
if name in flags:
deptask = d.expand(flags[name])
task_deps[name][task] = deptask
+ getTask('mcdepends')
getTask('depends')
getTask('rdepends')
getTask('deptask')
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 168a77a..258d679 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -37,7 +37,7 @@
logger = logging.getLogger("BitBake.Cache")
-__cache_version__ = "151"
+__cache_version__ = "152"
def getCacheFile(path, filename, data_hash):
return os.path.join(path, filename + "." + data_hash)
diff --git a/poky/bitbake/lib/bb/codeparser.py b/poky/bitbake/lib/bb/codeparser.py
index 530f44e..ddd1b97 100644
--- a/poky/bitbake/lib/bb/codeparser.py
+++ b/poky/bitbake/lib/bb/codeparser.py
@@ -140,7 +140,7 @@
# so that an existing cache gets invalidated. Additionally you'll need
# to increment __cache_version__ in cache.py in order to ensure that old
# recipe caches don't trigger "Taskhash mismatch" errors.
- CACHE_VERSION = 9
+ CACHE_VERSION = 10
def __init__(self):
MultiProcessCache.__init__(self)
@@ -214,7 +214,7 @@
self.buffer = []
class PythonParser():
- getvars = (".getVar", ".appendVar", ".prependVar")
+ getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional")
getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
containsfuncs = ("bb.utils.contains", "base_contains")
containsanyfuncs = ("bb.utils.contains_any", "bb.utils.filter")
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index 1fda40d..16681ba 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -620,6 +620,27 @@
runlist.append([mc, k, ktask, fn])
bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
+ mcdeps = taskdata[mc].get_mcdepends()
+ # No need to do check providers if there are no mcdeps or not an mc build
+ if mcdeps and mc:
+ # Make sure we can provide the multiconfig dependency
+ seen = set()
+ new = True
+ while new:
+ new = False
+ for mc in self.multiconfigs:
+ for k in mcdeps:
+ if k in seen:
+ continue
+ l = k.split(':')
+ depmc = l[2]
+ if depmc not in self.multiconfigs:
+ bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
+ else:
+ logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
+ taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
+ seen.add(k)
+ new = True
for mc in self.multiconfigs:
taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
@@ -706,8 +727,8 @@
if not dotname in depend_tree["tdepends"]:
depend_tree["tdepends"][dotname] = []
for dep in rq.rqdata.runtaskentries[tid].depends:
- (depmc, depfn, deptaskname, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
- deppn = self.recipecaches[mc].pkg_fn[deptaskfn]
+ (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
+ deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
if taskfn not in seen_fns:
seen_fns.append(taskfn)
@@ -1566,7 +1587,7 @@
pkgs_to_build.append(t)
if 'universe' in pkgs_to_build:
- parselog.warning("The \"universe\" target is only intended for testing and may produce errors.")
+ parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
parselog.debug(1, "collating packages for \"universe\"")
pkgs_to_build.remove('universe')
for mc in self.multiconfigs:
@@ -1633,7 +1654,10 @@
class CookerCollectFiles(object):
def __init__(self, priorities):
self.bbappends = []
- self.bbfile_config_priorities = priorities
+ # Priorities is a list of tupples, with the second element as the pattern.
+ # We need to sort the list with the longest pattern first, and so on to
+ # the shortest. This allows nested layers to be properly evaluated.
+ self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
def calc_bbfile_priority( self, filename, matched = None ):
for _, _, regex, pri in self.bbfile_config_priorities:
diff --git a/poky/bitbake/lib/bb/daemonize.py b/poky/bitbake/lib/bb/daemonize.py
index 8300d1d..c937675 100644
--- a/poky/bitbake/lib/bb/daemonize.py
+++ b/poky/bitbake/lib/bb/daemonize.py
@@ -16,6 +16,10 @@
background as a daemon, returning control to the caller.
"""
+ # Ensure stdout/stderror are flushed before forking to avoid duplicate output
+ sys.stdout.flush()
+ sys.stderr.flush()
+
try:
# Fork a child process so the parent can exit. This returns control to
# the command-line or shell. It also guarantees that the child will not
@@ -49,8 +53,8 @@
# exit() or _exit()?
# _exit is like exit(), but it doesn't call any functions registered
# with atexit (and on_exit) or any registered signal handlers. It also
- # closes any open file descriptors. Using exit() may cause all stdio
- # streams to be flushed twice and any temporary files may be unexpectedly
+ # closes any open file descriptors, but doesn't flush any buffered output.
+ # Using exit() may cause all any temporary files to be unexpectedly
# removed. It's therefore recommended that child branches of a fork()
# and the parent branch(es) of a daemon use _exit().
os._exit(0)
@@ -61,17 +65,19 @@
# The second child.
# Replace standard fds with our own
- si = open('/dev/null', 'r')
- os.dup2(si.fileno(), sys.stdin.fileno())
+ with open('/dev/null', 'r') as si:
+ os.dup2(si.fileno(), sys.stdin.fileno())
try:
so = open(logfile, 'a+')
- se = so
os.dup2(so.fileno(), sys.stdout.fileno())
- os.dup2(se.fileno(), sys.stderr.fileno())
+ os.dup2(so.fileno(), sys.stderr.fileno())
except io.UnsupportedOperation:
sys.stdout = open(logfile, 'a+')
- sys.stderr = sys.stdout
+
+ # Have stdout and stderr be the same so log output matches chronologically
+ # and there aren't two seperate buffers
+ sys.stderr = sys.stdout
try:
function()
@@ -79,4 +85,9 @@
traceback.print_exc()
finally:
bb.event.print_ui_queue()
+ # os._exit() doesn't flush open files like os.exit() does. Manually flush
+ # stdout and stderr so that any logging output will be seen, particularly
+ # exception tracebacks.
+ sys.stdout.flush()
+ sys.stderr.flush()
os._exit(0)
diff --git a/poky/bitbake/lib/bb/data.py b/poky/bitbake/lib/bb/data.py
index 80a7879..d66d98c 100644
--- a/poky/bitbake/lib/bb/data.py
+++ b/poky/bitbake/lib/bb/data.py
@@ -38,6 +38,7 @@
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import sys, os, re
+import hashlib
if sys.argv[0][-5:] == "pydoc":
path = os.path.dirname(os.path.dirname(sys.argv[1]))
else:
@@ -283,14 +284,12 @@
try:
if key[-1] == ']':
vf = key[:-1].split('[')
- value = d.getVarFlag(vf[0], vf[1], False)
- parser = d.expandWithRefs(value, key)
+ value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True)
deps |= parser.references
deps = deps | (keys & parser.execs)
return deps, value
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
vardeps = varflags.get("vardeps")
- value = d.getVarFlag(key, "_content", False)
def handle_contains(value, contains, d):
newvalue = ""
@@ -309,10 +308,19 @@
return newvalue
return value + newvalue
+ def handle_remove(value, deps, removes, d):
+ for r in sorted(removes):
+ r2 = d.expandWithRefs(r, None)
+ value += "\n_remove of %s" % r
+ deps |= r2.references
+ deps = deps | (keys & r2.execs)
+ return value
+
if "vardepvalue" in varflags:
- value = varflags.get("vardepvalue")
+ value = varflags.get("vardepvalue")
elif varflags.get("func"):
if varflags.get("python"):
+ value = d.getVarFlag(key, "_content", False)
parser = bb.codeparser.PythonParser(key, logger)
if value and "\t" in value:
logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE")))
@@ -321,13 +329,15 @@
deps = deps | (keys & parser.execs)
value = handle_contains(value, parser.contains, d)
else:
- parsedvar = d.expandWithRefs(value, key)
+ value, parsedvar = d.getVarFlag(key, "_content", False, retparser=True)
parser = bb.codeparser.ShellParser(key, logger)
parser.parse_shell(parsedvar.value)
deps = deps | shelldeps
deps = deps | parsedvar.references
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
value = handle_contains(value, parsedvar.contains, d)
+ if hasattr(parsedvar, "removes"):
+ value = handle_remove(value, deps, parsedvar.removes, d)
if vardeps is None:
parser.log.flush()
if "prefuncs" in varflags:
@@ -337,10 +347,12 @@
if "exports" in varflags:
deps = deps | set(varflags["exports"].split())
else:
- parser = d.expandWithRefs(value, key)
+ value, parser = d.getVarFlag(key, "_content", False, retparser=True)
deps |= parser.references
deps = deps | (keys & parser.execs)
value = handle_contains(value, parser.contains, d)
+ if hasattr(parser, "removes"):
+ value = handle_remove(value, deps, parser.removes, d)
if "vardepvalueexclude" in varflags:
exclude = varflags.get("vardepvalueexclude")
@@ -394,6 +406,43 @@
#print "For %s: %s" % (task, str(deps[task]))
return tasklist, deps, values
+def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
+ taskdeps = {}
+ basehash = {}
+
+ for task in tasklist:
+ data = lookupcache[task]
+
+ if data is None:
+ bb.error("Task %s from %s seems to be empty?!" % (task, fn))
+ data = ''
+
+ gendeps[task] -= whitelist
+ newdeps = gendeps[task]
+ seen = set()
+ while newdeps:
+ nextdeps = newdeps
+ seen |= nextdeps
+ newdeps = set()
+ for dep in nextdeps:
+ if dep in whitelist:
+ continue
+ gendeps[dep] -= whitelist
+ newdeps |= gendeps[dep]
+ newdeps -= seen
+
+ alldeps = sorted(seen)
+ for dep in alldeps:
+ data = data + dep
+ var = lookupcache[dep]
+ if var is not None:
+ data = data + str(var)
+ k = fn + "." + task
+ basehash[k] = hashlib.md5(data.encode("utf-8")).hexdigest()
+ taskdeps[task] = alldeps
+
+ return taskdeps, basehash
+
def inherits_class(klass, d):
val = d.getVar('__inherit_cache', False) or []
needle = os.path.join('classes', '%s.bbclass' % klass)
diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py
index 7b09af5..6b94fc4 100644
--- a/poky/bitbake/lib/bb/data_smart.py
+++ b/poky/bitbake/lib/bb/data_smart.py
@@ -42,6 +42,7 @@
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
+__whitespace_split__ = re.compile('(\s)')
def infer_caller_details(loginfo, parent = False, varval = True):
"""Save the caller the trouble of specifying everything."""
@@ -104,11 +105,7 @@
if self.varname and key:
if self.varname == key:
raise Exception("variable %s references itself!" % self.varname)
- if key in self.d.expand_cache:
- varparse = self.d.expand_cache[key]
- var = varparse.value
- else:
- var = self.d.getVarFlag(key, "_content")
+ var = self.d.getVarFlag(key, "_content")
self.references.add(key)
if var is not None:
return var
@@ -267,6 +264,16 @@
return
self.variables[var].append(loginfo.copy())
+ def rename_variable_hist(self, oldvar, newvar):
+ if not self.dataroot._tracking:
+ return
+ if oldvar not in self.variables:
+ return
+ if newvar not in self.variables:
+ self.variables[newvar] = []
+ for i in self.variables[oldvar]:
+ self.variables[newvar].append(i.copy())
+
def variable(self, var):
remote_connector = self.dataroot.getVar('_remote_data', False)
if remote_connector:
@@ -401,9 +408,6 @@
if not isinstance(s, str): # sanity check
return VariableParse(varname, self, s)
- if varname and varname in self.expand_cache:
- return self.expand_cache[varname]
-
varparse = VariableParse(varname, self)
while s.find('${') != -1:
@@ -427,9 +431,6 @@
varparse.value = s
- if varname:
- self.expand_cache[varname] = varparse
-
return varparse
def expand(self, s, varname = None):
@@ -498,6 +499,7 @@
def setVar(self, var, value, **loginfo):
#print("var=" + str(var) + " val=" + str(value))
+ self.expand_cache = {}
parsing=False
if 'parsing' in loginfo:
parsing=True
@@ -510,7 +512,7 @@
if 'op' not in loginfo:
loginfo['op'] = "set"
- self.expand_cache = {}
+
match = __setvar_regexp__.match(var)
if match and match.group("keyword") in __setvar_keyword__:
base = match.group('base')
@@ -619,6 +621,7 @@
val = self.getVar(key, 0, parsing=True)
if val is not None:
+ self.varhistory.rename_variable_hist(key, newkey)
loginfo['variable'] = newkey
loginfo['op'] = 'rename from %s' % key
loginfo['detail'] = val
@@ -660,6 +663,7 @@
self.setVar(var + "_prepend", value, ignore=True, parsing=True)
def delVar(self, var, **loginfo):
+ self.expand_cache = {}
if '_remote_data' in self.dict:
connector = self.dict["_remote_data"]["_content"]
res = connector.delVar(var)
@@ -669,7 +673,6 @@
loginfo['detail'] = ""
loginfo['op'] = 'del'
self.varhistory.record(**loginfo)
- self.expand_cache = {}
self.dict[var] = {}
if var in self.overridedata:
del self.overridedata[var]
@@ -692,13 +695,13 @@
override = None
def setVarFlag(self, var, flag, value, **loginfo):
+ self.expand_cache = {}
if '_remote_data' in self.dict:
connector = self.dict["_remote_data"]["_content"]
res = connector.setVarFlag(var, flag, value)
if not res:
return
- self.expand_cache = {}
if 'op' not in loginfo:
loginfo['op'] = "set"
loginfo['flag'] = flag
@@ -719,9 +722,21 @@
self.dict["__exportlist"]["_content"] = set()
self.dict["__exportlist"]["_content"].add(var)
- def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False):
+ def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False, retparser=False):
+ if flag == "_content":
+ cachename = var
+ else:
+ if not flag:
+ bb.warn("Calling getVarFlag with flag unset is invalid")
+ return None
+ cachename = var + "[" + flag + "]"
+
+ if expand and cachename in self.expand_cache:
+ return self.expand_cache[cachename].value
+
local_var, overridedata = self._findVar(var)
value = None
+ removes = set()
if flag == "_content" and overridedata is not None and not parsing:
match = False
active = {}
@@ -748,7 +763,11 @@
match = active[a]
del active[a]
if match:
- value = self.getVar(match, False)
+ value, subparser = self.getVarFlag(match, "_content", False, retparser=True)
+ if hasattr(subparser, "removes"):
+ # We have to carry the removes from the overridden variable to apply at the
+ # end of processing
+ removes = subparser.removes
if local_var is not None and value is None:
if flag in local_var:
@@ -784,17 +803,13 @@
if match:
value = r + value
- if expand and value:
- # Only getvar (flag == _content) hits the expand cache
- cachename = None
- if flag == "_content":
- cachename = var
- else:
- cachename = var + "[" + flag + "]"
- value = self.expand(value, cachename)
+ parser = None
+ if expand or retparser:
+ parser = self.expandWithRefs(value, cachename)
+ if expand:
+ value = parser.value
- if value and flag == "_content" and local_var is not None and "_remove" in local_var:
- removes = []
+ if value and flag == "_content" and local_var is not None and "_remove" in local_var and not parsing:
self.need_overrides()
for (r, o) in local_var["_remove"]:
match = True
@@ -803,26 +818,45 @@
if not o2 in self.overrides:
match = False
if match:
- removes.extend(self.expand(r).split())
+ removes.add(r)
- if removes:
- filtered = filter(lambda v: v not in removes,
- value.split())
- value = " ".join(filtered)
- if expand and var in self.expand_cache:
- # We need to ensure the expand cache has the correct value
- # flag == "_content" here
- self.expand_cache[var].value = value
+ if value and flag == "_content" and not parsing:
+ if removes and parser:
+ expanded_removes = {}
+ for r in removes:
+ expanded_removes[r] = self.expand(r).split()
+
+ parser.removes = set()
+ val = ""
+ for v in __whitespace_split__.split(parser.value):
+ skip = False
+ for r in removes:
+ if v in expanded_removes[r]:
+ parser.removes.add(r)
+ skip = True
+ if skip:
+ continue
+ val = val + v
+ parser.value = val
+ if expand:
+ value = parser.value
+
+ if parser:
+ self.expand_cache[cachename] = parser
+
+ if retparser:
+ return value, parser
+
return value
def delVarFlag(self, var, flag, **loginfo):
+ self.expand_cache = {}
if '_remote_data' in self.dict:
connector = self.dict["_remote_data"]["_content"]
res = connector.delVarFlag(var, flag)
if not res:
return
- self.expand_cache = {}
local_var, _ = self._findVar(var)
if not local_var:
return
diff --git a/poky/bitbake/lib/bb/event.py b/poky/bitbake/lib/bb/event.py
index 5d00496..5b1b094 100644
--- a/poky/bitbake/lib/bb/event.py
+++ b/poky/bitbake/lib/bb/event.py
@@ -141,6 +141,9 @@
logger = logging.getLogger("BitBake")
if not _uiready:
from bb.msg import BBLogFormatter
+ # Flush any existing buffered content
+ sys.stdout.flush()
+ sys.stderr.flush()
stdout = logging.StreamHandler(sys.stdout)
stderr = logging.StreamHandler(sys.stderr)
formatter = BBLogFormatter("%(levelname)s: %(message)s")
@@ -395,7 +398,7 @@
Event.__init__(self)
class RecipePreFinalise(RecipeEvent):
- """ Recipe Parsing Complete but not yet finialised"""
+ """ Recipe Parsing Complete but not yet finalised"""
class RecipeTaskPreProcess(RecipeEvent):
"""
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 72d6092..3b8389e 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -256,7 +256,7 @@
# Identify if the URI is relative or not
if urlp.scheme in self._relative_schemes and \
- re.compile("^\w+:(?!//)").match(uri):
+ re.compile(r"^\w+:(?!//)").match(uri):
self.relative = True
if not self.relative:
@@ -383,7 +383,7 @@
path = location
else:
host = location
- path = ""
+ path = "/"
if user:
m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
if m:
@@ -452,8 +452,8 @@
# Handle URL parameters
if i:
# Any specified URL parameters must match
- for k in uri_replace_decoded[loc]:
- if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
+ for k in uri_find_decoded[loc]:
+ if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
return None
# Overwrite any specified replacement parameters
for k in uri_replace_decoded[loc]:
@@ -827,6 +827,7 @@
'NO_PROXY', 'no_proxy',
'ALL_PROXY', 'all_proxy',
'GIT_PROXY_COMMAND',
+ 'GIT_SSH',
'GIT_SSL_CAINFO',
'GIT_SMART_HTTP',
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
@@ -837,14 +838,16 @@
if not cleanup:
cleanup = []
- # If PATH contains WORKDIR which contains PV which contains SRCPV we
+ # If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
# can end up in circular recursion here so give the option of breaking it
# in a data store copy.
try:
d.getVar("PV")
+ d.getVar("PR")
except bb.data_smart.ExpansionError:
d = bb.data.createCopy(d)
d.setVar("PV", "fetcheravoidrecurse")
+ d.setVar("PR", "fetcheravoidrecurse")
origenv = d.getVar("BB_ORIGENV", False)
for var in exportvars:
@@ -1016,16 +1019,7 @@
origud.method.build_mirror_data(origud, ld)
return origud.localpath
# Otherwise the result is a local file:// and we symlink to it
- if not os.path.exists(origud.localpath):
- if os.path.islink(origud.localpath):
- # Broken symbolic link
- os.unlink(origud.localpath)
-
- # As per above, in case two tasks end up here simultaneously.
- try:
- os.symlink(ud.localpath, origud.localpath)
- except FileExistsError:
- pass
+ ensure_symlink(ud.localpath, origud.localpath)
update_stamp(origud, ld)
return ud.localpath
@@ -1059,6 +1053,22 @@
bb.utils.unlockfile(lf)
+def ensure_symlink(target, link_name):
+ if not os.path.exists(link_name):
+ if os.path.islink(link_name):
+ # Broken symbolic link
+ os.unlink(link_name)
+
+ # In case this is executing without any file locks held (as is
+ # the case for file:// URLs), two tasks may end up here at the
+ # same time, in which case we do not want the second task to
+ # fail when the link has already been created by the first task.
+ try:
+ os.symlink(target, link_name)
+ except FileExistsError:
+ pass
+
+
def try_mirrors(fetch, d, origud, mirrors, check = False):
"""
Try to use a mirrored version of the sources.
@@ -1088,7 +1098,9 @@
return True
pkgname = d.expand(d.getVar('PN', False))
- trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
+ trusted_hosts = None
+ if pkgname:
+ trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
if not trusted_hosts:
trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
diff --git a/poky/bitbake/lib/bb/fetch2/bzr.py b/poky/bitbake/lib/bb/fetch2/bzr.py
index 16123f8..658502f 100644
--- a/poky/bitbake/lib/bb/fetch2/bzr.py
+++ b/poky/bitbake/lib/bb/fetch2/bzr.py
@@ -41,8 +41,9 @@
init bzr specific variable within url data
"""
# Create paths to bzr checkouts
+ bzrdir = d.getVar("BZRDIR") or (d.getVar("DL_DIR") + "/bzr")
relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(d.expand('${BZRDIR}'), ud.host, relpath)
+ ud.pkgdir = os.path.join(bzrdir, ud.host, relpath)
ud.setup_revisions(d)
@@ -57,7 +58,7 @@
command is "fetch", "update", "revno"
"""
- basecmd = d.expand('${FETCHCMD_bzr}')
+ basecmd = d.getVar("FETCHCMD_bzr") or "/usr/bin/env bzr"
proto = ud.parm.get('protocol', 'http')
diff --git a/poky/bitbake/lib/bb/fetch2/cvs.py b/poky/bitbake/lib/bb/fetch2/cvs.py
index 490c954..0e0a319 100644
--- a/poky/bitbake/lib/bb/fetch2/cvs.py
+++ b/poky/bitbake/lib/bb/fetch2/cvs.py
@@ -110,7 +110,7 @@
if ud.tag:
options.append("-r %s" % ud.tag)
- cvsbasecmd = d.getVar("FETCHCMD_cvs")
+ cvsbasecmd = d.getVar("FETCHCMD_cvs") or "/usr/bin/env cvs"
cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
@@ -121,7 +121,8 @@
# create module directory
logger.debug(2, "Fetch: checking for module directory")
pkg = d.getVar('PN')
- pkgdir = os.path.join(d.getVar('CVSDIR'), pkg)
+ cvsdir = d.getVar("CVSDIR") or (d.getVar("DL_DIR") + "/cvs")
+ pkgdir = os.path.join(cvsdir, pkg)
moddir = os.path.join(pkgdir, localdir)
workdir = None
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 7b618c6..59a2ee8 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -261,7 +261,7 @@
gitsrcname = gitsrcname + '_' + ud.revisions[name]
dl_dir = d.getVar("DL_DIR")
- gitdir = d.getVar("GITDIR") or (dl_dir + "/git2/")
+ gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
ud.clonedir = os.path.join(gitdir, gitsrcname)
ud.localfile = ud.clonedir
@@ -299,17 +299,22 @@
return ud.clonedir
def need_update(self, ud, d):
+ return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud)
+
+ def clonedir_need_update(self, ud, d):
if not os.path.exists(ud.clonedir):
return True
for name in ud.names:
if not self._contains_ref(ud, d, name, ud.clonedir):
return True
- if ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow):
- return True
- if ud.write_tarballs and not os.path.exists(ud.fullmirror):
- return True
return False
+ def shallow_tarball_need_update(self, ud):
+ return ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow)
+
+ def tarball_need_update(self, ud):
+ return ud.write_tarballs and not os.path.exists(ud.fullmirror)
+
def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors
# is not possible
@@ -322,16 +327,13 @@
def download(self, ud, d):
"""Fetch url"""
- no_clone = not os.path.exists(ud.clonedir)
- need_update = no_clone or self.need_update(ud, d)
-
# A current clone is preferred to either tarball, a shallow tarball is
# preferred to an out of date clone, and a missing clone will use
# either tarball.
- if ud.shallow and os.path.exists(ud.fullshallow) and need_update:
+ if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
ud.localpath = ud.fullshallow
return
- elif os.path.exists(ud.fullmirror) and no_clone:
+ elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
bb.utils.mkdirhier(ud.clonedir)
runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
@@ -353,6 +355,8 @@
for name in ud.names:
if not self._contains_ref(ud, d, name, ud.clonedir):
needupdate = True
+ break
+
if needupdate:
output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
if "origin" in output:
@@ -372,6 +376,7 @@
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
+
for name in ud.names:
if not self._contains_ref(ud, d, name, ud.clonedir):
raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
@@ -471,11 +476,30 @@
if os.path.exists(destdir):
bb.utils.prunedir(destdir)
- if ud.shallow and (not os.path.exists(ud.clonedir) or self.need_update(ud, d)):
- bb.utils.mkdirhier(destdir)
- runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
- else:
- runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
+ source_found = False
+ source_error = []
+
+ if not source_found:
+ clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
+ if clonedir_is_up_to_date:
+ runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
+ source_found = True
+ else:
+ source_error.append("clone directory not available or not up to date: " + ud.clonedir)
+
+ if not source_found:
+ if ud.shallow:
+ if os.path.exists(ud.fullshallow):
+ bb.utils.mkdirhier(destdir)
+ runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
+ source_found = True
+ else:
+ source_error.append("shallow clone not available: " + ud.fullshallow)
+ else:
+ source_error.append("shallow clone not enabled")
+
+ if not source_found:
+ raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
repourl = self._get_repo_url(ud)
runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index 0aff100..35729db 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -31,9 +31,12 @@
import os
import bb
+import copy
from bb.fetch2.git import Git
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
+from bb.fetch2 import Fetch
+from bb.fetch2 import BBFetchException
class GitSM(Git):
def supports(self, ud, d):
@@ -42,94 +45,206 @@
"""
return ud.type in ['gitsm']
- def uses_submodules(self, ud, d, wd):
- for name in ud.names:
- try:
- runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
- return True
- except bb.fetch.FetchError:
- pass
- return False
-
- def _set_relative_paths(self, repopath):
- """
- Fix submodule paths to be relative instead of absolute,
- so that when we move the repo it doesn't break
- (In Git 1.7.10+ this is done automatically)
- """
- submodules = []
- with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
- for line in f.readlines():
- if line.startswith('[submodule'):
- submodules.append(line.split('"')[1])
-
- for module in submodules:
- repo_conf = os.path.join(repopath, module, '.git')
- if os.path.exists(repo_conf):
- with open(repo_conf, 'r') as f:
- lines = f.readlines()
- newpath = ''
- for i, line in enumerate(lines):
- if line.startswith('gitdir:'):
- oldpath = line.split(': ')[-1].rstrip()
- if oldpath.startswith('/'):
- newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
- lines[i] = 'gitdir: %s\n' % newpath
- break
- if newpath:
- with open(repo_conf, 'w') as f:
- for line in lines:
- f.write(line)
-
- repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
- if os.path.exists(repo_conf2):
- with open(repo_conf2, 'r') as f:
- lines = f.readlines()
- newpath = ''
- for i, line in enumerate(lines):
- if line.lstrip().startswith('worktree = '):
- oldpath = line.split(' = ')[-1].rstrip()
- if oldpath.startswith('/'):
- newpath = '../' * (module.count('/') + 3) + module
- lines[i] = '\tworktree = %s\n' % newpath
- break
- if newpath:
- with open(repo_conf2, 'w') as f:
- for line in lines:
- f.write(line)
+ @staticmethod
+ def parse_gitmodules(gitmodules):
+ modules = {}
+ module = ""
+ for line in gitmodules.splitlines():
+ if line.startswith('[submodule'):
+ module = line.split('"')[1]
+ modules[module] = {}
+ elif module and line.strip().startswith('path'):
+ path = line.split('=')[1].strip()
+ modules[module]['path'] = path
+ elif module and line.strip().startswith('url'):
+ url = line.split('=')[1].strip()
+ modules[module]['url'] = url
+ return modules
def update_submodules(self, ud, d):
- # We have to convert bare -> full repo, do the submodule bit, then convert back
- tmpclonedir = ud.clonedir + ".tmp"
- gitdir = tmpclonedir + os.sep + ".git"
- bb.utils.remove(tmpclonedir, True)
- os.mkdir(tmpclonedir)
- os.rename(ud.clonedir, gitdir)
- runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
- runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
- runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
- runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
- self._set_relative_paths(tmpclonedir)
- runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
- os.rename(gitdir, ud.clonedir,)
- bb.utils.remove(tmpclonedir, True)
+ submodules = []
+ paths = {}
+ uris = {}
+ local_paths = {}
+
+ for name in ud.names:
+ try:
+ gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir)
+ except:
+ # No submodules to update
+ continue
+
+ for m, md in self.parse_gitmodules(gitmodules).items():
+ submodules.append(m)
+ paths[m] = md['path']
+ uris[m] = md['url']
+ if uris[m].startswith('..'):
+ newud = copy.copy(ud)
+ newud.path = os.path.realpath(os.path.join(newud.path, md['url']))
+ uris[m] = Git._get_repo_url(self, newud)
+
+ for module in submodules:
+ module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir)
+ module_hash = module_hash.split()[2]
+
+ # Build new SRC_URI
+ proto = uris[module].split(':', 1)[0]
+ url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
+ url += ';protocol=%s' % proto
+ url += ";name=%s" % module
+ url += ";bareclone=1;nocheckout=1;nobranch=1"
+
+ ld = d.createCopy()
+ # Not necessary to set SRC_URI, since we're passing the URI to
+ # Fetch.
+ #ld.setVar('SRC_URI', url)
+ ld.setVar('SRCREV_%s' % module, module_hash)
+
+ # Workaround for issues with SRCPV/SRCREV_FORMAT errors
+ # error refer to 'multiple' repositories. Only the repository
+ # in the original SRC_URI actually matters...
+ ld.setVar('SRCPV', d.getVar('SRCPV'))
+ ld.setVar('SRCREV_FORMAT', module)
+
+ newfetch = Fetch([url], ld, cache=False)
+ newfetch.download()
+ local_paths[module] = newfetch.localpath(url)
+
+ # Correct the submodule references to the local download version...
+ runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.clonedir)
+
+ symlink_path = os.path.join(ud.clonedir, 'modules', paths[module])
+ if not os.path.exists(symlink_path):
+ try:
+ os.makedirs(os.path.dirname(symlink_path), exist_ok=True)
+ except OSError:
+ pass
+ os.symlink(local_paths[module], symlink_path)
+
+ return True
+
+ def need_update(self, ud, d):
+ main_repo_needs_update = Git.need_update(self, ud, d)
+
+ # First check that the main repository has enough history fetched. If it doesn't, then we don't
+ # even have the .gitmodules and gitlinks for the submodules to attempt asking whether the
+ # submodules' histories are recent enough.
+ if main_repo_needs_update:
+ return True
+
+ # Now check that the submodule histories are new enough. The git-submodule command doesn't have
+ # any clean interface for doing this aside from just attempting the checkout (with network
+ # fetched disabled).
+ return not self.update_submodules(ud, d)
def download(self, ud, d):
Git.download(self, ud, d)
if not ud.shallow or ud.localpath != ud.fullshallow:
- submodules = self.uses_submodules(ud, d, ud.clonedir)
- if submodules:
- self.update_submodules(ud, d)
+ self.update_submodules(ud, d)
+
+ def copy_submodules(self, submodules, ud, destdir, d):
+ if ud.bareclone:
+ repo_conf = destdir
+ else:
+ repo_conf = os.path.join(destdir, '.git')
+
+ if submodules and not os.path.exists(os.path.join(repo_conf, 'modules')):
+ os.mkdir(os.path.join(repo_conf, 'modules'))
+
+ for module, md in submodules.items():
+ srcpath = os.path.join(ud.clonedir, 'modules', md['path'])
+ modpath = os.path.join(repo_conf, 'modules', md['path'])
+
+ if os.path.exists(srcpath):
+ if os.path.exists(os.path.join(srcpath, '.git')):
+ srcpath = os.path.join(srcpath, '.git')
+
+ target = modpath
+ if os.path.exists(modpath):
+ target = os.path.dirname(modpath)
+
+ os.makedirs(os.path.dirname(target), exist_ok=True)
+ runfetchcmd("cp -fpLR %s %s" % (srcpath, target), d)
+ elif os.path.exists(modpath):
+ # Module already exists, likely unpacked from a shallow mirror clone
+ pass
+ else:
+ # This is fatal, as we do NOT want git-submodule to hit the network
+ raise bb.fetch2.FetchError('Submodule %s does not exist in %s or %s.' % (module, srcpath, modpath))
def clone_shallow_local(self, ud, dest, d):
super(GitSM, self).clone_shallow_local(ud, dest, d)
- runfetchcmd('cp -fpPRH "%s/modules" "%s/"' % (ud.clonedir, os.path.join(dest, '.git')), d)
+ # Copy over the submodules' fetched histories too.
+ repo_conf = os.path.join(dest, '.git')
+
+ submodules = []
+ for name in ud.names:
+ try:
+ gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=dest)
+ except:
+ # No submodules to update
+ continue
+
+ submodules = self.parse_gitmodules(gitmodules)
+ self.copy_submodules(submodules, ud, dest, d)
def unpack(self, ud, destdir, d):
Git.unpack(self, ud, destdir, d)
- if self.uses_submodules(ud, d, ud.destdir):
- runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
- runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
+ # Copy over the submodules' fetched histories too.
+ if ud.bareclone:
+ repo_conf = ud.destdir
+ else:
+ repo_conf = os.path.join(ud.destdir, '.git')
+
+ update_submodules = False
+ paths = {}
+ uris = {}
+ local_paths = {}
+ for name in ud.names:
+ try:
+ gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+ except:
+ # No submodules to update
+ continue
+
+ submodules = self.parse_gitmodules(gitmodules)
+ self.copy_submodules(submodules, ud, ud.destdir, d)
+
+ submodules_queue = [(module, os.path.join(repo_conf, 'modules', md['path'])) for module, md in submodules.items()]
+ while len(submodules_queue) != 0:
+ module, modpath = submodules_queue.pop()
+
+ # add submodule children recursively
+ try:
+ gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath)
+ for m, md in self.parse_gitmodules(gitmodules).items():
+ submodules_queue.append([m, os.path.join(modpath, 'modules', md['path'])])
+ except:
+ # no children
+ pass
+
+
+ # There are submodules to update
+ update_submodules = True
+
+ # Determine (from the submodule) the correct url to reference
+ try:
+ output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath)
+ except bb.fetch2.FetchError as e:
+ # No remote url defined in this submodule
+ continue
+
+ local_paths[module] = output
+
+ # Setup the local URL properly (like git submodule init or sync would do...)
+ runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir)
+
+ # Ensure the submodule repository is NOT set to bare, since we're checking it out...
+ runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath)
+
+ if update_submodules:
+ # Run submodule update, this sets up the directories -- without touching the config
+ runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index d0857e6..936d043 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -80,7 +80,7 @@
ud.fullmirror = os.path.join(d.getVar("DL_DIR"), mirrortarball)
ud.mirrortarballs = [mirrortarball]
- hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/")
+ hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg")
ud.pkgdir = os.path.join(hgdir, hgsrcname)
ud.moddir = os.path.join(ud.pkgdir, ud.module)
ud.localfile = ud.moddir
diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py
index 2b4f7d9..6c60456 100644
--- a/poky/bitbake/lib/bb/fetch2/osc.py
+++ b/poky/bitbake/lib/bb/fetch2/osc.py
@@ -32,8 +32,9 @@
ud.module = ud.parm["module"]
# Create paths to osc checkouts
+ oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc")
relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host)
+ ud.pkgdir = os.path.join(oscdir, ud.host)
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
if 'rev' in ud.parm:
@@ -54,7 +55,7 @@
command is "fetch", "update", "info"
"""
- basecmd = d.expand('${FETCHCMD_osc}')
+ basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc"
proto = ud.parm.get('protocol', 'ocs')
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index 3debad5..903a8e6 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -43,13 +43,9 @@
provided by the env, use it. If P4PORT is specified by the recipe, use
its values, which may override the settings in P4CONFIG.
"""
- ud.basecmd = d.getVar('FETCHCMD_p4')
- if not ud.basecmd:
- ud.basecmd = "/usr/bin/env p4"
+ ud.basecmd = d.getVar("FETCHCMD_p4") or "/usr/bin/env p4"
- ud.dldir = d.getVar('P4DIR')
- if not ud.dldir:
- ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4')
+ ud.dldir = d.getVar("P4DIR") or (d.getVar("DL_DIR") + "/p4")
path = ud.url.split('://')[1]
path = path.split(';')[0]
diff --git a/poky/bitbake/lib/bb/fetch2/repo.py b/poky/bitbake/lib/bb/fetch2/repo.py
index c22d9b5..8c7e818 100644
--- a/poky/bitbake/lib/bb/fetch2/repo.py
+++ b/poky/bitbake/lib/bb/fetch2/repo.py
@@ -45,6 +45,8 @@
"master".
"""
+ ud.basecmd = d.getVar("FETCHCMD_repo") or "/usr/bin/env repo"
+
ud.proto = ud.parm.get('protocol', 'git')
ud.branch = ud.parm.get('branch', 'master')
ud.manifest = ud.parm.get('manifest', 'default.xml')
@@ -60,8 +62,8 @@
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
return
+ repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo")
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
- repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo")
codir = os.path.join(repodir, gitsrcname, ud.manifest)
if ud.user:
@@ -72,11 +74,11 @@
repodir = os.path.join(codir, "repo")
bb.utils.mkdirhier(repodir)
if not os.path.exists(os.path.join(repodir, ".repo")):
- bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
- runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
+ bb.fetch2.check_network_access(d, "%s init -m %s -b %s -u %s://%s%s%s" % (ud.basecmd, ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
+ runfetchcmd("%s init -m %s -b %s -u %s://%s%s%s" % (ud.basecmd, ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
- bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
- runfetchcmd("repo sync", d, workdir=repodir)
+ bb.fetch2.check_network_access(d, "%s sync %s" % (ud.basecmd, ud.url), ud.url)
+ runfetchcmd("%s sync" % ud.basecmd, d, workdir=repodir)
scmdata = ud.parm.get("scmdata", "")
if scmdata == "keep":
diff --git a/poky/bitbake/lib/bb/fetch2/svn.py b/poky/bitbake/lib/bb/fetch2/svn.py
index 3f172ee..9dcf3eb 100644
--- a/poky/bitbake/lib/bb/fetch2/svn.py
+++ b/poky/bitbake/lib/bb/fetch2/svn.py
@@ -49,7 +49,7 @@
if not "module" in ud.parm:
raise MissingParameterError('module', ud.url)
- ud.basecmd = d.getVar('FETCHCMD_svn')
+ ud.basecmd = d.getVar("FETCHCMD_svn") or "/usr/bin/env svn --non-interactive --trust-server-cert"
ud.module = ud.parm["module"]
@@ -59,9 +59,13 @@
ud.path_spec = ud.parm["path_spec"]
# Create paths to svn checkouts
+ svndir = d.getVar("SVNDIR") or (d.getVar("DL_DIR") + "/svn")
relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(d.expand('${SVNDIR}'), ud.host, relpath)
+ ud.pkgdir = os.path.join(svndir, ud.host, relpath)
ud.moddir = os.path.join(ud.pkgdir, ud.module)
+ # Protects the repository from concurrent updates, e.g. from two
+ # recipes fetching different revisions at the same time
+ ud.svnlock = os.path.join(ud.pkgdir, "svn.lock")
ud.setup_revisions(d)
@@ -122,35 +126,40 @@
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
- if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
- svnupdatecmd = self._buildsvncommand(ud, d, "update")
- logger.info("Update " + ud.url)
- # We need to attempt to run svn upgrade first in case its an older working format
- try:
- runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
- except FetchError:
- pass
- logger.debug(1, "Running %s", svnupdatecmd)
- bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
- runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
- else:
- svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
- logger.info("Fetch " + ud.url)
- # check out sources there
- bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", svnfetchcmd)
- bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
- runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
+ lf = bb.utils.lockfile(ud.svnlock)
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude='.svn'"
+ try:
+ if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
+ svnupdatecmd = self._buildsvncommand(ud, d, "update")
+ logger.info("Update " + ud.url)
+ # We need to attempt to run svn upgrade first in case its an older working format
+ try:
+ runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
+ except FetchError:
+ pass
+ logger.debug(1, "Running %s", svnupdatecmd)
+ bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
+ runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
+ else:
+ svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
+ logger.info("Fetch " + ud.url)
+ # check out sources there
+ bb.utils.mkdirhier(ud.pkgdir)
+ logger.debug(1, "Running %s", svnfetchcmd)
+ bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
+ runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
- # tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
- cleanup=[ud.localpath], workdir=ud.pkgdir)
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude='.svn'"
+
+ # tar them up to a defined filename
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
+ cleanup=[ud.localpath], workdir=ud.pkgdir)
+ finally:
+ bb.utils.unlockfile(lf)
def clean(self, ud, d):
""" Clean SVN specific files and dirs """
diff --git a/poky/bitbake/lib/bb/msg.py b/poky/bitbake/lib/bb/msg.py
index f1723be..96f077e 100644
--- a/poky/bitbake/lib/bb/msg.py
+++ b/poky/bitbake/lib/bb/msg.py
@@ -40,6 +40,7 @@
VERBOSE = logging.INFO - 1
NOTE = logging.INFO
PLAIN = logging.INFO + 1
+ VERBNOTE = logging.INFO + 2
ERROR = logging.ERROR
WARNING = logging.WARNING
CRITICAL = logging.CRITICAL
@@ -51,6 +52,7 @@
VERBOSE: 'NOTE',
NOTE : 'NOTE',
PLAIN : '',
+ VERBNOTE: 'NOTE',
WARNING : 'WARNING',
ERROR : 'ERROR',
CRITICAL: 'ERROR',
@@ -66,6 +68,7 @@
VERBOSE : BASECOLOR,
NOTE : BASECOLOR,
PLAIN : BASECOLOR,
+ VERBNOTE: BASECOLOR,
WARNING : YELLOW,
ERROR : RED,
CRITICAL: RED,
diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py
index 6690dc5..9d20c32 100644
--- a/poky/bitbake/lib/bb/parse/ast.py
+++ b/poky/bitbake/lib/bb/parse/ast.py
@@ -343,30 +343,31 @@
def finalize(fn, d, variant = None):
saved_handlers = bb.event.get_handlers().copy()
+ try:
+ for var in d.getVar('__BBHANDLERS', False) or []:
+ # try to add the handler
+ handlerfn = d.getVarFlag(var, "filename", False)
+ if not handlerfn:
+ bb.fatal("Undefined event handler function '%s'" % var)
+ handlerln = int(d.getVarFlag(var, "lineno", False))
+ bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln)
- for var in d.getVar('__BBHANDLERS', False) or []:
- # try to add the handler
- handlerfn = d.getVarFlag(var, "filename", False)
- if not handlerfn:
- bb.fatal("Undefined event handler function '%s'" % var)
- handlerln = int(d.getVarFlag(var, "lineno", False))
- bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln)
+ bb.event.fire(bb.event.RecipePreFinalise(fn), d)
- bb.event.fire(bb.event.RecipePreFinalise(fn), d)
+ bb.data.expandKeys(d)
+ runAnonFuncs(d)
- bb.data.expandKeys(d)
- runAnonFuncs(d)
+ tasklist = d.getVar('__BBTASKS', False) or []
+ bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d)
+ bb.build.add_tasks(tasklist, d)
- tasklist = d.getVar('__BBTASKS', False) or []
- bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d)
- bb.build.add_tasks(tasklist, d)
+ bb.parse.siggen.finalise(fn, d, variant)
- bb.parse.siggen.finalise(fn, d, variant)
+ d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
- d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
-
- bb.event.fire(bb.event.RecipeParsed(fn), d)
- bb.event.set_handlers(saved_handlers)
+ bb.event.fire(bb.event.RecipeParsed(fn), d)
+ finally:
+ bb.event.set_handlers(saved_handlers)
def _create_variants(datastores, names, function, onlyfinalise):
def create_variant(name, orig_d, arg = None):
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 480a851..4d5d876 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -94,13 +94,13 @@
self.active = self.active - 1
self.failed = self.failed + 1
- def taskCompleted(self, number = 1):
- self.active = self.active - number
- self.completed = self.completed + number
+ def taskCompleted(self):
+ self.active = self.active - 1
+ self.completed = self.completed + 1
- def taskSkipped(self, number = 1):
- self.active = self.active + number
- self.skipped = self.skipped + number
+ def taskSkipped(self):
+ self.active = self.active + 1
+ self.skipped = self.skipped + 1
def taskActive(self):
self.active = self.active + 1
@@ -134,6 +134,7 @@
self.prio_map = [self.rqdata.runtaskentries.keys()]
self.buildable = []
+ self.skip_maxthread = {}
self.stamps = {}
for tid in self.rqdata.runtaskentries:
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
@@ -150,8 +151,25 @@
self.buildable = [x for x in self.buildable if x not in self.rq.runq_running]
if not self.buildable:
return None
+
+ # Filter out tasks that have a max number of threads that have been exceeded
+ skip_buildable = {}
+ for running in self.rq.runq_running.difference(self.rq.runq_complete):
+ rtaskname = taskname_from_tid(running)
+ if rtaskname not in self.skip_maxthread:
+ self.skip_maxthread[rtaskname] = self.rq.cfgData.getVarFlag(rtaskname, "number_threads")
+ if not self.skip_maxthread[rtaskname]:
+ continue
+ if rtaskname in skip_buildable:
+ skip_buildable[rtaskname] += 1
+ else:
+ skip_buildable[rtaskname] = 1
+
if len(self.buildable) == 1:
tid = self.buildable[0]
+ taskname = taskname_from_tid(tid)
+ if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
+ return None
stamp = self.stamps[tid]
if stamp not in self.rq.build_stamps.values():
return tid
@@ -164,6 +182,9 @@
best = None
bestprio = None
for tid in self.buildable:
+ taskname = taskname_from_tid(tid)
+ if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
+ continue
prio = self.rev_prio_map[tid]
if bestprio is None or bestprio > prio:
stamp = self.stamps[tid]
@@ -178,7 +199,7 @@
"""
Return the id of the task we should build next
"""
- if self.rq.stats.active < self.rq.number_tasks:
+ if self.rq.can_start_task():
return self.next_buildable_task()
def newbuildable(self, task):
@@ -581,6 +602,19 @@
if t in taskData[mc].taskentries:
depends.add(t)
+ def add_mc_dependencies(mc, tid):
+ mcdeps = taskData[mc].get_mcdepends()
+ for dep in mcdeps:
+ mcdependency = dep.split(':')
+ pn = mcdependency[3]
+ frommc = mcdependency[1]
+ mcdep = mcdependency[2]
+ deptask = mcdependency[4]
+ if mc == frommc:
+ fn = taskData[mcdep].build_targets[pn][0]
+ newdep = '%s:%s' % (fn,deptask)
+ taskData[mc].taskentries[tid].tdepends.append(newdep)
+
for mc in taskData:
for tid in taskData[mc].taskentries:
@@ -597,12 +631,16 @@
if fn in taskData[mc].failed_fns:
continue
+ # We add multiconfig dependencies before processing internal task deps (tdepends)
+ if 'mcdepends' in task_deps and taskname in task_deps['mcdepends']:
+ add_mc_dependencies(mc, tid)
+
# Resolve task internal dependencies
#
# e.g. addtask before X after Y
for t in taskData[mc].taskentries[tid].tdepends:
- (_, depfn, deptaskname, _) = split_tid_mcfn(t)
- depends.add(build_tid(mc, depfn, deptaskname))
+ (depmc, depfn, deptaskname, _) = split_tid_mcfn(t)
+ depends.add(build_tid(depmc, depfn, deptaskname))
# Resolve 'deptask' dependencies
#
@@ -1032,7 +1070,7 @@
msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs))
if self.warn_multi_bb:
- logger.warning(msg)
+ logger.verbnote(msg)
else:
logger.error(msg)
@@ -1040,7 +1078,7 @@
# Create a whitelist usable by the stamp checks
self.stampfnwhitelist = {}
- for mc in self.taskData:
+ for mc in self.taskData:
self.stampfnwhitelist[mc] = []
for entry in self.stampwhitelist.split():
if entry not in self.taskData[mc].build_targets:
@@ -1072,7 +1110,7 @@
bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
else:
logger.verbose("Invalidate task %s, %s", taskname, fn)
- bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], fn)
+ bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], taskfn)
self.init_progress_reporter.next_stage()
@@ -1717,6 +1755,10 @@
valid = bb.utils.better_eval(call, locs)
return valid
+ def can_start_task(self):
+ can_start = self.stats.active < self.number_tasks
+ return can_start
+
class RunQueueExecuteDummy(RunQueueExecute):
def __init__(self, rq):
self.rq = rq
@@ -1790,13 +1832,14 @@
bb.build.del_stamp(taskname, self.rqdata.dataCaches[mc], taskfn)
self.rq.scenequeue_covered.remove(tid)
- toremove = covered_remove
+ toremove = covered_remove | self.rq.scenequeue_notcovered
for task in toremove:
logger.debug(1, 'Not skipping task %s due to setsceneverify', task)
while toremove:
covered_remove = []
for task in toremove:
- removecoveredtask(task)
+ if task in self.rq.scenequeue_covered:
+ removecoveredtask(task)
for deptask in self.rqdata.runtaskentries[task].depends:
if deptask not in self.rq.scenequeue_covered:
continue
@@ -1866,14 +1909,13 @@
continue
if revdep in self.runq_buildable:
continue
- alldeps = 1
+ alldeps = True
for dep in self.rqdata.runtaskentries[revdep].depends:
if dep not in self.runq_complete:
- alldeps = 0
- if alldeps == 1:
+ alldeps = False
+ break
+ if alldeps:
self.setbuildable(revdep)
- fn = fn_from_tid(revdep)
- taskname = taskname_from_tid(revdep)
logger.debug(1, "Marking task %s as buildable", revdep)
def task_complete(self, task):
@@ -1897,8 +1939,8 @@
self.setbuildable(task)
bb.event.fire(runQueueTaskSkipped(task, self.stats, self.rq, reason), self.cfgData)
self.task_completeoutright(task)
- self.stats.taskCompleted()
self.stats.taskSkipped()
+ self.stats.taskCompleted()
def execute(self):
"""
@@ -2008,7 +2050,7 @@
self.build_stamps2.append(self.build_stamps[task])
self.runq_running.add(task)
self.stats.taskActive()
- if self.stats.active < self.number_tasks:
+ if self.can_start_task():
return True
if self.stats.active > 0:
@@ -2063,6 +2105,7 @@
# If we don't have any setscene functions, skip this step
if len(self.rqdata.runq_setscene_tids) == 0:
rq.scenequeue_covered = set()
+ rq.scenequeue_notcovered = set()
rq.state = runQueueRunInit
return
@@ -2278,10 +2321,15 @@
sq_hash.append(self.rqdata.runtaskentries[tid].hash)
sq_taskname.append(taskname)
sq_task.append(tid)
+
+ self.cooker.data.setVar("BB_SETSCENE_STAMPCURRENT_COUNT", len(stamppresent))
+
call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
valid = bb.utils.better_eval(call, locs)
+ self.cooker.data.delVar("BB_SETSCENE_STAMPCURRENT_COUNT")
+
valid_new = stamppresent
for v in valid:
valid_new.append(sq_task[v])
@@ -2343,8 +2391,8 @@
def task_failoutright(self, task):
self.runq_running.add(task)
self.runq_buildable.add(task)
- self.stats.taskCompleted()
self.stats.taskSkipped()
+ self.stats.taskCompleted()
self.scenequeue_notcovered.add(task)
self.scenequeue_updatecounters(task, True)
@@ -2352,8 +2400,8 @@
self.runq_running.add(task)
self.runq_buildable.add(task)
self.task_completeoutright(task)
- self.stats.taskCompleted()
self.stats.taskSkipped()
+ self.stats.taskCompleted()
def execute(self):
"""
@@ -2363,7 +2411,7 @@
self.rq.read_workers()
task = None
- if self.stats.active < self.number_tasks:
+ if self.can_start_task():
# Find the next setscene to run
for nexttask in self.rqdata.runq_setscene_tids:
if nexttask in self.runq_buildable and nexttask not in self.runq_running and self.stamps[nexttask] not in self.build_stamps.values():
@@ -2422,7 +2470,7 @@
self.build_stamps2.append(self.build_stamps[task])
self.runq_running.add(task)
self.stats.taskActive()
- if self.stats.active < self.number_tasks:
+ if self.can_start_task():
return True
if self.stats.active > 0:
diff --git a/poky/bitbake/lib/bb/server/process.py b/poky/bitbake/lib/bb/server/process.py
index 0749b5b..42d7907 100644
--- a/poky/bitbake/lib/bb/server/process.py
+++ b/poky/bitbake/lib/bb/server/process.py
@@ -377,11 +377,12 @@
if os.path.exists(sockname):
os.unlink(sockname)
+ # Place the log in the builddirectory alongside the lock file
+ logfile = os.path.join(os.path.dirname(self.bitbake_lock.name), "bitbake-cookerdaemon.log")
+
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
# AF_UNIX has path length issues so chdir here to workaround
cwd = os.getcwd()
- logfile = os.path.join(cwd, "bitbake-cookerdaemon.log")
-
try:
os.chdir(os.path.dirname(sockname))
self.sock.bind(os.path.basename(sockname))
@@ -394,11 +395,16 @@
bb.daemonize.createDaemon(self._startServer, logfile)
self.sock.close()
self.bitbake_lock.close()
+ os.close(self.readypipein)
ready = ConnectionReader(self.readypipe)
r = ready.poll(30)
if r:
- r = ready.get()
+ try:
+ r = ready.get()
+ except EOFError:
+ # Trap the child exitting/closing the pipe and error out
+ r = None
if not r or r != "ready":
ready.close()
bb.error("Unable to start bitbake server")
@@ -424,7 +430,6 @@
bb.error("Server log for this session (%s):\n%s" % (logfile, "".join(lines)))
raise SystemExit(1)
ready.close()
- os.close(self.readypipein)
def _startServer(self):
print(self.start_log_format % (os.getpid(), datetime.datetime.now().strftime(self.start_log_datetime_format)))
@@ -432,15 +437,11 @@
server = ProcessServer(self.bitbake_lock, self.sock, self.sockname)
self.configuration.setServerRegIdleCallback(server.register_idle_function)
+ os.close(self.readypipe)
writer = ConnectionWriter(self.readypipein)
- try:
- self.cooker = bb.cooker.BBCooker(self.configuration, self.featureset)
- writer.send("ready")
- except:
- writer.send("fail")
- raise
- finally:
- os.close(self.readypipein)
+ self.cooker = bb.cooker.BBCooker(self.configuration, self.featureset)
+ writer.send("ready")
+ writer.close()
server.cooker = self.cooker
server.server_timeout = self.configuration.server_timeout
server.xmlrpcinterface = self.configuration.xmlrpcinterface
@@ -455,9 +456,6 @@
# AF_UNIX has path length issues so chdir here to workaround
cwd = os.getcwd()
- readfd = writefd = readfd1 = writefd1 = readfd2 = writefd2 = None
- eq = command_chan_recv = command_chan = None
-
try:
try:
os.chdir(os.path.dirname(sockname))
@@ -465,6 +463,9 @@
finally:
os.chdir(cwd)
+ readfd = writefd = readfd1 = writefd1 = readfd2 = writefd2 = None
+ eq = command_chan_recv = command_chan = None
+
# Send an fd for the remote to write events to
readfd, writefd = os.pipe()
eq = BBUIEventQueue(readfd)
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 5ef82d7..fdbb2a3 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -110,42 +110,13 @@
ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
- taskdeps = {}
- basehash = {}
+ taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn)
for task in tasklist:
- data = lookupcache[task]
-
- if data is None:
- bb.error("Task %s from %s seems to be empty?!" % (task, fn))
- data = ''
-
- gendeps[task] -= self.basewhitelist
- newdeps = gendeps[task]
- seen = set()
- while newdeps:
- nextdeps = newdeps
- seen |= nextdeps
- newdeps = set()
- for dep in nextdeps:
- if dep in self.basewhitelist:
- continue
- gendeps[dep] -= self.basewhitelist
- newdeps |= gendeps[dep]
- newdeps -= seen
-
- alldeps = sorted(seen)
- for dep in alldeps:
- data = data + dep
- var = lookupcache[dep]
- if var is not None:
- data = data + str(var)
- datahash = hashlib.md5(data.encode("utf-8")).hexdigest()
k = fn + "." + task
- if not ignore_mismatch and k in self.basehash and self.basehash[k] != datahash:
- bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (k, self.basehash[k], datahash))
- self.basehash[k] = datahash
- taskdeps[task] = alldeps
+ if not ignore_mismatch and k in self.basehash and self.basehash[k] != basehash[k]:
+ bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (k, self.basehash[k], basehash[k]))
+ self.basehash[k] = basehash[k]
self.taskdeps[fn] = taskdeps
self.gendeps[fn] = gendeps
@@ -193,19 +164,28 @@
return taint
def get_taskhash(self, fn, task, deps, dataCache):
+
+ mc = ''
+ if fn.startswith('multiconfig:'):
+ mc = fn.split(':')[1]
k = fn + "." + task
+
data = dataCache.basetaskhash[k]
self.basehash[k] = data
self.runtaskdeps[k] = []
self.file_checksum_values[k] = []
recipename = dataCache.pkg_fn[fn]
-
for dep in sorted(deps, key=clean_basepath):
- depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
+ pkgname = self.pkgnameextract.search(dep).group('fn')
+ if mc:
+ depmc = pkgname.split(':')[1]
+ if mc != depmc:
+ continue
+ depname = dataCache.pkg_fn[pkgname]
if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
continue
if dep not in self.taskhash:
- bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
+ bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
data = data + self.taskhash[dep]
self.runtaskdeps[k].append(dep)
@@ -347,7 +327,7 @@
def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
-
+
def invalidate_task(self, task, d, fn):
bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
bb.build.write_taint(task, d, fn)
@@ -362,10 +342,10 @@
def init_colors(enable_color):
"""Initialise colour dict for passing to compare_sigfiles()"""
# First set up the colours
- colors = {'color_title': '\033[1;37;40m',
- 'color_default': '\033[0;37;40m',
- 'color_add': '\033[1;32;40m',
- 'color_remove': '\033[1;31;40m',
+ colors = {'color_title': '\033[1m',
+ 'color_default': '\033[0m',
+ 'color_add': '\033[0;32m',
+ 'color_remove': '\033[0;31m',
}
# Leave all keys present but clear the values
if not enable_color:
@@ -636,7 +616,7 @@
if collapsed:
output.extend(recout)
else:
- # If a dependent hash changed, might as well print the line above and then defer to the changes in
+ # If a dependent hash changed, might as well print the line above and then defer to the changes in
# that hash since in all likelyhood, they're the same changes this task also saw.
output = [output[-1]] + recout
diff --git a/poky/bitbake/lib/bb/taskdata.py b/poky/bitbake/lib/bb/taskdata.py
index 0ea6c0b..94e822c 100644
--- a/poky/bitbake/lib/bb/taskdata.py
+++ b/poky/bitbake/lib/bb/taskdata.py
@@ -70,6 +70,8 @@
self.skiplist = skiplist
+ self.mcdepends = []
+
def add_tasks(self, fn, dataCache):
"""
Add tasks for a given fn to the database
@@ -88,6 +90,13 @@
self.add_extra_deps(fn, dataCache)
+ def add_mcdepends(task):
+ for dep in task_deps['mcdepends'][task].split():
+ if len(dep.split(':')) != 5:
+ bb.msg.fatal("TaskData", "Error for %s:%s[%s], multiconfig dependency %s does not contain exactly four ':' characters.\n Task '%s' should be specified in the form 'multiconfig:fromMC:toMC:packagename:task'" % (fn, task, 'mcdepends', dep, 'mcdepends'))
+ if dep not in self.mcdepends:
+ self.mcdepends.append(dep)
+
# Common code for dep_name/depends = 'depends'/idepends and 'rdepends'/irdepends
def handle_deps(task, dep_name, depends, seen):
if dep_name in task_deps and task in task_deps[dep_name]:
@@ -110,16 +119,20 @@
parentids = []
for dep in task_deps['parents'][task]:
if dep not in task_deps['tasks']:
- bb.debug(2, "Not adding dependeny of %s on %s since %s does not exist" % (task, dep, dep))
+ bb.debug(2, "Not adding dependency of %s on %s since %s does not exist" % (task, dep, dep))
continue
parentid = "%s:%s" % (fn, dep)
parentids.append(parentid)
self.taskentries[tid].tdepends.extend(parentids)
+
# Touch all intertask dependencies
handle_deps(task, 'depends', self.taskentries[tid].idepends, self.seen_build_target)
handle_deps(task, 'rdepends', self.taskentries[tid].irdepends, self.seen_run_target)
+ if 'mcdepends' in task_deps and task in task_deps['mcdepends']:
+ add_mcdepends(task)
+
# Work out build dependencies
if not fn in self.depids:
dependids = set()
@@ -537,6 +550,9 @@
provmap[name] = provider[0]
return provmap
+ def get_mcdepends(self):
+ return self.mcdepends
+
def dump_data(self):
"""
Dump some debug information on the internal data structures
diff --git a/poky/bitbake/lib/bb/tests/cooker.py b/poky/bitbake/lib/bb/tests/cooker.py
new file mode 100644
index 0000000..2b44236
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/cooker.py
@@ -0,0 +1,83 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# BitBake Tests for cooker.py
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+
+import unittest
+import tempfile
+import os
+import bb, bb.cooker
+import re
+import logging
+
+# Cooker tests
+class CookerTest(unittest.TestCase):
+ def setUp(self):
+ # At least one variable needs to be set
+ self.d = bb.data.init()
+ topdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "testdata/cooker")
+ self.d.setVar('TOPDIR', topdir)
+
+ def test_CookerCollectFiles_sublayers(self):
+ '''Test that a sublayer of an existing layer does not trigger
+ No bb files matched ...'''
+
+ def append_collection(topdir, path, d):
+ collection = path.split('/')[-1]
+ pattern = "^" + topdir + "/" + path + "/"
+ regex = re.compile(pattern)
+ priority = 5
+
+ d.setVar('BBFILE_COLLECTIONS', (d.getVar('BBFILE_COLLECTIONS') or "") + " " + collection)
+ d.setVar('BBFILE_PATTERN_%s' % (collection), pattern)
+ d.setVar('BBFILE_PRIORITY_%s' % (collection), priority)
+
+ return (collection, pattern, regex, priority)
+
+ topdir = self.d.getVar("TOPDIR")
+
+ # Priorities: list of (collection, pattern, regex, priority)
+ bbfile_config_priorities = []
+ # Order is important for this test, shortest to longest is typical failure case
+ bbfile_config_priorities.append( append_collection(topdir, 'first', self.d) )
+ bbfile_config_priorities.append( append_collection(topdir, 'second', self.d) )
+ bbfile_config_priorities.append( append_collection(topdir, 'second/third', self.d) )
+
+ pkgfns = [ topdir + '/first/recipes/sample1_1.0.bb',
+ topdir + '/second/recipes/sample2_1.0.bb',
+ topdir + '/second/third/recipes/sample3_1.0.bb' ]
+
+ class LogHandler(logging.Handler):
+ def __init__(self):
+ logging.Handler.__init__(self)
+ self.logdata = []
+
+ def emit(self, record):
+ self.logdata.append(record.getMessage())
+
+ # Move cooker to use my special logging
+ logger = bb.cooker.logger
+ log_handler = LogHandler()
+ logger.addHandler(log_handler)
+ collection = bb.cooker.CookerCollectFiles(bbfile_config_priorities)
+ collection.collection_priorities(pkgfns, self.d)
+ logger.removeHandler(log_handler)
+
+ # Should be empty (no generated messages)
+ expected = []
+
+ self.assertEqual(log_handler.logdata, expected)
diff --git a/poky/bitbake/lib/bb/tests/data.py b/poky/bitbake/lib/bb/tests/data.py
index a4a9dd3..db3e201 100644
--- a/poky/bitbake/lib/bb/tests/data.py
+++ b/poky/bitbake/lib/bb/tests/data.py
@@ -281,7 +281,7 @@
def test_remove(self):
self.d.setVar("TEST", "${VAL} ${BAR}")
self.d.setVar("TEST_remove", "val")
- self.assertEqual(self.d.getVar("TEST"), "bar")
+ self.assertEqual(self.d.getVar("TEST"), " bar")
def test_remove_cleared(self):
self.d.setVar("TEST", "${VAL} ${BAR}")
@@ -300,7 +300,7 @@
self.d.setVar("TEST", "${VAL} ${BAR}")
self.d.setVar("TEST_remove", "val")
self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
- self.assertEqual(self.d.getVar("TEST_TEST"), "bar bar")
+ self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar")
def test_empty_remove(self):
self.d.setVar("TEST", "")
@@ -311,13 +311,25 @@
self.d.setVar("BAR", "Z")
self.d.setVar("TEST", "${BAR}/X Y")
self.d.setVar("TEST_remove", "${BAR}/X")
- self.assertEqual(self.d.getVar("TEST"), "Y")
+ self.assertEqual(self.d.getVar("TEST"), " Y")
def test_remove_expansion_items(self):
self.d.setVar("TEST", "A B C D")
self.d.setVar("BAR", "B D")
self.d.setVar("TEST_remove", "${BAR}")
- self.assertEqual(self.d.getVar("TEST"), "A C")
+ self.assertEqual(self.d.getVar("TEST"), "A C ")
+
+ def test_remove_preserve_whitespace(self):
+ # When the removal isn't active, the original value should be preserved
+ self.d.setVar("TEST", " A B")
+ self.d.setVar("TEST_remove", "C")
+ self.assertEqual(self.d.getVar("TEST"), " A B")
+
+ def test_remove_preserve_whitespace2(self):
+ # When the removal is active preserve the whitespace
+ self.d.setVar("TEST", " A B")
+ self.d.setVar("TEST_remove", "B")
+ self.assertEqual(self.d.getVar("TEST"), " A ")
class TestOverrides(unittest.TestCase):
def setUp(self):
@@ -374,6 +386,15 @@
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
+ def test_remove_with_override(self):
+ self.d.setVar("TEST_bar", "testvalue2")
+ self.d.setVar("TEST_some_val", "testvalue3 testvalue5")
+ self.d.setVar("TEST_some_val_remove", "testvalue3")
+ self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("OVERRIDES", "foo:bar:some_val")
+ self.assertEqual(self.d.getVar("TEST"), " testvalue5")
+
+
class TestKeyExpansion(unittest.TestCase):
def setUp(self):
self.d = bb.data.init()
@@ -443,6 +464,54 @@
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d))
+class TaskHash(unittest.TestCase):
+ def test_taskhashes(self):
+ def gettask_bashhash(taskname, d):
+ tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
+ taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, set(), "somefile")
+ bb.warn(str(lookupcache))
+ return basehash["somefile." + taskname]
+
+ d = bb.data.init()
+ d.setVar("__BBTASKS", ["mytask"])
+ d.setVar("__exportlist", [])
+ d.setVar("mytask", "${MYCOMMAND}")
+ d.setVar("MYCOMMAND", "${VAR}; foo; bar; exit 0")
+ d.setVar("VAR", "val")
+ orighash = gettask_bashhash("mytask", d)
+
+ # Changing a variable should change the hash
+ d.setVar("VAR", "val2")
+ nexthash = gettask_bashhash("mytask", d)
+ self.assertNotEqual(orighash, nexthash)
+
+ d.setVar("VAR", "val")
+ # Adding an inactive removal shouldn't change the hash
+ d.setVar("BAR", "notbar")
+ d.setVar("MYCOMMAND_remove", "${BAR}")
+ nexthash = gettask_bashhash("mytask", d)
+ self.assertEqual(orighash, nexthash)
+
+ # Adding an active removal should change the hash
+ d.setVar("BAR", "bar;")
+ nexthash = gettask_bashhash("mytask", d)
+ self.assertNotEqual(orighash, nexthash)
+
+ # Setup an inactive contains()
+ d.setVar("VAR", "${@bb.utils.contains('VAR2', 'A', 'val', '', d)}")
+ orighash = gettask_bashhash("mytask", d)
+
+ # Activate the contains() and the hash should change
+ d.setVar("VAR2", "A")
+ nexthash = gettask_bashhash("mytask", d)
+ self.assertNotEqual(orighash, nexthash)
+
+ # The contains should be inactive but even though VAR2 has a
+ # different value the hash should match the original
+ d.setVar("VAR2", "B")
+ nexthash = gettask_bashhash("mytask", d)
+ self.assertEqual(orighash, nexthash)
+
class Serialize(unittest.TestCase):
def test_serialize(self):
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 68eb388..6848095 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -402,6 +402,12 @@
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
: "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
+ ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org")
+ : "http://somewhere2.org/somefile_1.2.3.tar.gz",
+ ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/")
+ : "http://somewhere2.org/somefile_1.2.3.tar.gz",
+ ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http")
+ : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
#Renaming files doesn't work
#("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
@@ -457,6 +463,124 @@
'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz',
'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
+
+class GitDownloadDirectoryNamingTest(FetcherTest):
+ def setUp(self):
+ super(GitDownloadDirectoryNamingTest, self).setUp()
+ self.recipe_url = "git://git.openembedded.org/bitbake"
+ self.recipe_dir = "git.openembedded.org.bitbake"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_dir = "github.com.openembedded.bitbake.git"
+
+ self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
+
+ def setup_mirror_rewrite(self):
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n")
+
+ @skipIfNoNetwork()
+ def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self):
+ self.setup_mirror_rewrite()
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ fetcher.download()
+
+ dir = os.listdir(self.dldir + "/git2")
+ self.assertIn(self.recipe_dir, dir)
+
+ @skipIfNoNetwork()
+ def test_that_directory_exists_for_mirrored_url_and_recipe_url_when_mirroring_is_used(self):
+ self.setup_mirror_rewrite()
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ fetcher.download()
+
+ dir = os.listdir(self.dldir + "/git2")
+ self.assertIn(self.mirror_dir, dir)
+ self.assertIn(self.recipe_dir, dir)
+
+ @skipIfNoNetwork()
+ def test_that_recipe_directory_and_mirrored_directory_exists_when_mirroring_is_used_and_the_mirrored_directory_already_exists(self):
+ self.setup_mirror_rewrite()
+ fetcher = bb.fetch.Fetch([self.mirror_url], self.d)
+ fetcher.download()
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ fetcher.download()
+
+ dir = os.listdir(self.dldir + "/git2")
+ self.assertIn(self.mirror_dir, dir)
+ self.assertIn(self.recipe_dir, dir)
+
+
+class TarballNamingTest(FetcherTest):
+ def setUp(self):
+ super(TarballNamingTest, self).setUp()
+ self.recipe_url = "git://git.openembedded.org/bitbake"
+ self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
+
+ self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
+ self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
+
+ def setup_mirror_rewrite(self):
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n")
+
+ @skipIfNoNetwork()
+ def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self):
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ fetcher.download()
+
+ dir = os.listdir(self.dldir)
+ self.assertIn(self.recipe_tarball, dir)
+
+ @skipIfNoNetwork()
+ def test_that_the_mirror_tarball_is_created_when_mirroring_is_used(self):
+ self.setup_mirror_rewrite()
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ fetcher.download()
+
+ dir = os.listdir(self.dldir)
+ self.assertIn(self.mirror_tarball, dir)
+
+
+class GitShallowTarballNamingTest(FetcherTest):
+ def setUp(self):
+ super(GitShallowTarballNamingTest, self).setUp()
+ self.recipe_url = "git://git.openembedded.org/bitbake"
+ self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
+
+ self.d.setVar('BB_GIT_SHALLOW', '1')
+ self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
+ self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
+
+ def setup_mirror_rewrite(self):
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n")
+
+ @skipIfNoNetwork()
+ def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self):
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ fetcher.download()
+
+ dir = os.listdir(self.dldir)
+ self.assertIn(self.recipe_tarball, dir)
+
+ @skipIfNoNetwork()
+ def test_that_the_mirror_tarball_is_created_when_mirroring_is_used(self):
+ self.setup_mirror_rewrite()
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ fetcher.download()
+
+ dir = os.listdir(self.dldir)
+ self.assertIn(self.mirror_tarball, dir)
+
+
class FetcherLocalTest(FetcherTest):
def setUp(self):
def touch(fn):
@@ -745,27 +869,27 @@
self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
@skipIfNoNetwork()
- def test_gitfetch_premirror(self):
- url1 = "git://git.openembedded.org/bitbake"
- url2 = "git://someserver.org/bitbake"
+ def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self):
+ recipeurl = "git://git.openembedded.org/bitbake"
+ mirrorurl = "git://someserver.org/bitbake"
self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
- self.gitfetcher(url1, url2)
+ self.gitfetcher(recipeurl, mirrorurl)
@skipIfNoNetwork()
- def test_gitfetch_premirror2(self):
- url1 = url2 = "git://someserver.org/bitbake"
+ def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self):
+ recipeurl = "git://someserver.org/bitbake"
self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
- self.gitfetcher(url1, url2)
+ self.gitfetcher(recipeurl, recipeurl)
@skipIfNoNetwork()
- def test_gitfetch_premirror3(self):
+ def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self):
realurl = "git://git.openembedded.org/bitbake"
- dummyurl = "git://someserver.org/bitbake"
+ recipeurl = "git://someserver.org/bitbake"
self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
os.chdir(self.tempdir)
bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True)
- self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (dummyurl, self.sourcedir))
- self.gitfetcher(dummyurl, dummyurl)
+ self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (recipeurl, self.sourcedir))
+ self.gitfetcher(recipeurl, recipeurl)
@skipIfNoNetwork()
def test_git_submodule(self):
@@ -832,7 +956,7 @@
# decodeurl and we need to handle them
decodedata = datatable.copy()
decodedata.update({
- "http://somesite.net;someparam=1": ('http', 'somesite.net', '', '', '', {'someparam': '1'}),
+ "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}),
})
def test_decodeurl(self):
@@ -861,12 +985,12 @@
("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
: "1.4.0",
# combination version pattern
- ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
+ ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
: "1.2.0",
("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
: "2014.01",
# version pattern "yyyymmdd"
- ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info;protocol=https", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
+ ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
@@ -1338,6 +1462,9 @@
smdir = os.path.join(self.tempdir, 'gitsubmodule')
bb.utils.mkdirhier(smdir)
self.git('init', cwd=smdir)
+ # Make this look like it was cloned from a remote...
+ self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
+ self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
self.add_empty_file('asub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
@@ -1571,3 +1698,29 @@
self.assertNotEqual(orig_revs, revs)
self.assertRefs(['master', 'origin/master'])
self.assertRevCount(orig_revs - 1758)
+
+ def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self):
+ self.add_empty_file('a')
+ fetcher, ud = self.fetch()
+ bb.utils.remove(self.gitdir, recurse=True)
+ bb.utils.remove(self.dldir, recurse=True)
+
+ with self.assertRaises(bb.fetch2.UnpackError) as context:
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+
+ self.assertIn("No up to date source found", context.exception.msg)
+ self.assertIn("clone directory not available or not up to date", context.exception.msg)
+
+ @skipIfNoNetwork()
+ def test_that_unpack_does_work_when_using_git_shallow_tarball_but_tarball_is_not_available(self):
+ self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0')
+ self.d.setVar('BB_GIT_SHALLOW', '1')
+ self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
+ fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests"], self.d)
+ fetcher.download()
+
+ bb.utils.remove(self.dldir + "/*.tar.gz")
+ fetcher.unpack(self.unpackdir)
+
+ dir = os.listdir(self.unpackdir + "/git/")
+ self.assertIn("fstests.doap", dir)
diff --git a/poky/bitbake/lib/bb/tests/parse.py b/poky/bitbake/lib/bb/tests/parse.py
index 8f16ba4..1bc4740 100644
--- a/poky/bitbake/lib/bb/tests/parse.py
+++ b/poky/bitbake/lib/bb/tests/parse.py
@@ -44,9 +44,13 @@
"""
def setUp(self):
+ self.origdir = os.getcwd()
self.d = bb.data.init()
bb.parse.siggen = bb.siggen.init(self.d)
+ def tearDown(self):
+ os.chdir(self.origdir)
+
def parsehelper(self, content, suffix = ".bb"):
f = tempfile.NamedTemporaryFile(suffix = suffix)
diff --git a/poky/bitbake/lib/bb/ui/buildinfohelper.py b/poky/bitbake/lib/bb/ui/buildinfohelper.py
index 524a5b0..31323d2 100644
--- a/poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/poky/bitbake/lib/bb/ui/buildinfohelper.py
@@ -1603,14 +1603,14 @@
mockevent.lineno = -1
self.store_log_event(mockevent)
- def store_log_event(self, event):
+ def store_log_event(self, event,cli_backlog=True):
self._ensure_build()
if event.levelno < formatter.WARNING:
return
# early return for CLI builds
- if self.brbe is None:
+ if cli_backlog and self.brbe is None:
if not 'backlog' in self.internal_state:
self.internal_state['backlog'] = []
self.internal_state['backlog'].append(event)
@@ -1622,7 +1622,7 @@
tempevent = self.internal_state['backlog'].pop()
logger.debug(1, "buildinfohelper: Saving stored event %s "
% tempevent)
- self.store_log_event(tempevent)
+ self.store_log_event(tempevent,cli_backlog)
else:
logger.info("buildinfohelper: All events saved")
del self.internal_state['backlog']
@@ -1987,7 +1987,8 @@
if 'backlog' in self.internal_state:
# we save missed events in the database for the current build
tempevent = self.internal_state['backlog'].pop()
- self.store_log_event(tempevent)
+ # Do not skip command line build events
+ self.store_log_event(tempevent,False)
if not connection.features.autocommits_when_autocommit_is_off:
transaction.set_autocommit(True)
diff --git a/poky/bitbake/lib/bb/ui/taskexp.py b/poky/bitbake/lib/bb/ui/taskexp.py
index 0e8e9d4..8305d70 100644
--- a/poky/bitbake/lib/bb/ui/taskexp.py
+++ b/poky/bitbake/lib/bb/ui/taskexp.py
@@ -103,9 +103,16 @@
self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
column = Gtk.TreeViewColumn("Package", Gtk.CellRendererText(), text=COL_PKG_NAME)
self.pkg_treeview.append_column(column)
- pane.add1(scrolled)
scrolled.add(self.pkg_treeview)
+ self.search_entry = Gtk.SearchEntry.new()
+ self.pkg_treeview.set_search_entry(self.search_entry)
+
+ left_panel = Gtk.VPaned()
+ left_panel.add(self.search_entry)
+ left_panel.add(scrolled)
+ pane.add1(left_panel)
+
box = Gtk.VBox(homogeneous=True, spacing=4)
# Task Depends
@@ -129,6 +136,7 @@
pane.add2(box)
self.show_all()
+ self.search_entry.grab_focus()
def on_package_activated(self, treeview, path, column, data_col):
model = treeview.get_model()
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index 2ff7e82..73b6cb4 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -497,7 +497,11 @@
if statinfo.st_ino == statinfo2.st_ino:
return lf
lf.close()
- except Exception:
+ except OSError as e:
+ if e.errno == errno.EACCES:
+ logger.error("Unable to acquire lock '%s', %s",
+ e.strerror, name)
+ sys.exit(1)
try:
lf.close()
except Exception:
@@ -906,6 +910,23 @@
newmtime = sstat[stat.ST_MTIME]
return newmtime
+def break_hardlinks(src, sstat = None):
+ """
+ Ensures src is the only hardlink to this file. Other hardlinks,
+ if any, are not affected (other than in their st_nlink value, of
+ course). Returns true on success and false on failure.
+
+ """
+ try:
+ if not sstat:
+ sstat = os.lstat(src)
+ except Exception as e:
+ logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
+ return False
+ if sstat[stat.ST_NLINK] == 1:
+ return True
+ return copyfile(src, src, sstat=sstat)
+
def which(path, item, direction = 0, history = False, executable=False):
"""
Locate `item` in the list of paths `path` (colon separated string like $PATH).
@@ -1290,7 +1311,7 @@
return updated
-def edit_bblayers_conf(bblayers_conf, add, remove):
+def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
"""Edit bblayers.conf, adding and/or removing layers
Parameters:
bblayers_conf: path to bblayers.conf file to edit
@@ -1298,6 +1319,8 @@
list to add nothing
remove: layer path (or list of layer paths) to remove; None or
empty list to remove nothing
+ edit_cb: optional callback function that will be called after
+ processing adds/removes once per existing entry.
Returns a tuple:
notadded: list of layers specified to be added but weren't
(because they were already in the list)
@@ -1361,6 +1384,17 @@
bblayers.append(addlayer)
del addlayers[:]
+ if edit_cb:
+ newlist = []
+ for layer in bblayers:
+ res = edit_cb(layer, canonicalise_path(layer))
+ if res != layer:
+ newlist.append(res)
+ updated = True
+ else:
+ newlist.append(layer)
+ bblayers = newlist
+
if updated:
if op == '+=' and not bblayers:
bblayers = None