bitbake: runqueue: Print pseudo.log if fakeroot task failed

Currently if pseudo fails we can only see the path to pseudo.log. If we
have no access to server and can only rely on bitbake log then debugging
becomes impossible. This printing needs to be added in runqueue level,
not inside task execution, because in some cases task fails with pseudo
abort really early and we don't even see any log.

In this change I'm adding pseudo log printing in every fakeroot task
failure that logged `mismatch`, `error` or `fatal` to logfile, because
we have no other way to communicate with pseudo if it failed or not.
Only lines from last pseudo server execution will be printed.

(Bitbake rev: e7c664a947903ed7b868abef62af2ff5f8ef0dc6)

Signed-off-by: Tomasz Dziendzielski <tomasz.dziendzielski@gmail.com>
Signed-off-by: Jan Brzezanski <jan.brzezanski@gmail.com>
Signed-off-by: Adrian Walag
Signed-off-by: Paulo Neves <ptsneves@gmail.com>
Signed-off-by: Mikolaj Lasota <mikolaj.lasota@protonmail.com>
Signed-off-by: Wiktor Baura <wbaura@gmail.com>
Signed-off-by: Kamil Kwiek <kamil.kwiek@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Tomasz Dziendzielski 2021-03-05 12:44:24 +01:00 committed by Richard Purdie
parent d3a0f074bc
commit 5386b3db50
2 changed files with 40 additions and 9 deletions

View File

@ -126,6 +126,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
@ -163,6 +164,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
cachedata.fakerootenv = {}
cachedata.fakerootnoenv = {}
cachedata.fakerootdirs = {}
cachedata.fakerootlogs = {}
cachedata.extradepsfunc = {}
def add_cacheData(self, cachedata, fn):
@ -231,6 +233,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
cachedata.fakerootenv[fn] = self.fakerootenv
cachedata.fakerootnoenv[fn] = self.fakerootnoenv
cachedata.fakerootdirs[fn] = self.fakerootdirs
cachedata.fakerootlogs[fn] = self.fakerootlogs
cachedata.extradepsfunc[fn] = self.extradepsfunc
def virtualfn2realfn(virtualfn):

View File

@ -1242,6 +1242,7 @@ class RunQueue:
magic = "decafbad"
if self.cooker.configuration.profile:
magic = "decafbadbad"
fakerootlogs = None
if fakeroot:
magic = magic + "beef"
mcdata = self.cooker.databuilder.mcdata[mc]
@ -1251,10 +1252,11 @@ class RunQueue:
for key, value in (var.split('=') for var in fakerootenv):
env[key] = value
worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
fakerootlogs = self.rqdata.dataCaches[mc].fakerootlogs
else:
worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
bb.utils.nonblockingfd(worker.stdout)
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs)
workerdata = {
"taskdeps" : self.rqdata.dataCaches[mc].task_deps,
@ -1772,7 +1774,7 @@ class RunQueueExecute:
self.sqdata = SQData()
build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self)
def runqueue_process_waitpid(self, task, status):
def runqueue_process_waitpid(self, task, status, fakerootlog=None):
# self.build_stamps[pid] may not exist when use shared work directory.
if task in self.build_stamps:
@ -1787,7 +1789,7 @@ class RunQueueExecute:
self.sq_live.remove(task)
else:
if status != 0:
self.task_fail(task, status)
self.task_fail(task, status, fakerootlog=fakerootlog)
else:
self.task_complete(task)
return True
@ -1908,14 +1910,31 @@ class RunQueueExecute:
self.task_completeoutright(task)
self.runq_tasksrun.add(task)
def task_fail(self, task, exitcode):
def task_fail(self, task, exitcode, fakerootlog=None):
"""
Called when a task has failed
Updates the state engine with the failure
"""
self.stats.taskFailed()
self.failed_tids.append(task)
bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
fakeroot_log = ""
if fakerootlog and os.path.exists(fakerootlog):
with open(fakerootlog) as fakeroot_log_file:
fakeroot_failed = False
for line in reversed(fakeroot_log_file.readlines()):
for fakeroot_error in ['mismatch', 'error', 'fatal']:
if fakeroot_error in line.lower():
fakeroot_failed = True
if 'doing new pid setup and server start' in line:
break
fakeroot_log = line + fakeroot_log
if not fakeroot_failed:
fakeroot_log = None
bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=fakeroot_log), self.cfgData)
if self.rqdata.taskData[''].abort:
self.rq.state = runQueueCleanUp
@ -2883,12 +2902,16 @@ class runQueueTaskFailed(runQueueEvent):
"""
Event notifying a task failed
"""
def __init__(self, task, stats, exitcode, rq):
def __init__(self, task, stats, exitcode, rq, fakeroot_log=None):
runQueueEvent.__init__(self, task, stats, rq)
self.exitcode = exitcode
self.fakeroot_log = fakeroot_log
def __str__(self):
return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode)
if self.fakeroot_log:
return "Task (%s) failed with exit code '%s' \nPseudo log:\n%s" % (self.taskstring, self.exitcode, self.fakeroot_log)
else:
return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode)
class sceneQueueTaskFailed(sceneQueueEvent):
"""
@ -2940,7 +2963,7 @@ class runQueuePipe():
"""
Abstraction for a pipe between a worker thread and the server
"""
def __init__(self, pipein, pipeout, d, rq, rqexec):
def __init__(self, pipein, pipeout, d, rq, rqexec, fakerootlogs=None):
self.input = pipein
if pipeout:
pipeout.close()
@ -2949,6 +2972,7 @@ class runQueuePipe():
self.d = d
self.rq = rq
self.rqexec = rqexec
self.fakerootlogs = fakerootlogs
def setrunqueueexec(self, rqexec):
self.rqexec = rqexec
@ -2994,7 +3018,11 @@ class runQueuePipe():
task, status = pickle.loads(self.queue[10:index])
except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e:
bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
self.rqexec.runqueue_process_waitpid(task, status)
(_, _, _, taskfn) = split_tid_mcfn(task)
fakerootlog = None
if self.fakerootlogs and taskfn and taskfn in self.fakerootlogs:
fakerootlog = self.fakerootlogs[taskfn]
self.rqexec.runqueue_process_waitpid(task, status, fakerootlog=fakerootlog)
found = True
self.queue = self.queue[index+11:]
index = self.queue.find(b"</exitcode>")