u-boot-qoriq: add 2019.10 recipe

u-boot-qoriq in meta-freescale has upgraded to 2020.04. The patches for
lx2160acex7 can't be applied on 2020.04.
Restore the 2019.10 recipe to avoid build break.

Signed-off-by: Ting Liu <ting.liu@nxp.com>
This commit is contained in:
Ting Liu 2020-12-17 14:34:32 +05:30 committed by Otavio Salvador
parent a71856d16a
commit 0428b922a5
9 changed files with 1665 additions and 0 deletions

View File

@ -0,0 +1,29 @@
From 018921ee79d3f30893614b3b2b63b588d8544f73 Mon Sep 17 00:00:00 2001
From: Peter Robinson <pbrobinson@gmail.com>
Date: Thu, 30 Jan 2020 09:37:15 +0000
Subject: [PATCH] Remove redundant YYLOC global declaration
Same as the upstream fix for building dtc with gcc 10.
Upstream-Status: Backport
Signed-off-by: Peter Robinson <pbrobinson@gmail.com>
---
scripts/dtc/dtc-lexer.l | 1 -
1 file changed, 1 deletion(-)
diff --git a/scripts/dtc/dtc-lexer.l b/scripts/dtc/dtc-lexer.l
index fd825ebba69c..24af54997758 100644
--- a/scripts/dtc/dtc-lexer.l
+++ b/scripts/dtc/dtc-lexer.l
@@ -38,7 +38,6 @@ LINECOMMENT "//".*\n
#include "srcpos.h"
#include "dtc-parser.tab.h"
-YYLTYPE yylloc;
extern bool treesource_error;
/* CAUTION: this will stop working if we ever use yyless() or yyunput() */
--
2.26.2

View File

@ -0,0 +1,79 @@
From b6ee0cf89f9405094cbb6047076a13e14ebc030b Mon Sep 17 00:00:00 2001
From: Simon Glass <sjg@chromium.org>
Date: Thu, 31 Oct 2019 07:43:03 -0600
Subject: [PATCH] binman: Convert a few tests to Python 3
Some tests have crept in with Python 2 strings and constructs. Convert
then.
Upstream-Status: Backport
Signed-off-by: Simon Glass <sjg@chromium.org>
---
tools/binman/ftest.py | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py
index 93507993a0..80df0e3ca9 100644
--- a/tools/binman/ftest.py
+++ b/tools/binman/ftest.py
@@ -2113,7 +2113,7 @@ class TestFunctional(unittest.TestCase):
data = self.data = self._DoReadFileRealDtb('115_fdtmap.dts')
fdtmap_data = data[len(U_BOOT_DATA):]
magic = fdtmap_data[:8]
- self.assertEqual('_FDTMAP_', magic)
+ self.assertEqual(b'_FDTMAP_', magic)
self.assertEqual(tools.GetBytes(0, 8), fdtmap_data[8:16])
fdt_data = fdtmap_data[16:]
@@ -2156,7 +2156,7 @@ class TestFunctional(unittest.TestCase):
dtb = fdt.Fdt.FromData(fdt_data)
fdt_size = dtb.GetFdtObj().totalsize()
hdr_data = data[-8:]
- self.assertEqual('BinM', hdr_data[:4])
+ self.assertEqual(b'BinM', hdr_data[:4])
offset = struct.unpack('<I', hdr_data[4:])[0] & 0xffffffff
self.assertEqual(fdtmap_pos - 0x400, offset - (1 << 32))
@@ -2165,7 +2165,7 @@ class TestFunctional(unittest.TestCase):
data = self.data = self._DoReadFileRealDtb('117_fdtmap_hdr_start.dts')
fdtmap_pos = 0x100 + len(U_BOOT_DATA)
hdr_data = data[:8]
- self.assertEqual('BinM', hdr_data[:4])
+ self.assertEqual(b'BinM', hdr_data[:4])
offset = struct.unpack('<I', hdr_data[4:])[0]
self.assertEqual(fdtmap_pos, offset)
@@ -2174,7 +2174,7 @@ class TestFunctional(unittest.TestCase):
data = self.data = self._DoReadFileRealDtb('118_fdtmap_hdr_pos.dts')
fdtmap_pos = 0x100 + len(U_BOOT_DATA)
hdr_data = data[0x80:0x88]
- self.assertEqual('BinM', hdr_data[:4])
+ self.assertEqual(b'BinM', hdr_data[:4])
offset = struct.unpack('<I', hdr_data[4:])[0]
self.assertEqual(fdtmap_pos, offset)
@@ -2435,9 +2435,9 @@ class TestFunctional(unittest.TestCase):
' section 100 %x section 100' % section_size,
' cbfs 100 400 cbfs 0',
' u-boot 138 4 u-boot 38',
-' u-boot-dtb 180 10f u-boot-dtb 80 3c9',
+' u-boot-dtb 180 105 u-boot-dtb 80 3c9',
' u-boot-dtb 500 %x u-boot-dtb 400 3c9' % fdt_size,
-' fdtmap %x 3b4 fdtmap %x' %
+' fdtmap %x 3bd fdtmap %x' %
(fdtmap_offset, fdtmap_offset),
' image-header bf8 8 image-header bf8',
]
@@ -2522,7 +2522,7 @@ class TestFunctional(unittest.TestCase):
data = self._RunExtractCmd('section')
cbfs_data = data[:0x400]
cbfs = cbfs_util.CbfsReader(cbfs_data)
- self.assertEqual(['u-boot', 'u-boot-dtb', ''], cbfs.files.keys())
+ self.assertEqual(['u-boot', 'u-boot-dtb', ''], list(cbfs.files.keys()))
dtb_data = data[0x400:]
dtb = self._decompress(dtb_data)
self.assertEqual(EXTRACT_DTB_SIZE, len(dtb))
--
2.24.0

View File

@ -0,0 +1,30 @@
From 388560134b99dc4cc752627d3a7e9f8c8c2a89a7 Mon Sep 17 00:00:00 2001
From: Simon Glass <sjg@chromium.org>
Date: Thu, 31 Oct 2019 07:43:05 -0600
Subject: [PATCH] binman: Move to use Python 3
Update this tool to use Python 3 to meet the 2020 deadline.
Unfortunately this introduces a test failure due to a problem in pylibfdt
on Python 3. I will investigate.
Upstream-Status: Backport
Signed-off-by: Simon Glass <sjg@chromium.org>
---
tools/binman/binman.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tools/binman/binman.py b/tools/binman/binman.py
index 8bd5868df2..9e6fd72117 100755
--- a/tools/binman/binman.py
+++ b/tools/binman/binman.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
--
2.24.0

View File

@ -0,0 +1,928 @@
From c05aa0364280803d8274e260a739553d588ea052 Mon Sep 17 00:00:00 2001
From: Simon Glass <sjg@chromium.org>
Date: Thu, 31 Oct 2019 07:42:53 -0600
Subject: [PATCH] buildman: Convert to Python 3
Convert buildman to Python 3 and make it use that, to meet the 2020
deadline.
Upstream-Status: Backport
Signed-off-by: Simon Glass <sjg@chromium.org>
---
tools/buildman/board.py | 9 +--
tools/buildman/bsettings.py | 20 +++----
tools/buildman/builder.py | 47 ++++++++--------
tools/buildman/builderthread.py | 24 ++++----
tools/buildman/buildman.py | 10 ++--
tools/buildman/control.py | 44 +++++++--------
tools/buildman/func_test.py | 16 +++---
tools/buildman/test.py | 22 ++++----
tools/buildman/toolchain.py | 99 +++++++++++++++++----------------
9 files changed, 146 insertions(+), 145 deletions(-)
diff --git a/tools/buildman/board.py b/tools/buildman/board.py
index 2a1d021574..447aaabea8 100644
--- a/tools/buildman/board.py
+++ b/tools/buildman/board.py
@@ -1,6 +1,7 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2012 The Chromium OS Authors.
+from collections import OrderedDict
import re
class Expr:
@@ -120,7 +121,7 @@ class Boards:
Args:
fname: Filename of boards.cfg file
"""
- with open(fname, 'r') as fd:
+ with open(fname, 'r', encoding='utf-8') as fd:
for line in fd:
if line[0] == '#':
continue
@@ -155,7 +156,7 @@ class Boards:
key is board.target
value is board
"""
- board_dict = {}
+ board_dict = OrderedDict()
for board in self._boards:
board_dict[board.target] = board
return board_dict
@@ -166,7 +167,7 @@ class Boards:
Returns:
List of Board objects that are marked selected
"""
- board_dict = {}
+ board_dict = OrderedDict()
for board in self._boards:
if board.build_it:
board_dict[board.target] = board
@@ -259,7 +260,7 @@ class Boards:
due to each argument, arranged by argument.
List of errors found
"""
- result = {}
+ result = OrderedDict()
warnings = []
terms = self._BuildTerms(args)
diff --git a/tools/buildman/bsettings.py b/tools/buildman/bsettings.py
index 03d7439aa5..0b7208da37 100644
--- a/tools/buildman/bsettings.py
+++ b/tools/buildman/bsettings.py
@@ -1,9 +1,9 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2012 The Chromium OS Authors.
-import ConfigParser
+import configparser
import os
-import StringIO
+import io
def Setup(fname=''):
@@ -15,20 +15,20 @@ def Setup(fname=''):
global settings
global config_fname
- settings = ConfigParser.SafeConfigParser()
+ settings = configparser.SafeConfigParser()
if fname is not None:
config_fname = fname
if config_fname == '':
config_fname = '%s/.buildman' % os.getenv('HOME')
if not os.path.exists(config_fname):
- print 'No config file found ~/.buildman\nCreating one...\n'
+ print('No config file found ~/.buildman\nCreating one...\n')
CreateBuildmanConfigFile(config_fname)
- print 'To install tool chains, please use the --fetch-arch option'
+ print('To install tool chains, please use the --fetch-arch option')
if config_fname:
settings.read(config_fname)
def AddFile(data):
- settings.readfp(StringIO.StringIO(data))
+ settings.readfp(io.StringIO(data))
def GetItems(section):
"""Get the items from a section of the config.
@@ -41,7 +41,7 @@ def GetItems(section):
"""
try:
return settings.items(section)
- except ConfigParser.NoSectionError as e:
+ except configparser.NoSectionError as e:
return []
except:
raise
@@ -68,10 +68,10 @@ def CreateBuildmanConfigFile(config_fname):
try:
f = open(config_fname, 'w')
except IOError:
- print "Couldn't create buildman config file '%s'\n" % config_fname
+ print("Couldn't create buildman config file '%s'\n" % config_fname)
raise
- print >>f, '''[toolchain]
+ print('''[toolchain]
# name = path
# e.g. x86 = /opt/gcc-4.6.3-nolibc/x86_64-linux
@@ -93,5 +93,5 @@ openrisc = or1k
# snapper-boards=ENABLE_AT91_TEST=1
# snapper9260=${snapper-boards} BUILD_TAG=442
# snapper9g45=${snapper-boards} BUILD_TAG=443
-'''
+''', file=f)
f.close();
diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py
index fbb236676c..cfbe4c26b1 100644
--- a/tools/buildman/builder.py
+++ b/tools/buildman/builder.py
@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
import glob
import os
import re
-import Queue
+import queue
import shutil
import signal
import string
@@ -92,11 +92,10 @@ u-boot/ source directory
"""
# Possible build outcomes
-OUTCOME_OK, OUTCOME_WARNING, OUTCOME_ERROR, OUTCOME_UNKNOWN = range(4)
+OUTCOME_OK, OUTCOME_WARNING, OUTCOME_ERROR, OUTCOME_UNKNOWN = list(range(4))
# Translate a commit subject into a valid filename (and handle unicode)
-trans_valid_chars = string.maketrans('/: ', '---')
-trans_valid_chars = trans_valid_chars.decode('latin-1')
+trans_valid_chars = str.maketrans('/: ', '---')
BASE_CONFIG_FILENAMES = [
'u-boot.cfg', 'u-boot-spl.cfg', 'u-boot-tpl.cfg'
@@ -122,8 +121,8 @@ class Config:
def __hash__(self):
val = 0
for fname in self.config:
- for key, value in self.config[fname].iteritems():
- print key, value
+ for key, value in self.config[fname].items():
+ print(key, value)
val = val ^ hash(key) & hash(value)
return val
@@ -293,8 +292,8 @@ class Builder:
self._re_dtb_warning = re.compile('(.*): Warning .*')
self._re_note = re.compile('(.*):(\d*):(\d*): note: this is the location of the previous.*')
- self.queue = Queue.Queue()
- self.out_queue = Queue.Queue()
+ self.queue = queue.Queue()
+ self.out_queue = queue.Queue()
for i in range(self.num_threads):
t = builderthread.BuilderThread(self, i, incremental,
per_board_out_dir)
@@ -781,7 +780,7 @@ class Builder:
config = {}
environment = {}
- for board in boards_selected.itervalues():
+ for board in boards_selected.values():
outcome = self.GetBuildOutcome(commit_upto, board.target,
read_func_sizes, read_config,
read_environment)
@@ -814,13 +813,13 @@ class Builder:
tconfig = Config(self.config_filenames, board.target)
for fname in self.config_filenames:
if outcome.config:
- for key, value in outcome.config[fname].iteritems():
+ for key, value in outcome.config[fname].items():
tconfig.Add(fname, key, value)
config[board.target] = tconfig
tenvironment = Environment(board.target)
if outcome.environment:
- for key, value in outcome.environment.iteritems():
+ for key, value in outcome.environment.items():
tenvironment.Add(key, value)
environment[board.target] = tenvironment
@@ -1040,12 +1039,12 @@ class Builder:
# We now have a list of image size changes sorted by arch
# Print out a summary of these
- for arch, target_list in arch_list.iteritems():
+ for arch, target_list in arch_list.items():
# Get total difference for each type
totals = {}
for result in target_list:
total = 0
- for name, diff in result.iteritems():
+ for name, diff in result.items():
if name.startswith('_'):
continue
total += diff
@@ -1250,7 +1249,7 @@ class Builder:
if self._show_unknown:
self.AddOutcome(board_selected, arch_list, unknown_boards, '?',
self.col.MAGENTA)
- for arch, target_list in arch_list.iteritems():
+ for arch, target_list in arch_list.items():
Print('%10s: %s' % (arch, target_list))
self._error_lines += 1
if better_err:
@@ -1283,13 +1282,13 @@ class Builder:
environment_minus = {}
environment_change = {}
base = tbase.environment
- for key, value in tenvironment.environment.iteritems():
+ for key, value in tenvironment.environment.items():
if key not in base:
environment_plus[key] = value
- for key, value in base.iteritems():
+ for key, value in base.items():
if key not in tenvironment.environment:
environment_minus[key] = value
- for key, value in base.iteritems():
+ for key, value in base.items():
new_value = tenvironment.environment.get(key)
if new_value and value != new_value:
desc = '%s -> %s' % (value, new_value)
@@ -1342,15 +1341,15 @@ class Builder:
config_minus = {}
config_change = {}
base = tbase.config[name]
- for key, value in tconfig.config[name].iteritems():
+ for key, value in tconfig.config[name].items():
if key not in base:
config_plus[key] = value
all_config_plus[key] = value
- for key, value in base.iteritems():
+ for key, value in base.items():
if key not in tconfig.config[name]:
config_minus[key] = value
all_config_minus[key] = value
- for key, value in base.iteritems():
+ for key, value in base.items():
new_value = tconfig.config.get(key)
if new_value and value != new_value:
desc = '%s -> %s' % (value, new_value)
@@ -1368,7 +1367,7 @@ class Builder:
summary[target] = '\n'.join(lines)
lines_by_target = {}
- for target, lines in summary.iteritems():
+ for target, lines in summary.items():
if lines in lines_by_target:
lines_by_target[lines].append(target)
else:
@@ -1392,7 +1391,7 @@ class Builder:
Print('%s:' % arch)
_OutputConfigInfo(lines)
- for lines, targets in lines_by_target.iteritems():
+ for lines, targets in lines_by_target.items():
if not lines:
continue
Print('%s :' % ' '.join(sorted(targets)))
@@ -1463,7 +1462,7 @@ class Builder:
commits: Selected commits to build
"""
# First work out how many commits we will build
- count = (self.commit_count + self._step - 1) / self._step
+ count = (self.commit_count + self._step - 1) // self._step
self.count = len(board_selected) * count
self.upto = self.warned = self.fail = 0
self._timestamps = collections.deque()
@@ -1566,7 +1565,7 @@ class Builder:
self.ProcessResult(None)
# Create jobs to build all commits for each board
- for brd in board_selected.itervalues():
+ for brd in board_selected.values():
job = builderthread.BuilderJob()
job.board = brd
job.commits = commits
diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py
index 8a9d47cd5e..570c1f6595 100644
--- a/tools/buildman/builderthread.py
+++ b/tools/buildman/builderthread.py
@@ -28,7 +28,7 @@ def Mkdir(dirname, parents = False):
except OSError as err:
if err.errno == errno.EEXIST:
if os.path.realpath('.') == os.path.realpath(dirname):
- print "Cannot create the current working directory '%s'!" % dirname
+ print("Cannot create the current working directory '%s'!" % dirname)
sys.exit(1)
pass
else:
@@ -291,15 +291,13 @@ class BuilderThread(threading.Thread):
outfile = os.path.join(build_dir, 'log')
with open(outfile, 'w') as fd:
if result.stdout:
- # We don't want unicode characters in log files
- fd.write(result.stdout.decode('UTF-8').encode('ASCII', 'replace'))
+ fd.write(result.stdout)
errfile = self.builder.GetErrFile(result.commit_upto,
result.brd.target)
if result.stderr:
with open(errfile, 'w') as fd:
- # We don't want unicode characters in log files
- fd.write(result.stderr.decode('UTF-8').encode('ASCII', 'replace'))
+ fd.write(result.stderr)
elif os.path.exists(errfile):
os.remove(errfile)
@@ -314,17 +312,17 @@ class BuilderThread(threading.Thread):
else:
fd.write('%s' % result.return_code)
with open(os.path.join(build_dir, 'toolchain'), 'w') as fd:
- print >>fd, 'gcc', result.toolchain.gcc
- print >>fd, 'path', result.toolchain.path
- print >>fd, 'cross', result.toolchain.cross
- print >>fd, 'arch', result.toolchain.arch
+ print('gcc', result.toolchain.gcc, file=fd)
+ print('path', result.toolchain.path, file=fd)
+ print('cross', result.toolchain.cross, file=fd)
+ print('arch', result.toolchain.arch, file=fd)
fd.write('%s' % result.return_code)
# Write out the image and function size information and an objdump
env = result.toolchain.MakeEnvironment(self.builder.full_path)
with open(os.path.join(build_dir, 'env'), 'w') as fd:
for var in sorted(env.keys()):
- print >>fd, '%s="%s"' % (var, env[var])
+ print('%s="%s"' % (var, env[var]), file=fd)
lines = []
for fname in ['u-boot', 'spl/u-boot-spl']:
cmd = ['%snm' % self.toolchain.cross, '--size-sort', fname]
@@ -335,7 +333,7 @@ class BuilderThread(threading.Thread):
nm = self.builder.GetFuncSizesFile(result.commit_upto,
result.brd.target, fname)
with open(nm, 'w') as fd:
- print >>fd, nm_result.stdout,
+ print(nm_result.stdout, end=' ', file=fd)
cmd = ['%sobjdump' % self.toolchain.cross, '-h', fname]
dump_result = command.RunPipe([cmd], capture=True,
@@ -346,7 +344,7 @@ class BuilderThread(threading.Thread):
objdump = self.builder.GetObjdumpFile(result.commit_upto,
result.brd.target, fname)
with open(objdump, 'w') as fd:
- print >>fd, dump_result.stdout,
+ print(dump_result.stdout, end=' ', file=fd)
for line in dump_result.stdout.splitlines():
fields = line.split()
if len(fields) > 5 and fields[1] == '.rodata':
@@ -378,7 +376,7 @@ class BuilderThread(threading.Thread):
sizes = self.builder.GetSizesFile(result.commit_upto,
result.brd.target)
with open(sizes, 'w') as fd:
- print >>fd, '\n'.join(lines)
+ print('\n'.join(lines), file=fd)
# Write out the configuration files, with a special case for SPL
for dirname in ['', 'spl', 'tpl']:
diff --git a/tools/buildman/buildman.py b/tools/buildman/buildman.py
index f17aa15e7c..30a8690f93 100755
--- a/tools/buildman/buildman.py
+++ b/tools/buildman/buildman.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
#
# Copyright (c) 2012 The Chromium OS Authors.
@@ -6,6 +6,8 @@
"""See README for more information"""
+from __future__ import print_function
+
import multiprocessing
import os
import re
@@ -46,11 +48,11 @@ def RunTests(skip_net_tests):
suite = unittest.TestLoader().loadTestsFromTestCase(module)
suite.run(result)
- print result
+ print(result)
for test, err in result.errors:
- print err
+ print(err)
for test, err in result.failures:
- print err
+ print(err)
options, args = cmdline.ParseArgs()
diff --git a/tools/buildman/control.py b/tools/buildman/control.py
index 9787b86747..216012d001 100644
--- a/tools/buildman/control.py
+++ b/tools/buildman/control.py
@@ -30,7 +30,7 @@ def GetActionSummary(is_summary, commits, selected, options):
"""
if commits:
count = len(commits)
- count = (count + options.step - 1) / options.step
+ count = (count + options.step - 1) // options.step
commit_str = '%d commit%s' % (count, GetPlural(count))
else:
commit_str = 'current source'
@@ -59,31 +59,31 @@ def ShowActions(series, why_selected, boards_selected, builder, options,
board_warnings: List of warnings obtained from board selected
"""
col = terminal.Color()
- print 'Dry run, so not doing much. But I would do this:'
- print
+ print('Dry run, so not doing much. But I would do this:')
+ print()
if series:
commits = series.commits
else:
commits = None
- print GetActionSummary(False, commits, boards_selected,
- options)
- print 'Build directory: %s' % builder.base_dir
+ print(GetActionSummary(False, commits, boards_selected,
+ options))
+ print('Build directory: %s' % builder.base_dir)
if commits:
for upto in range(0, len(series.commits), options.step):
commit = series.commits[upto]
- print ' ', col.Color(col.YELLOW, commit.hash[:8], bright=False),
- print commit.subject
- print
+ print(' ', col.Color(col.YELLOW, commit.hash[:8], bright=False), end=' ')
+ print(commit.subject)
+ print()
for arg in why_selected:
if arg != 'all':
- print arg, ': %d boards' % len(why_selected[arg])
+ print(arg, ': %d boards' % len(why_selected[arg]))
if options.verbose:
- print ' %s' % ' '.join(why_selected[arg])
- print ('Total boards to build for each commit: %d\n' %
- len(why_selected['all']))
+ print(' %s' % ' '.join(why_selected[arg]))
+ print(('Total boards to build for each commit: %d\n' %
+ len(why_selected['all'])))
if board_warnings:
for warning in board_warnings:
- print col.Color(col.YELLOW, warning)
+ print(col.Color(col.YELLOW, warning))
def CheckOutputDir(output_dir):
"""Make sure that the output directory is not within the current directory
@@ -146,17 +146,17 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
if options.fetch_arch:
if options.fetch_arch == 'list':
sorted_list = toolchains.ListArchs()
- print col.Color(col.BLUE, 'Available architectures: %s\n' %
- ' '.join(sorted_list))
+ print(col.Color(col.BLUE, 'Available architectures: %s\n' %
+ ' '.join(sorted_list)))
return 0
else:
fetch_arch = options.fetch_arch
if fetch_arch == 'all':
fetch_arch = ','.join(toolchains.ListArchs())
- print col.Color(col.CYAN, '\nDownloading toolchains: %s' %
- fetch_arch)
+ print(col.Color(col.CYAN, '\nDownloading toolchains: %s' %
+ fetch_arch))
for arch in fetch_arch.split(','):
- print
+ print()
ret = toolchains.FetchAndInstall(arch)
if ret:
return ret
@@ -167,7 +167,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
toolchains.Scan(options.list_tool_chains and options.verbose)
if options.list_tool_chains:
toolchains.List()
- print
+ print()
return 0
# Work out how many commits to build. We want to build everything on the
@@ -191,7 +191,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
sys.exit(col.Color(col.RED, "Range '%s' has no commits" %
options.branch))
if msg:
- print col.Color(col.YELLOW, msg)
+ print(col.Color(col.YELLOW, msg))
count += 1 # Build upstream commit also
if not count:
@@ -268,7 +268,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
options.threads = min(multiprocessing.cpu_count(), len(selected))
if not options.jobs:
options.jobs = max(1, (multiprocessing.cpu_count() +
- len(selected) - 1) / len(selected))
+ len(selected) - 1) // len(selected))
if not options.step:
options.step = len(series.commits) - 1
diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py
index f90b8ea7f5..4c3d497294 100644
--- a/tools/buildman/func_test.py
+++ b/tools/buildman/func_test.py
@@ -270,7 +270,7 @@ class TestFunctional(unittest.TestCase):
stdout=''.join(commit_log[:count]))
# Not handled, so abort
- print 'git log', args
+ print('git log', args)
sys.exit(1)
def _HandleCommandGitConfig(self, args):
@@ -286,7 +286,7 @@ class TestFunctional(unittest.TestCase):
stdout='refs/heads/master\n')
# Not handled, so abort
- print 'git config', args
+ print('git config', args)
sys.exit(1)
def _HandleCommandGit(self, in_args):
@@ -320,7 +320,7 @@ class TestFunctional(unittest.TestCase):
return command.CommandResult(return_code=0)
# Not handled, so abort
- print 'git', git_args, sub_cmd, args
+ print('git', git_args, sub_cmd, args)
sys.exit(1)
def _HandleCommandNm(self, args):
@@ -351,7 +351,7 @@ class TestFunctional(unittest.TestCase):
if pipe_list[1] == ['wc', '-l']:
wc = True
else:
- print 'invalid pipe', kwargs
+ print('invalid pipe', kwargs)
sys.exit(1)
cmd = pipe_list[0][0]
args = pipe_list[0][1:]
@@ -371,7 +371,7 @@ class TestFunctional(unittest.TestCase):
if not result:
# Not handled, so abort
- print 'unknown command', kwargs
+ print('unknown command', kwargs)
sys.exit(1)
if wc:
@@ -404,14 +404,14 @@ class TestFunctional(unittest.TestCase):
return command.CommandResult(return_code=0)
# Not handled, so abort
- print 'make', stage
+ print('make', stage)
sys.exit(1)
# Example function to print output lines
def print_lines(self, lines):
- print len(lines)
+ print(len(lines))
for line in lines:
- print line
+ print(line)
#self.print_lines(terminal.GetPrintTestLines())
def testNoBoards(self):
diff --git a/tools/buildman/test.py b/tools/buildman/test.py
index ed99b9375c..b4e28d6867 100644
--- a/tools/buildman/test.py
+++ b/tools/buildman/test.py
@@ -212,11 +212,11 @@ class TestBuild(unittest.TestCase):
self.assertEqual(lines[1].text, '02: %s' % commits[1][1])
col = terminal.Color()
- self.assertSummary(lines[2].text, 'sandbox', 'w+', ['board4'],
+ self.assertSummary(lines[2].text, 'arm', 'w+', ['board1'],
outcome=OUTCOME_WARN)
- self.assertSummary(lines[3].text, 'arm', 'w+', ['board1'],
+ self.assertSummary(lines[3].text, 'powerpc', 'w+', ['board2', 'board3'],
outcome=OUTCOME_WARN)
- self.assertSummary(lines[4].text, 'powerpc', 'w+', ['board2', 'board3'],
+ self.assertSummary(lines[4].text, 'sandbox', 'w+', ['board4'],
outcome=OUTCOME_WARN)
# Second commit: The warnings should be listed
@@ -226,10 +226,10 @@ class TestBuild(unittest.TestCase):
# Third commit: Still fails
self.assertEqual(lines[6].text, '03: %s' % commits[2][1])
- self.assertSummary(lines[7].text, 'sandbox', '+', ['board4'])
- self.assertSummary(lines[8].text, 'arm', '', ['board1'],
+ self.assertSummary(lines[7].text, 'arm', '', ['board1'],
outcome=OUTCOME_OK)
- self.assertSummary(lines[9].text, 'powerpc', '+', ['board2', 'board3'])
+ self.assertSummary(lines[8].text, 'powerpc', '+', ['board2', 'board3'])
+ self.assertSummary(lines[9].text, 'sandbox', '+', ['board4'])
# Expect a compiler error
self.assertEqual(lines[10].text, '+%s' %
@@ -237,8 +237,6 @@ class TestBuild(unittest.TestCase):
# Fourth commit: Compile errors are fixed, just have warning for board3
self.assertEqual(lines[11].text, '04: %s' % commits[3][1])
- self.assertSummary(lines[12].text, 'sandbox', 'w+', ['board4'],
- outcome=OUTCOME_WARN)
expect = '%10s: ' % 'powerpc'
expect += ' ' + col.Color(col.GREEN, '')
expect += ' '
@@ -246,7 +244,9 @@ class TestBuild(unittest.TestCase):
expect += ' ' + col.Color(col.YELLOW, 'w+')
expect += ' '
expect += col.Color(col.YELLOW, ' %s' % 'board3')
- self.assertEqual(lines[13].text, expect)
+ self.assertEqual(lines[12].text, expect)
+ self.assertSummary(lines[13].text, 'sandbox', 'w+', ['board4'],
+ outcome=OUTCOME_WARN)
# Compile error fixed
self.assertEqual(lines[14].text, '-%s' %
@@ -259,9 +259,9 @@ class TestBuild(unittest.TestCase):
# Fifth commit
self.assertEqual(lines[16].text, '05: %s' % commits[4][1])
- self.assertSummary(lines[17].text, 'sandbox', '+', ['board4'])
- self.assertSummary(lines[18].text, 'powerpc', '', ['board3'],
+ self.assertSummary(lines[17].text, 'powerpc', '', ['board3'],
outcome=OUTCOME_OK)
+ self.assertSummary(lines[18].text, 'sandbox', '+', ['board4'])
# The second line of errors[3] is a duplicate, so buildman will drop it
expect = errors[3].rstrip().split('\n')
diff --git a/tools/buildman/toolchain.py b/tools/buildman/toolchain.py
index a65737fdf8..cc26e2ede5 100644
--- a/tools/buildman/toolchain.py
+++ b/tools/buildman/toolchain.py
@@ -4,18 +4,19 @@
import re
import glob
-from HTMLParser import HTMLParser
+from html.parser import HTMLParser
import os
import sys
import tempfile
-import urllib2
+import urllib.request, urllib.error, urllib.parse
import bsettings
import command
import terminal
+import tools
(PRIORITY_FULL_PREFIX, PRIORITY_PREFIX_GCC, PRIORITY_PREFIX_GCC_PATH,
- PRIORITY_CALC) = range(4)
+ PRIORITY_CALC) = list(range(4))
# Simple class to collect links from a page
class MyHTMLParser(HTMLParser):
@@ -100,15 +101,15 @@ class Toolchain:
raise_on_error=False)
self.ok = result.return_code == 0
if verbose:
- print 'Tool chain test: ',
+ print('Tool chain test: ', end=' ')
if self.ok:
- print "OK, arch='%s', priority %d" % (self.arch,
- self.priority)
+ print("OK, arch='%s', priority %d" % (self.arch,
+ self.priority))
else:
- print 'BAD'
- print 'Command: ', cmd
- print result.stdout
- print result.stderr
+ print('BAD')
+ print('Command: ', cmd)
+ print(result.stdout)
+ print(result.stderr)
else:
self.ok = True
@@ -138,7 +139,7 @@ class Toolchain:
value = ''
for name, value in bsettings.GetItems('toolchain-wrapper'):
if not value:
- print "Warning: Wrapper not found"
+ print("Warning: Wrapper not found")
if value:
value = value + ' '
@@ -227,11 +228,11 @@ class Toolchains:
"""
toolchains = bsettings.GetItems('toolchain')
if show_warning and not toolchains:
- print ("Warning: No tool chains. Please run 'buildman "
+ print(("Warning: No tool chains. Please run 'buildman "
"--fetch-arch all' to download all available toolchains, or "
"add a [toolchain] section to your buildman config file "
"%s. See README for details" %
- bsettings.config_fname)
+ bsettings.config_fname))
paths = []
for name, value in toolchains:
@@ -272,10 +273,10 @@ class Toolchains:
if add_it:
self.toolchains[toolchain.arch] = toolchain
elif verbose:
- print ("Toolchain '%s' at priority %d will be ignored because "
+ print(("Toolchain '%s' at priority %d will be ignored because "
"another toolchain for arch '%s' has priority %d" %
(toolchain.gcc, toolchain.priority, toolchain.arch,
- self.toolchains[toolchain.arch].priority))
+ self.toolchains[toolchain.arch].priority)))
def ScanPath(self, path, verbose):
"""Scan a path for a valid toolchain
@@ -289,9 +290,9 @@ class Toolchains:
fnames = []
for subdir in ['.', 'bin', 'usr/bin']:
dirname = os.path.join(path, subdir)
- if verbose: print " - looking in '%s'" % dirname
+ if verbose: print(" - looking in '%s'" % dirname)
for fname in glob.glob(dirname + '/*gcc'):
- if verbose: print " - found '%s'" % fname
+ if verbose: print(" - found '%s'" % fname)
fnames.append(fname)
return fnames
@@ -321,9 +322,9 @@ class Toolchains:
Args:
verbose: True to print out progress information
"""
- if verbose: print 'Scanning for tool chains'
+ if verbose: print('Scanning for tool chains')
for name, value in self.prefixes:
- if verbose: print " - scanning prefix '%s'" % value
+ if verbose: print(" - scanning prefix '%s'" % value)
if os.path.exists(value):
self.Add(value, True, verbose, PRIORITY_FULL_PREFIX, name)
continue
@@ -335,10 +336,10 @@ class Toolchains:
for f in fname_list:
self.Add(f, True, verbose, PRIORITY_PREFIX_GCC_PATH, name)
if not fname_list:
- raise ValueError, ("No tool chain found for prefix '%s'" %
+ raise ValueError("No tool chain found for prefix '%s'" %
value)
for path in self.paths:
- if verbose: print " - scanning path '%s'" % path
+ if verbose: print(" - scanning path '%s'" % path)
fnames = self.ScanPath(path, verbose)
for fname in fnames:
self.Add(fname, True, verbose)
@@ -346,13 +347,13 @@ class Toolchains:
def List(self):
"""List out the selected toolchains for each architecture"""
col = terminal.Color()
- print col.Color(col.BLUE, 'List of available toolchains (%d):' %
- len(self.toolchains))
+ print(col.Color(col.BLUE, 'List of available toolchains (%d):' %
+ len(self.toolchains)))
if len(self.toolchains):
- for key, value in sorted(self.toolchains.iteritems()):
- print '%-10s: %s' % (key, value.gcc)
+ for key, value in sorted(self.toolchains.items()):
+ print('%-10s: %s' % (key, value.gcc))
else:
- print 'None'
+ print('None')
def Select(self, arch):
"""Returns the toolchain for a given architecture
@@ -370,7 +371,7 @@ class Toolchains:
return self.toolchains[alias]
if not arch in self.toolchains:
- raise ValueError, ("No tool chain found for arch '%s'" % arch)
+ raise ValueError("No tool chain found for arch '%s'" % arch)
return self.toolchains[arch]
def ResolveReferences(self, var_dict, args):
@@ -464,9 +465,9 @@ class Toolchains:
links = []
for version in versions:
url = '%s/%s/%s/' % (base, arch, version)
- print 'Checking: %s' % url
- response = urllib2.urlopen(url)
- html = response.read()
+ print('Checking: %s' % url)
+ response = urllib.request.urlopen(url)
+ html = tools.ToString(response.read())
parser = MyHTMLParser(fetch_arch)
parser.feed(html)
if fetch_arch == 'list':
@@ -488,14 +489,14 @@ class Toolchains:
Full path to the downloaded archive file in that directory,
or None if there was an error while downloading
"""
- print 'Downloading: %s' % url
+ print('Downloading: %s' % url)
leaf = url.split('/')[-1]
tmpdir = tempfile.mkdtemp('.buildman')
- response = urllib2.urlopen(url)
+ response = urllib.request.urlopen(url)
fname = os.path.join(tmpdir, leaf)
fd = open(fname, 'wb')
meta = response.info()
- size = int(meta.getheaders('Content-Length')[0])
+ size = int(meta.get('Content-Length'))
done = 0
block_size = 1 << 16
status = ''
@@ -504,19 +505,19 @@ class Toolchains:
while True:
buffer = response.read(block_size)
if not buffer:
- print chr(8) * (len(status) + 1), '\r',
+ print(chr(8) * (len(status) + 1), '\r', end=' ')
break
done += len(buffer)
fd.write(buffer)
- status = r'%10d MiB [%3d%%]' % (done / 1024 / 1024,
- done * 100 / size)
+ status = r'%10d MiB [%3d%%]' % (done // 1024 // 1024,
+ done * 100 // size)
status = status + chr(8) * (len(status) + 1)
- print status,
+ print(status, end=' ')
sys.stdout.flush()
fd.close()
if done != size:
- print 'Error, failed to download'
+ print('Error, failed to download')
os.remove(fname)
fname = None
return tmpdir, fname
@@ -565,11 +566,11 @@ class Toolchains:
"""
# Fist get the URL for this architecture
col = terminal.Color()
- print col.Color(col.BLUE, "Downloading toolchain for arch '%s'" % arch)
+ print(col.Color(col.BLUE, "Downloading toolchain for arch '%s'" % arch))
url = self.LocateArchUrl(arch)
if not url:
- print ("Cannot find toolchain for arch '%s' - use 'list' to list" %
- arch)
+ print(("Cannot find toolchain for arch '%s' - use 'list' to list" %
+ arch))
return 2
home = os.environ['HOME']
dest = os.path.join(home, '.buildman-toolchains')
@@ -580,28 +581,28 @@ class Toolchains:
tmpdir, tarfile = self.Download(url)
if not tarfile:
return 1
- print col.Color(col.GREEN, 'Unpacking to: %s' % dest),
+ print(col.Color(col.GREEN, 'Unpacking to: %s' % dest), end=' ')
sys.stdout.flush()
path = self.Unpack(tarfile, dest)
os.remove(tarfile)
os.rmdir(tmpdir)
- print
+ print()
# Check that the toolchain works
- print col.Color(col.GREEN, 'Testing')
+ print(col.Color(col.GREEN, 'Testing'))
dirpath = os.path.join(dest, path)
compiler_fname_list = self.ScanPath(dirpath, True)
if not compiler_fname_list:
- print 'Could not locate C compiler - fetch failed.'
+ print('Could not locate C compiler - fetch failed.')
return 1
if len(compiler_fname_list) != 1:
- print col.Color(col.RED, 'Warning, ambiguous toolchains: %s' %
- ', '.join(compiler_fname_list))
+ print(col.Color(col.RED, 'Warning, ambiguous toolchains: %s' %
+ ', '.join(compiler_fname_list)))
toolchain = Toolchain(compiler_fname_list[0], True, True)
# Make sure that it will be found by buildman
if not self.TestSettingsHasPath(dirpath):
- print ("Adding 'download' to config file '%s'" %
- bsettings.config_fname)
+ print(("Adding 'download' to config file '%s'" %
+ bsettings.config_fname))
bsettings.SetItem('toolchain', 'download', '%s/*/*' % dest)
return 0
--
2.24.0

View File

@ -0,0 +1,228 @@
From 3b3e3c0f6c261a8c9f989d437dc261ba84467d4f Mon Sep 17 00:00:00 2001
From: Simon Glass <sjg@chromium.org>
Date: Thu, 31 Oct 2019 07:42:50 -0600
Subject: [PATCH] patman: Adjust 'command' to return strings instead of bytes
At present all the 'command' methods return bytes. Most of the time we
actually want strings, so change this. We still need to keep the internal
representation as bytes since otherwise unicode strings might break over
a read() boundary (e.g. 4KB), causing errors. But we can convert the end
result to strings.
Add a 'binary' parameter to cover the few cases where bytes are needed.
Upstream-Status: Backport
Signed-off-by: Simon Glass <sjg@chromium.org>
---
tools/binman/cbfs_util_test.py | 2 +-
tools/binman/ftest.py | 2 +-
tools/patman/command.py | 31 +++++++++++++++++++++++--------
tools/patman/tools.py | 29 +++++++++++++++++++++--------
4 files changed, 46 insertions(+), 18 deletions(-)
diff --git a/tools/binman/cbfs_util_test.py b/tools/binman/cbfs_util_test.py
index 772c794ece..ddc2e09e35 100755
--- a/tools/binman/cbfs_util_test.py
+++ b/tools/binman/cbfs_util_test.py
@@ -56,7 +56,7 @@ class TestCbfs(unittest.TestCase):
cls.have_lz4 = True
try:
tools.Run('lz4', '--no-frame-crc', '-c',
- tools.GetInputFilename('u-boot.bin'))
+ tools.GetInputFilename('u-boot.bin'), binary=True)
except:
cls.have_lz4 = False
--- a/tools/binman/ftest.py
+++ b/tools/binman/ftest.py
@@ -151,7 +151,7 @@ class TestFunctional(unittest.TestCase):
self.have_lz4 = True
try:
tools.Run('lz4', '--no-frame-crc', '-c',
- os.path.join(self._indir, 'u-boot.bin'))
+ os.path.join(self._indir, 'u-boot.bin'), binary=True)
except:
self.have_lz4 = False
diff --git a/tools/patman/command.py b/tools/patman/command.py
index 16299f3f5b..5fbd2c4a3e 100644
--- a/tools/patman/command.py
+++ b/tools/patman/command.py
@@ -4,6 +4,7 @@
import os
import cros_subprocess
+import tools
"""Shell command ease-ups for Python."""
@@ -31,6 +32,13 @@ class CommandResult:
self.return_code = return_code
self.exception = exception
+ def ToOutput(self, binary):
+ if not binary:
+ self.stdout = tools.ToString(self.stdout)
+ self.stderr = tools.ToString(self.stderr)
+ self.combined = tools.ToString(self.combined)
+ return self
+
# This permits interception of RunPipe for test purposes. If it is set to
# a function, then that function is called with the pipe list being
@@ -41,7 +49,7 @@ test_result = None
def RunPipe(pipe_list, infile=None, outfile=None,
capture=False, capture_stderr=False, oneline=False,
- raise_on_error=True, cwd=None, **kwargs):
+ raise_on_error=True, cwd=None, binary=False, **kwargs):
"""
Perform a command pipeline, with optional input/output filenames.
@@ -67,7 +75,7 @@ def RunPipe(pipe_list, infile=None, outfile=None,
else:
return test_result
# No result: fall through to normal processing
- result = CommandResult()
+ result = CommandResult(b'', b'', b'')
last_pipe = None
pipeline = list(pipe_list)
user_pipestr = '|'.join([' '.join(pipe) for pipe in pipe_list])
@@ -93,29 +101,36 @@ def RunPipe(pipe_list, infile=None, outfile=None,
if raise_on_error:
raise Exception("Error running '%s': %s" % (user_pipestr, str))
result.return_code = 255
- return result
+ return result.ToOutput(binary)
if capture:
result.stdout, result.stderr, result.combined = (
last_pipe.CommunicateFilter(None))
if result.stdout and oneline:
- result.output = result.stdout.rstrip('\r\n')
+ result.output = result.stdout.rstrip(b'\r\n')
result.return_code = last_pipe.wait()
else:
result.return_code = os.waitpid(last_pipe.pid, 0)[1]
if raise_on_error and result.return_code:
raise Exception("Error running '%s'" % user_pipestr)
- return result
+ return result.ToOutput(binary)
def Output(*cmd, **kwargs):
kwargs['raise_on_error'] = kwargs.get('raise_on_error', True)
return RunPipe([cmd], capture=True, **kwargs).stdout
def OutputOneLine(*cmd, **kwargs):
+ """Run a command and output it as a single-line string
+
+ The command us expected to produce a single line of output
+
+ Returns:
+ String containing output of command
+ """
raise_on_error = kwargs.pop('raise_on_error', True)
- return (RunPipe([cmd], capture=True, oneline=True,
- raise_on_error=raise_on_error,
- **kwargs).stdout.strip())
+ result = RunPipe([cmd], capture=True, oneline=True,
+ raise_on_error=raise_on_error, **kwargs).stdout.strip()
+ return result
def Run(*cmd, **kwargs):
return RunPipe([cmd], **kwargs).stdout
diff --git a/tools/patman/tools.py b/tools/patman/tools.py
index 4a7fcdad21..3feddb292f 100644
--- a/tools/patman/tools.py
+++ b/tools/patman/tools.py
@@ -186,7 +186,7 @@ def PathHasFile(path_spec, fname):
return True
return False
-def Run(name, *args):
+def Run(name, *args, **kwargs):
"""Run a tool with some arguments
This runs a 'tool', which is a program used by binman to process files and
@@ -201,13 +201,14 @@ def Run(name, *args):
CommandResult object
"""
try:
+ binary = kwargs.get('binary')
env = None
if tool_search_paths:
env = dict(os.environ)
env['PATH'] = ':'.join(tool_search_paths) + ':' + env['PATH']
all_args = (name,) + args
result = command.RunPipe([all_args], capture=True, capture_stderr=True,
- env=env, raise_on_error=False)
+ env=env, raise_on_error=False, binary=binary)
if result.return_code:
raise Exception("Error %d running '%s': %s" %
(result.return_code,' '.join(all_args),
@@ -375,7 +376,7 @@ def ToBytes(string):
"""Convert a str type into a bytes type
Args:
- string: string to convert value
+ string: string to convert
Returns:
Python 3: A bytes type
@@ -385,6 +386,18 @@ def ToBytes(string):
return string.encode('utf-8')
return string
+def ToString(bval):
+ """Convert a bytes type into a str type
+
+ Args:
+ bval: bytes value to convert
+
+ Returns:
+ Python 3: A bytes type
+ Python 2: A string type
+ """
+ return bval.decode('utf-8')
+
def Compress(indata, algo, with_header=True):
"""Compress some data using a given algorithm
@@ -406,14 +419,14 @@ def Compress(indata, algo, with_header=True):
fname = GetOutputFilename('%s.comp.tmp' % algo)
WriteFile(fname, indata)
if algo == 'lz4':
- data = Run('lz4', '--no-frame-crc', '-c', fname)
+ data = Run('lz4', '--no-frame-crc', '-c', fname, binary=True)
# cbfstool uses a very old version of lzma
elif algo == 'lzma':
outfname = GetOutputFilename('%s.comp.otmp' % algo)
Run('lzma_alone', 'e', fname, outfname, '-lc1', '-lp0', '-pb0', '-d8')
data = ReadFile(outfname)
elif algo == 'gzip':
- data = Run('gzip', '-c', fname)
+ data = Run('gzip', '-c', fname, binary=True)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
if with_header:
@@ -446,13 +459,13 @@ def Decompress(indata, algo, with_header=True):
with open(fname, 'wb') as fd:
fd.write(indata)
if algo == 'lz4':
- data = Run('lz4', '-dc', fname)
+ data = Run('lz4', '-dc', fname, binary=True)
elif algo == 'lzma':
outfname = GetOutputFilename('%s.decomp.otmp' % algo)
Run('lzma_alone', 'd', fname, outfname)
- data = ReadFile(outfname)
+ data = ReadFile(outfname, binary=True)
elif algo == 'gzip':
- data = Run('gzip', '-cd', fname)
+ data = Run('gzip', '-cd', fname, binary=True)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
return data
--
2.24.0

View File

@ -0,0 +1,105 @@
From 3b1c0b09c99bfd30355a6ba87a15e9d408a51109 Mon Sep 17 00:00:00 2001
From: Simon Glass <sjg@chromium.org>
Date: Sat, 24 Aug 2019 07:22:41 -0600
Subject: [PATCH] patman: Drop binary parameter
Since cros_subprocess use bytestrings now, this feature not needed. Drop
it.
Upstream-Status: Backport
Signed-off-by: Simon Glass <sjg@chromium.org>
---
tools/patman/cros_subprocess.py | 3 +--
tools/patman/tools.py | 15 +++++++--------
2 files changed, 8 insertions(+), 10 deletions(-)
diff --git a/tools/patman/cros_subprocess.py b/tools/patman/cros_subprocess.py
index 06be64cc2c..0f0d60dfb7 100644
--- a/tools/patman/cros_subprocess.py
+++ b/tools/patman/cros_subprocess.py
@@ -54,7 +54,7 @@ class Popen(subprocess.Popen):
"""
def __init__(self, args, stdin=None, stdout=PIPE_PTY, stderr=PIPE_PTY,
- shell=False, cwd=None, env=None, binary=False, **kwargs):
+ shell=False, cwd=None, env=None, **kwargs):
"""Cut-down constructor
Args:
@@ -72,7 +72,6 @@ class Popen(subprocess.Popen):
"""
stdout_pty = None
stderr_pty = None
- self.binary = binary
if stdout == PIPE_PTY:
stdout_pty = pty.openpty()
diff --git a/tools/patman/tools.py b/tools/patman/tools.py
index 0d4705db76..97441ca796 100644
--- a/tools/patman/tools.py
+++ b/tools/patman/tools.py
@@ -186,7 +186,7 @@ def PathHasFile(path_spec, fname):
return True
return False
-def Run(name, *args, **kwargs):
+def Run(name, *args):
"""Run a tool with some arguments
This runs a 'tool', which is a program used by binman to process files and
@@ -196,7 +196,6 @@ def Run(name, *args, **kwargs):
Args:
name: Command name to run
args: Arguments to the tool
- kwargs: Options to pass to command.run()
Returns:
CommandResult object
@@ -206,8 +205,8 @@ def Run(name, *args, **kwargs):
if tool_search_paths:
env = dict(os.environ)
env['PATH'] = ':'.join(tool_search_paths) + ':' + env['PATH']
- return command.Run(name, *args, capture=True,
- capture_stderr=True, env=env, **kwargs)
+ return command.Run(name, *args, capture=True, capture_stderr=True,
+ env=env)
except:
if env and not PathHasFile(env['PATH'], name):
msg = "Please install tool '%s'" % name
@@ -401,14 +400,14 @@ def Compress(indata, algo, with_header=True):
fname = GetOutputFilename('%s.comp.tmp' % algo)
WriteFile(fname, indata)
if algo == 'lz4':
- data = Run('lz4', '--no-frame-crc', '-c', fname, binary=True)
+ data = Run('lz4', '--no-frame-crc', '-c', fname)
# cbfstool uses a very old version of lzma
elif algo == 'lzma':
outfname = GetOutputFilename('%s.comp.otmp' % algo)
Run('lzma_alone', 'e', fname, outfname, '-lc1', '-lp0', '-pb0', '-d8')
data = ReadFile(outfname)
elif algo == 'gzip':
- data = Run('gzip', '-c', fname, binary=True)
+ data = Run('gzip', '-c', fname)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
if with_header:
@@ -441,13 +440,13 @@ def Decompress(indata, algo, with_header=True):
with open(fname, 'wb') as fd:
fd.write(indata)
if algo == 'lz4':
- data = Run('lz4', '-dc', fname, binary=True)
+ data = Run('lz4', '-dc', fname)
elif algo == 'lzma':
outfname = GetOutputFilename('%s.decomp.otmp' % algo)
Run('lzma_alone', 'd', fname, outfname)
data = ReadFile(outfname)
elif algo == 'gzip':
- data = Run('gzip', '-cd', fname, binary=True)
+ data = Run('gzip', '-cd', fname)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
return data
--
2.24.0

View File

@ -0,0 +1,42 @@
From 6eace398072a62e74f10f412ffadfe51b7402395 Mon Sep 17 00:00:00 2001
From: Simon Glass <sjg@chromium.org>
Date: Sat, 24 Aug 2019 07:22:42 -0600
Subject: [PATCH] patman: Update command.Run() to handle failure better
At present tools are not expected to fail. If they do an exception is
raised but there is no detail about what went wrong. This makes it hard
to debug if something does actually go wrong.
Fix this by outputting both stderr and stdout on failure.
Upstream-Status: Backport
Signed-off-by: Simon Glass <sjg@chromium.org>
---
tools/patman/tools.py | 10 ++++++++--
1 file changed, 8 insertions(+), 2 deletions(-)
diff --git a/tools/patman/tools.py b/tools/patman/tools.py
index 97441ca796..0952681579 100644
--- a/tools/patman/tools.py
+++ b/tools/patman/tools.py
@@ -205,8 +205,14 @@ def Run(name, *args):
if tool_search_paths:
env = dict(os.environ)
env['PATH'] = ':'.join(tool_search_paths) + ':' + env['PATH']
- return command.Run(name, *args, capture=True, capture_stderr=True,
- env=env)
+ all_args = (name,) + args
+ result = command.RunPipe([all_args], capture=True, capture_stderr=True,
+ env=env, raise_on_error=False)
+ if result.return_code:
+ raise Exception("Error %d running '%s': %s" %
+ (result.return_code,' '.join(all_args),
+ result.stderr))
+ return result.stdout
except:
if env and not PathHasFile(env['PATH'], name):
msg = "Please install tool '%s'" % name
--
2.24.0

View File

@ -0,0 +1,117 @@
From b4cf5f1df741e8781bed6149291823cd1a4b8baa Mon Sep 17 00:00:00 2001
From: Simon Glass <sjg@chromium.org>
Date: Thu, 31 Oct 2019 07:42:59 -0600
Subject: [PATCH] pylibfdt: Convert to Python 3
Build this swig module with Python 3.
Upstream-Status: Backport
Signed-off-by: Simon Glass <sjg@chromium.org>
---
scripts/dtc/pylibfdt/Makefile | 2 +-
scripts/dtc/pylibfdt/libfdt.i_shipped | 2 +-
scripts/dtc/pylibfdt/setup.py | 2 +-
tools/binman/entry.py | 16 ++--------------
tools/binman/entry_test.py | 15 ---------------
5 files changed, 5 insertions(+), 32 deletions(-)
diff --git a/scripts/dtc/pylibfdt/Makefile b/scripts/dtc/pylibfdt/Makefile
index 15e66ad44d..42342c75bb 100644
--- a/scripts/dtc/pylibfdt/Makefile
+++ b/scripts/dtc/pylibfdt/Makefile
@@ -21,7 +21,7 @@ quiet_cmd_pymod = PYMOD $@
CPPFLAGS="$(HOSTCFLAGS) -I$(LIBFDT_srcdir)" OBJDIR=$(obj) \
SOURCES="$(PYLIBFDT_srcs)" \
SWIG_OPTS="-I$(LIBFDT_srcdir) -I$(LIBFDT_srcdir)/.." \
- $(PYTHON2) $< --quiet build_ext --inplace
+ $(PYTHON3) $< --quiet build_ext --inplace
$(obj)/_libfdt.so: $(src)/setup.py $(PYLIBFDT_srcs) FORCE
$(call if_changed,pymod)
diff --git a/scripts/dtc/pylibfdt/libfdt.i_shipped b/scripts/dtc/pylibfdt/libfdt.i_shipped
index 76e61e98bd..53b70f8f5e 100644
--- a/scripts/dtc/pylibfdt/libfdt.i_shipped
+++ b/scripts/dtc/pylibfdt/libfdt.i_shipped
@@ -624,7 +624,7 @@ class Fdt(FdtRo):
Raises:
FdtException if no parent found or other error occurs
"""
- val = val.encode('utf-8') + '\0'
+ val = val.encode('utf-8') + b'\0'
return check_err(fdt_setprop(self._fdt, nodeoffset, prop_name,
val, len(val)), quiet)
diff --git a/scripts/dtc/pylibfdt/setup.py b/scripts/dtc/pylibfdt/setup.py
index 4f7cf042bf..992cdec30f 100755
--- a/scripts/dtc/pylibfdt/setup.py
+++ b/scripts/dtc/pylibfdt/setup.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
"""
setup.py file for SWIG libfdt
diff --git a/tools/binman/entry.py b/tools/binman/entry.py
index 409c0dca93..5bf5be4794 100644
--- a/tools/binman/entry.py
+++ b/tools/binman/entry.py
@@ -7,16 +7,7 @@
from __future__ import print_function
from collections import namedtuple
-
-# importlib was introduced in Python 2.7 but there was a report of it not
-# working in 2.7.12, so we work around this:
-# http://lists.denx.de/pipermail/u-boot/2016-October/269729.html
-try:
- import importlib
- have_importlib = True
-except:
- have_importlib = False
-
+import importlib
import os
import sys
@@ -119,10 +110,7 @@ class Entry(object):
old_path = sys.path
sys.path.insert(0, os.path.join(our_path, 'etype'))
try:
- if have_importlib:
- module = importlib.import_module(module_name)
- else:
- module = __import__(module_name)
+ module = importlib.import_module(module_name)
except ImportError as e:
raise ValueError("Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
(etype, node_path, module_name, e))
diff --git a/tools/binman/entry_test.py b/tools/binman/entry_test.py
index 13f5864516..277e10b585 100644
--- a/tools/binman/entry_test.py
+++ b/tools/binman/entry_test.py
@@ -39,21 +39,6 @@ class TestEntry(unittest.TestCase):
else:
import entry
- def test1EntryNoImportLib(self):
- """Test that we can import Entry subclassess successfully"""
- sys.modules['importlib'] = None
- global entry
- self._ReloadEntry()
- entry.Entry.Create(None, self.GetNode(), 'u-boot')
- self.assertFalse(entry.have_importlib)
-
- def test2EntryImportLib(self):
- del sys.modules['importlib']
- global entry
- self._ReloadEntry()
- entry.Entry.Create(None, self.GetNode(), 'u-boot-spl')
- self.assertTrue(entry.have_importlib)
-
def testEntryContents(self):
"""Test the Entry bass class"""
import entry
--
2.24.0

View File

@ -0,0 +1,107 @@
require recipes-bsp/u-boot/u-boot.inc
DESCRIPTION = "U-Boot provided by Freescale with focus on QorIQ boards"
PROVIDES += "u-boot"
inherit fsl-u-boot-localversion
LICENSE = "GPLv2 & BSD-3-Clause & BSD-2-Clause & LGPL-2.0 & LGPL-2.1"
LIC_FILES_CHKSUM = " \
file://Licenses/gpl-2.0.txt;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
file://Licenses/bsd-2-clause.txt;md5=6a31f076f5773aabd8ff86191ad6fdd5 \
file://Licenses/bsd-3-clause.txt;md5=4a1190eac56a9db675d58ebe86eaf50c \
file://Licenses/lgpl-2.0.txt;md5=5f30f0716dfdd0d91eb439ebec522ec2 \
file://Licenses/lgpl-2.1.txt;md5=4fbd65380cdd255951079008b364516c \
"
SRC_URI = "git://source.codeaurora.org/external/qoriq/qoriq-components/u-boot;nobranch=1 \
file://0001-patman-Drop-binary-parameter.patch \
file://0001-patman-Update-command.Run-to-handle-failure-better.patch \
file://0001-patman-Adjust-command-to-return-strings-instead-of-b.patch \
file://0001-pylibfdt-Convert-to-Python-3.patch \
file://0001-binman-Convert-a-few-tests-to-Python-3.patch \
file://0001-binman-Move-to-use-Python-3.patch \
file://0001-buildman-Convert-to-Python-3.patch \
file://0001-Remove-redundant-YYLOC-global-declaration.patch \
"
SRCREV= "1e55b2f9e7f56b76569089b9e950f49c1579580e"
S = "${WORKDIR}/git"
B = "${WORKDIR}/build"
PV_append = "+fslgit"
LOCALVERSION = "+fsl"
INHIBIT_DEFAULT_DEPS = "1"
DEPENDS = "libgcc virtual/${TARGET_PREFIX}gcc bison-native bc-native swig-native python3-native"
DEPENDS_append_qoriq-arm64 = " dtc-native"
DEPENDS_append_qoriq-arm = " dtc-native"
DEPENDS_append_qoriq-ppc = " boot-format-native"
python () {
if d.getVar("TCMODE") == "external-fsl":
return
ml = d.getVar("MULTILIB_VARIANTS")
arch = d.getVar("OVERRIDES")
if "e5500-64b:" in arch or "e6500-64b:" in arch:
if not "lib32" in ml:
raise bb.parse.SkipPackage("Building the u-boot for this arch requires multilib to be enabled")
sys_multilib = d.getVar('TARGET_VENDOR') + 'mllib32-linux'
sys_original = d.getVar('TARGET_VENDOR') + '-' + d.getVar('TARGET_OS')
workdir = d.getVar('WORKDIR')
d.setVar('DEPENDS_append', ' lib32-gcc-cross-powerpc lib32-libgcc')
d.setVar('PATH_append', ':' + d.getVar('STAGING_BINDIR_NATIVE') + '/powerpc' + sys_multilib)
d.setVar('TOOLCHAIN_OPTIONS', '--sysroot=' + workdir + '/lib32-recipe-sysroot')
d.setVar("WRAP_TARGET_PREFIX", 'powerpc' + sys_multilib + '-')
elif "fsl-lsch2-32b:" in arch:
if not "lib64" in ml:
raise bb.parse.SkipRecipe("Building the u-boot for this arch requires multilib to be enabled")
sys_multilib = d.getVar('TARGET_VENDOR') + 'mllib64-linux'
sys_original = d.getVar('TARGET_VENDOR') + '-' + d.getVar('TARGET_OS')
workdir = d.getVar('WORKDIR')
d.setVar('DEPENDS_append', ' lib64-gcc-cross-aarch64 lib64-libgcc')
d.setVar('PATH_append', ':' + d.getVar('STAGING_BINDIR_NATIVE') + '/aarch64' + sys_multilib)
d.setVar('TOOLCHAIN_OPTIONS', '--sysroot=' + workdir + '/lib64-recipe-sysroot')
d.setVar("WRAP_TARGET_PREFIX", 'aarch64' + sys_multilib + '-')
}
LE_UBOOT_FOR_ARMBE_TARGET ?= "0"
ENDIANNESS_GCC = "${@bb.utils.contains("LE_UBOOT_FOR_ARMBE_TARGET", "1", "-mlittle-endian", "", d)}"
ENDIANNESS_LD = "${@bb.utils.contains("LE_UBOOT_FOR_ARMBE_TARGET", "1", "-EL", "", d)}"
WRAP_TARGET_PREFIX ?= "${TARGET_PREFIX}"
EXTRA_OEMAKE = 'CROSS_COMPILE=${WRAP_TARGET_PREFIX} CC="${WRAP_TARGET_PREFIX}gcc ${TOOLCHAIN_OPTIONS} ${ENDIANNESS_GCC}" LD="${WRAP_TARGET_PREFIX}ld ${ENDIANNESS_LD}" V=1'
EXTRA_OEMAKE += 'HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}"'
EXTRA_OEMAKE += 'STAGING_INCDIR=${STAGING_INCDIR_NATIVE} STAGING_LIBDIR=${STAGING_LIBDIR_NATIVE}'
do_compile_append_qoriq() {
unset i j k
for config in ${UBOOT_MACHINE}; do
i=`expr $i + 1`;
for type in ${UBOOT_CONFIG}; do
j=`expr $j + 1`;
for binary in ${UBOOT_BINARIES}; do
k=`expr $k + 1`
if [ $j -eq $i ] && [ $k -eq $i ]; then
if [ -n "${BOOTFORMAT_CONFIG}" ] && echo "${type}" |grep -q spi;then
# regenerate spi binary if BOOTFORMAT_CONFIG is set
boot_format ${STAGING_DATADIR_NATIVE}/boot_format/${BOOTFORMAT_CONFIG} \
${config}/u-boot-${type}.${UBOOT_SUFFIX} -spi ${config}/u-boot.format.bin
cp ${config}/u-boot.format.bin ${config}/u-boot-${type}.${UBOOT_SUFFIX}
elif [ "qspi" = "${type}" ];then
cp ${config}/${binary} ${config}/u-boot-${type}-${PV}-${PR}.${UBOOT_SUFFIX}
fi
fi
done
unset k
done
unset j
done
unset i
}
PACKAGES += "${PN}-images"
FILES_${PN}-images += "/boot"
COMPATIBLE_MACHINE = "(qoriq)"