buildroot long term support release 2024.02.3

buildroot-2024.02.3.tar.gz

Signed-off-by: carbon <carbon@milkv.io>
This commit is contained in:
carbon
2024-06-14 22:30:26 +08:00
parent aac91c08cd
commit e42fe97d74
19479 changed files with 284259 additions and 232833 deletions

View File

@ -0,0 +1,111 @@
#!/usr/bin/env bash
set -e
# Add hash files for packages with custom versions for
# BR2_DOWNLOAD_FORCE_CHECK_HASHES=y
#
# Run in a configured Buildroot directory, E.G.
# make foo_defconfig; ./utils/add-custom-hashes
# print BR-style message
# message <info message>
message() {
tput smso 2>/dev/null
echo "$*"
tput rmso 2>/dev/null
}
# print error message and exit
# die <error message>
die() {
echo "Error: $*" >&2
exit 1
}
# get package(s) for download file, if any
# get_pkgs <json> <file>
get_pkgs() {
jq --arg file "$2" -r \
'to_entries[] | select(.value.downloads[0].source == $file) | .key | strings' "$1"
}
# get download dir for package
# get_pkg_dl_dir <json> <package>
get_pkg_dl_dir() {
jq --arg pkg "$2" -r '.[$pkg].dl_dir | strings' "$1"
}
# generate hash file for download file
# gen_hash <dir> <file>
gen_hash() {
(
cd "$1" && printf '# Locally calculated\nsha256 ' && sha256sum "$2"
)
}
command -v jq >/dev/null || die 'Script needs jq'
[ -e .config ] || \
die "No .config found, please run this in a configured Buildroot (O=) directory"
message Collecting data
eval "$(make -s VARS='TOPDIR DL_DIR BR_NO_CHECK_HASH_FOR BR2_GLOBAL_PATCH_DIR' QUOTED_VARS=YES printvars)"
# global patch dir may already have quotes
BR2_GLOBAL_PATCH_DIR=$(echo "$BR2_GLOBAL_PATCH_DIR" | tr -d '"')
[ -n "$BR2_GLOBAL_PATCH_DIR" ] || die "No BR2_GLOBAL_PATCH_DIR defined, nothing to do"
[ -n "$BR_NO_CHECK_HASH_FOR" ] || die "No packages without hashes found, nothing to do"
[ -d "$TOPDIR" ] || die "TOPDIR ($TOPDIR) does not look correct"
[ -d "$DL_DIR" ] || die "DL_DIR ($DL_DIR) does not look correct"
# patch dir may contain multiple dirs, use the last one
# shellcheck disable=SC2086 # we need the word splitting
set -- $BR2_GLOBAL_PATCH_DIR
if [ $# -gt 1 ]; then
BR2_GLOBAL_PATCH_DIR="${!#}";
message BR2_GLOBAL_PATCH_DIR contains multiple directories, using "$BR2_GLOBAL_PATCH_DIR"
fi
# patch dir may be relative to TOPDIR
case "$BR2_GLOBAL_PATCH_DIR" in
/*) ;;
*) BR2_GLOBAL_PATCH_DIR="$TOPDIR/$BR2_GLOBAL_PATCH_DIR"
;;
esac
[ -d "$BR2_GLOBAL_PATCH_DIR" ] \
|| die "BR2_GLOBAL_PATCH_DIR ($BR2_GLOBAL_PATCH_DIR) does not look correct"
trap 'rm -f "$JSON"' EXIT
JSON=$(mktemp)
make show-info > "$JSON"
# ensure files have been downloaded, but without checking
make BR2_DOWNLOAD_FORCE_CHECK_HASHES= source
message Updating hashes
for file in $BR_NO_CHECK_HASH_FOR; do
for pkg in $(get_pkgs "$JSON" "$file"); do
HASHFILE="$BR2_GLOBAL_PATCH_DIR/$pkg/$pkg.hash"
PKG_DL_DIR=$(get_pkg_dl_dir "$JSON" "$pkg")
message "Adding hash for $file to $HASHFILE"
mkdir -p "${HASHFILE%/*}"
gen_hash "$DL_DIR/$PKG_DL_DIR" "$file" > "$HASHFILE"
done
done
# Symlink linux-headers to linux if identical
linux_hash="$BR2_GLOBAL_PATCH_DIR/linux/linux.hash"
linux_headers_hash="$BR2_GLOBAL_PATCH_DIR/linux-headers/linux-headers.hash"
if [ -e "$linux_hash" ] && [ -e "$linux_headers_hash" ] \
&& cmp -s "$linux_hash" "$linux_headers_hash"; then
ln -sf ../linux/linux.hash "$linux_headers_hash"
fi
message Verifying hashes
make clean
make BR2_DOWNLOAD_FORCE_CHECK_HASHES=y source

42
buildroot-2024.02/utils/brmake Executable file
View File

@ -0,0 +1,42 @@
#!/bin/bash
# (C) 2016, "Yann E. MORIN" <yann.morin.1998@free.fr>
# License: WTFPL, https://spdx.org/licenses/WTFPL.html
main() {
local ret start d h m mf
if ! which unbuffer >/dev/null 2>&1; then
printf "you need to install 'unbuffer' (from package expect or expect-dev)\n" >&2
exit 1
fi
start=${SECONDS}
( exec 2>&1; unbuffer make "${@}"; ) \
> >( while read -r line; do
printf "%(%Y-%m-%dT%H:%M:%S)T %s\n" -1 "${line}"
done \
|tee -a br.log \
|grep --colour=never -E '>>>'
)
ret=${?}
d=$((SECONDS-start))
printf "Done in "
h=$((d/3600))
d=$((d%3600))
[ ${h} -eq 0 ] || { printf "%dh " ${h}; mf="02"; }
m=$((d/60))
d=$((d%60))
[ ${m} -eq 0 ] || { printf "%${mf}dmin " ${m}; sf="02"; }
printf "%${sf}ds" ${d}
if [ ${ret} -ne 0 ]; then
printf " (error code: %s)" ${ret}
fi
printf "\n"
return ${ret}
}
main "${@}"

View File

@ -0,0 +1,316 @@
#!/usr/bin/env python3
# See utils/checkpackagelib/readme.txt before editing this file.
import argparse
import inspect
import magic
import os
import re
import sys
import checkpackagelib.base
import checkpackagelib.lib_config
import checkpackagelib.lib_hash
import checkpackagelib.lib_ignore
import checkpackagelib.lib_mk
import checkpackagelib.lib_patch
import checkpackagelib.lib_python
import checkpackagelib.lib_shellscript
import checkpackagelib.lib_sysv
VERBOSE_LEVEL_TO_SHOW_IGNORED_FILES = 3
flags = None # Command line arguments.
# There are two Python packages called 'magic':
# https://pypi.org/project/file-magic/
# https://pypi.org/project/python-magic/
# Both allow to return a MIME file type, but with a slightly different
# interface. Detect which one of the two we have based on one of the
# attributes.
if hasattr(magic, 'FileMagic'):
# https://pypi.org/project/file-magic/
def get_filetype(fname):
return magic.detect_from_filename(fname).mime_type
else:
# https://pypi.org/project/python-magic/
def get_filetype(fname):
return magic.from_file(fname, mime=True)
def get_ignored_parsers_per_file(intree_only, ignore_filename):
ignored = dict()
entry_base_dir = ''
if not ignore_filename:
return ignored
filename = os.path.abspath(ignore_filename)
entry_base_dir = os.path.join(os.path.dirname(filename))
with open(filename, "r") as f:
for line in f.readlines():
filename, warnings_str = line.split(' ', 1)
warnings = warnings_str.split()
ignored[os.path.join(entry_base_dir, filename)] = warnings
return ignored
def parse_args():
parser = argparse.ArgumentParser()
# Do not use argparse.FileType("r") here because only files with known
# format will be open based on the filename.
parser.add_argument("files", metavar="F", type=str, nargs="*",
help="list of files")
parser.add_argument("--br2-external", "-b", dest='intree_only', action="store_false",
help="do not apply the pathname filters used for intree files")
parser.add_argument("--ignore-list", dest='ignore_filename', action="store",
help='override the default list of ignored warnings')
parser.add_argument("--manual-url", action="store",
default="https://nightly.buildroot.org/",
help="default: %(default)s")
parser.add_argument("--verbose", "-v", action="count", default=0)
parser.add_argument("--quiet", "-q", action="count", default=0)
# Now the debug options in the order they are processed.
parser.add_argument("--include-only", dest="include_list", action="append",
help="run only the specified functions (debug)")
parser.add_argument("--exclude", dest="exclude_list", action="append",
help="do not run the specified functions (debug)")
parser.add_argument("--dry-run", action="store_true", help="print the "
"functions that would be called for each file (debug)")
parser.add_argument("--failed-only", action="store_true", help="print only"
" the name of the functions that failed (debug)")
flags = parser.parse_args()
flags.ignore_list = get_ignored_parsers_per_file(flags.intree_only, flags.ignore_filename)
if flags.failed_only:
flags.dry_run = False
flags.verbose = -1
return flags
def get_lib_from_filetype(fname):
if not os.path.isfile(fname):
return None
filetype = get_filetype(fname)
if filetype == "text/x-shellscript":
return checkpackagelib.lib_shellscript
if filetype in ["text/x-python", "text/x-script.python"]:
return checkpackagelib.lib_python
return None
CONFIG_IN_FILENAME = re.compile(r"Config\.\S*$")
DO_CHECK_INTREE = re.compile(r"|".join([
r".checkpackageignore",
r"Config.in",
r"arch/",
r"board/",
r"boot/",
r"fs/",
r"linux/",
r"package/",
r"support/",
r"system/",
r"toolchain/",
r"utils/",
]))
DO_NOT_CHECK_INTREE = re.compile(r"|".join([
r"boot/barebox/barebox\.mk$",
r"fs/common\.mk$",
r"package/doc-asciidoc\.mk$",
r"package/pkg-\S*\.mk$",
r"support/dependencies/[^/]+\.mk$",
r"support/gnuconfig/config\.",
r"support/kconfig/",
r"support/misc/[^/]+\.mk$",
r"support/testing/tests/.*br2-external/",
r"toolchain/helpers\.mk$",
r"toolchain/toolchain-external/pkg-toolchain-external\.mk$",
]))
SYSV_INIT_SCRIPT_FILENAME = re.compile(r"/S\d\d[^/]+$")
def get_lib_from_filename(fname):
if flags.intree_only:
if DO_CHECK_INTREE.match(fname) is None:
return None
if DO_NOT_CHECK_INTREE.match(fname):
return None
else:
if os.path.basename(fname) == "external.mk" and \
os.path.exists(fname[:-2] + "desc"):
return None
if fname == ".checkpackageignore":
return checkpackagelib.lib_ignore
if CONFIG_IN_FILENAME.search(fname):
return checkpackagelib.lib_config
if fname.endswith(".hash"):
return checkpackagelib.lib_hash
if fname.endswith(".mk"):
return checkpackagelib.lib_mk
if fname.endswith(".patch"):
return checkpackagelib.lib_patch
if SYSV_INIT_SCRIPT_FILENAME.search(fname):
return checkpackagelib.lib_sysv
return get_lib_from_filetype(fname)
def common_inspect_rules(m):
# do not call the base class
if m.__name__.startswith("_"):
return False
if flags.include_list and m.__name__ not in flags.include_list:
return False
if flags.exclude_list and m.__name__ in flags.exclude_list:
return False
return True
def is_a_check_function(m):
if not inspect.isclass(m):
return False
if not issubclass(m, checkpackagelib.base._CheckFunction):
return False
return common_inspect_rules(m)
def is_external_tool(m):
if not inspect.isclass(m):
return False
if not issubclass(m, checkpackagelib.base._Tool):
return False
return common_inspect_rules(m)
def print_warnings(warnings, xfail):
# Avoid the need to use 'return []' at the end of every check function.
if warnings is None:
return 0, 0 # No warning generated.
if xfail:
return 0, 1 # Warning not generated, fail expected for this file.
for level, message in enumerate(warnings):
if flags.verbose >= level:
print(message.replace("\t", "< tab >").rstrip())
return 1, 1 # One more warning to count.
def check_file_using_lib(fname):
# Count number of warnings generated and lines processed.
nwarnings = 0
nlines = 0
xfail = flags.ignore_list.get(os.path.abspath(fname), [])
failed = set()
lib = get_lib_from_filename(fname)
if not lib:
if flags.verbose >= VERBOSE_LEVEL_TO_SHOW_IGNORED_FILES:
print("{}: ignored".format(fname))
return nwarnings, nlines
internal_functions = inspect.getmembers(lib, is_a_check_function)
external_tools = inspect.getmembers(lib, is_external_tool)
all_checks = internal_functions + external_tools
if flags.dry_run:
functions_to_run = [c[0] for c in all_checks]
print("{}: would run: {}".format(fname, functions_to_run))
return nwarnings, nlines
objects = [[c[0], c[1](fname, flags.manual_url)] for c in internal_functions]
for name, cf in objects:
warn, fail = print_warnings(cf.before(), name in xfail)
if fail > 0:
failed.add(name)
nwarnings += warn
lastline = ""
with open(fname, "r", errors="surrogateescape") as f:
for lineno, text in enumerate(f):
nlines += 1
for name, cf in objects:
if cf.disable.search(lastline):
continue
line_sts = cf.check_line(lineno + 1, text)
warn, fail = print_warnings(line_sts, name in xfail)
if fail > 0:
failed.add(name)
nwarnings += warn
lastline = text
for name, cf in objects:
warn, fail = print_warnings(cf.after(), name in xfail)
if fail > 0:
failed.add(name)
nwarnings += warn
tools = [[c[0], c[1](fname)] for c in external_tools]
for name, tool in tools:
warn, fail = print_warnings(tool.run(), name in xfail)
if fail > 0:
failed.add(name)
nwarnings += warn
for should_fail in xfail:
if should_fail not in failed:
print("{}:0: {} was expected to fail, did you fix the file and forget to update {}?"
.format(fname, should_fail, flags.ignore_filename))
nwarnings += 1
if flags.failed_only:
if len(failed) > 0:
f = " ".join(sorted(failed))
print("{} {}".format(fname, f))
return nwarnings, nlines
def __main__():
global flags
flags = parse_args()
if flags.intree_only:
# change all paths received to be relative to the base dir
base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
files_to_check = [os.path.relpath(os.path.abspath(f), base_dir) for f in flags.files]
# move current dir so the script find the files
os.chdir(base_dir)
else:
files_to_check = flags.files
if len(files_to_check) == 0:
print("No files to check style")
sys.exit(1)
# Accumulate number of warnings generated and lines processed.
total_warnings = 0
total_lines = 0
for fname in files_to_check:
nwarnings, nlines = check_file_using_lib(fname)
total_warnings += nwarnings
total_lines += nlines
# The warning messages are printed to stdout and can be post-processed
# (e.g. counted by 'wc'), so for stats use stderr. Wait all warnings are
# printed, for the case there are many of them, before printing stats.
sys.stdout.flush()
if not flags.quiet:
print("{} lines processed".format(total_lines), file=sys.stderr)
print("{} warnings generated".format(total_warnings), file=sys.stderr)
if total_warnings > 0 and not flags.failed_only:
sys.exit(1)
__main__()

View File

@ -0,0 +1,78 @@
#!/usr/bin/env python3
import argparse
import os
import sys
import checksymbolslib.file as file
from checksymbolslib.db import DB
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--search', action='store', default=None,
help='print all symbols matching a given regular expression')
return parser.parse_args()
def change_to_top_dir():
base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
os.chdir(base_dir)
def get_full_db(files_to_process):
db = DB()
for f in files_to_process:
file.populate_db_from_file(db, f)
return db
def print_filenames_with_pattern(all_files, files_to_process, pattern):
ignored_filenames = file.get_list_of_filenames_with_pattern(all_files, files_to_process, pattern)
processed_filenames = file.get_list_of_filenames_with_pattern(files_to_process, [], pattern)
print('========== filenames found with pattern "{}": {}'.format(pattern, len(processed_filenames)))
for f in processed_filenames:
print(f)
print('========== ignored filenames with pattern "{}": {}'.format(pattern, len(ignored_filenames)))
for f in ignored_filenames:
print(f)
def print_symbols_with_pattern(db, pattern):
symbols = db.get_symbols_with_pattern(pattern)
print('========== symbols with pattern "{}": {}'.format(pattern, len(symbols)))
for s in symbols:
print(s, str(symbols[s]))
def __main__():
flags = parse_args()
change_to_top_dir()
all_files = file.get_list_of_files_in_the_repo()
files_to_process = file.get_list_of_files_to_process(all_files)
db = get_full_db(files_to_process)
if flags.search:
print_filenames_with_pattern(all_files, files_to_process, flags.search)
print_symbols_with_pattern(db, flags.search)
print('========== warnings:')
warnings = []
warnings += db.get_warnings_for_choices_selected()
warnings += db.get_warnings_for_legacy_symbols_being_defined()
warnings += db.get_warnings_for_legacy_symbols_being_used()
warnings += db.get_warnings_for_symbols_with_legacy_note_and_no_comment_on_usage()
warnings += db.get_warnings_for_symbols_with_legacy_note_and_no_usage()
warnings += db.get_warnings_for_symbols_without_definition()
warnings += db.get_warnings_for_symbols_without_usage()
for filename, lineno, msg in sorted(warnings):
print('{}:{}: {}'.format(filename, lineno, msg), file=sys.stderr)
if len(warnings) > 0:
sys.exit(1)
if __name__ == '__main__':
__main__()

View File

@ -0,0 +1,29 @@
# See utils/checkpackagelib/readme.txt before editing this file.
import re
class _CheckFunction(object):
def __init__(self, filename, url_to_manual):
self.filename = filename
self.url_to_manual = url_to_manual
self.disable = re.compile(r"^\s*# check-package .*\b{}\b".format(self.__class__.__name__))
def before(self):
pass
def check_line(self, lineno, text):
pass
def after(self):
pass
class _Tool(object):
def __init__(self, filename):
self.filename = filename
def run(self):
pass
def hint(self):
return ""

View File

@ -0,0 +1,68 @@
# See utils/checkpackagelib/readme.txt before editing this file.
from checkpackagelib.base import _CheckFunction
class ConsecutiveEmptyLines(_CheckFunction):
def before(self):
self.lastline = "non empty"
def check_line(self, lineno, text):
if text.strip() == "" == self.lastline.strip():
return ["{}:{}: consecutive empty lines"
.format(self.filename, lineno)]
self.lastline = text
class EmptyLastLine(_CheckFunction):
def before(self):
self.lastlineno = 0
self.lastline = "non empty"
def check_line(self, lineno, text):
self.lastlineno = lineno
self.lastline = text
def after(self):
if self.lastline.strip() == "":
return ["{}:{}: empty line at end of file"
.format(self.filename, self.lastlineno)]
class NewlineAtEof(_CheckFunction):
def before(self):
self.lastlineno = 0
self.lastline = "\n"
def check_line(self, lineno, text):
self.lastlineno = lineno
self.lastline = text
def after(self):
if self.lastline == self.lastline.rstrip("\r\n"):
return ["{}:{}: missing newline at end of file"
.format(self.filename, self.lastlineno),
self.lastline]
class TrailingSpace(_CheckFunction):
def check_line(self, lineno, text):
line = text.rstrip("\r\n")
if line != line.rstrip():
return ["{}:{}: line contains trailing whitespace"
.format(self.filename, lineno),
text]
class Utf8Characters(_CheckFunction):
def is_ascii(self, s):
try:
return all(ord(c) < 128 for c in s)
except TypeError:
return False
def check_line(self, lineno, text):
if not self.is_ascii(text):
return ["{}:{}: line contains UTF-8 characters"
.format(self.filename, lineno),
text]

View File

@ -0,0 +1,273 @@
# See utils/checkpackagelib/readme.txt before editing this file.
# Kconfig generates errors if someone introduces a typo like "boool" instead of
# "bool", so below check functions don't need to check for things already
# checked by running "make menuconfig".
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
from checkpackagelib.tool import NotExecutable # noqa: F401
def _empty_or_comment(text):
line = text.strip()
# ignore empty lines and comment lines indented or not
return line == "" or line.startswith("#")
def _part_of_help_text(text):
return text.startswith("\t ")
# used in more than one check
entries_that_should_not_be_indented = [
"choice", "comment", "config", "endchoice", "endif", "endmenu", "if",
"menu", "menuconfig", "source"]
class AttributesOrder(_CheckFunction):
attributes_order_convention = {
"bool": 1, "prompt": 1, "string": 1, "default": 2, "depends": 3,
"select": 4, "help": 5}
def before(self):
self.state = 0
def check_line(self, lineno, text):
if _empty_or_comment(text) or _part_of_help_text(text):
return
attribute = text.split()[0]
if attribute in entries_that_should_not_be_indented:
self.state = 0
return
if attribute not in self.attributes_order_convention.keys():
return
new_state = self.attributes_order_convention[attribute]
wrong_order = self.state > new_state
# save to process next line
self.state = new_state
if wrong_order:
return ["{}:{}: attributes order: type, default, depends on,"
" select, help ({}#_config_files)"
.format(self.filename, lineno, self.url_to_manual),
text]
class CommentsMenusPackagesOrder(_CheckFunction):
def before(self):
self.level = 0
self.menu_of_packages = ["The top level menu"]
self.new_package = ""
self.package = [""]
self.print_package_warning = [True]
self.state = ""
def get_level(self):
return len(self.state.split('-')) - 1
def initialize_package_level_elements(self, text):
try:
self.menu_of_packages[self.level] = text[:-1]
self.package[self.level] = ""
self.print_package_warning[self.level] = True
except IndexError:
self.menu_of_packages.append(text[:-1])
self.package.append("")
self.print_package_warning.append(True)
def initialize_level_elements(self, text):
self.level = self.get_level()
self.initialize_package_level_elements(text)
def check_line(self, lineno, text):
# We only want to force sorting for the top-level menus
if self.filename not in ["fs/Config.in",
"package/Config.in",
"package/Config.in.host",
"package/kodi/Config.in"]:
return
source_line = re.match(r'^\s*source ".*/([^/]*)/Config.in(.host)?"', text)
if text.startswith("comment "):
if not self.state.endswith("-comment"):
self.state += "-comment"
self.initialize_level_elements(text)
elif text.startswith("if "):
self.state += "-if"
self.initialize_level_elements(text)
elif text.startswith("menu "):
if self.state.endswith("-comment"):
self.state = self.state[:-8]
self.state += "-menu"
self.initialize_level_elements(text)
elif text.startswith("endif") or text.startswith("endmenu"):
if self.state.endswith("-comment"):
self.state = self.state[:-8]
if text.startswith("endif"):
self.state = self.state[:-3]
elif text.startswith("endmenu"):
self.state = self.state[:-5]
self.level = self.get_level()
elif source_line:
self.new_package = source_line.group(1)
# We order _ before A, so replace it with .
new_package_ord = self.new_package.replace('_', '.')
if self.package[self.level] != "" and \
self.print_package_warning[self.level] and \
new_package_ord < self.package[self.level]:
self.print_package_warning[self.level] = False
prefix = "{}:{}: ".format(self.filename, lineno)
spaces = " " * len(prefix)
return ["{prefix}Packages in: {menu},\n"
"{spaces}are not alphabetically ordered;\n"
"{spaces}correct order: '-', '_', digits, capitals, lowercase;\n"
"{spaces}first incorrect package: {package}"
.format(prefix=prefix, spaces=spaces,
menu=self.menu_of_packages[self.level],
package=self.new_package),
text]
self.package[self.level] = new_package_ord
class HelpText(_CheckFunction):
HELP_TEXT_FORMAT = re.compile(r"^\t .{,62}$")
URL_ONLY = re.compile(r"^(http|https|git)://\S*$")
def before(self):
self.help_text = False
def check_line(self, lineno, text):
if _empty_or_comment(text):
return
entry = text.split()[0]
if entry in entries_that_should_not_be_indented:
self.help_text = False
return
if text.strip() == "help":
self.help_text = True
return
if not self.help_text:
return
if self.HELP_TEXT_FORMAT.match(text.rstrip()):
return
if self.URL_ONLY.match(text.strip()):
return
return ["{}:{}: help text: <tab><2 spaces><62 chars>"
" ({}#writing-rules-config-in)"
.format(self.filename, lineno, self.url_to_manual),
text,
"\t " + "123456789 " * 6 + "12"]
class Indent(_CheckFunction):
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
entries_that_should_be_indented = [
"bool", "default", "depends", "help", "prompt", "select", "string"]
def before(self):
self.backslash = False
def check_line(self, lineno, text):
if _empty_or_comment(text) or _part_of_help_text(text):
self.backslash = False
return
entry = text.split()[0]
last_line_ends_in_backslash = self.backslash
# calculate for next line
if self.ENDS_WITH_BACKSLASH.search(text):
self.backslash = True
else:
self.backslash = False
if last_line_ends_in_backslash:
if text.startswith("\t"):
return
return ["{}:{}: continuation line should be indented using tabs"
.format(self.filename, lineno),
text]
if entry in self.entries_that_should_be_indented:
if not text.startswith("\t{}".format(entry)):
return ["{}:{}: should be indented with one tab"
" ({}#_config_files)"
.format(self.filename, lineno, self.url_to_manual),
text]
elif entry in entries_that_should_not_be_indented:
if not text.startswith(entry):
# four Config.in files have a special but legitimate indentation rule
if self.filename in ["package/Config.in",
"package/Config.in.host",
"package/kodi/Config.in",
"package/x11r7/Config.in"]:
return
return ["{}:{}: should not be indented"
.format(self.filename, lineno),
text]
class RedefinedConfig(_CheckFunction):
CONFIG = re.compile(r"^\s*(menu|)config\s+(BR2_\w+)\b")
IF = re.compile(r"^\s*if\s+([^#]*)\b")
ENDIF = re.compile(r"^\s*endif\b")
def before(self):
self.configs = {}
self.conditional = []
def check_line(self, lineno, text):
if _empty_or_comment(text) or _part_of_help_text(text):
return
m = self.IF.search(text)
if m is not None:
condition = m.group(1)
self.conditional.append(condition)
return
m = self.ENDIF.search(text)
if m is not None:
self.conditional.pop()
return
m = self.CONFIG.search(text)
if m is None:
return
config = m.group(2)
key = (config, ' AND '.join(self.conditional))
if key in self.configs.keys():
previous_line = self.configs[key]
return ["{}:{}: config {} redeclared (previous line: {})"
.format(self.filename, lineno, config, previous_line),
text]
self.configs[key] = lineno

View File

@ -0,0 +1,70 @@
# See utils/checkpackagelib/readme.txt before editing this file.
# The validity of the hashes itself is checked when building, so below check
# functions don't need to check for things already checked by running
# "make package-dirclean package-source".
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
from checkpackagelib.tool import NotExecutable # noqa: F401
def _empty_line_or_comment(text):
return text.strip() == "" or text.startswith("#")
class HashNumberOfFields(_CheckFunction):
def check_line(self, lineno, text):
if _empty_line_or_comment(text):
return
fields = text.split()
if len(fields) != 3:
return ["{}:{}: expected three fields ({}#adding-packages-hash)"
.format(self.filename, lineno, self.url_to_manual),
text]
class HashType(_CheckFunction):
len_of_hash = {"md5": 32, "sha1": 40, "sha224": 56, "sha256": 64,
"sha384": 96, "sha512": 128}
def check_line(self, lineno, text):
if _empty_line_or_comment(text):
return
fields = text.split()
if len(fields) < 2:
return
htype, hexa = fields[:2]
if htype not in self.len_of_hash.keys():
return ["{}:{}: unexpected type of hash ({}#adding-packages-hash)"
.format(self.filename, lineno, self.url_to_manual),
text]
if not re.match("^[0-9A-Fa-f]{%s}$" % self.len_of_hash[htype], hexa):
return ["{}:{}: hash size does not match type "
"({}#adding-packages-hash)"
.format(self.filename, lineno, self.url_to_manual),
text,
"expected {} hex digits".format(self.len_of_hash[htype])]
class HashSpaces(_CheckFunction):
def check_line(self, lineno, text):
if _empty_line_or_comment(text):
return
fields = text.split()
if len(fields) != 3:
# Handled by HashNumberOfFields
return
if not re.match(re.escape("{} {} {}".format(*fields)), text):
return ["{}:{}: separation does not match expectation "
"({}#adding-packages-hash)"
.format(self.filename, lineno, self.url_to_manual), text]

View File

@ -0,0 +1,14 @@
# See utils/checkpackagelib/readme.txt before editing this file.
import os
from checkpackagelib.base import _CheckFunction
class IgnoreMissingFile(_CheckFunction):
def check_line(self, lineno, text):
fields = text.split()
if not os.path.exists(fields[0]):
return ["{}:{}: ignored file {} is missing"
.format(self.filename, lineno, fields[0]),
text]

View File

@ -0,0 +1,441 @@
# See utils/checkpackagelib/readme.txt before editing this file.
# There are already dependency checks during the build, so below check
# functions don't need to check for things already checked by exploring the
# menu options using "make menuconfig" and by running "make" with appropriate
# packages enabled.
import os
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
from checkpackagelib.lib import Utf8Characters # noqa: F401
from checkpackagelib.tool import NotExecutable # noqa: F401
# used in more than one check
start_conditional = ["ifdef", "ifeq", "ifndef", "ifneq"]
continue_conditional = ["elif", "else"]
end_conditional = ["endif"]
class DoNotInstallToHostdirUsr(_CheckFunction):
INSTALL_TO_HOSTDIR_USR = re.compile(r"^[^#].*\$\(HOST_DIR\)/usr")
def check_line(self, lineno, text):
if self.INSTALL_TO_HOSTDIR_USR.match(text.rstrip()):
return ["{}:{}: install files to $(HOST_DIR)/ instead of $(HOST_DIR)/usr/"
.format(self.filename, lineno),
text]
class Ifdef(_CheckFunction):
IFDEF = re.compile(r"^\s*(else\s+|)(ifdef|ifndef)\s")
def check_line(self, lineno, text):
m = self.IFDEF.search(text)
if m is None:
return
word = m.group(2)
if word == 'ifdef':
return ["{}:{}: use ifeq ($(SYMBOL),y) instead of ifdef SYMBOL"
.format(self.filename, lineno),
text]
else:
return ["{}:{}: use ifneq ($(SYMBOL),y) instead of ifndef SYMBOL"
.format(self.filename, lineno),
text]
class Indent(_CheckFunction):
COMMENT = re.compile(r"^\s*#")
CONDITIONAL = re.compile(r"^\s*({})\s".format("|".join(start_conditional + end_conditional + continue_conditional)))
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
END_DEFINE = re.compile(r"^\s*endef\s")
MAKEFILE_TARGET = re.compile(r"^[^# \t]+:\s")
START_DEFINE = re.compile(r"^\s*define\s")
def before(self):
self.define = False
self.backslash = False
self.makefile_target = False
def check_line(self, lineno, text):
if self.START_DEFINE.search(text):
self.define = True
return
if self.END_DEFINE.search(text):
self.define = False
return
expect_tabs = False
if self.define or self.backslash or self.makefile_target:
expect_tabs = True
if not self.backslash and self.CONDITIONAL.search(text):
expect_tabs = False
# calculate for next line
if self.ENDS_WITH_BACKSLASH.search(text):
self.backslash = True
else:
self.backslash = False
if self.MAKEFILE_TARGET.search(text):
self.makefile_target = True
return
if text.strip() == "":
self.makefile_target = False
return
# comment can be indented or not inside define ... endef, so ignore it
if self.define and self.COMMENT.search(text):
return
if expect_tabs:
if not text.startswith("\t"):
return ["{}:{}: expected indent with tabs"
.format(self.filename, lineno),
text]
else:
if text.startswith("\t"):
return ["{}:{}: unexpected indent with tabs"
.format(self.filename, lineno),
text]
class OverriddenVariable(_CheckFunction):
CONCATENATING = re.compile(r"^([A-Z0-9_]+)\s*(\+|:|)=\s*\$\(\1\)")
END_CONDITIONAL = re.compile(r"^\s*({})".format("|".join(end_conditional)))
OVERRIDING_ASSIGNMENTS = [':=', "="]
START_CONDITIONAL = re.compile(r"^\s*({})".format("|".join(start_conditional)))
VARIABLE = re.compile(r"^([A-Z0-9_]+)\s*((\+|:|)=)")
USUALLY_OVERRIDDEN = re.compile(r"^[A-Z0-9_]+({})".format("|".join([
r"_ARCH\s*=\s*",
r"_CPU\s*=\s*",
r"_SITE\s*=\s*",
r"_SOURCE\s*=\s*",
r"_VERSION\s*=\s*"])))
FORBIDDEN_OVERRIDDEN = re.compile(r"^[A-Z0-9_]+({})".format("|".join([
r"_CONF_OPTS\s*=\s*",
r"_DEPENDENCIES\s*=\s*"])))
def before(self):
self.conditional = 0
self.unconditionally_set = []
self.conditionally_set = []
def check_line(self, lineno, text):
if self.START_CONDITIONAL.search(text):
self.conditional += 1
return
if self.END_CONDITIONAL.search(text):
self.conditional -= 1
return
m = self.VARIABLE.search(text)
if m is None:
return
variable, assignment = m.group(1, 2)
if self.conditional == 0:
if variable in self.conditionally_set:
self.unconditionally_set.append(variable)
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: unconditional override of variable {} previously conditionally set"
.format(self.filename, lineno, variable),
text]
if variable not in self.unconditionally_set:
self.unconditionally_set.append(variable)
return
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: unconditional override of variable {}"
.format(self.filename, lineno, variable),
text]
else:
if self.FORBIDDEN_OVERRIDDEN.search(text):
return ["{}:{}: conditional override of variable {}"
.format(self.filename, lineno, variable),
text]
if variable not in self.unconditionally_set:
self.conditionally_set.append(variable)
return
if self.CONCATENATING.search(text):
return ["{}:{}: immediate assignment to append to variable {}"
.format(self.filename, lineno, variable),
text]
if self.USUALLY_OVERRIDDEN.search(text):
return
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: conditional override of variable {}"
.format(self.filename, lineno, variable),
text]
class PackageHeader(_CheckFunction):
def before(self):
self.skip = False
def check_line(self, lineno, text):
if self.skip or lineno > 6:
return
if lineno in [1, 5]:
if lineno == 1 and text.startswith("include "):
self.skip = True
return
if text.rstrip() != "#" * 80:
return ["{}:{}: should be 80 hashes ({}#writing-rules-mk)"
.format(self.filename, lineno, self.url_to_manual),
text,
"#" * 80]
elif lineno in [2, 4]:
if text.rstrip() != "#":
return ["{}:{}: should be 1 hash ({}#writing-rules-mk)"
.format(self.filename, lineno, self.url_to_manual),
text]
elif lineno == 6:
if text.rstrip() != "":
return ["{}:{}: should be a blank line ({}#writing-rules-mk)"
.format(self.filename, lineno, self.url_to_manual),
text]
class RemoveDefaultPackageSourceVariable(_CheckFunction):
packages_that_may_contain_default_source = ["binutils", "gcc", "gdb"]
def before(self):
package, _ = os.path.splitext(os.path.basename(self.filename))
package_upper = package.replace("-", "_").upper()
self.package = package
self.FIND_SOURCE = re.compile(
r"^{}_SOURCE\s*=\s*{}-\$\({}_VERSION\)\.tar\.gz"
.format(package_upper, package, package_upper))
def check_line(self, lineno, text):
if self.FIND_SOURCE.search(text):
if self.package in self.packages_that_may_contain_default_source:
return
return ["{}:{}: remove default value of _SOURCE variable "
"({}#generic-package-reference)"
.format(self.filename, lineno, self.url_to_manual),
text]
class SpaceBeforeBackslash(_CheckFunction):
TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH = re.compile(r"^.*( |\t ?)\\$")
def check_line(self, lineno, text):
if self.TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH.match(text.rstrip()):
return ["{}:{}: use only one space before backslash"
.format(self.filename, lineno),
text]
class TrailingBackslash(_CheckFunction):
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
def before(self):
self.backslash = False
def check_line(self, lineno, text):
last_line_ends_in_backslash = self.backslash
# calculate for next line
if self.ENDS_WITH_BACKSLASH.search(text):
self.backslash = True
self.lastline = text
return
self.backslash = False
if last_line_ends_in_backslash and text.strip() == "":
return ["{}:{}: remove trailing backslash"
.format(self.filename, lineno - 1),
self.lastline]
class TypoInPackageVariable(_CheckFunction):
ALLOWED = re.compile(r"|".join([
"ACLOCAL_DIR",
"ACLOCAL_HOST_DIR",
"ACLOCAL_PATH",
"BR_CCACHE_INITIAL_SETUP",
"BR_LIBC",
"BR_NO_CHECK_HASH_FOR",
"GCC_TARGET",
"LINUX_EXTENSIONS",
"LINUX_POST_PATCH_HOOKS",
"LINUX_TOOLS",
"LUA_RUN",
"MKFS_JFFS2",
"MKIMAGE_ARCH",
"PACKAGES_PERMISSIONS_TABLE",
"PKG_CONFIG_HOST_BINARY",
"SUMTOOL",
"TARGET_FINALIZE_HOOKS",
"TARGETS_ROOTFS",
"XTENSA_CORE_NAME"]))
VARIABLE = re.compile(r"^(define\s+)?([A-Z0-9_]+_[A-Z0-9_]+)")
def before(self):
package, _ = os.path.splitext(os.path.basename(self.filename))
package = package.replace("-", "_").upper()
# linux tools do not use LINUX_TOOL_ prefix for variables
package = package.replace("LINUX_TOOL_", "")
# linux extensions do not use LINUX_EXT_ prefix for variables
package = package.replace("LINUX_EXT_", "")
self.package = package
self.REGEX = re.compile(r"(HOST_|ROOTFS_)?({}_[A-Z0-9_]+)".format(package))
self.FIND_VIRTUAL = re.compile(
r"^{}_PROVIDES\s*(\+|)=\s*(.*)".format(package))
self.virtual = []
def check_line(self, lineno, text):
m = self.VARIABLE.search(text)
if m is None:
return
variable = m.group(2)
# allow to set variables for virtual package this package provides
v = self.FIND_VIRTUAL.search(text)
if v:
self.virtual += v.group(2).upper().split()
return
for virtual in self.virtual:
if variable.startswith("{}_".format(virtual)):
return
if self.ALLOWED.match(variable):
return
if self.REGEX.search(text) is None:
return ["{}:{}: possible typo: {} -> *{}*"
.format(self.filename, lineno, variable, self.package),
text]
class UselessFlag(_CheckFunction):
DEFAULT_AUTOTOOLS_FLAG = re.compile(r"^.*{}".format("|".join([
r"_AUTORECONF\s*=\s*NO",
r"_LIBTOOL_PATCH\s*=\s*YES"])))
DEFAULT_GENERIC_FLAG = re.compile(r"^.*{}".format("|".join([
r"_INSTALL_IMAGES\s*=\s*NO",
r"_INSTALL_REDISTRIBUTE\s*=\s*YES",
r"_INSTALL_STAGING\s*=\s*NO",
r"_INSTALL_TARGET\s*=\s*YES"])))
END_CONDITIONAL = re.compile(r"^\s*({})".format("|".join(end_conditional)))
START_CONDITIONAL = re.compile(r"^\s*({})".format("|".join(start_conditional)))
def before(self):
self.conditional = 0
def check_line(self, lineno, text):
if self.START_CONDITIONAL.search(text):
self.conditional += 1
return
if self.END_CONDITIONAL.search(text):
self.conditional -= 1
return
# allow non-default conditionally overridden by default
if self.conditional > 0:
return
if self.DEFAULT_GENERIC_FLAG.search(text):
return ["{}:{}: useless default value ({}#"
"_infrastructure_for_packages_with_specific_build_systems)"
.format(self.filename, lineno, self.url_to_manual),
text]
if self.DEFAULT_AUTOTOOLS_FLAG.search(text) and not text.lstrip().startswith("HOST_"):
return ["{}:{}: useless default value "
"({}#_infrastructure_for_autotools_based_packages)"
.format(self.filename, lineno, self.url_to_manual),
text]
class VariableWithBraces(_CheckFunction):
VARIABLE_WITH_BRACES = re.compile(r"^[^#].*[^$]\${\w+}")
def check_line(self, lineno, text):
if self.VARIABLE_WITH_BRACES.match(text.rstrip()):
return ["{}:{}: use $() to delimit variables, not ${{}}"
.format(self.filename, lineno),
text]
class CPEVariables(_CheckFunction):
"""
Check that the values for the CPE variables are not the default.
- CPE_ID_* variables must not be set to their default
- CPE_ID_VALID must not be set if a non-default CPE_ID variable is set
"""
def before(self):
pkg, _ = os.path.splitext(os.path.basename(self.filename))
self.CPE_fields_defaults = {
"VALID": "NO",
"PREFIX": "cpe:2.3:a",
"VENDOR": f"{pkg}_project",
"PRODUCT": pkg,
"VERSION": None,
"UPDATE": "*",
}
self.valid = None
self.non_defaults = 0
self.CPE_FIELDS_RE = re.compile(
r"^\s*(.+_CPE_ID_({}))\s*=\s*(.+)$"
.format("|".join(self.CPE_fields_defaults)),
)
self.VERSION_RE = re.compile(
rf"^(HOST_)?{pkg.upper().replace('-', '_')}_VERSION\s*=\s*(.+)$",
)
self.COMMENT_RE = re.compile(r"^\s*#.*")
def check_line(self, lineno, text):
text = self.COMMENT_RE.sub('', text.rstrip())
# WARNING! The VERSION_RE can _also_ match the same lines as CPE_FIELDS_RE,
# but not the other way around. So we must first check for CPE_FIELDS_RE,
# and if not matched, then and only then check for VERSION_RE.
match = self.CPE_FIELDS_RE.match(text)
if match:
var, field, val = match.groups()
return self._check_field(lineno, text, field, var, val)
match = self.VERSION_RE.match(text)
if match:
self.CPE_fields_defaults["VERSION"] = match.groups()[1]
def after(self):
# "VALID" counts in the non-defaults; so when "VALID" is present,
# 1 non-default means only "VALID" is present, so that's OK.
if self.valid and self.non_defaults > 1:
return ["{}:{}: 'YES' is implied when a non-default CPE_ID field is specified: {} ({}#cpe-id)".format(
self.filename,
self.valid["lineno"],
self.valid["text"],
self.url_to_manual,
)]
def _check_field(self, lineno, text, field, var, val):
if field == "VERSION" and self.CPE_fields_defaults[field] is None:
return ["{}:{}: expecting package version to be set before CPE_ID_VERSION".format(
self.filename,
lineno,
)]
if val == self.CPE_fields_defaults[field]:
return ["{}:{}: '{}' is the default value for {} ({}#cpe-id)".format(
self.filename,
lineno,
val,
var,
self.url_to_manual,
)]
else:
if field == "VALID":
self.valid = {"lineno": lineno, "text": text}
self.non_defaults += 1

View File

@ -0,0 +1,82 @@
# See utils/checkpackagelib/readme.txt before editing this file.
# The format of the patch files is tested during the build, so below check
# functions don't need to check for things already checked by running
# "make package-dirclean package-patch".
import os
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.tool import NotExecutable # noqa: F401
class ApplyOrder(_CheckFunction):
APPLY_ORDER = re.compile(r"\d{1,4}-[^/]*$")
def before(self):
if not self.APPLY_ORDER.match(os.path.basename(self.filename)):
return ["{}:0: use name <number>-<description>.patch "
"({}#_providing_patches)"
.format(self.filename, self.url_to_manual)]
class NumberedSubject(_CheckFunction):
NUMBERED_PATCH = re.compile(r"Subject:\s*\[PATCH\s*\d+/\d+\]")
def before(self):
self.git_patch = False
self.lineno = 0
self.text = None
def check_line(self, lineno, text):
if text.startswith("diff --git"):
self.git_patch = True
return
if self.NUMBERED_PATCH.search(text):
self.lineno = lineno
self.text = text
def after(self):
if self.git_patch and self.text:
return ["{}:{}: generate your patches with 'git format-patch -N'"
.format(self.filename, self.lineno),
self.text]
class Sob(_CheckFunction):
SOB_ENTRY = re.compile(r"^Signed-off-by: .*$")
def before(self):
self.found = False
def check_line(self, lineno, text):
if self.found:
return
if self.SOB_ENTRY.search(text):
self.found = True
def after(self):
if not self.found:
return ["{}:0: missing Signed-off-by in the header "
"({}#_format_and_licensing_of_the_package_patches)"
.format(self.filename, self.url_to_manual)]
class Upstream(_CheckFunction):
UPSTREAM_ENTRY = re.compile(r"^Upstream: .*$")
def before(self):
self.found = False
def check_line(self, lineno, text):
if self.found:
return
if self.UPSTREAM_ENTRY.search(text):
self.found = True
def after(self):
if not self.found:
return ["{}:0: missing Upstream in the header "
"({}#_additional_patch_documentation)"
.format(self.filename, self.url_to_manual)]

View File

@ -0,0 +1 @@
from checkpackagelib.tool import Flake8 # noqa: F401

View File

@ -0,0 +1,5 @@
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
from checkpackagelib.tool import Shellcheck # noqa: F401

View File

@ -0,0 +1,72 @@
import os
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
import checkpackagelib.tool
from checkpackagelib.tool import Shellcheck # noqa: F401
class Indent(_CheckFunction):
INDENTED_WITH_SPACES = re.compile(r"^[\t]* ")
def check_line(self, lineno, text):
if self.INDENTED_WITH_SPACES.search(text.rstrip()):
return ["{}:{}: should be indented with tabs ({}#adding-packages-start-script)"
.format(self.filename, lineno, self.url_to_manual),
text]
class NotExecutable(checkpackagelib.tool.NotExecutable):
def ignore(self):
return 'etc/init.d/' in self.filename
def hint(self):
return ", just make sure you use '$(INSTALL) -D -m 0755' in the .mk file"
class Variables(_CheckFunction):
DAEMON_VAR = re.compile(r"^DAEMON=[\"']{0,1}([^\"']*)[\"']{0,1}")
PIDFILE_PATTERN = re.compile(r"/var/run/(\$DAEMON|\$\{DAEMON\}).pid")
PIDFILE_VAR = re.compile(r"^PIDFILE=[\"']{0,1}([^\"']*)[\"']{0,1}")
def before(self):
self.name = None
def check_line(self, lineno, text):
name_found = self.DAEMON_VAR.search(text.rstrip())
if name_found:
if self.name:
return ["{}:{}: DAEMON variable redefined ({}#adding-packages-start-script)"
.format(self.filename, lineno, self.url_to_manual),
text]
self.name = name_found.group(1)
if '/' in self.name:
self.name = os.path.basename(self.name) # to be used in after() to check the expected filename
return ["{}:{}: Do not include path in DAEMON ({}#adding-packages-start-script)"
.format(self.filename, lineno, self.url_to_manual),
text,
'DAEMON="{}"'.format(self.name)]
return
pidfile_found = self.PIDFILE_VAR.search(text.rstrip())
if pidfile_found:
pidfile = pidfile_found.group(1)
if not self.PIDFILE_PATTERN.match(pidfile):
return ["{}:{}: Incorrect PIDFILE value ({}#adding-packages-start-script)"
.format(self.filename, lineno, self.url_to_manual),
text,
'PIDFILE="/var/run/$DAEMON.pid"']
def after(self):
if self.name is None:
return ["{}:0: DAEMON variable not defined ({}#adding-packages-start-script)"
.format(self.filename, self.url_to_manual)]
expected_filename = re.compile(r"S\d\d{}$".format(self.name))
if not expected_filename.match(os.path.basename(self.filename)):
return ["{}:0: filename should be S<number><number><daemon name> ({}#adding-packages-start-script)"
.format(self.filename, self.url_to_manual),
"expecting S<number><number>{}".format(self.name)]

View File

@ -0,0 +1,73 @@
How the scripts are structured:
- check-package is the main engine, called by the user.
For each input file, this script decides which parser should be used and it
collects all classes declared in the library file and instantiates them.
The main engine opens the input files and it serves each raw line (including
newline!) to the method check_line() of every check object.
Two special methods before() and after() are used to call the initialization
of variables (for the case it needs to keep data across calls) and the
equivalent finalization (e.g. for the case a warning must be issued if some
pattern is not in the input file).
- base.py contains the base class for all check functions.
- lib.py contains the classes for common check functions.
Each check function is explicitly included in a given type-parsing library.
Do not include every single check function in this file, a class that will
only parse hash files should be implemented in the hash-parsing library.
When a warning must be issued, the check function returns an array of strings.
Each string is a warning message and is displayed if the corresponding verbose
level is active. When the script is called without --verbose only the first
warning in the returned array is printed; when called with --verbose both
first and second warnings are printed; when called with -vv until the third
warning is printed; an so on.
Helper functions can be defined and will not be called by the main script.
- lib_type.py contains check functions specific to files of this type.
Some hints when changing this code:
- prefer O(n) algorithms, where n is the total number of lines in the files
processed.
- when there is no other reason for ordering, use alphabetical order (e.g. keep
the check functions in alphabetical order, keep the imports in alphabetical
order, and so on).
- keep in mind that for every class the method before() will be called before
any line is served to be checked by the method check_line(). A class that
checks the filename should only implement the method before(). A function that
needs to keep data across calls (e.g. keep the last line before the one being
processed) should initialize all variables using this method.
- keep in mind that for every class the method after() will be called after all
lines were served to be checked by the method check_line(). A class that
checks the absence of a pattern in the file will need to use this method.
- try to avoid false warnings. It's better to not issue a warning message to a
corner case than have too many false warnings. The second can make users stop
using the script.
- do not check spacing in the input line in every single function. Trailing
whitespace and wrong indentation should be checked by separate functions.
- avoid duplicate tests. Try to test only one thing in each function.
- in the warning message, include the url to a section from the manual, when
applicable. It potentially will make more people know the manual.
- use short sentences in the warning messages. A complete explanation can be
added to show when --verbose is used.
- when testing, verify the error message is displayed when the error pattern is
found, but also verify the error message is not displayed for few
well-formatted packages... there are many of these, just pick your favorite
as golden package that should not trigger any warning message.
- check the url displayed by the warning message works.
Usage examples:
- to get a list of check functions that would be called without actually
calling them you can use the --dry-run option:
$ utils/check-package --dry-run package/yourfavorite/*
- when you just added a new check function, e.g. Something, check how it behaves
for all current packages:
$ utils/check-package --include-only Something $(find package -type f)
- the effective processing time (when the .pyc were already generated and all
files to be processed are cached in the RAM) should stay in the order of few
seconds:
$ utils/check-package $(find package -type f) >/dev/null ; \
time utils/check-package $(find package -type f) >/dev/null
- vim users can navigate the warnings (most editors probably have similar
function) since warnings are generated in the form 'path/file:line: warning':
$ find package/ -name 'Config.*' > filelist && vim -c \
'set makeprg=utils/check-package\ $(cat\ filelist)' -c make -c copen

View File

@ -0,0 +1,212 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib as m
ConsecutiveEmptyLines = [
('1 line (no newline)',
'any',
'',
[]),
('1 line',
'any',
'\n',
[]),
('2 lines',
'any',
'\n'
'\n',
[['any:2: consecutive empty lines']]),
('more than 2 consecutive',
'any',
'\n'
'\n'
'\n',
[['any:2: consecutive empty lines'],
['any:3: consecutive empty lines']]),
('ignore whitespace 1',
'any',
'\n'
' ',
[['any:2: consecutive empty lines']]),
('ignore whitespace 2',
'any',
' \n'
'\t\n',
[['any:2: consecutive empty lines']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', ConsecutiveEmptyLines)
def test_ConsecutiveEmptyLines(testname, filename, string, expected):
warnings = util.check_file(m.ConsecutiveEmptyLines, filename, string)
assert warnings == expected
EmptyLastLine = [
('ignore empty file',
'any',
'',
[]),
('empty line (newline)',
'any',
'\n',
[['any:1: empty line at end of file']]),
('empty line (space, newline)',
'any',
' \n',
[['any:1: empty line at end of file']]),
('empty line (space, no newline)',
'any',
' ',
[['any:1: empty line at end of file']]),
('warn for the last of 2',
'any',
'\n'
'\n',
[['any:2: empty line at end of file']]),
('warn for the last of 3',
'any',
'\n'
'\n'
'\n',
[['any:3: empty line at end of file']]),
('ignore whitespace',
'any',
' \n'
'\t\n',
[['any:2: empty line at end of file']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', EmptyLastLine)
def test_EmptyLastLine(testname, filename, string, expected):
warnings = util.check_file(m.EmptyLastLine, filename, string)
assert warnings == expected
NewlineAtEof = [
('good',
'any',
'text\n',
[]),
('text (bad)',
'any',
'\n'
'text',
[['any:2: missing newline at end of file',
'text']]),
('space (bad)',
'any',
'\n'
' ',
[['any:2: missing newline at end of file',
' ']]),
('tab (bad)',
'any',
'\n'
'\t',
[['any:2: missing newline at end of file',
'\t']]),
('even for file with one line',
'any',
' ',
[['any:1: missing newline at end of file',
' ']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', NewlineAtEof)
def test_NewlineAtEof(testname, filename, string, expected):
warnings = util.check_file(m.NewlineAtEof, filename, string)
assert warnings == expected
TrailingSpace = [
('good',
'any',
'text\n',
[]),
('ignore missing newline',
'any',
'\n'
'text',
[]),
('spaces',
'any',
'text \n',
[['any:1: line contains trailing whitespace',
'text \n']]),
('tabs after text',
'any',
'text\t\t\n',
[['any:1: line contains trailing whitespace',
'text\t\t\n']]),
('mix of tabs and spaces',
'any',
' \n'
' ',
[['any:1: line contains trailing whitespace',
' \n'],
['any:2: line contains trailing whitespace',
' ']]),
('blank line with tabs',
'any',
'\n'
'\t',
[['any:2: line contains trailing whitespace',
'\t']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', TrailingSpace)
def test_TrailingSpace(testname, filename, string, expected):
warnings = util.check_file(m.TrailingSpace, filename, string)
assert warnings == expected
Utf8Characters = [
('usual',
'any',
'text\n',
[]),
('acceptable character',
'any',
'\x60',
[]),
('unacceptable character',
'any',
'\x81',
[['any:1: line contains UTF-8 characters',
'\x81']]),
('2 warnings',
'any',
'text\n'
'text \xc8 text\n'
'\xc9\n',
[['any:2: line contains UTF-8 characters',
'text \xc8 text\n'],
['any:3: line contains UTF-8 characters',
'\xc9\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Utf8Characters)
def test_Utf8Characters(testname, filename, string, expected):
warnings = util.check_file(m.Utf8Characters, filename, string)
assert warnings == expected
def test_all_check_functions_are_used():
import inspect
import checkpackagelib.lib_config as lib_config
import checkpackagelib.lib_hash as lib_hash
import checkpackagelib.lib_mk as lib_mk
import checkpackagelib.lib_patch as lib_patch
c_config = [c[0] for c in inspect.getmembers(lib_config, inspect.isclass)]
c_hash = [c[0] for c in inspect.getmembers(lib_hash, inspect.isclass)]
c_mk = [c[0] for c in inspect.getmembers(lib_mk, inspect.isclass)]
c_patch = [c[0] for c in inspect.getmembers(lib_patch, inspect.isclass)]
c_all = c_config + c_hash + c_mk + c_patch
c_common = [c[0] for c in inspect.getmembers(m, inspect.isclass)]
assert set(c_common) <= set(c_all)

View File

@ -0,0 +1,465 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_config as m
AttributesOrder = [
('good example',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'default y\n'
'depends on BR2_USE_BAR # runtime\n'
'select BR2_PACKAGE_BAZ\n'
'help\n'
'\t help text\n',
[]),
('depends before default',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'depends on BR2_USE_BAR\n'
'default y\n',
[['any:4: attributes order: type, default, depends on, select, help (url#_config_files)',
'default y\n']]),
('select after help',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'help\n'
'\t help text\n'
'select BR2_PACKAGE_BAZ\n',
[['any:5: attributes order: type, default, depends on, select, help (url#_config_files)',
'select BR2_PACKAGE_BAZ\n']]),
('string',
'any',
'config BR2_PACKAGE_FOO_PLUGINS\n'
'string "foo plugins"\n'
'default "all"\n',
[]),
('ignore tabs',
'any',
'config\tBR2_PACKAGE_FOO_PLUGINS\n'
'default\t"all"\n'
'string\t"foo plugins"\n',
[['any:3: attributes order: type, default, depends on, select, help (url#_config_files)',
'string\t"foo plugins"\n']]),
('choice',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'if BR2_PACKAGE_FOO\n'
'\n'
'choice\n'
'prompt "type of foo"\n'
'default BR2_PACKAGE_FOO_STRING\n'
'\n'
'config BR2_PACKAGE_FOO_NONE\n'
'bool "none"\n'
'\n'
'config BR2_PACKAGE_FOO_STRING\n'
'bool "string"\n'
'\n'
'endchoice\n'
'\n'
'endif\n'
'\n',
[]),
('type after default',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'if BR2_PACKAGE_FOO\n'
'\n'
'choice\n'
'default BR2_PACKAGE_FOO_STRING\n'
'prompt "type of foo"\n',
[['any:7: attributes order: type, default, depends on, select, help (url#_config_files)',
'prompt "type of foo"\n']]),
('menu',
'any',
'menuconfig BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'help\n'
'\t help text\n'
'\t help text\n'
'\n'
'if BR2_PACKAGE_FOO\n'
'\n'
'menu "foo plugins"\n'
'config BR2_PACKAGE_FOO_COUNTER\n'
'bool "counter"\n'
'\n'
'endmenu\n'
'\n'
'endif\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', AttributesOrder)
def test_AttributesOrder(testname, filename, string, expected):
warnings = util.check_file(m.AttributesOrder, filename, string)
assert warnings == expected
CommentsMenusPackagesOrder = [
('top menu (good)',
'package/Config.in',
'menu "Target packages"\n'
'source "package/busybox/Config.in"\n'
'source "package/skeleton/Config.in"\n',
[]),
('top menu (bad)',
'package/Config.in',
'source "package/skeleton/Config.in"\n'
'source "package/busybox/Config.in"\n',
[['package/Config.in:2: Packages in: The top level menu,\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: busybox',
'source "package/busybox/Config.in"\n']]),
('menu (bad)',
'package/Config.in',
'menu "Target packages"\n'
'source "package/skeleton/Config.in"\n'
'source "package/busybox/Config.in"\n',
[['package/Config.in:3: Packages in: menu "Target packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: busybox',
'source "package/busybox/Config.in"\n']]),
('underscore (good)',
'package/Config.in.host',
'menu "Hardware handling"\n'
'menu "Firmware"\n'
'endmenu\n'
'source "package/usb_modeswitch/Config.in"\n'
'source "package/usbmount/Config.in"\n',
[]),
('underscore (bad)',
'package/Config.in.host',
'menu "Hardware handling"\n'
'menu "Firmware"\n'
'endmenu\n'
'source "package/usbmount/Config.in"\n'
'source "package/usb_modeswitch/Config.in"\n',
[['package/Config.in.host:5: Packages in: menu "Hardware handling",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: usb_modeswitch',
'source "package/usb_modeswitch/Config.in"\n']]),
('ignore other files',
'any other file',
'menu "Hardware handling"\n'
'source "package/bbb/Config.in"\n'
'source "package/aaa/Config.in"\n',
[]),
('dash (bad)',
'package/Config.in',
'menu "packages"\n'
'source "package/a_a/Config.in"\n'
'source "package/a-a/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/aAa/Config.in"\n'
'source "package/aaa/Config.in"\n',
[['package/Config.in:3: Packages in: menu "packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: a-a',
'source "package/a-a/Config.in"\n']]),
('underscore (bad)',
'package/Config.in',
'menu "packages"\n'
'source "package/a-a/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/a_a/Config.in"\n'
'source "package/aAa/Config.in"\n'
'source "package/aaa/Config.in"\n',
[['package/Config.in:4: Packages in: menu "packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: a_a',
'source "package/a_a/Config.in"\n']]),
('digit (bad)',
'package/Config.in',
'menu "packages"\n'
'source "package/a-a/Config.in"\n'
'source "package/a_a/Config.in"\n'
'source "package/aAa/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/aaa/Config.in"\n',
[['package/Config.in:5: Packages in: menu "packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: a1a',
'source "package/a1a/Config.in"\n']]),
('capitals (bad)',
'package/Config.in',
'menu "packages"\n'
'source "package/a-a/Config.in"\n'
'source "package/a_a/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/aaa/Config.in"\n'
'source "package/aAa/Config.in"\n',
[['package/Config.in:6: Packages in: menu "packages",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: aAa',
'source "package/aAa/Config.in"\n']]),
('digits, capitals, underscore (good)',
'package/Config.in',
'menu "packages"\n'
'source "package/a-a/Config.in"\n'
'source "package/a_a/Config.in"\n'
'source "package/a1a/Config.in"\n'
'source "package/aAa/Config.in"\n'
'source "package/aaa/Config.in"\n',
[]),
('conditional menu (good)',
'package/Config.in',
'menu "Other"\n'
'source "package/linux-pam/Config.in"\n'
'if BR2_PACKAGE_LINUX_PAM\n'
'comment "linux-pam plugins"\n'
'source "package/libpam-radius-auth/Config.in"\n'
'source "package/libpam-tacplus/Config.in"\n'
'endif\n'
'source "package/liquid-dsp/Config.in"\n',
[]),
('conditional menu (bad)',
'package/Config.in',
'menu "Other"\n'
'source "package/linux-pam/Config.in"\n'
'if BR2_PACKAGE_LINUX_PAM\n'
'comment "linux-pam plugins"\n'
'source "package/libpam-tacplus/Config.in"\n'
'source "package/libpam-radius-auth/Config.in"\n'
'endif\n'
'source "package/liquid-dsp/Config.in"\n',
[['package/Config.in:6: Packages in: comment "linux-pam plugins",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: libpam-radius-auth',
'source "package/libpam-radius-auth/Config.in"\n']]),
('no conditional (bad)',
'package/Config.in',
'menu "Other"\n'
'source "package/linux-pam/Config.in"\n'
'source "package/libpam-radius-auth/Config.in"\n'
'source "package/libpam-tacplus/Config.in"\n'
'source "package/liquid-dsp/Config.in"\n',
[['package/Config.in:3: Packages in: menu "Other",\n'
' are not alphabetically ordered;\n'
" correct order: '-', '_', digits, capitals, lowercase;\n"
' first incorrect package: libpam-radius-auth',
'source "package/libpam-radius-auth/Config.in"\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', CommentsMenusPackagesOrder)
def test_CommentsMenusPackagesOrder(testname, filename, string, expected):
warnings = util.check_file(m.CommentsMenusPackagesOrder, filename, string)
assert warnings == expected
HelpText = [
('single line',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'default y\n'
'depends on BR2_USE_BAR # runtime\n'
'select BR2_PACKAGE_BAZ\n'
'help\n'
'\t help text\n',
[]),
('larger than 72',
'any',
'help\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 123\n'
'\t help text\n',
[['any:3: help text: <tab><2 spaces><62 chars> (url#writing-rules-config-in)',
'\t 123456789 123456789 123456789 123456789 123456789 123456789 123\n',
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12']]),
('long url at beginning of line',
'any',
'help\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12\n'
'\t http://url.that.is.longer.than.seventy.two.characthers/folder_name\n'
'\t https://url.that.is.longer.than.seventy.two.characthers/folder_name\n'
'\t git://url.that.is.longer.than.seventy.two.characthers/folder_name\n',
[]),
('long url not at beginning of line',
'any',
'help\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12\n'
'\t refer to http://url.that.is.longer.than.seventy.two.characthers/folder_name\n'
'\n'
'\t http://url.that.is.longer.than.seventy.two.characthers/folder_name\n',
[['any:3: help text: <tab><2 spaces><62 chars> (url#writing-rules-config-in)',
'\t refer to http://url.that.is.longer.than.seventy.two.characthers/folder_name\n',
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12']]),
('allow beautified items',
'any',
'help\n'
'\t 123456789 123456789 123456789 123456789 123456789 123456789 12\n'
'\t summary:\n'
'\t - enable that config\n'
'\t - built it\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', HelpText)
def test_HelpText(testname, filename, string, expected):
warnings = util.check_file(m.HelpText, filename, string)
assert warnings == expected
Indent = [
('good example',
'any',
'config BR2_PACKAGE_FOO\n'
'\tbool "foo"\n'
'\tdefault y\n'
'\tdepends on BR2_TOOLCHAIN_HAS_THREADS\n'
'\tdepends on BR2_INSTALL_LIBSTDCPP\n'
'# very useful comment\n'
'\tselect BR2_PACKAGE_BAZ\n'
'\thelp\n'
'\t help text\n'
'\n'
'comment "foo needs toolchain w/ C++, threads"\n'
'\tdepends on !BR2_INSTALL_LIBSTDCPP || \\\n'
'\t\t!BR2_TOOLCHAIN_HAS_THREADS\n'
'\n'
'source "package/foo/bar/Config.in"\n',
[]),
('spaces',
'any',
'config BR2_PACKAGE_FOO\n'
' bool "foo"\n',
[['any:2: should be indented with one tab (url#_config_files)',
' bool "foo"\n']]),
('without indent',
'any',
'config BR2_PACKAGE_FOO\n'
'default y\n',
[['any:2: should be indented with one tab (url#_config_files)',
'default y\n']]),
('too much tabs',
'any',
'config BR2_PACKAGE_FOO\n'
'\t\tdepends on BR2_TOOLCHAIN_HAS_THREADS\n',
[['any:2: should be indented with one tab (url#_config_files)',
'\t\tdepends on BR2_TOOLCHAIN_HAS_THREADS\n']]),
('help',
'any',
'config BR2_PACKAGE_FOO\n'
' help\n',
[['any:2: should be indented with one tab (url#_config_files)',
' help\n']]),
('continuation line',
'any',
'comment "foo needs toolchain w/ C++, threads"\n'
'\tdepends on !BR2_INSTALL_LIBSTDCPP || \\\n'
' !BR2_TOOLCHAIN_HAS_THREADS\n',
[['any:3: continuation line should be indented using tabs',
' !BR2_TOOLCHAIN_HAS_THREADS\n']]),
('comment with tabs',
'any',
'\tcomment "foo needs toolchain w/ C++, threads"\n',
[['any:1: should not be indented',
'\tcomment "foo needs toolchain w/ C++, threads"\n']]),
('comment with spaces',
'any',
' comment "foo needs toolchain w/ C++, threads"\n',
[['any:1: should not be indented',
' comment "foo needs toolchain w/ C++, threads"\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Indent)
def test_Indent(testname, filename, string, expected):
warnings = util.check_file(m.Indent, filename, string)
assert warnings == expected
RedefinedConfig = [
('no redefinition',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'config BR2_PACKAGE_FOO_BAR\n'
'bool "foo"\n',
[]),
('no conditional',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'config BR2_PACKAGE_BAR\n'
'bool "bar"\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n',
[['any:5: config BR2_PACKAGE_FOO redeclared (previous line: 1)',
'config BR2_PACKAGE_FOO\n']]),
('three times',
'any',
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n',
[['any:3: config BR2_PACKAGE_FOO redeclared (previous line: 1)',
'config BR2_PACKAGE_FOO\n'],
['any:5: config BR2_PACKAGE_FOO redeclared (previous line: 1)',
'config BR2_PACKAGE_FOO\n']]),
('same conditional',
'any',
'if BR2_PACKAGE_BAZ\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'config BR2_PACKAGE_BAR\n'
'bool "bar"\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'endif\n',
[['any:6: config BR2_PACKAGE_FOO redeclared (previous line: 2)',
'config BR2_PACKAGE_FOO\n']]),
('equivalent conditional',
'any',
'if BR2_PACKAGE_BAZ\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'endif\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'if BR2_PACKAGE_BAZ\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'endif\n',
[['any:8: config BR2_PACKAGE_FOO redeclared (previous line: 2)',
'config BR2_PACKAGE_FOO\n']]),
('not equivalent conditional',
'any',
'if BR2_PACKAGE_BAZ\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'endif\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'if !BR2_PACKAGE_BAZ\n'
'config BR2_PACKAGE_FOO\n'
'bool "foo"\n'
'endif\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', RedefinedConfig)
def test_RedefinedConfig(testname, filename, string, expected):
warnings = util.check_file(m.RedefinedConfig, filename, string)
assert warnings == expected

View File

@ -0,0 +1,183 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_hash as m
HashNumberOfFields = [
('empty file',
'any',
'',
[]),
('empty line',
'any',
'\n',
[]),
('ignore whitespace',
'any',
'\t\n',
[]),
('ignore comments',
'any',
'# text\n',
[]),
('1 field',
'any',
'field1\n',
[['any:1: expected three fields (url#adding-packages-hash)',
'field1\n']]),
('2 fields',
'any',
'field1 field2\n',
[['any:1: expected three fields (url#adding-packages-hash)',
'field1 field2\n']]),
('4 fields',
'any',
'field1 field2 field3 field4\n',
[['any:1: expected three fields (url#adding-packages-hash)',
'field1 field2 field3 field4\n']]),
('with 1 space',
'any',
'field1 field2 field3\n',
[]),
('many spaces',
'any',
' field1 field2 field3\n',
[]),
('tabs',
'any',
'field1\tfield2\tfield3\n',
[]),
('mix of tabs and spaces',
'any',
'\tfield1\t field2\t field3 \n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', HashNumberOfFields)
def test_HashNumberOfFields(testname, filename, string, expected):
warnings = util.check_file(m.HashNumberOfFields, filename, string)
assert warnings == expected
HashType = [
('ignore empty files',
'any',
'',
[]),
('ignore 1 field',
'any',
'text\n',
[]),
('wrong type',
'any',
'text text\n',
[['any:1: unexpected type of hash (url#adding-packages-hash)',
'text text\n']]),
('md5 (good)',
'any',
'md5 12345678901234567890123456789012\n',
[]),
('md5 (short)',
'any',
'md5 123456\n',
[['any:1: hash size does not match type (url#adding-packages-hash)',
'md5 123456\n',
'expected 32 hex digits']]),
('ignore space before',
'any',
' md5 12345678901234567890123456789012\n',
[]),
('2 spaces',
'any',
'md5 12345678901234567890123456789012\n',
[]),
('ignore tabs',
'any',
'md5\t12345678901234567890123456789012\n',
[]),
('common typo',
'any',
'md5sum 12345678901234567890123456789012\n',
[['any:1: unexpected type of hash (url#adding-packages-hash)',
'md5sum 12345678901234567890123456789012\n']]),
('md5 (too long)',
'any',
'md5 123456789012345678901234567890123\n',
[['any:1: hash size does not match type (url#adding-packages-hash)',
'md5 123456789012345678901234567890123\n',
'expected 32 hex digits']]),
('sha1 (good)',
'any',
'sha1 1234567890123456789012345678901234567890\n',
[]),
('sha256',
'any',
'sha256 1234567890123456789012345678901234567890123456789012345678901234\n',
[]),
('sha384',
'any',
'sha384 123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456\n',
[]),
('sha512',
'any',
'sha512 1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678'
'9012345678\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', HashType)
def test_HashType(testname, filename, string, expected):
warnings = util.check_file(m.HashType, filename, string)
assert warnings == expected
HashSpaces = [
('ignore empty files',
'any',
'',
[]),
('ignore 1 field',
'any',
'text\n',
[]),
('ignore comments',
'any',
'# type 1234567890123456789012345678901234567890 file\n',
[]),
('ignore trailing space',
'any',
'type 1234567890123456789012345678901234567890 file\t \n',
[]),
('2 spaces',
'any',
'type 1234567890123456789012345678901234567890 file\n',
[]),
('1 space',
'any',
'type 1234567890123456789012345678901234567890 file\n',
[['any:1: separation does not match expectation (url#adding-packages-hash)',
'type 1234567890123456789012345678901234567890 file\n']]),
('3 spaces',
'any',
'type 1234567890123456789012345678901234567890 file\n',
[['any:1: separation does not match expectation (url#adding-packages-hash)',
'type 1234567890123456789012345678901234567890 file\n']]),
('tabs',
'any',
'type\t1234567890123456789012345678901234567890\tfile\n',
[['any:1: separation does not match expectation (url#adding-packages-hash)',
'type\t1234567890123456789012345678901234567890\tfile\n']]),
('mixed tabs and spaces',
'any',
'type\t 1234567890123456789012345678901234567890 \tfile\n',
[['any:1: separation does not match expectation (url#adding-packages-hash)',
'type\t 1234567890123456789012345678901234567890 \tfile\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', HashSpaces)
def test_HashSpaces(testname, filename, string, expected):
warnings = util.check_file(m.HashSpaces, filename, string)
assert warnings == expected

View File

@ -0,0 +1,18 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_ignore as m
IgnoreMissingFile = [
('missing ignored file',
'.checkpackageignore',
'this-file-does-not-exist SomeTest',
[['.checkpackageignore:1: ignored file this-file-does-not-exist is missing',
'this-file-does-not-exist SomeTest']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', IgnoreMissingFile)
def test_IgnoreMissingFile(testname, filename, string, expected):
warnings = util.check_file(m.IgnoreMissingFile, filename, string)
assert warnings == expected

View File

@ -0,0 +1,661 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_mk as m
DoNotInstallToHostdirUsr = [
('real case',
'libapparmor.mk',
'LIBAPPARMOR_CONF_OPTS += \\\n'
'\t--with-python \\\n'
'\tPYTHON=$(HOST_DIR)/usr/bin/python3 \\\n'
'\tPYTHON_CONFIG=$(STAGING_DIR)/usr/bin/python3-config \\\n'
'\tSWIG=$(SWIG)\n',
[['libapparmor.mk:3: install files to $(HOST_DIR)/ instead of $(HOST_DIR)/usr/',
'\tPYTHON=$(HOST_DIR)/usr/bin/python3 \\\n']]),
('ignore comment',
'any',
'# following code do not install to $(HOST_DIR)/usr/\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', DoNotInstallToHostdirUsr)
def test_DoNotInstallToHostdirUsr(testname, filename, string, expected):
warnings = util.check_file(m.DoNotInstallToHostdirUsr, filename, string)
assert warnings == expected
Ifdef = [
('ignore commented line',
'any',
'# ifdef\n',
[]),
('simple',
'any',
'\n'
'ifdef BR2_PACKAGE_FWTS_EFI_RUNTIME_MODULE\n'
'endif\n',
[['any:2: use ifeq ($(SYMBOL),y) instead of ifdef SYMBOL',
'ifdef BR2_PACKAGE_FWTS_EFI_RUNTIME_MODULE\n']]),
('ignore indentation',
'any',
' ifdef FOO\n'
' endif\n'
'\tifdef BAR\n'
'endif\n',
[['any:1: use ifeq ($(SYMBOL),y) instead of ifdef SYMBOL',
' ifdef FOO\n'],
['any:3: use ifeq ($(SYMBOL),y) instead of ifdef SYMBOL',
'\tifdef BAR\n']]),
('typo',
'any',
'\n'
'ifndef ($(BR2_ENABLE_LOCALE),y)\n'
'endif\n',
[['any:2: use ifneq ($(SYMBOL),y) instead of ifndef SYMBOL',
'ifndef ($(BR2_ENABLE_LOCALE),y)\n']]),
('else ifdef',
'any',
'else ifdef SYMBOL # comment\n',
[['any:1: use ifeq ($(SYMBOL),y) instead of ifdef SYMBOL',
'else ifdef SYMBOL # comment\n']]),
('else ifndef',
'any',
'\t else ifndef\t($(SYMBOL),y) # comment\n',
[['any:1: use ifneq ($(SYMBOL),y) instead of ifndef SYMBOL',
'\t else ifndef\t($(SYMBOL),y) # comment\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Ifdef)
def test_Ifdef(testname, filename, string, expected):
warnings = util.check_file(m.Ifdef, filename, string)
assert warnings == expected
Indent = [
('ignore comment at beginning of line',
'any',
'# very useful comment\n',
[]),
('ignore comment at end of line',
'any',
' # very useful comment\n',
[]),
('do not indent on conditional (good)',
'any',
'ifeq ($(BR2_TOOLCHAIN_HAS_THREADS),y)\n'
'FOO_CONF_OPTS += something\n'
'endef\n',
[]),
('do not indent on conditional (bad)',
'any',
'ifeq ($(BR2_TOOLCHAIN_HAS_THREADS),y)\n'
'\tFOO_CONF_OPTS += something\n'
'endef\n',
[['any:2: unexpected indent with tabs',
'\tFOO_CONF_OPTS += something\n']]),
('indent after line that ends in backslash (good)',
'any',
'FOO_CONF_OPTS += \\\n'
'\tsomething\n',
[]),
('indent after line that ends in backslash (bad)',
'any',
'FOO_CONF_OPTS += \\\n'
'something\n',
[['any:2: expected indent with tabs',
'something\n']]),
('indent after 2 lines that ends in backslash (good)',
'any',
'FOO_CONF_OPTS += \\\n'
'\tsomething \\\n'
'\tsomething_else\n',
[]),
('indent after 2 lines that ends in backslash (bad)',
'any',
'FOO_CONF_OPTS += \\\n'
'\tsomething \\\n'
'\tsomething_else \\\n'
'FOO_CONF_OPTS += another_thing\n',
[['any:4: expected indent with tabs',
'FOO_CONF_OPTS += another_thing\n']]),
('indent inside define (good)',
'any',
'define FOO_SOMETHING\n'
'\tcommand\n'
'\tcommand \\\n'
'\t\targuments\n'
'endef\n'
'FOO_POST_PATCH_HOOKS += FOO_SOMETHING\n',
[]),
('indent inside define (bad, no indent)',
'any',
'define FOO_SOMETHING\n'
'command\n'
'endef\n',
[['any:2: expected indent with tabs',
'command\n']]),
('indent inside define (bad, spaces)',
'any',
'define FOO_SOMETHING\n'
' command\n'
'endef\n',
[['any:2: expected indent with tabs',
' command\n']]),
('indent make target (good)',
'any',
'make_target:\n'
'\tcommand\n'
'\n',
[]),
('indent make target (bad)',
'any',
'make_target:\n'
' command\n'
'\n',
[['any:2: expected indent with tabs',
' command\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Indent)
def test_Indent(testname, filename, string, expected):
warnings = util.check_file(m.Indent, filename, string)
assert warnings == expected
OverriddenVariable = [
('simple assignment',
'any.mk',
'VAR_1 = VALUE1\n',
[]),
('unconditional override (variable without underscore)',
'any.mk',
'VAR1 = VALUE1\n'
'VAR1 = VALUE1\n',
[['any.mk:2: unconditional override of variable VAR1',
'VAR1 = VALUE1\n']]),
('unconditional override (variable with underscore, same value)',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 = VALUE1\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 = VALUE1\n']]),
('unconditional override (variable with underscore, different value)',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 = VALUE2\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 = VALUE2\n']]),
('warn for unconditional override even with wrong number of spaces',
'any.mk',
'VAR_1= VALUE1\n'
'VAR_1 =VALUE2\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 =VALUE2\n']]),
('warn for := override',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 := VALUE2\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 := VALUE2\n']]),
('append values outside conditional (good)',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 += VALUE2\n',
[]),
('append values outside conditional (bad)',
'any.mk',
'VAR_1 = VALUE1\n'
'VAR_1 := $(VAR_1), VALUE2\n',
[['any.mk:2: unconditional override of variable VAR_1',
'VAR_1 := $(VAR_1), VALUE2\n']]),
('immediate assignment inside conditional',
'any.mk',
'VAR_1 = VALUE1\n'
'ifeq (condition)\n'
'VAR_1 := $(VAR_1), VALUE2\n',
[['any.mk:3: immediate assignment to append to variable VAR_1',
'VAR_1 := $(VAR_1), VALUE2\n']]),
('immediate assignment inside conditional and unconditional override outside',
'any.mk',
'VAR_1 = VALUE1\n'
'ifeq (condition)\n'
'VAR_1 := $(VAR_1), VALUE2\n'
'endif\n'
'VAR_1 := $(VAR_1), VALUE2\n',
[['any.mk:3: immediate assignment to append to variable VAR_1',
'VAR_1 := $(VAR_1), VALUE2\n'],
['any.mk:5: unconditional override of variable VAR_1',
'VAR_1 := $(VAR_1), VALUE2\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', OverriddenVariable)
def test_OverriddenVariable(testname, filename, string, expected):
warnings = util.check_file(m.OverriddenVariable, filename, string)
assert warnings == expected
PackageHeader = [
('first line (good)',
'any',
80 * '#' + '\n',
[]),
('first line (bad)',
'any',
'# very useful comment\n',
[['any:1: should be 80 hashes (url#writing-rules-mk)',
'# very useful comment\n',
80 * '#']]),
('second line (bad)',
'any',
80 * '#' + '\n'
'# package\n',
[['any:2: should be 1 hash (url#writing-rules-mk)',
'# package\n']]),
('full header (good)',
'any',
80 * '#' + '\n'
'#\n'
'# package\n'
'#\n' +
80 * '#' + '\n'
'\n',
[]),
('blank line after header (good)',
'any',
80 * '#' + '\n'
'#\n'
'# package\n'
'#\n' +
80 * '#' + '\n'
'\n'
'FOO_VERSION = 1\n',
[]),
('blank line after header (bad)',
'any',
80 * '#' + '\n'
'#\n'
'# package\n'
'#\n' +
80 * '#' + '\n'
'FOO_VERSION = 1\n',
[['any:6: should be a blank line (url#writing-rules-mk)',
'FOO_VERSION = 1\n']]),
('wrong number of hashes',
'any',
79 * '#' + '\n'
'#\n'
'# package\n'
'#\n' +
81 * '#' + '\n'
'\n',
[['any:1: should be 80 hashes (url#writing-rules-mk)',
79 * '#' + '\n',
80 * '#'],
['any:5: should be 80 hashes (url#writing-rules-mk)',
81 * '#' + '\n',
80 * '#']]),
('allow include without header',
'any',
'include $(sort $(wildcard package/foo/*/*.mk))\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', PackageHeader)
def test_PackageHeader(testname, filename, string, expected):
warnings = util.check_file(m.PackageHeader, filename, string)
assert warnings == expected
RemoveDefaultPackageSourceVariable = [
('bad',
'any.mk',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n',
[['any.mk:1: remove default value of _SOURCE variable (url#generic-package-reference)',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n']]),
('bad with path',
'./any.mk',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n',
[['./any.mk:1: remove default value of _SOURCE variable (url#generic-package-reference)',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n']]),
('warn for correct line',
'./any.mk',
'\n'
'\n'
'\n'
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n',
[['./any.mk:4: remove default value of _SOURCE variable (url#generic-package-reference)',
'ANY_SOURCE = any-$(ANY_VERSION).tar.gz\n']]),
('warn ignoring missing spaces',
'./any.mk',
'ANY_SOURCE=any-$(ANY_VERSION).tar.gz\n',
[['./any.mk:1: remove default value of _SOURCE variable (url#generic-package-reference)',
'ANY_SOURCE=any-$(ANY_VERSION).tar.gz\n']]),
('good',
'./any.mk',
'ANY_SOURCE = aNy-$(ANY_VERSION).tar.gz\n',
[]),
('gcc exception',
'gcc.mk',
'GCC_SOURCE = gcc-$(GCC_VERSION).tar.gz\n',
[]),
('binutils exception',
'./binutils.mk',
'BINUTILS_SOURCE = binutils-$(BINUTILS_VERSION).tar.gz\n',
[]),
('gdb exception',
'gdb/gdb.mk',
'GDB_SOURCE = gdb-$(GDB_VERSION).tar.gz\n',
[]),
('package name with dash',
'python-subprocess32.mk',
'PYTHON_SUBPROCESS32_SOURCE = python-subprocess32-$(PYTHON_SUBPROCESS32_VERSION).tar.gz\n',
[['python-subprocess32.mk:1: remove default value of _SOURCE variable (url#generic-package-reference)',
'PYTHON_SUBPROCESS32_SOURCE = python-subprocess32-$(PYTHON_SUBPROCESS32_VERSION).tar.gz\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', RemoveDefaultPackageSourceVariable)
def test_RemoveDefaultPackageSourceVariable(testname, filename, string, expected):
warnings = util.check_file(m.RemoveDefaultPackageSourceVariable, filename, string)
assert warnings == expected
SpaceBeforeBackslash = [
('no backslash',
'any.mk',
'\n',
[]),
('ignore missing indent',
'any.mk',
'define ANY_SOME_FIXUP\n'
'for i in $$(find $(STAGING_DIR)/usr/lib* -name "any*.la"); do \\\n',
[]),
('ignore missing space',
'any.mk',
'ANY_CONF_ENV= \\\n'
'\tap_cv_void_ptr_lt_long=no \\\n',
[]),
('variable',
'any.mk',
'\n'
'ANY = \\\n',
[]),
('2 spaces',
'any.mk',
'ANY = \\\n',
[['any.mk:1: use only one space before backslash',
'ANY = \\\n']]),
('warn about correct line',
'any.mk',
'\n'
'ANY = \\\n',
[['any.mk:2: use only one space before backslash',
'ANY = \\\n']]),
('tab',
'any.mk',
'ANY =\t\\\n',
[['any.mk:1: use only one space before backslash',
'ANY =\t\\\n']]),
('tabs',
'any.mk',
'ANY =\t\t\\\n',
[['any.mk:1: use only one space before backslash',
'ANY =\t\t\\\n']]),
('spaces and tabs',
'any.mk',
'ANY = \t\t\\\n',
[['any.mk:1: use only one space before backslash',
'ANY = \t\t\\\n']]),
('mixed spaces and tabs 1',
'any.mk',
'ANY = \t \t\\\n',
[['any.mk:1: use only one space before backslash',
'ANY = \t \t\\\n']]),
('mixed spaces and tabs 2',
'any.mk',
'ANY = \t \\\n',
[['any.mk:1: use only one space before backslash',
'ANY = \t \\\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', SpaceBeforeBackslash)
def test_SpaceBeforeBackslash(testname, filename, string, expected):
warnings = util.check_file(m.SpaceBeforeBackslash, filename, string)
assert warnings == expected
TrailingBackslash = [
('no backslash',
'any.mk',
'ANY = \n',
[]),
('one line',
'any.mk',
'ANY = \\\n',
[]),
('2 lines',
'any.mk',
'ANY = \\\n'
'\\\n',
[]),
('empty line after',
'any.mk',
'ANY = \\\n'
'\n',
[['any.mk:1: remove trailing backslash',
'ANY = \\\n']]),
('line with spaces after',
'any.mk',
'ANY = \\\n'
' \n',
[['any.mk:1: remove trailing backslash',
'ANY = \\\n']]),
('line with tabs after',
'any.mk',
'ANY = \\\n'
'\t\n',
[['any.mk:1: remove trailing backslash',
'ANY = \\\n']]),
('ignore if commented',
'any.mk',
'# ANY = \\\n'
'\n',
[]),
('real example',
'any.mk',
'ANY_CONF_ENV= \t\\\n'
'\tap_cv_void_ptr_lt_long=no \\\n'
'\n',
[['any.mk:2: remove trailing backslash',
'\tap_cv_void_ptr_lt_long=no \\\n']]),
('ignore whitespace 1',
'any.mk',
'ANY = \t\t\\\n',
[]),
('ignore whitespace 2',
'any.mk',
'ANY = \t \t\\\n',
[]),
('ignore whitespace 3',
'any.mk',
'ANY = \t \\\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', TrailingBackslash)
def test_TrailingBackslash(testname, filename, string, expected):
warnings = util.check_file(m.TrailingBackslash, filename, string)
assert warnings == expected
TypoInPackageVariable = [
('good',
'any.mk',
'ANY_VAR = \n',
[]),
('good with path 1',
'./any.mk',
'ANY_VAR += \n',
[]),
('good with path 2',
'any/any.mk',
'ANY_VAR = \n',
[]),
('bad =',
'any.mk',
'OTHER_VAR = \n',
[['any.mk:1: possible typo: OTHER_VAR -> *ANY*',
'OTHER_VAR = \n']]),
('bad +=',
'any.mk',
'OTHER_VAR += \n',
[['any.mk:1: possible typo: OTHER_VAR -> *ANY*',
'OTHER_VAR += \n']]),
('ignore missing space',
'any.mk',
'OTHER_VAR= \n',
[['any.mk:1: possible typo: OTHER_VAR -> *ANY*',
'OTHER_VAR= \n']]),
('use path in the warning',
'./any.mk',
'OTHER_VAR = \n',
[['./any.mk:1: possible typo: OTHER_VAR -> *ANY*',
'OTHER_VAR = \n']]),
('another name',
'other.mk',
'ANY_VAR = \n',
[['other.mk:1: possible typo: ANY_VAR -> *OTHER*',
'ANY_VAR = \n']]),
('libc exception',
'./any.mk',
'BR_LIBC = \n',
[]),
('rootfs exception',
'any.mk',
'ROOTFS_ANY_VAR += \n',
[]),
('host (good)',
'any.mk',
'HOST_ANY_VAR += \n',
[]),
('host (bad)',
'any.mk',
'HOST_OTHER_VAR = \n',
[['any.mk:1: possible typo: HOST_OTHER_VAR -> *ANY*',
'HOST_OTHER_VAR = \n']]),
('provides',
'any.mk',
'ANY_PROVIDES = other thing\n'
'OTHER_VAR = \n',
[]),
('ignore space',
'any.mk',
'ANY_PROVIDES = thing other \n'
'OTHER_VAR = \n',
[]),
('wrong provides',
'any.mk',
'ANY_PROVIDES = other\n'
'OTHERS_VAR = \n',
[['any.mk:2: possible typo: OTHERS_VAR -> *ANY*',
'OTHERS_VAR = \n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', TypoInPackageVariable)
def test_TypoInPackageVariable(testname, filename, string, expected):
warnings = util.check_file(m.TypoInPackageVariable, filename, string)
assert warnings == expected
UselessFlag = [
('autoreconf no',
'any.mk',
'ANY_AUTORECONF=NO\n',
[['any.mk:1: useless default value (url#_infrastructure_for_autotools_based_packages)',
'ANY_AUTORECONF=NO\n']]),
('host autoreconf no',
'any.mk',
'HOST_ANY_AUTORECONF\n',
[]),
('autoreconf yes',
'any.mk',
'ANY_AUTORECONF=YES\n',
[]),
('libtool_patch yes',
'any.mk',
'ANY_LIBTOOL_PATCH\t= YES\n',
[['any.mk:1: useless default value (url#_infrastructure_for_autotools_based_packages)',
'ANY_LIBTOOL_PATCH\t= YES\n']]),
('libtool_patch no',
'any.mk',
'ANY_LIBTOOL_PATCH= \t NO\n',
[]),
('generic',
'any.mk',
'ANY_INSTALL_IMAGES = NO\n'
'ANY_INSTALL_REDISTRIBUTE = YES\n'
'ANY_INSTALL_STAGING = NO\n'
'ANY_INSTALL_TARGET = YES\n',
[['any.mk:1: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_IMAGES = NO\n'],
['any.mk:2: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_REDISTRIBUTE = YES\n'],
['any.mk:3: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_STAGING = NO\n'],
['any.mk:4: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_TARGET = YES\n']]),
('conditional',
'any.mk',
'ifneq (condition)\n'
'ANY_INSTALL_IMAGES = NO\n'
'endif\n'
'ANY_INSTALL_REDISTRIBUTE = YES\n',
[['any.mk:4: useless default value (url#_infrastructure_for_packages_with_specific_build_systems)',
'ANY_INSTALL_REDISTRIBUTE = YES\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', UselessFlag)
def test_UselessFlag(testname, filename, string, expected):
warnings = util.check_file(m.UselessFlag, filename, string)
assert warnings == expected
VariableWithBraces = [
('good',
'xmlstarlet.mk',
'XMLSTARLET_CONF_OPTS += \\\n'
'\t--with-libxml-prefix=$(STAGING_DIR)/usr \\\n',
[]),
('bad',
'xmlstarlet.mk',
'XMLSTARLET_CONF_OPTS += \\\n'
'\t--with-libxml-prefix=${STAGING_DIR}/usr \\\n',
[['xmlstarlet.mk:2: use $() to delimit variables, not ${}',
'\t--with-libxml-prefix=${STAGING_DIR}/usr \\\n']]),
('expanded by the shell',
'sg3_utils.mk',
'\tfor prog in xcopy zone; do \\\n'
'\t\t$(RM) $(TARGET_DIR)/usr/bin/sg_$${prog} ; \\\n'
'\tdone\n',
[]),
('comments',
'any.mk',
'#\t--with-libxml-prefix=${STAGING_DIR}/usr \\\n',
[]),
]
@pytest.mark.parametrize('testname,filename,string,expected', VariableWithBraces)
def test_VariableWithBraces(testname, filename, string, expected):
warnings = util.check_file(m.VariableWithBraces, filename, string)
assert warnings == expected

View File

@ -0,0 +1,118 @@
import pytest
import checkpackagelib.test_util as util
import checkpackagelib.lib_patch as m
ApplyOrder = [
('standard', # catches https://bugs.busybox.net/show_bug.cgi?id=11271
'0001-description.patch',
'',
[]),
('standard with path',
'path/0001-description.patch',
'',
[]),
('acceptable format',
'1-description.patch',
'',
[]),
('acceptable format with path',
'path/1-description.patch',
'',
[]),
('old format',
'package-0001-description.patch',
'',
[['package-0001-description.patch:0: use name <number>-<description>.patch (url#_providing_patches)']]),
('old format with path',
'path/package-0001-description.patch',
'',
[['path/package-0001-description.patch:0: use name <number>-<description>.patch (url#_providing_patches)']]),
('missing number',
'description.patch',
'',
[['description.patch:0: use name <number>-<description>.patch (url#_providing_patches)']]),
('missing number with path',
'path/description.patch',
'',
[['path/description.patch:0: use name <number>-<description>.patch (url#_providing_patches)']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', ApplyOrder)
def test_ApplyOrder(testname, filename, string, expected):
warnings = util.check_file(m.ApplyOrder, filename, string)
assert warnings == expected
NumberedSubject = [
('no subject',
'patch',
'',
[]),
('acceptable because it is not a git patch',
'patch',
'Subject: [PATCH 24/105] text\n',
[]),
('good',
'patch',
'Subject: [PATCH] text\n'
'diff --git a/configure.ac b/configure.ac\n',
[]),
('bad',
'patch',
'Subject: [PATCH 24/105] text\n'
'diff --git a/configure.ac b/configure.ac\n',
[["patch:1: generate your patches with 'git format-patch -N'",
'Subject: [PATCH 24/105] text\n']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', NumberedSubject)
def test_NumberedSubject(testname, filename, string, expected):
warnings = util.check_file(m.NumberedSubject, filename, string)
assert warnings == expected
Sob = [
('good',
'patch',
'Signed-off-by: John Doe <johndoe@example.com>\n',
[]),
('empty',
'patch',
'',
[['patch:0: missing Signed-off-by in the header (url#_format_and_licensing_of_the_package_patches)']]),
('bad',
'patch',
'Subject: [PATCH 24/105] text\n',
[['patch:0: missing Signed-off-by in the header (url#_format_and_licensing_of_the_package_patches)']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Sob)
def test_Sob(testname, filename, string, expected):
warnings = util.check_file(m.Sob, filename, string)
assert warnings == expected
Upstream = [
('good',
'patch',
'Upstream: https://some/amazing/patch/submission\n',
[]),
('empty',
'patch',
'',
[['patch:0: missing Upstream in the header (url#_additional_patch_documentation)']]),
('bad',
'patch',
'Subject: [PATCH 24/105] text\n',
[['patch:0: missing Upstream in the header (url#_additional_patch_documentation)']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Upstream)
def test_Upstream(testname, filename, string, expected):
warnings = util.check_file(m.Upstream, filename, string)
assert warnings == expected

View File

@ -0,0 +1,131 @@
import os
import pytest
import re
import tempfile
import checkpackagelib.test_util as util
import checkpackagelib.lib_sysv as m
from checkpackagelib.test_tool import check_file as tool_check_file
workdir = os.path.join(tempfile.mkdtemp(suffix='-checkpackagelib-test-sysv'))
workdir_regex = re.compile(r'/tmp/tmp[^/]*-checkpackagelib-test-sysv')
Indent = [
('empty file',
'any',
'',
[]),
('empty line',
'any',
'\n',
[]),
('ignore whitespace',
'any',
' \n',
[]),
('spaces',
'any',
'case "$1" in\n'
' start)',
[['any:2: should be indented with tabs (url#adding-packages-start-script)',
' start)']]),
('tab',
'any',
'case "$1" in\n'
'\tstart)',
[]),
('tabs and spaces',
'any',
'case "$1" in\n'
'\t start)',
[['any:2: should be indented with tabs (url#adding-packages-start-script)',
'\t start)']]),
('spaces and tabs',
'any',
'case "$1" in\n'
' \tstart)',
[['any:2: should be indented with tabs (url#adding-packages-start-script)',
' \tstart)']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Indent)
def test_Indent(testname, filename, string, expected):
warnings = util.check_file(m.Indent, filename, string)
assert warnings == expected
NotExecutable = [
('SysV',
'sh-shebang.sh',
0o775,
'#!/bin/sh',
["dir/sh-shebang.sh:0: This file does not need to be executable,"
" just make sure you use '$(INSTALL) -D -m 0755' in the .mk file"]),
]
@pytest.mark.parametrize('testname,filename,permissions,string,expected', NotExecutable)
def test_NotExecutable(testname, filename, permissions, string, expected):
warnings = tool_check_file(m.NotExecutable, filename, string, permissions)
assert warnings == expected
Variables = [
('empty file',
'any',
'',
[['any:0: DAEMON variable not defined (url#adding-packages-start-script)']]),
('daemon and pidfile ok',
'package/busybox/S01syslogd',
'DAEMON="syslogd"\n'
'PIDFILE="/var/run/$DAEMON.pid"\n',
[]),
('wrong filename',
'package/busybox/S01syslog',
'DAEMON="syslogd"\n'
'PIDFILE="/var/run/${DAEMON}.pid"\n',
[['package/busybox/S01syslog:0: filename should be S<number><number><daemon name> (url#adding-packages-start-script)',
'expecting S<number><number>syslogd']]),
('no pidfile ok',
'S99something',
'DAEMON="something"\n',
[]),
('hardcoded pidfile',
'S99something',
'DAEMON="something"\n'
'PIDFILE="/var/run/something.pid"\n',
[['S99something:2: Incorrect PIDFILE value (url#adding-packages-start-script)',
'PIDFILE="/var/run/something.pid"\n',
'PIDFILE="/var/run/$DAEMON.pid"']]),
('redefined daemon',
'S50any',
'DAEMON="any"\n'
'DAEMON="other"\n',
[['S50any:2: DAEMON variable redefined (url#adding-packages-start-script)',
'DAEMON="other"\n']]),
('daemon name with dash',
'S82cups-browsed',
'DAEMON="cups-browsed"',
[]),
('daemon with path',
'S50avahi-daemon',
'DAEMON=/usr/sbin/avahi-daemon',
[['S50avahi-daemon:1: Do not include path in DAEMON (url#adding-packages-start-script)',
'DAEMON=/usr/sbin/avahi-daemon',
'DAEMON="avahi-daemon"']]),
('daemon with path and wrong filename',
'S50avahi',
'DAEMON=/usr/sbin/avahi-daemon',
[['S50avahi:1: Do not include path in DAEMON (url#adding-packages-start-script)',
'DAEMON=/usr/sbin/avahi-daemon',
'DAEMON="avahi-daemon"'],
['S50avahi:0: filename should be S<number><number><daemon name> (url#adding-packages-start-script)',
'expecting S<number><number>avahi-daemon']]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Variables)
def test_Variables(testname, filename, string, expected):
warnings = util.check_file(m.Variables, filename, string)
assert warnings == expected

View File

@ -0,0 +1,140 @@
import os
import pytest
import re
import tempfile
import checkpackagelib.tool as m
workdir_regex = re.compile(r'/tmp/tmp[^/]*-checkpackagelib-test-tool')
def check_file(tool, filename, string, permissions=None):
with tempfile.TemporaryDirectory(suffix='-checkpackagelib-test-tool') as workdir:
script = os.path.join(workdir, filename)
with open(script, 'wb') as f:
f.write(string.encode())
if permissions:
os.chmod(script, permissions)
obj = tool(script)
result = obj.run()
if result is None:
return []
return [workdir_regex.sub('dir', r) for r in result]
NotExecutable = [
('664',
'package.mk',
0o664,
'',
[]),
('775',
'package.mk',
0o775,
'',
["dir/package.mk:0: This file does not need to be executable"]),
]
@pytest.mark.parametrize('testname,filename,permissions,string,expected', NotExecutable)
def test_NotExecutable(testname, filename, permissions, string, expected):
warnings = check_file(m.NotExecutable, filename, string, permissions)
assert warnings == expected
NotExecutable_hint = [
('no hint',
"",
'sh-shebang.sh',
0o775,
'#!/bin/sh',
["dir/sh-shebang.sh:0: This file does not need to be executable"]),
('hint',
", very special hint",
'sh-shebang.sh',
0o775,
'#!/bin/sh',
["dir/sh-shebang.sh:0: This file does not need to be executable, very special hint"]),
]
@pytest.mark.parametrize('testname,hint,filename,permissions,string,expected', NotExecutable_hint)
def test_NotExecutable_hint(testname, hint, filename, permissions, string, expected):
class NotExecutable(m.NotExecutable):
def hint(self):
return hint
warnings = check_file(NotExecutable, filename, string, permissions)
assert warnings == expected
Flake8 = [
('empty',
'empty.py',
'',
[]),
('W391',
'blank-line.py',
'\n',
["dir/blank-line.py:0: run 'flake8' and fix the warnings",
"dir/blank-line.py:1:1: W391 blank line at end of file"]),
('more than one warning',
'file',
'import os\n'
'import re\n'
'\n',
["dir/file:0: run 'flake8' and fix the warnings",
"dir/file:1:1: F401 'os' imported but unused\n"
"dir/file:2:1: F401 're' imported but unused\n"
'dir/file:3:1: W391 blank line at end of file']),
]
@pytest.mark.parametrize('testname,filename,string,expected', Flake8)
def test_Flake8(testname, filename, string, expected):
warnings = check_file(m.Flake8, filename, string)
assert warnings == expected
Shellcheck = [
('missing shebang',
'empty.sh',
'',
["dir/empty.sh:0: run 'shellcheck' and fix the warnings",
"In dir/empty.sh line 1:\n"
"^-- SC2148: Tips depend on target shell and yours is unknown. Add a shebang or a 'shell' directive.\n"
"For more information:\n"
" https://www.shellcheck.net/wiki/SC2148 -- Tips depend on target shell and y..."]),
('sh shebang',
'sh-shebang.sh',
'#!/bin/sh',
[]),
('bash shebang',
'bash-shebang.sh',
'#!/bin/bash',
[]),
('2 warnings',
'unused.sh',
'unused=""',
["dir/unused.sh:0: run 'shellcheck' and fix the warnings",
"In dir/unused.sh line 1:\n"
'unused=""\n'
"^-- SC2148: Tips depend on target shell and yours is unknown. Add a shebang or a 'shell' directive.\n"
"^----^ SC2034: unused appears unused. Verify use (or export if used externally).\n"
"For more information:\n"
" https://www.shellcheck.net/wiki/SC2148 -- Tips depend on target shell and y...\n"
" https://www.shellcheck.net/wiki/SC2034 -- unused appears unused. Verify use..."]),
('tab',
'tab.sh',
'\t#!/bin/sh',
["dir/tab.sh:0: run 'shellcheck' and fix the warnings",
"In dir/tab.sh line 1:\n"
'\t#!/bin/sh\n'
"^-- SC1114: Remove leading spaces before the shebang.\n"
"For more information:\n"
" https://www.shellcheck.net/wiki/SC1114 -- Remove leading spaces before the ..."]),
]
@pytest.mark.parametrize('testname,filename,string,expected', Shellcheck)
def test_Shellcheck(testname, filename, string, expected):
warnings = check_file(m.Shellcheck, filename, string)
assert warnings == expected

View File

@ -0,0 +1,8 @@
def check_file(check_function, filename, string):
obj = check_function(filename, 'url')
result = []
result.append(obj.before())
for i, line in enumerate(string.splitlines(True)):
result.append(obj.check_line(i + 1, line))
result.append(obj.after())
return [r for r in result if r is not None]

View File

@ -0,0 +1,44 @@
import flake8.main.application
import os
import subprocess
import tempfile
from checkpackagelib.base import _Tool
class NotExecutable(_Tool):
def ignore(self):
return False
def run(self):
if self.ignore():
return
if os.access(self.filename, os.X_OK):
return ["{}:0: This file does not need to be executable{}".format(self.filename, self.hint())]
class Flake8(_Tool):
def run(self):
with tempfile.NamedTemporaryFile() as output:
app = flake8.main.application.Application()
app.run(['--output-file={}'.format(output.name), self.filename])
stdout = output.readlines()
processed_output = [str(line.decode().rstrip()) for line in stdout if line]
if len(stdout) == 0:
return
return ["{}:0: run 'flake8' and fix the warnings".format(self.filename),
'\n'.join(processed_output)]
class Shellcheck(_Tool):
def run(self):
cmd = ['shellcheck', self.filename]
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = p.communicate()[0]
processed_output = [str(line.decode().rstrip()) for line in stdout.splitlines() if line]
if p.returncode == 0:
return
return ["{}:0: run 'shellcheck' and fix the warnings".format(self.filename),
'\n'.join(processed_output)]
except FileNotFoundError:
return ["{}:0: failed to call 'shellcheck'".format(self.filename)]

View File

@ -0,0 +1,143 @@
import os
import re
ignored_directories = [
'support/testing/',
]
# Makefile
symbols_used_only_in_source_code = [
'BR2_USE_CCACHE',
]
# package/skeleton/Config.in
symbols_used_only_for_host_variant = [
'BR2_PACKAGE_SKELETON',
]
# Makefile
# package/pkg-generic.mk
symbols_defined_only_at_command_line = [
'BR2_GRAPH_ALT',
'BR2_GRAPH_DEPS_OPTS',
'BR2_GRAPH_DOT_OPTS',
'BR2_GRAPH_OUT',
'BR2_GRAPH_SIZE_OPTS',
'BR2_INSTRUMENTATION_SCRIPTS',
]
# Makefile
symbols_defined_only_when_using_br2_external = [
'BR2_EXTERNAL',
'BR2_EXTERNAL_DIRS',
'BR2_EXTERNAL_MKS',
'BR2_EXTERNAL_NAMES',
]
# boot/barebox/barebox.mk
symbols_defined_only_for_barebox_variant = [
'BR2_TARGET_BAREBOX_AUX_BAREBOXENV',
]
# toolchain/toolchain/toolchain.mk
# toolchain/toolchain-buildroot/toolchain-buildroot.mk
# toolchain/toolchain-bare-metal-buildroot/toolchain-bare-metal-buildroot.mk
symbols_not_defined_for_fake_virtual_packages = [
'BR2_PACKAGE_HAS_TOOLCHAIN',
'BR2_PACKAGE_HAS_TOOLCHAIN_BUILDROOT',
'BR2_PACKAGE_HAS_TOOLCHAIN_BARE_METAL_BUILDROOT',
'BR2_PACKAGE_PROVIDES_TOOLCHAIN',
'BR2_PACKAGE_PROVIDES_TOOLCHAIN_BUILDROOT',
'BR2_PACKAGE_PROVIDES_TOOLCHAIN_BARE_METAL_BUILDROOT',
]
# fs/common.mk
suffixes_not_defined_for_all_rootfs_types = [
'_BZIP2',
'_GZIP',
'_LZ4',
'_LZMA',
'_LZO',
'_XZ',
'_ZSTD',
]
# fs/common.mk
rootfs_prefix = 'BR2_TARGET_ROOTFS_'
# package/pkg-generic.mk
package_prefix = 'BR2_PACKAGE_'
# package/pkg-generic.mk
boot_prefix = 'BR2_TARGET_'
# package/pkg-generic.mk
toolchain_prefix = 'BR2_'
# boot/barebox/barebox.mk
barebox_infra_suffixes = [
'',
'_BAREBOXENV',
'_BOARD_DEFCONFIG',
'_CONFIG_FRAGMENT_FILES',
'_CUSTOM_CONFIG_FILE',
'_CUSTOM_EMBEDDED_ENV_PATH',
'_CUSTOM_ENV',
'_CUSTOM_ENV_PATH',
'_IMAGE_FILE',
'_USE_CUSTOM_CONFIG',
'_USE_DEFCONFIG',
]
re_kconfig_symbol = re.compile(r'\b(BR2_\w+)\b')
# Example lines to be handled:
# config BR2_TOOLCHAIN_EXTERNAL_PREFIX
# menuconfig BR2_PACKAGE_GST1_PLUGINS_BASE
re_kconfig_config = re.compile(r'^\s*(menu|)config\s+(BR2_\w+)')
# Example lines to be handled:
# default "uclibc" if BR2_TOOLCHAIN_BUILDROOT_UCLIBC
# default BR2_TARGET_GRUB2_BUILTIN_MODULES if BR2_TARGET_GRUB2_BUILTIN_MODULES != ""
# default y if BR2_HOSTARCH = "powerpc"
re_kconfig_default = re.compile(r'^\s*default\s')
re_kconfig_default_before_conditional = re.compile(r'^.*\bif\b')
re_kconfig_default_legacy_comment = re.compile(r'#\s*legacy')
# Example lines to be handled:
# depends on !(BR2_TOOLCHAIN_USES_GLIBC && BR2_TOOLCHAIN_USES_MUSL)
# depends on BR2_HOSTARCH = "x86_64" || BR2_HOSTARCH = "x86"
re_kconfig_depends = re.compile(r'^\s*depends on\s')
# Example lines to be handled:
# select BR2_PACKAGE_HOST_NODEJS if BR2_PACKAGE_NODEJS_MODULES_ADDITIONAL != ""
# select BR2_PACKAGE_LIBDRM if !(BR2_arm && BR2_PACKAGE_IMX_GPU_VIV_OUTPUT_FB)
# select BR2_PACKAGE_OPENSSL if !(BR2_PACKAGE_GNUTLS || BR2_PACKAGE_MBEDTLS)
re_kconfig_select = re.compile(r'^\s*select\s')
re_kconfig_select_conditional = re.compile(r'\bif\s.*')
# Example lines to be handled:
# if !BR2_SKIP_LEGACY
# if (BR2_PACKAGE_FREESCALE_IMX_PLATFORM_IMX51 || BR2_PACKAGE_FREESCALE_IMX_PLATFORM_IMX53)
# if BR2_PACKAGE_HAS_LUAINTERPRETER && !BR2_STATIC_LIBS
# if BR2_PACKAGE_QEMU_CUSTOM_TARGETS = ""
re_kconfig_if = re.compile(r'^\s*if\s')
# Example lines to be handled:
# source "$BR2_BASE_DIR/.br2-external.in.jpeg"
re_kconfig_source = re.compile(r'^\s*source\b')
re_kconfig_choice = re.compile(r'^\s*choice\b')
re_kconfig_endchoice = re.compile(r'^\s*endchoice\b')
re_makefile_eval = re.compile(r'^\s*\$\(eval\b')
re_menu = re.compile(r'^\s*menu\b')
re_endmenu = re.compile(r'^\s*endmenu\b')
re_comments = re.compile(r'#.*$')
re_legacy_special_comment = re.compile(r'#.*(BR2_\w+)\s.*still referenced')
re_host_symbol = re.compile(r'(BR2_PACKAGE_HOST_\w+|BR2_PACKAGE_HAS_HOST_\w+)')
re_makefile_symbol_usage = re.compile(r'\$\((BR2_\w+)\)')
re_makefile_symbol_export = re.compile(r'export\s*(BR2_\w+)')
re_makefile_symbol_attribution = re.compile(r'^\s*(BR2_\w+)\s*[?:=]')
def get_package_from_filename(filename):
package = os.path.basename(filename)[:-3].upper().replace('-', '_')
return package
def is_an_optional_symbol_for_a_roofts(symbol):
if not symbol.startswith(rootfs_prefix):
return False
for sufix in suffixes_not_defined_for_all_rootfs_types:
if symbol.endswith(sufix):
return True
return False
def file_belongs_to_an_ignored_diretory(filename):
for d in ignored_directories:
if filename.startswith(d):
return True
return False

View File

@ -0,0 +1,205 @@
import re
import checksymbolslib.br as br
choice = 'part of a choice'
definition = 'definition'
helper = 'possible config helper'
legacy_definition = 'legacy definition'
legacy_note = 'legacy note'
legacy_usage = 'legacy usage'
select = 'selected'
usage = 'normal usage'
usage_in_legacy = 'usage inside legacy'
virtual = 'virtual'
class DB:
def __init__(self):
self.all_symbols = {}
def __str__(self):
return str(self.all_symbols)
def add_symbol_entry(self, symbol, filename, lineno, entry_type):
if symbol not in self.all_symbols:
self.all_symbols[symbol] = {}
if entry_type not in self.all_symbols[symbol]:
self.all_symbols[symbol][entry_type] = {}
if filename not in self.all_symbols[symbol][entry_type]:
self.all_symbols[symbol][entry_type][filename] = []
self.all_symbols[symbol][entry_type][filename].append(lineno)
def add_symbol_choice(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, choice)
def add_symbol_definition(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, definition)
def add_symbol_helper(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, helper)
def add_symbol_legacy_definition(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, legacy_definition)
def add_symbol_legacy_note(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, legacy_note)
def add_symbol_legacy_usage(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, legacy_usage)
def add_symbol_select(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, select)
def add_symbol_usage(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, usage)
def add_symbol_usage_in_legacy(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, usage_in_legacy)
def add_symbol_virtual(self, symbol, filename, lineno):
self.add_symbol_entry(symbol, filename, lineno, virtual)
def get_symbols_with_pattern(self, pattern):
re_pattern = re.compile(r'{}'.format(pattern))
found_symbols = {}
for symbol, entries in self.all_symbols.items():
if not re_pattern.search(symbol):
continue
found_symbols[symbol] = entries
return found_symbols
def get_warnings_for_choices_selected(self):
warnings = []
for symbol, entries in self.all_symbols.items():
if choice not in entries:
continue
if select not in entries:
continue
all_items = []
all_items += entries.get(select, {}).items()
for filename, linenos in all_items:
for lineno in linenos:
msg = '{} is part of a "choice" and should not be "select"ed'.format(symbol)
warnings.append((filename, lineno, msg))
return warnings
def get_warnings_for_legacy_symbols_being_used(self):
warnings = []
for symbol, entries in self.all_symbols.items():
if legacy_definition not in entries:
continue
if usage not in entries:
continue
all_items = []
all_items += entries.get(usage, {}).items()
for filename, linenos in all_items:
for lineno in linenos:
msg = '{} is a legacy symbol and should not be referenced'.format(symbol)
warnings.append((filename, lineno, msg))
return warnings
def get_warnings_for_legacy_symbols_being_defined(self):
warnings = []
for symbol, entries in self.all_symbols.items():
if legacy_definition not in entries:
continue
if definition not in entries:
continue
all_items = []
all_items += entries.get(definition, {}).items()
for filename, linenos in all_items:
for lineno in linenos:
msg = '{} is a legacy symbol and should not be redefined'.format(symbol)
warnings.append((filename, lineno, msg))
return warnings
def get_warnings_for_symbols_without_definition(self):
warnings = []
for symbol, entries in self.all_symbols.items():
if definition in entries:
continue
if legacy_definition in entries:
continue
if br.re_host_symbol.search(symbol):
continue
if br.is_an_optional_symbol_for_a_roofts(symbol):
continue
if symbol in br.symbols_defined_only_at_command_line:
continue
if symbol in br.symbols_defined_only_when_using_br2_external:
continue
if symbol in br.symbols_defined_only_for_barebox_variant:
continue
if symbol in br.symbols_not_defined_for_fake_virtual_packages:
continue
if virtual in entries:
continue
all_items = []
all_items += entries.get(usage, {}).items()
all_items += entries.get(legacy_usage, {}).items()
all_items += entries.get(usage_in_legacy, {}).items()
for filename, linenos in all_items:
for lineno in linenos:
msg = '{} referenced but not defined'.format(symbol)
warnings.append((filename, lineno, msg))
return warnings
def get_warnings_for_symbols_without_usage(self):
warnings = []
for symbol, entries in self.all_symbols.items():
if usage in entries:
continue
if usage_in_legacy in entries:
continue
if legacy_usage in entries:
continue
if symbol in br.symbols_used_only_in_source_code:
continue
if symbol in br.symbols_used_only_for_host_variant:
continue
if helper in entries:
continue
if choice in entries:
continue
all_items = []
all_items += entries.get(definition, {}).items()
all_items += entries.get(legacy_definition, {}).items()
for filename, linenos in all_items:
for lineno in linenos:
msg = '{} defined but not referenced'.format(symbol)
warnings.append((filename, lineno, msg))
return warnings
def get_warnings_for_symbols_with_legacy_note_and_no_comment_on_usage(self):
warnings = []
for symbol, entries in self.all_symbols.items():
if legacy_note not in entries:
continue
if legacy_usage in entries:
continue
all_items = []
all_items += entries.get(usage, {}).items()
for filename, linenos in all_items:
for lineno in linenos:
msg = '{} missing "# legacy"'.format(symbol)
warnings.append((filename, lineno, msg))
return warnings
def get_warnings_for_symbols_with_legacy_note_and_no_usage(self):
warnings = []
for symbol, entries in self.all_symbols.items():
if legacy_note not in entries:
continue
if legacy_usage in entries:
continue
if usage in entries:
continue
all_items = []
all_items += entries.get(legacy_note, {}).items()
for filename, linenos in all_items:
for lineno in linenos:
msg = '{} not referenced but has a comment stating it is'.format(symbol)
warnings.append((filename, lineno, msg))
return warnings

View File

@ -0,0 +1,83 @@
import re
import subprocess
import checksymbolslib.br as br
import checksymbolslib.kconfig as kconfig
import checksymbolslib.makefile as makefile
file_types = [
kconfig,
makefile,
]
def get_list_of_files_in_the_repo():
cmd = ['git', 'ls-files']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = p.communicate()[0]
processed_output = [str(line.decode().rstrip()) for line in stdout.splitlines() if line]
return processed_output
def get_list_of_files_to_process(all_files):
files_to_process = []
for f in all_files:
if br.file_belongs_to_an_ignored_diretory(f):
continue
for t in file_types:
if t.check_filename(f):
files_to_process.append(f)
break
return files_to_process
def get_list_of_filenames_with_pattern(all_files, exclude_list, pattern):
re_pattern = re.compile(r'{}'.format(pattern))
matching_filenames = []
for filename in all_files:
if re_pattern.search(filename):
if filename not in exclude_list:
matching_filenames.append(filename)
return matching_filenames
def read_file(filename):
file_content_raw = []
with open(filename, 'r', errors='surrogateescape') as f:
for lineno, text in enumerate(f.readlines()):
file_content_raw.append([lineno + 1, text])
return file_content_raw
def cleanup_file_content(file_content_raw):
cleaned_up_content = []
continuation = False
last_line = None
first_lineno = None
for cur_lineno, cur_line in file_content_raw:
if continuation:
line = last_line + cur_line
lineno = first_lineno
else:
line = cur_line
lineno = cur_lineno
continuation = False
last_line = None
first_lineno = None
clean_line = line.rstrip('\n')
if clean_line.endswith('\\'):
continuation = True
last_line = clean_line.rstrip('\\')
first_lineno = lineno
continue
cleaned_up_content.append([lineno, clean_line])
return cleaned_up_content
def populate_db_from_file(db, filename):
file_content_raw = read_file(filename)
file_content_to_process = cleanup_file_content(file_content_raw)
for t in file_types:
if t.check_filename(filename):
t.populate_db(db, filename, file_content_to_process)

View File

@ -0,0 +1,139 @@
import os
import checksymbolslib.br as br
def all_symbols_from(line):
clean_line = br.re_comments.sub('', line)
symbols = br.re_kconfig_symbol.findall(clean_line)
return symbols
def handle_definition(db, filename, lineno, line, legacy):
for symbol in all_symbols_from(line):
if legacy:
db.add_symbol_legacy_definition(symbol, filename, lineno)
else:
db.add_symbol_definition(symbol, filename, lineno)
def handle_usage(db, filename, lineno, line, legacy):
for symbol in all_symbols_from(line):
if legacy:
db.add_symbol_usage_in_legacy(symbol, filename, lineno)
else:
db.add_symbol_usage(symbol, filename, lineno)
def handle_default(db, filename, lineno, line, legacy):
if legacy:
handle_usage(db, filename, lineno, line, legacy)
return
if not br.re_kconfig_default_legacy_comment.search(line):
handle_usage(db, filename, lineno, line, legacy)
return
after = br.re_kconfig_default_before_conditional.sub('', line)
for symbol in all_symbols_from(after):
db.add_symbol_legacy_usage(symbol, filename, lineno)
def handle_select(db, filename, lineno, line, legacy):
handle_usage(db, filename, lineno, line, legacy)
before = br.re_kconfig_select_conditional.sub('', line)
for symbol in all_symbols_from(before):
db.add_symbol_select(symbol, filename, lineno)
line_type_handlers = {
br.re_kconfig_config: handle_definition,
br.re_kconfig_default: handle_default,
br.re_kconfig_depends: handle_usage,
br.re_kconfig_if: handle_usage,
br.re_kconfig_select: handle_select,
br.re_kconfig_source: handle_usage,
}
def handle_line(db, filename, lineno, line, legacy):
if not br.re_kconfig_symbol.search(line):
return
for regexp, line_type_handler in line_type_handlers.items():
if regexp.search(line):
line_type_handler(db, filename, lineno, line, legacy)
def handle_config_helper(db, filename, file_content):
symbol = None
lineno = None
state = 'none'
for cur_lineno, line in file_content:
if state == 'none':
m = br.re_kconfig_config.search(line)
if m is not None:
symbol = m.group(2)
lineno = cur_lineno
state = 'config'
continue
if state == 'config':
if br.re_kconfig_select.search(line):
db.add_symbol_helper(symbol, filename, lineno)
state = 'none'
continue
m = br.re_kconfig_config.search(line)
if m is not None:
symbol = m.group(2)
lineno = cur_lineno
continue
def handle_config_choice(db, filename, file_content):
state = 'none'
for lineno, line in file_content:
if state == 'none':
if br.re_kconfig_choice.search(line):
state = 'choice'
continue
if state == 'choice':
if br.re_kconfig_endchoice.search(line):
state = 'none'
continue
m = br.re_kconfig_config.search(line)
if m is not None:
symbol = m.group(2)
db.add_symbol_choice(symbol, filename, lineno)
continue
def handle_note(db, filename, file_content):
state = 'none'
for lineno, line in file_content:
if state == 'none':
if br.re_menu.search(line):
state = 'menu'
continue
if state == 'menu':
if br.re_endmenu.search(line):
state = 'none'
continue
m = br.re_legacy_special_comment.search(line)
if m is not None:
symbol = m.group(1)
db.add_symbol_legacy_note(symbol, filename, lineno)
continue
def populate_db(db, filename, file_content):
legacy = filename.endswith('.legacy')
for lineno, line in file_content:
handle_line(db, filename, lineno, line, legacy)
handle_config_helper(db, filename, file_content)
handle_config_choice(db, filename, file_content)
if legacy:
handle_note(db, filename, file_content)
def check_filename(filename):
if os.path.basename(filename).startswith('Config.'):
return True
return False

View File

@ -0,0 +1,100 @@
import checksymbolslib.br as br
def handle_eval(db, filename, lineno, line):
def add_multiple_symbol_usages(package, prefixes=None, suffixes=None):
for prefix in prefixes or ['']:
for sufix in suffixes or ['']:
symbol = prefix + package + sufix
db.add_symbol_usage(symbol, filename, lineno)
package = br.get_package_from_filename(filename)
if '$(rootfs)' in line:
suffixes = [''] + br.suffixes_not_defined_for_all_rootfs_types
add_multiple_symbol_usages(package, prefixes=[br.rootfs_prefix], suffixes=suffixes)
return
if '$(kernel-module)' in line:
add_multiple_symbol_usages(package, prefixes=[br.package_prefix])
return
if '$(barebox-package)' in line:
add_multiple_symbol_usages(package, prefixes=[br.boot_prefix], suffixes=br.barebox_infra_suffixes)
return
if '-package)' not in line:
return
if package == 'LINUX':
# very special case at package/pkg-generic.mk
add_multiple_symbol_usages('BR2_LINUX_KERNEL')
return
# mimic package/pkg-generic.mk and package/pkg-virtual.mk
if '$(virtual-' in line:
prefixes = ['BR2_PACKAGE_PROVIDES_', 'BR2_PACKAGE_HAS_']
if filename.startswith('toolchain/'):
prefix = br.toolchain_prefix
else:
prefix = br.package_prefix
symbol = prefix + package
db.add_symbol_virtual(symbol, filename, lineno)
prefixes.append(prefix)
elif '$(host-virtual-' in line:
prefixes = ['BR2_PACKAGE_HOST_', 'BR2_PACKAGE_PROVIDES_HOST_', 'BR2_PACKAGE_HAS_HOST_']
elif '$(host-' in line:
prefixes = ['BR2_PACKAGE_HOST_']
elif filename.startswith('boot/'):
prefixes = [br.boot_prefix]
elif filename.startswith('toolchain/'):
prefixes = [br.toolchain_prefix]
elif '$(toolchain-' in line:
prefixes = [br.toolchain_prefix]
else:
prefixes = [br.package_prefix]
add_multiple_symbol_usages(package, prefixes=prefixes)
def handle_definition(db, filename, lineno, line, legacy):
symbols = br.re_makefile_symbol_attribution.findall(line)
symbols += br.re_makefile_symbol_export.findall(line)
for symbol in symbols:
if legacy:
db.add_symbol_legacy_definition(symbol, filename, lineno)
else:
db.add_symbol_definition(symbol, filename, lineno)
def handle_usage(db, filename, lineno, line, legacy):
if br.re_makefile_eval.search(line):
handle_eval(db, filename, lineno, line)
return
symbols = br.re_makefile_symbol_usage.findall(line)
for symbol in symbols:
if legacy:
db.add_symbol_usage_in_legacy(symbol, filename, lineno)
else:
db.add_symbol_usage(symbol, filename, lineno)
def populate_db(db, filename, file_content):
legacy = filename.endswith('.legacy')
for lineno, raw_line in file_content:
line = br.re_comments.sub('', raw_line)
handle_definition(db, filename, lineno, line, legacy)
handle_usage(db, filename, lineno, line, legacy)
def check_filename(filename):
if filename.endswith('.mk'):
return True
if filename.endswith('.mk.in'):
return True
if filename.startswith('arch/arch.mk.'):
return True
if filename in [
'Makefile',
'Makefile.legacy',
'package/Makefile.in'
]:
return True
return False

View File

@ -0,0 +1,286 @@
import checksymbolslib.db as m
def test_empty_db():
db = m.DB()
assert str(db) == '{}'
def test_one_definition():
db = m.DB()
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 7)
assert str(db) == str({
'BR2_foo': {'definition': {'foo/Config.in': [7]}},
})
def test_three_definitions():
db = m.DB()
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 7)
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 9)
db.add_symbol_definition('BR2_bar', 'bar/Config.in', 5)
assert str(db) == str({
'BR2_foo': {'definition': {'foo/Config.in': [7, 9]}},
'BR2_bar': {'definition': {'bar/Config.in': [5]}},
})
def test_definition_and_usage():
db = m.DB()
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 7)
db.add_symbol_usage('BR2_foo', 'foo/Config.in', 9)
assert str(db) == str({
'BR2_foo': {'definition': {'foo/Config.in': [7]}, 'normal usage': {'foo/Config.in': [9]}},
})
def test_all_entry_types():
db = m.DB()
db.add_symbol_choice('BR2_foo', 'foo/Config.in', 7)
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 7)
db.add_symbol_definition('BR2_bar', 'bar/Config.in', 700)
db.add_symbol_helper('BR2_bar', 'bar/Config.in', 700)
db.add_symbol_legacy_definition('BR2_baz', 'Config.in.legacy', 7000)
db.add_symbol_legacy_note('BR2_baz', 'Config.in.legacy', 7001)
db.add_symbol_legacy_usage('BR2_bar', 'Config.in.legacy', 7001)
db.add_symbol_select('BR2_bar', 'Config.in.legacy', 7001)
db.add_symbol_usage('BR2_foo', 'foo/Config.in', 9)
db.add_symbol_usage_in_legacy('BR2_bar', 'Config.in.legacy', 9)
db.add_symbol_virtual('BR2_foo', 'foo/Config.in', 7)
assert str(db) == str({
'BR2_foo': {
'part of a choice': {'foo/Config.in': [7]},
'definition': {'foo/Config.in': [7]},
'normal usage': {'foo/Config.in': [9]},
'virtual': {'foo/Config.in': [7]}},
'BR2_bar': {
'definition': {'bar/Config.in': [700]},
'possible config helper': {'bar/Config.in': [700]},
'legacy usage': {'Config.in.legacy': [7001]},
'selected': {'Config.in.legacy': [7001]},
'usage inside legacy': {'Config.in.legacy': [9]}},
'BR2_baz': {
'legacy definition': {'Config.in.legacy': [7000]},
'legacy note': {'Config.in.legacy': [7001]}},
})
def test_get_symbols_with_pattern():
db = m.DB()
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 7)
db.add_symbol_usage('BR2_foo', 'foo/Config.in', 9)
db.add_symbol_definition('BR2_bar', 'bar/Config.in', 5)
assert str(db) == str({
'BR2_foo': {'definition': {'foo/Config.in': [7]}, 'normal usage': {'foo/Config.in': [9]}},
'BR2_bar': {'definition': {'bar/Config.in': [5]}},
})
symbols = db.get_symbols_with_pattern('foo')
assert str(symbols) == str({
'BR2_foo': {'definition': {'foo/Config.in': [7]}, 'normal usage': {'foo/Config.in': [9]}},
})
symbols = db.get_symbols_with_pattern('FOO')
assert str(symbols) == str({
})
symbols = db.get_symbols_with_pattern('foo|FOO')
assert str(symbols) == str({
'BR2_foo': {'definition': {'foo/Config.in': [7]}, 'normal usage': {'foo/Config.in': [9]}},
})
symbols = db.get_symbols_with_pattern('^foo')
assert str(symbols) == str({
})
symbols = db.get_symbols_with_pattern('foo|bar')
assert str(symbols) == str({
'BR2_foo': {'definition': {'foo/Config.in': [7]}, 'normal usage': {'foo/Config.in': [9]}},
'BR2_bar': {'definition': {'bar/Config.in': [5]}},
})
def test_get_warnings_for_choices_selected():
db = m.DB()
db.add_symbol_choice('BR2_foo', 'foo/Config.in', 1)
db.add_symbol_choice('BR2_bar', 'bar/Config.in', 1)
db.add_symbol_select('BR2_foo', 'bar/Config.in', 2)
assert str(db) == str({
'BR2_foo': {'part of a choice': {'foo/Config.in': [1]}, 'selected': {'bar/Config.in': [2]}},
'BR2_bar': {'part of a choice': {'bar/Config.in': [1]}},
})
warnings = db.get_warnings_for_choices_selected()
assert warnings == [
('bar/Config.in', 2, 'BR2_foo is part of a "choice" and should not be "select"ed'),
]
def test_get_warnings_for_legacy_symbols_being_used():
db = m.DB()
db.add_symbol_legacy_definition('BR2_foo', 'Config.in.legacy', 1)
db.add_symbol_usage('BR2_foo', 'bar/Config.in', 2)
db.add_symbol_legacy_definition('BR2_bar', 'Config.in.legacy', 10)
db.add_symbol_usage_in_legacy('BR2_bar', 'Config.in.legacy', 11)
assert str(db) == str({
'BR2_foo': {'legacy definition': {'Config.in.legacy': [1]}, 'normal usage': {'bar/Config.in': [2]}},
'BR2_bar': {'legacy definition': {'Config.in.legacy': [10]}, 'usage inside legacy': {'Config.in.legacy': [11]}},
})
warnings = db.get_warnings_for_legacy_symbols_being_used()
assert warnings == [
('bar/Config.in', 2, 'BR2_foo is a legacy symbol and should not be referenced'),
]
def test_get_warnings_for_legacy_symbols_being_defined():
db = m.DB()
db.add_symbol_legacy_definition('BR2_foo', 'Config.in.legacy', 1)
db.add_symbol_legacy_definition('BR2_bar', 'Config.in.legacy', 10)
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 7)
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 8)
assert str(db) == str({
'BR2_foo': {'legacy definition': {'Config.in.legacy': [1]}, 'definition': {'foo/Config.in': [7, 8]}},
'BR2_bar': {'legacy definition': {'Config.in.legacy': [10]}},
})
warnings = db.get_warnings_for_legacy_symbols_being_defined()
assert warnings == [
('foo/Config.in', 7, 'BR2_foo is a legacy symbol and should not be redefined'),
('foo/Config.in', 8, 'BR2_foo is a legacy symbol and should not be redefined'),
]
def test_get_warnings_for_symbols_without_definition():
db = m.DB()
db.add_symbol_definition('BR2_foo', 'foo/Config.in', 7)
db.add_symbol_legacy_definition('BR2_bar', 'Config.in.legacy', 10)
db.add_symbol_virtual('BR2_baz', 'baz/Config.in', 7)
db.add_symbol_usage('BR2_foo', 'file', 1)
db.add_symbol_usage('BR2_bar', 'file', 1)
db.add_symbol_usage('BR2_baz', 'file', 1)
db.add_symbol_usage('BR2_undef1', 'file', 1)
db.add_symbol_legacy_usage('BR2_undef2', 'file', 2)
db.add_symbol_usage_in_legacy('BR2_undef3', 'file', 3)
db.add_symbol_usage('BR2_undef3', 'another', 1)
db.add_symbol_legacy_usage('BR2_undef3', 'another', 2)
db.add_symbol_usage('BR2_PACKAGE_HOST_undef', 'file', 1)
db.add_symbol_usage('BR2_PACKAGE_HAS_HOST_undef', 'file', 1)
db.add_symbol_usage('BR2_TARGET_ROOTFS_undef_XZ', 'file', 1)
db.add_symbol_usage('BR2_GRAPH_ALT', 'file', 1)
db.add_symbol_usage('BR2_EXTERNAL', 'file', 1)
db.add_symbol_usage('BR2_TARGET_BAREBOX_AUX_BAREBOXENV', 'file', 1)
db.add_symbol_usage('BR2_PACKAGE_HAS_TOOLCHAIN_BUILDROOT', 'file', 1)
assert str(db) == str({
'BR2_foo': {'definition': {'foo/Config.in': [7]}, 'normal usage': {'file': [1]}},
'BR2_bar': {'legacy definition': {'Config.in.legacy': [10]}, 'normal usage': {'file': [1]}},
'BR2_baz': {'virtual': {'baz/Config.in': [7]}, 'normal usage': {'file': [1]}},
'BR2_undef1': {'normal usage': {'file': [1]}},
'BR2_undef2': {'legacy usage': {'file': [2]}},
'BR2_undef3': {'usage inside legacy': {'file': [3]}, 'normal usage': {'another': [1]}, 'legacy usage': {'another': [2]}},
'BR2_PACKAGE_HOST_undef': {'normal usage': {'file': [1]}},
'BR2_PACKAGE_HAS_HOST_undef': {'normal usage': {'file': [1]}},
'BR2_TARGET_ROOTFS_undef_XZ': {'normal usage': {'file': [1]}},
'BR2_GRAPH_ALT': {'normal usage': {'file': [1]}},
'BR2_EXTERNAL': {'normal usage': {'file': [1]}},
'BR2_TARGET_BAREBOX_AUX_BAREBOXENV': {'normal usage': {'file': [1]}},
'BR2_PACKAGE_HAS_TOOLCHAIN_BUILDROOT': {'normal usage': {'file': [1]}},
})
warnings = db.get_warnings_for_symbols_without_definition()
assert warnings == [
('file', 1, 'BR2_undef1 referenced but not defined'),
('file', 2, 'BR2_undef2 referenced but not defined'),
('another', 1, 'BR2_undef3 referenced but not defined'),
('another', 2, 'BR2_undef3 referenced but not defined'),
('file', 3, 'BR2_undef3 referenced but not defined'),
]
def test_get_warnings_for_symbols_without_usage():
db = m.DB()
db.add_symbol_definition('BR2_a', 'a/Config.in', 1)
db.add_symbol_definition('BR2_a', 'a/Config.in', 2)
db.add_symbol_usage('BR2_a', 'file', 1)
db.add_symbol_usage('BR2_a', 'file', 2)
db.add_symbol_definition('BR2_b', 'b/Config.in', 2)
db.add_symbol_usage_in_legacy('BR2_b', 'file', 1)
db.add_symbol_definition('BR2_c', 'c/Config.in', 2)
db.add_symbol_legacy_usage('BR2_c', 'file', 1)
db.add_symbol_definition('BR2_USE_CCACHE', 'file', 1)
db.add_symbol_definition('BR2_PACKAGE_SKELETON', 'file', 1)
db.add_symbol_definition('BR2_d', 'd/Config.in', 2)
db.add_symbol_helper('BR2_d', 'd/Config.in', 2)
db.add_symbol_definition('BR2_e', 'e/Config.in', 2)
db.add_symbol_choice('BR2_e', 'e/Config.in', 2)
db.add_symbol_definition('BR2_f', 'f/Config.in', 2)
db.add_symbol_definition('BR2_g', 'g/Config.in', 2)
db.add_symbol_definition('BR2_g', 'g/Config.in', 3)
db.add_symbol_legacy_definition('BR2_h', 'Config.in.legacy', 1)
db.add_symbol_usage('BR2_h', 'file', 2)
db.add_symbol_usage('BR2_h', 'file', 3)
db.add_symbol_legacy_definition('BR2_i', 'Config.in.legacy', 2)
db.add_symbol_usage_in_legacy('BR2_i', 'file', 2)
db.add_symbol_legacy_definition('BR2_j', 'Config.in.legacy', 2)
db.add_symbol_legacy_usage('BR2_j', 'file', 2)
db.add_symbol_legacy_definition('BR2_k', 'Config.in.legacy', 2)
db.add_symbol_usage('BR2_k', 'file', 5)
db.add_symbol_usage_in_legacy('BR2_k', 'file', 6)
db.add_symbol_legacy_usage('BR2_k', 'file', 7)
db.add_symbol_legacy_definition('BR2_l', 'Config.in.legacy', 2)
assert str(db) == str({
'BR2_a': {'definition': {'a/Config.in': [1, 2]}, 'normal usage': {'file': [1, 2]}},
'BR2_b': {'definition': {'b/Config.in': [2]}, 'usage inside legacy': {'file': [1]}},
'BR2_c': {'definition': {'c/Config.in': [2]}, 'legacy usage': {'file': [1]}},
'BR2_USE_CCACHE': {'definition': {'file': [1]}},
'BR2_PACKAGE_SKELETON': {'definition': {'file': [1]}},
'BR2_d': {'definition': {'d/Config.in': [2]}, 'possible config helper': {'d/Config.in': [2]}},
'BR2_e': {'definition': {'e/Config.in': [2]}, 'part of a choice': {'e/Config.in': [2]}},
'BR2_f': {'definition': {'f/Config.in': [2]}},
'BR2_g': {'definition': {'g/Config.in': [2, 3]}},
'BR2_h': {'legacy definition': {'Config.in.legacy': [1]}, 'normal usage': {'file': [2, 3]}},
'BR2_i': {'legacy definition': {'Config.in.legacy': [2]}, 'usage inside legacy': {'file': [2]}},
'BR2_j': {'legacy definition': {'Config.in.legacy': [2]}, 'legacy usage': {'file': [2]}},
'BR2_k': {
'legacy definition': {'Config.in.legacy': [2]},
'normal usage': {'file': [5]},
'usage inside legacy': {'file': [6]},
'legacy usage': {'file': [7]}},
'BR2_l': {'legacy definition': {'Config.in.legacy': [2]}},
})
warnings = db.get_warnings_for_symbols_without_usage()
assert warnings == [
('f/Config.in', 2, 'BR2_f defined but not referenced'),
('g/Config.in', 2, 'BR2_g defined but not referenced'),
('g/Config.in', 3, 'BR2_g defined but not referenced'),
('Config.in.legacy', 2, 'BR2_l defined but not referenced'),
]
def test_get_warnings_for_symbols_with_legacy_note_and_no_comment_on_usage():
db = m.DB()
db.add_symbol_legacy_note('BR2_foo', 'Config.in.legacy', 1)
db.add_symbol_legacy_usage('BR2_foo', 'package/bar/Config.in', 2)
db.add_symbol_legacy_note('BR2_baz', 'Config.in.legacy', 7001)
db.add_symbol_usage('BR2_baz', 'package/foo/Config.in', 1)
assert str(db) == str({
'BR2_foo': {'legacy note': {'Config.in.legacy': [1]}, 'legacy usage': {'package/bar/Config.in': [2]}},
'BR2_baz': {'legacy note': {'Config.in.legacy': [7001]}, 'normal usage': {'package/foo/Config.in': [1]}},
})
warnings = db.get_warnings_for_symbols_with_legacy_note_and_no_comment_on_usage()
assert warnings == [
('package/foo/Config.in', 1, 'BR2_baz missing "# legacy"'),
]
def test_get_warnings_for_symbols_with_legacy_note_and_no_usage():
db = m.DB()
db.add_symbol_legacy_note('BR2_foo', 'Config.in.legacy', 1)
db.add_symbol_legacy_usage('BR2_foo', 'package/bar/Config.in', 2)
db.add_symbol_legacy_note('BR2_bar', 'Config.in.legacy', 1)
db.add_symbol_usage_in_legacy('BR2_bar', 'Config.in.legacy', 7001)
db.add_symbol_legacy_note('BR2_baz', 'Config.in.legacy', 7001)
db.add_symbol_legacy_note('BR2_no_comment', 'Config.in.legacy', 1)
db.add_symbol_usage('BR2_no_comment', 'package/bar/Config.in', 2)
assert str(db) == str({
'BR2_foo': {'legacy note': {'Config.in.legacy': [1]}, 'legacy usage': {'package/bar/Config.in': [2]}},
'BR2_bar': {'legacy note': {'Config.in.legacy': [1]}, 'usage inside legacy': {'Config.in.legacy': [7001]}},
'BR2_baz': {'legacy note': {'Config.in.legacy': [7001]}},
'BR2_no_comment': {'legacy note': {'Config.in.legacy': [1]}, 'normal usage': {'package/bar/Config.in': [2]}},
})
warnings = db.get_warnings_for_symbols_with_legacy_note_and_no_usage()
assert warnings == [
('Config.in.legacy', 1, 'BR2_bar not referenced but has a comment stating it is'),
('Config.in.legacy', 7001, 'BR2_baz not referenced but has a comment stating it is'),
]

View File

@ -0,0 +1,152 @@
import os
import pytest
import tempfile
import checksymbolslib.file as m
def test_get_list_of_files_in_the_repo():
all_files = m.get_list_of_files_in_the_repo()
assert 'Makefile' in all_files
assert 'package/Config.in' in all_files
assert len(all_files) > 1000
get_list_of_files_to_process = [
('unknown file type',
['a/file/Config.in',
'another/file.mk',
'unknown/file/type'],
['a/file/Config.in',
'another/file.mk']),
('runtime test infra fixtures',
['a/file/Config.in',
'support/testing/a/broken/Config.in',
'another/file.mk'],
['a/file/Config.in',
'another/file.mk']),
]
@pytest.mark.parametrize('testname,all_files,expected', get_list_of_files_to_process)
def test_get_list_of_files_to_process(testname, all_files, expected):
files_to_process = m.get_list_of_files_to_process(all_files)
assert files_to_process == expected
get_list_of_filenames_with_pattern = [
('ignored directories',
['a/file/Config.in',
'support/testing/a/broken/file/Config.in',
'not/found.mk',
'another/file.mk'],
['a/file/Config.in',
'not/found.mk',
'another/file.mk'],
'file',
['support/testing/a/broken/file/Config.in']),
('processed files',
['a/file/Config.in',
'not/found.mk',
'another/file.mk'],
[],
'file',
['a/file/Config.in',
'another/file.mk']),
('case sensitive',
['a/file/Config.in',
'not/found.mk',
'another/file.mk'],
[],
'FILE',
[]),
('or',
['a/file/Config.in',
'not/found.mk',
'another/file.mk'],
[],
'file|FILE',
['a/file/Config.in',
'another/file.mk']),
('complex regexp',
['a/file/Config.in',
'not/found.mk',
'another/file.mk'],
[],
'^n[oO]+t.*mk$',
['not/found.mk']),
]
@pytest.mark.parametrize('testname,all_files,files_to_process,pattern,expected', get_list_of_filenames_with_pattern)
def test_get_list_of_filenames_with_pattern(testname, all_files, files_to_process, pattern, expected):
files_to_process = m.get_list_of_filenames_with_pattern(all_files, files_to_process, pattern)
assert files_to_process == expected
read_file = [
('indent',
'file1',
' content1\n'
'\t# comment1',
[[1, ' content1\n'],
[2, '\t# comment1']]),
('trailing space',
'file2',
'content2 \n'
'# comment2\t\n',
[[1, 'content2 \n'],
[2, '# comment2\t\n']]),
('empty line',
'file3',
'\n'
'\n',
[[1, '\n'],
[2, '\n']]),
('missing newline at EOF',
'file4',
'\n'
' text\t',
[[1, '\n'],
[2, ' text\t']]),
]
@pytest.mark.parametrize('testname,filename,content,,expected', read_file)
def test_read_file(testname, filename, content, expected):
with tempfile.TemporaryDirectory(suffix='-checksymbolslib-test-file') as workdir:
full_filename = os.path.join(workdir, filename)
with open(full_filename, 'wb') as f:
f.write(content.encode())
read_file_content = m.read_file(full_filename)
assert read_file_content == expected
cleanup_file_content = [
('empty file',
[],
[]),
('empty line',
[[5, '\n']],
[[5, '']]),
('trailing space',
[[3, ' \n']],
[[3, ' ']]),
('trailing tab',
[[3, '\t\n']],
[[3, '\t']]),
('1 continuation',
[[1, 'foo \\\n'],
[2, 'bar\n']],
[[1, 'foo bar']]),
('2 continuations',
[[1, 'foo \\\n'],
[2, 'bar \\\n'],
[3, 'baz\n']],
[[1, 'foo bar baz']]),
]
@pytest.mark.parametrize('testname,file_content_raw,expected', cleanup_file_content)
def test_cleanup_file_content(testname, file_content_raw, expected):
cleaned_up_content = m.cleanup_file_content(file_content_raw)
assert cleaned_up_content == expected

View File

@ -0,0 +1,438 @@
import pytest
from unittest.mock import Mock
from unittest.mock import call
from checksymbolslib.test_util import assert_db_calls
import checksymbolslib.kconfig as m
all_symbols_from = [
('no prefix',
'config PACKAGE_FOO',
[]),
('simple',
'config BR2_PACKAGE_FOO',
['BR2_PACKAGE_FOO']),
('ignore comment',
'config BR2_PACKAGE_FOO # BR2_PACKAGE_BAR',
['BR2_PACKAGE_FOO']),
('ignore whitespace',
'\tconfig BR2_PACKAGE_FOO\t # BR2_PACKAGE_BAR',
['BR2_PACKAGE_FOO']),
('2 occurrences',
'\tdefault BR2_PACKAGE_FOO_BAR if BR2_PACKAGE_FOO_BAR != ""',
['BR2_PACKAGE_FOO_BAR', 'BR2_PACKAGE_FOO_BAR']),
]
@pytest.mark.parametrize('testname,line,expected', all_symbols_from)
def test_all_symbols_from(testname, line, expected):
symbols = m.all_symbols_from(line)
assert symbols == expected
handle_definition = [
('config',
'package/foo/Config.in',
5,
'config BR2_PACKAGE_FOO',
False,
{'add_symbol_definition': [call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5)]}),
('ignore comment',
'package/foo/Config.in',
5,
'config BR2_PACKAGE_FOO # BR2_PACKAGE_BAR',
False,
{'add_symbol_definition': [call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5)]}),
('ignore whitespace',
'package/foo/Config.in',
5,
'\tconfig BR2_PACKAGE_FOO\t # BR2_PACKAGE_BAR',
False,
{'add_symbol_definition': [call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5)]}),
('menuconfig',
'package/gd/Config.in',
1,
'menuconfig BR2_PACKAGE_GD',
False,
{'add_symbol_definition': [call('BR2_PACKAGE_GD', 'package/gd/Config.in', 1)]}),
('menu',
'package/Config.in',
100,
'menu "Database"',
False,
{}),
('legacy config',
'Config.in.legacy',
50,
'config BR2_PACKAGE_FOO',
True,
{'add_symbol_legacy_definition': [call('BR2_PACKAGE_FOO', 'Config.in.legacy', 50)]}),
]
@pytest.mark.parametrize('testname,filename,lineno,line,legacy,expected_calls', handle_definition)
def test_handle_definition(testname, filename, lineno, line, legacy, expected_calls):
db = Mock()
m.handle_definition(db, filename, lineno, line, legacy)
assert_db_calls(db, expected_calls)
handle_usage = [
('default with comparison',
'package/openblas/Config.in',
60,
'\tdefault y if BR2_PACKAGE_OPENBLAS_DEFAULT_TARGET != ""',
False,
{'add_symbol_usage': [call('BR2_PACKAGE_OPENBLAS_DEFAULT_TARGET', 'package/openblas/Config.in', 60)]}),
('default with logical operators',
'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options',
47,
'\tdefault y if BR2_i386 && !BR2_x86_i486 && !BR2_x86_i586 && !BR2_x86_x1000 && !BR2_x86_pentium_mmx && !BR2_x86_geode '
'&& !BR2_x86_c3 && !BR2_x86_winchip_c6 && !BR2_x86_winchip2',
False,
{'add_symbol_usage': [
call('BR2_i386', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_c3', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_geode', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_i486', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_i586', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_pentium_mmx', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_winchip2', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_winchip_c6', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_x1000', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47)]}),
('legacy depends on',
'Config.in.legacy',
3000,
'\tdepends on BR2_LINUX_KERNEL',
True,
{'add_symbol_usage_in_legacy': [call('BR2_LINUX_KERNEL', 'Config.in.legacy', 3000)]}),
('legacy if',
'Config.in.legacy',
97,
'if !BR2_SKIP_LEGACY',
True,
{'add_symbol_usage_in_legacy': [call('BR2_SKIP_LEGACY', 'Config.in.legacy', 97)]}),
('source',
'system/Config.in',
152,
'source "$BR2_BASE_DIR/.br2-external.in.init"',
False,
{'add_symbol_usage': [call('BR2_BASE_DIR', 'system/Config.in', 152)]}),
]
@pytest.mark.parametrize('testname,filename,lineno,line,legacy,expected_calls', handle_usage)
def test_handle_usage(testname, filename, lineno, line, legacy, expected_calls):
db = Mock()
m.handle_usage(db, filename, lineno, line, legacy)
assert_db_calls(db, expected_calls)
handle_default = [
('default with comparison',
'package/openblas/Config.in',
60,
'\tdefault y if BR2_PACKAGE_OPENBLAS_DEFAULT_TARGET != ""',
False,
{'add_symbol_usage': [call('BR2_PACKAGE_OPENBLAS_DEFAULT_TARGET', 'package/openblas/Config.in', 60)]}),
('default with logical operators',
'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options',
47,
'\tdefault y if BR2_i386 && !BR2_x86_i486 && !BR2_x86_i586 && !BR2_x86_x1000 && !BR2_x86_pentium_mmx && !BR2_x86_geode '
'&& !BR2_x86_c3 && !BR2_x86_winchip_c6 && !BR2_x86_winchip2',
False,
{'add_symbol_usage': [
call('BR2_i386', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_c3', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_geode', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_i486', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_i586', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_pentium_mmx', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_winchip2', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_winchip_c6', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47),
call('BR2_x86_x1000', 'toolchain/toolchain-external/toolchain-external-bootlin/Config.in.options', 47)]}),
('legacy default',
'Config.in.legacy',
3000,
'default y if BR2_PACKAGE_REFPOLICY_POLICY_VERSION != ""',
True,
{'add_symbol_usage_in_legacy': [call('BR2_PACKAGE_REFPOLICY_POLICY_VERSION', 'Config.in.legacy', 3000)]}),
('legacy handling on package',
'package/uboot-tools/Config.in.host',
105,
'\tdefault BR2_TARGET_UBOOT_BOOT_SCRIPT_SOURCE if BR2_TARGET_UBOOT_BOOT_SCRIPT_SOURCE != "" # legacy',
False,
{'add_symbol_legacy_usage': [call('BR2_TARGET_UBOOT_BOOT_SCRIPT_SOURCE', 'package/uboot-tools/Config.in.host', 105)]}),
('default on package',
'package/uboot-tools/Config.in.host',
105,
'\tdefault BR2_TARGET_UBOOT_BOOT_SCRIPT_SOURCE if BR2_TARGET_UBOOT_BOOT_SCRIPT_SOURCE != ""',
False,
{'add_symbol_usage': [
call('BR2_TARGET_UBOOT_BOOT_SCRIPT_SOURCE', 'package/uboot-tools/Config.in.host', 105),
call('BR2_TARGET_UBOOT_BOOT_SCRIPT_SOURCE', 'package/uboot-tools/Config.in.host', 105)]}),
]
@pytest.mark.parametrize('testname,filename,lineno,line,legacy,expected_calls', handle_default)
def test_handle_default(testname, filename, lineno, line, legacy, expected_calls):
db = Mock()
m.handle_default(db, filename, lineno, line, legacy)
assert_db_calls(db, expected_calls)
handle_select = [
('select with comparison',
'package/bcusdk/Config.in',
6,
'\tselect BR2_PACKAGE_ARGP_STANDALONE if BR2_TOOLCHAIN_USES_UCLIBC || BR2_TOOLCHAIN_USES_MUSL',
False,
{'add_symbol_select': [call('BR2_PACKAGE_ARGP_STANDALONE', 'package/bcusdk/Config.in', 6)],
'add_symbol_usage': [
call('BR2_PACKAGE_ARGP_STANDALONE', 'package/bcusdk/Config.in', 6),
call('BR2_TOOLCHAIN_USES_UCLIBC', 'package/bcusdk/Config.in', 6),
call('BR2_TOOLCHAIN_USES_MUSL', 'package/bcusdk/Config.in', 6)]}),
('legacy select',
'Config.in.legacy',
100,
'\tselect BR2_PACKAGE_WPA_SUPPLICANT_DBUS if BR2_TOOLCHAIN_HAS_THREADS',
True,
{'add_symbol_select': [call('BR2_PACKAGE_WPA_SUPPLICANT_DBUS', 'Config.in.legacy', 100)],
'add_symbol_usage_in_legacy': [
call('BR2_PACKAGE_WPA_SUPPLICANT_DBUS', 'Config.in.legacy', 100),
call('BR2_TOOLCHAIN_HAS_THREADS', 'Config.in.legacy', 100)]}),
]
@pytest.mark.parametrize('testname,filename,lineno,line,legacy,expected_calls', handle_select)
def test_handle_select(testname, filename, lineno, line, legacy, expected_calls):
db = Mock()
m.handle_select(db, filename, lineno, line, legacy)
assert_db_calls(db, expected_calls)
handle_line = [
('select with comparison',
'package/bcusdk/Config.in',
6,
'\tselect BR2_PACKAGE_ARGP_STANDALONE if BR2_TOOLCHAIN_USES_UCLIBC || BR2_TOOLCHAIN_USES_MUSL',
False,
{'add_symbol_select': [call('BR2_PACKAGE_ARGP_STANDALONE', 'package/bcusdk/Config.in', 6)],
'add_symbol_usage': [
call('BR2_PACKAGE_ARGP_STANDALONE', 'package/bcusdk/Config.in', 6),
call('BR2_TOOLCHAIN_USES_UCLIBC', 'package/bcusdk/Config.in', 6),
call('BR2_TOOLCHAIN_USES_MUSL', 'package/bcusdk/Config.in', 6)]}),
('legacy select',
'Config.in.legacy',
100,
'\tselect BR2_PACKAGE_WPA_SUPPLICANT_DBUS if BR2_TOOLCHAIN_HAS_THREADS',
True,
{'add_symbol_select': [call('BR2_PACKAGE_WPA_SUPPLICANT_DBUS', 'Config.in.legacy', 100)],
'add_symbol_usage_in_legacy': [
call('BR2_PACKAGE_WPA_SUPPLICANT_DBUS', 'Config.in.legacy', 100),
call('BR2_TOOLCHAIN_HAS_THREADS', 'Config.in.legacy', 100)]}),
('comment with symbol',
'Config.in',
6,
'\tselect # BR2_PACKAGE_ARGP_STANDALONE if BR2_TOOLCHAIN_USES_UCLIBC || BR2_TOOLCHAIN_USES_MUSL',
False,
{}),
('comment',
'Config.in',
6,
'# just a comment',
False,
{}),
]
@pytest.mark.parametrize('testname,filename,lineno,line,legacy,expected_calls', handle_line)
def test_handle_line(testname, filename, lineno, line, legacy, expected_calls):
db = Mock()
m.handle_line(db, filename, lineno, line, legacy)
assert_db_calls(db, expected_calls)
handle_config_helper = [
('no select',
'package/foo/Config.in',
[[5, 'config BR2_PACKAGE_FOO']],
{}),
('select',
'package/foo/Config.in',
[[5, 'config BR2_PACKAGE_FOO'],
[6, '\tselect BR2_PACKAGE_BAR']],
{'add_symbol_helper': [call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5)]}),
('ignore comment',
'package/foo/Config.in',
[[5, 'config BR2_PACKAGE_FOO # BR2_PACKAGE_BAR'],
[6, '\tselect BR2_PACKAGE_BAR # BR2_PACKAGE_FOO']],
{'add_symbol_helper': [call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5)]}),
('correct symbol',
'package/foo/Config.in',
[[5, 'config BR2_PACKAGE_FOO'],
[6, 'config BR2_PACKAGE_BAR'],
[7, '\tselect BR2_PACKAGE_BAZ']],
{'add_symbol_helper': [call('BR2_PACKAGE_BAR', 'package/foo/Config.in', 6)]}),
('2 selects',
'package/foo/Config.in',
[[5, 'config BR2_PACKAGE_FOO'],
[6, '\tselect BR2_PACKAGE_BAR'],
[7, ' select BR2_PACKAGE_BAR']],
{'add_symbol_helper': [call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5)]}),
]
@pytest.mark.parametrize('testname,filename,file_content,expected_calls', handle_config_helper)
def test_handle_config_helper(testname, filename, file_content, expected_calls):
db = Mock()
m.handle_config_helper(db, filename, file_content)
assert_db_calls(db, expected_calls)
handle_config_choice = [
('no choice',
'package/foo/Config.in',
[[5, 'config BR2_PACKAGE_FOO']],
{}),
('after',
'package/foo/Config.in',
[[3, 'choice'],
[4, '\tprompt "your choice"'],
[5, 'config BR2_PACKAGE_FOO'],
[6, 'config BR2_PACKAGE_BAR'],
[10, 'endchoice'],
[19, 'config BR2_PACKAGE_BAZ']],
{'add_symbol_choice': [
call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5),
call('BR2_PACKAGE_BAR', 'package/foo/Config.in', 6)]}),
('before',
'package/foo/Config.in',
[[1, 'config BR2_PACKAGE_BAZ'],
[3, 'choice'],
[4, '\tprompt "your choice"'],
[5, 'config BR2_PACKAGE_FOO'],
[6, 'config BR2_PACKAGE_BAR'],
[10, 'endchoice']],
{'add_symbol_choice': [
call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5),
call('BR2_PACKAGE_BAR', 'package/foo/Config.in', 6)]}),
]
@pytest.mark.parametrize('testname,filename,file_content,expected_calls', handle_config_choice)
def test_handle_config_choice(testname, filename, file_content, expected_calls):
db = Mock()
m.handle_config_choice(db, filename, file_content)
assert_db_calls(db, expected_calls)
handle_note = [
('example',
'Config.in.legacy',
[[51, '# # Note: BR2_FOO_1 is still referenced from package/foo/Config.in']],
{}),
('ok',
'Config.in.legacy',
[[112, 'menu "Legacy config options"'],
[2132, '# Note: BR2_PACKAGE_FOO is still referenced from package/foo/Config.in'],
[4958, 'endmenu']],
{'add_symbol_legacy_note': [call('BR2_PACKAGE_FOO', 'Config.in.legacy', 2132)]}),
('before and after',
'Config.in.legacy',
[[100, '# Note: BR2_PACKAGE_BAR is still referenced from package/foo/Config.in'],
[112, 'menu "Legacy config options"'],
[2132, '# Note: BR2_PACKAGE_FOO is still referenced from package/foo/Config.in'],
[4958, 'endmenu'],
[5000, '# Note: BR2_PACKAGE_BAR is still referenced from package/foo/Config.in']],
{'add_symbol_legacy_note': [call('BR2_PACKAGE_FOO', 'Config.in.legacy', 2132)]}),
]
@pytest.mark.parametrize('testname,filename,file_content,expected_calls', handle_note)
def test_handle_note(testname, filename, file_content, expected_calls):
db = Mock()
m.handle_note(db, filename, file_content)
assert_db_calls(db, expected_calls)
populate_db = [
('legacy',
'Config.in.legacy',
[[112, 'menu "Legacy config options"'],
[2100, 'config BR2_PACKAGE_FOO'],
[2101, '\tselect BR2_PACKAGE_BAR'],
[2132, '# Note: BR2_PACKAGE_FOO is still referenced from package/foo/Config.in'],
[4958, 'endmenu']],
{'add_symbol_legacy_note': [call('BR2_PACKAGE_FOO', 'Config.in.legacy', 2132)],
'add_symbol_helper': [call('BR2_PACKAGE_FOO', 'Config.in.legacy', 2100)],
'add_symbol_legacy_definition': [call('BR2_PACKAGE_FOO', 'Config.in.legacy', 2100)],
'add_symbol_usage_in_legacy': [call('BR2_PACKAGE_BAR', 'Config.in.legacy', 2101)],
'add_symbol_select': [call('BR2_PACKAGE_BAR', 'Config.in.legacy', 2101)]}),
('normal',
'package/foo/Config.in',
[[1, 'config BR2_PACKAGE_BAZ'],
[3, 'choice'],
[4, '\tprompt "your choice"'],
[5, 'config BR2_PACKAGE_FOO'],
[6, 'config BR2_PACKAGE_BAR'],
[7, '\t select BR2_PACKAGE_FOO_BAR'],
[10, 'endchoice']],
{'add_symbol_choice': [
call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5),
call('BR2_PACKAGE_BAR', 'package/foo/Config.in', 6)],
'add_symbol_usage': [
call('BR2_PACKAGE_FOO_BAR', 'package/foo/Config.in', 7)],
'add_symbol_select': [
call('BR2_PACKAGE_FOO_BAR', 'package/foo/Config.in', 7)],
'add_symbol_definition': [
call('BR2_PACKAGE_BAZ', 'package/foo/Config.in', 1),
call('BR2_PACKAGE_FOO', 'package/foo/Config.in', 5),
call('BR2_PACKAGE_BAR', 'package/foo/Config.in', 6)],
'add_symbol_helper': [
call('BR2_PACKAGE_BAR', 'package/foo/Config.in', 6)]}),
]
@pytest.mark.parametrize('testname,filename,file_content,expected_calls', populate_db)
def test_populate_db(testname, filename, file_content, expected_calls):
db = Mock()
m.populate_db(db, filename, file_content)
assert_db_calls(db, expected_calls)
check_filename = [
('Config.in',
'Config.in',
True),
('Config.in.legacy',
'Config.in.legacy',
True),
('arch/Config.in.microblaze',
'arch/Config.in.microblaze',
True),
('package/php/Config.ext',
'package/php/Config.ext',
True),
('package/pru-software-support/Config.in.host',
'package/pru-software-support/Config.in.host',
True),
('toolchain/toolchain-external/toolchain-external-custom/Config.in.options',
'toolchain/toolchain-external/toolchain-external-custom/Config.in.options',
True),
('package/foo/0001-Config.patch',
'package/foo/0001-Config.patch',
False),
('package/pkg-generic.mk',
'package/pkg-generic.mk',
False),
('Makefile',
'Makefile',
False),
]
@pytest.mark.parametrize('testname,filename,expected', check_filename)
def test_check_filename(testname, filename, expected):
symbols = m.check_filename(filename)
assert symbols == expected

View File

@ -0,0 +1,301 @@
import pytest
from unittest.mock import Mock
from unittest.mock import call
from checksymbolslib.test_util import assert_db_calls
import checksymbolslib.makefile as m
handle_eval = [
('generic',
'package/foo/foo.mk',
5,
'$(eval $(generic-package))',
{'add_symbol_usage': [call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 5)]}),
('ignore trailing whitespace',
'package/foo/foo.mk',
5,
'$(eval $(generic-package)) ',
{'add_symbol_usage': [call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 5)]}),
('ignore indent',
'package/foo/foo.mk',
5,
'\t$(eval $(generic-package))',
{'add_symbol_usage': [call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 5)]}),
('rootfs',
'fs/foo/foo.mk',
5,
'$(eval $(rootfs))',
{'add_symbol_usage': [
call('BR2_TARGET_ROOTFS_FOO', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_BZIP2', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_GZIP', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_LZ4', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_LZMA', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_LZO', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_XZ', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_ZSTD', 'fs/foo/foo.mk', 5)]}),
('kernel module',
'package/foo/foo.mk',
6,
'$(eval $(kernel-module))',
{'add_symbol_usage': [call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 6)]}),
('not an eval for package infra',
'docs/manual/manual.mk',
10,
'$(eval $(call asciidoc-document))',
{}),
('linux',
'linux/linux.mk',
617,
'$(eval $(kconfig-package))',
{'add_symbol_usage': [call('BR2_LINUX_KERNEL', 'linux/linux.mk', 617)]}),
('virtual toolchain',
'toolchain/toolchain-external/toolchain-external.mk',
18,
'$(eval $(virtual-package))',
{'add_symbol_usage': [
call('BR2_PACKAGE_PROVIDES_TOOLCHAIN_EXTERNAL', 'toolchain/toolchain-external/toolchain-external.mk', 18),
call('BR2_PACKAGE_HAS_TOOLCHAIN_EXTERNAL', 'toolchain/toolchain-external/toolchain-external.mk', 18),
call('BR2_TOOLCHAIN_EXTERNAL', 'toolchain/toolchain-external/toolchain-external.mk', 18)],
'add_symbol_virtual': [call('BR2_TOOLCHAIN_EXTERNAL', 'toolchain/toolchain-external/toolchain-external.mk', 18)]}),
('virtual package',
'package/foo/foo.mk',
18,
'$(eval $(virtual-package))',
{'add_symbol_usage': [
call('BR2_PACKAGE_PROVIDES_FOO', 'package/foo/foo.mk', 18),
call('BR2_PACKAGE_HAS_FOO', 'package/foo/foo.mk', 18),
call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 18)],
'add_symbol_virtual': [call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 18)]}),
('host virtual package',
'package/foo/foo.mk',
18,
'$(eval $(host-virtual-package))',
{'add_symbol_usage': [
call('BR2_PACKAGE_PROVIDES_HOST_FOO', 'package/foo/foo.mk', 18),
call('BR2_PACKAGE_HAS_HOST_FOO', 'package/foo/foo.mk', 18),
call('BR2_PACKAGE_HOST_FOO', 'package/foo/foo.mk', 18)]}),
('host generic package',
'package/foo/foo.mk',
18,
'$(eval $(host-package))',
{'add_symbol_usage': [call('BR2_PACKAGE_HOST_FOO', 'package/foo/foo.mk', 18)]}),
('boot package',
'boot/foo/foo.mk',
18,
'$(eval $(generic-package))',
{'add_symbol_usage': [call('BR2_TARGET_FOO', 'boot/foo/foo.mk', 18)]}),
('toolchain package',
'toolchain/foo/foo.mk',
18,
'$(eval $(generic-package))',
{'add_symbol_usage': [call('BR2_FOO', 'toolchain/foo/foo.mk', 18)]}),
('generic package',
'package/foo/foo.mk',
18,
'$(eval $(generic-package))',
{'add_symbol_usage': [call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 18)]}),
('cmake package',
'package/foo/foo.mk',
18,
'$(eval $(cmake-package))',
{'add_symbol_usage': [call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 18)]}),
]
@pytest.mark.parametrize('testname,filename,lineno,line,expected_calls', handle_eval)
def test_handle_eval(testname, filename, lineno, line, expected_calls):
db = Mock()
m.handle_eval(db, filename, lineno, line)
assert_db_calls(db, expected_calls)
handle_definition = [
('legacy attribution',
'Makefile.legacy',
9,
'BR2_LEGACY_FOO := foo',
True,
{'add_symbol_legacy_definition': [call('BR2_LEGACY_FOO', 'Makefile.legacy', 9)]}),
('attribution 1',
'Makefile',
9,
'BR2_FOO ?= foo',
False,
{'add_symbol_definition': [call('BR2_FOO', 'Makefile', 9)]}),
('attribution 2',
'Makefile',
9,
'BR2_FOO = $(BR2_BAR)',
False,
{'add_symbol_definition': [call('BR2_FOO', 'Makefile', 9)]}),
('attribution 3',
'Makefile',
9,
'BR2_FOO := foo',
False,
{'add_symbol_definition': [call('BR2_FOO', 'Makefile', 9)]}),
('normal export',
'Makefile',
90,
'export BR2_FOO',
False,
{'add_symbol_definition': [call('BR2_FOO', 'Makefile', 90)]}),
('legacy export',
'Makefile.legacy',
90,
'export BR2_FOO',
True,
{'add_symbol_legacy_definition': [call('BR2_FOO', 'Makefile.legacy', 90)]}),
]
@pytest.mark.parametrize('testname,filename,lineno,line,legacy,expected_calls', handle_definition)
def test_handle_definition(testname, filename, lineno, line, legacy, expected_calls):
db = Mock()
m.handle_definition(db, filename, lineno, line, legacy)
assert_db_calls(db, expected_calls)
handle_usage = [
('legacy',
'Makefile.legacy',
8,
'ifeq ($(BR2_LEGACY),y)',
True,
{'add_symbol_usage_in_legacy': [call('BR2_LEGACY', 'Makefile.legacy', 8)]}),
('attribution',
'Makefile',
9,
'BR2_FOO = $(BR2_BAR)',
False,
{'add_symbol_usage': [call('BR2_BAR', 'Makefile', 9)]}),
('host virtual package',
'package/foo/foo.mk',
18,
'$(eval $(host-virtual-package))',
False,
{'add_symbol_usage': [
call('BR2_PACKAGE_PROVIDES_HOST_FOO', 'package/foo/foo.mk', 18),
call('BR2_PACKAGE_HAS_HOST_FOO', 'package/foo/foo.mk', 18),
call('BR2_PACKAGE_HOST_FOO', 'package/foo/foo.mk', 18)]}),
]
@pytest.mark.parametrize('testname,filename,lineno,line,legacy,expected_calls', handle_usage)
def test_handle_usage(testname, filename, lineno, line, legacy, expected_calls):
db = Mock()
m.handle_usage(db, filename, lineno, line, legacy)
assert_db_calls(db, expected_calls)
populate_db = [
('legacy',
'Makefile.legacy',
[[8, 'ifeq ($(BR2_LEGACY),y)'],
[9, 'BR2_LEGACY_FOO := foo'],
[34, 'ifneq ($(BUILDROOT_CONFIG),$(BR2_CONFIG))']],
{'add_symbol_usage_in_legacy': [
call('BR2_LEGACY', 'Makefile.legacy', 8),
call('BR2_CONFIG', 'Makefile.legacy', 34)],
'add_symbol_legacy_definition': [call('BR2_LEGACY_FOO', 'Makefile.legacy', 9)]}),
('attribution',
'Makefile',
[[9, 'BR2_FOO = $(BR2_BAR)']],
{'add_symbol_definition': [call('BR2_FOO', 'Makefile', 9)],
'add_symbol_usage': [call('BR2_BAR', 'Makefile', 9)]}),
('legacy attribution',
'Makefile.legacy',
[[9, 'BR2_FOO = $(BR2_BAR)']],
{'add_symbol_legacy_definition': [call('BR2_FOO', 'Makefile.legacy', 9)],
'add_symbol_usage_in_legacy': [call('BR2_BAR', 'Makefile.legacy', 9)]}),
('generic',
'package/foo/foo.mk',
[[3, 'ifeq ($(BR2_PACKAGE_FOO_BAR):$(BR2_BAR),y:)'],
[4, 'export BR2_PACKAGE_FOO_BAZ'],
[5, '$(eval $(generic-package))']],
{'add_symbol_usage': [
call('BR2_PACKAGE_FOO_BAR', 'package/foo/foo.mk', 3),
call('BR2_BAR', 'package/foo/foo.mk', 3),
call('BR2_PACKAGE_FOO', 'package/foo/foo.mk', 5)],
'add_symbol_definition': [call('BR2_PACKAGE_FOO_BAZ', 'package/foo/foo.mk', 4)]}),
('rootfs',
'fs/foo/foo.mk',
[[4, 'ifeq ($(BR2_TARGET_ROOTFS_FOO_LZ4),y)'],
[5, '$(eval $(rootfs))']],
{'add_symbol_usage': [
call('BR2_TARGET_ROOTFS_FOO', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_BZIP2', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_GZIP', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_LZ4', 'fs/foo/foo.mk', 4),
call('BR2_TARGET_ROOTFS_FOO_LZ4', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_LZMA', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_LZO', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_XZ', 'fs/foo/foo.mk', 5),
call('BR2_TARGET_ROOTFS_FOO_ZSTD', 'fs/foo/foo.mk', 5)]}),
]
@pytest.mark.parametrize('testname,filename,file_content,expected_calls', populate_db)
def test_populate_db(testname, filename, file_content, expected_calls):
db = Mock()
m.populate_db(db, filename, file_content)
assert_db_calls(db, expected_calls)
check_filename = [
('arch/arch.mk.riscv',
'arch/arch.mk.riscv',
True),
('fs/cramfs/cramfs.mk',
'fs/cramfs/cramfs.mk',
True),
('linux/linux-ext-fbtft.mk',
'linux/linux-ext-fbtft.mk',
True),
('package/ace/ace.mk',
'package/ace/ace.mk',
True),
('package/linux-tools/linux-tool-hv.mk.in',
'package/linux-tools/linux-tool-hv.mk.in',
True),
('package/pkg-generic.mk',
'package/pkg-generic.mk',
True),
('package/x11r7/xlib_libXt/xlib_libXt.mk',
'package/x11r7/xlib_libXt/xlib_libXt.mk',
True),
('support/dependencies/check-host-make.mk',
'support/dependencies/check-host-make.mk',
True),
('toolchain/toolchain-external/toolchain-external-arm-aarch64-be/toolchain-external-arm-aarch64-be.mk',
'toolchain/toolchain-external/toolchain-external-arm-aarch64-be/toolchain-external-arm-aarch64-be.mk',
True),
('Makefile.legacy',
'Makefile.legacy',
True),
('boot/common.mk',
'boot/common.mk',
True),
('fs/common.mk',
'fs/common.mk',
True),
('Makefile',
'Makefile',
True),
('package/Makefile.in',
'package/Makefile.in',
True),
('Config.in',
'Config.in',
False),
('package/foo/0001-Makefile.patch',
'package/foo/0001-Makefile.patch',
False),
]
@pytest.mark.parametrize('testname,filename,expected', check_filename)
def test_check_filename(testname, filename, expected):
symbols = m.check_filename(filename)
assert symbols == expected

View File

@ -0,0 +1,15 @@
def assert_calls(method, expected_calls):
method.assert_has_calls(expected_calls, any_order=True)
assert method.call_count == len(expected_calls)
def assert_db_calls(db, expected_calls):
assert_calls(db.add_symbol_legacy_definition, expected_calls.get('add_symbol_legacy_definition', []))
assert_calls(db.add_symbol_definition, expected_calls.get('add_symbol_definition', []))
assert_calls(db.add_symbol_usage_in_legacy, expected_calls.get('add_symbol_usage_in_legacy', []))
assert_calls(db.add_symbol_usage, expected_calls.get('add_symbol_usage', []))
assert_calls(db.add_symbol_legacy_usage, expected_calls.get('add_symbol_legacy_usage', []))
assert_calls(db.add_symbol_select, expected_calls.get('add_symbol_select', []))
assert_calls(db.add_symbol_helper, expected_calls.get('add_symbol_helper', []))
assert_calls(db.add_symbol_legacy_note, expected_calls.get('add_symbol_legacy_note', []))
assert_calls(db.add_symbol_virtual, expected_calls.get('add_symbol_virtual', []))

202
buildroot-2024.02/utils/config Executable file
View File

@ -0,0 +1,202 @@
#!/bin/bash
# Manipulate options in a .config file from the command line
myname=${0##*/}
# If no prefix forced, use the default BR2_
BR2_PREFIX="${BR2_PREFIX-BR2_}"
usage() {
cat >&2 <<EOL
Manipulate options in a .config file from the command line.
Usage:
$myname options command ...
commands:
--enable|-e option Enable option
--disable|-d option Disable option
--set-str option string
Set option to "string"
--set-val option value
Set option to value
--undefine|-u option Undefine option
--state|-s option Print state of option (n,y,m,undef)
--enable-after|-E beforeopt option
Enable option directly after other option
--disable-after|-D beforeopt option
Disable option directly after other option
commands can be repeated multiple times
options:
--file config-file .config file to change (default .config)
--keep-case|-k Keep next symbols' case (dont' upper-case it)
--package|-p Operate on package (set prefix to BR2_PACKAGE_)
$myname doesn't check the validity of the .config file. This is done at next
make time.
By default, $myname will upper-case the given symbol. Use --keep-case to keep
the case of all following symbols unchanged.
$myname uses 'BR2_' as the default symbol prefix. Set the environment
variable BR2_PREFIX to the prefix to use. Eg.: BR2_PREFIX="FOO_" $myname ...
EOL
exit 1
}
checkarg() {
ARG="$1"
if [ "$ARG" = "" ] ; then
usage
fi
if [ "$MUNGE_CASE" = "yes" ] ; then
ARG="$(echo "$ARG" | tr a-z- A-Z_)"
fi
case "$ARG" in
${BR2_PREFIX}*)
ARG="${ARG/${BR2_PREFIX}/}"
;;
esac
}
txt_append() {
local anchor="$1"
local insert="$2"
local infile="$3"
# sed append cmd: 'a\' + newline + text + newline
cmd="$(printf "a\\%b$insert" "\n")"
sed -i -e "/$anchor/$cmd" "$infile"
}
txt_subst() {
local before="$1"
local after="$2"
local infile="$3"
sed -i -e "s:$before:$after:" "$infile"
}
txt_delete() {
local text="$1"
local infile="$2"
sed -i -e "/$text/d" "$infile"
}
set_var() {
local name=$1 new=$2 before=$3
name_re="^($name=|# $name is not set)"
before_re="^($before=|# $before is not set)"
if test -n "$before" && grep -Eq "$before_re" "$FN"; then
txt_append "^$before=" "$new" "$FN"
txt_append "^# $before is not set" "$new" "$FN"
elif grep -Eq "$name_re" "$FN"; then
txt_subst "^$name=.*" "$new" "$FN"
txt_subst "^# $name is not set" "$new" "$FN"
else
echo "$new" >>"$FN"
fi
}
undef_var() {
local name=$1
txt_delete "^$name=" "$FN"
txt_delete "^# $name is not set" "$FN"
}
if [ "$1" = "--file" ]; then
FN="$2"
if [ "$FN" = "" ] ; then
usage
fi
shift 2
else
FN=.config
fi
if [ "$1" = "" ] ; then
usage
fi
MUNGE_CASE=yes
while [ "$1" != "" ] ; do
CMD="$1"
shift
case "$CMD" in
--keep-case|-k)
MUNGE_CASE=no
continue
;;
--package|-p)
BR2_PREFIX="BR2_PACKAGE_"
continue
;;
--*-after|-E|-D|-M)
checkarg "$1"
A=$ARG
checkarg "$2"
B=$ARG
shift 2
;;
-*)
checkarg "$1"
shift
;;
esac
case "$CMD" in
--enable|-e)
set_var "${BR2_PREFIX}$ARG" "${BR2_PREFIX}$ARG=y"
;;
--disable|-d)
set_var "${BR2_PREFIX}$ARG" "# ${BR2_PREFIX}$ARG is not set"
;;
--set-str)
# sed swallows one level of escaping, so we need double-escaping
set_var "${BR2_PREFIX}$ARG" "${BR2_PREFIX}$ARG=\"${1//\"/\\\\\"}\""
shift
;;
--set-val)
set_var "${BR2_PREFIX}$ARG" "${BR2_PREFIX}$ARG=$1"
shift
;;
--undefine|-u)
undef_var "${BR2_PREFIX}$ARG"
;;
--state|-s)
if grep -q "# ${BR2_PREFIX}$ARG is not set" $FN ; then
echo n
else
if V="$(grep "^${BR2_PREFIX}$ARG=" $FN)"; then
V="${V/#${BR2_PREFIX}$ARG=/}"
V="${V/#\"/}"
V="${V/%\"/}"
V="${V//\\\"/\"}"
echo "${V}"
else
echo undef
fi
fi
;;
--enable-after|-E)
set_var "${BR2_PREFIX}$B" "${BR2_PREFIX}$B=y" "${BR2_PREFIX}$A"
;;
--disable-after|-D)
set_var "${BR2_PREFIX}$B" "# ${BR2_PREFIX}$B is not set" "${BR2_PREFIX}$A"
;;
*)
usage
;;
esac
done

View File

@ -0,0 +1,138 @@
#!/usr/bin/env python3
#
# diffconfig - a tool to compare .config files.
#
# originally written in 2006 by Matt Mackall
# (at least, this was in his bloatwatch source code)
# last worked on 2008 by Tim Bird for the Linux kernel
# Adapted to Buildroot 2017 by Marcus Folkesson
#
import sys, os
def usage():
print("""Usage: diffconfig [-h] [-m] [<config1> <config2>]
Diffconfig is a simple utility for comparing two .config files.
Using standard diff to compare .config files often includes extraneous and
distracting information. This utility produces sorted output with only the
changes in configuration values between the two files.
Added and removed items are shown with a leading plus or minus, respectively.
Changed items show the old and new values on a single line.
If -m is specified, then output will be in "merge" style, which has the
changed and new values in kernel config option format.
If no config files are specified, .config and .config.old are used.
Example usage:
$ diffconfig .config config-with-some-changes
-BR2_LINUX_KERNEL_INTREE_DTS_NAME "vexpress-v2p-ca9"
BR2_LINUX_KERNEL_DTS_SUPPORT y -> n
BR2_LINUX_KERNEL_USE_INTREE_DTS y -> n
BR2_PACKAGE_DFU_UTIL n -> y
BR2_PACKAGE_LIBUSB n -> y
BR2_TARGET_GENERIC_HOSTNAME "buildroot" -> "Tuxie"
BR2_TARGET_GENERIC_ISSUE "Welcome to Buildroot" -> "Welcome to CustomBoard"
+BR2_PACKAGE_LIBUSB_COMPAT n
""")
sys.exit(0)
# returns a dictionary of name/value pairs for config items in the file
def readconfig(config_file):
d = {}
for line in config_file:
line = line.strip()
if len(line) == 0:
continue
if line[-11:] == " is not set":
d[line[2:-11]] = "n"
elif line[0] != "#":
name, val = line.split("=", 1)
d[name] = val
return d
def print_config(op, config, value, new_value):
global merge_style
if merge_style:
if new_value:
if new_value=="n":
print("# %s is not set" % config)
else:
print("%s=%s" % (config, new_value))
else:
if op=="-":
print("-%s %s" % (config, value))
elif op=="+":
print("+%s %s" % (config, new_value))
else:
print(" %s %s -> %s" % (config, value, new_value))
def main():
global merge_style
# parse command line args
if ("-h" in sys.argv or "--help" in sys.argv):
usage()
merge_style = 0
if "-m" in sys.argv:
merge_style = 1
sys.argv.remove("-m")
argc = len(sys.argv)
if not (argc==1 or argc == 3):
print("Error: incorrect number of arguments or unrecognized option")
usage()
if argc == 1:
# if no filenames given, assume .config and .config.old
build_dir=""
if "KBUILD_OUTPUT" in os.environ:
build_dir = os.environ["KBUILD_OUTPUT"]+"/"
configa_filename = build_dir + ".config.old"
configb_filename = build_dir + ".config"
else:
configa_filename = sys.argv[1]
configb_filename = sys.argv[2]
try:
a = readconfig(open(configa_filename))
b = readconfig(open(configb_filename))
except (IOError):
e = sys.exc_info()[1]
print("I/O error[%s]: %s\n" % (e.args[0],e.args[1]))
usage()
# print items in a but not b (accumulate, sort and print)
old = []
for config in a:
if config not in b:
old.append(config)
old.sort()
for config in old:
print_config("-", config, a[config], None)
del a[config]
# print items that changed (accumulate, sort, and print)
changed = []
for config in a:
if a[config] != b[config]:
changed.append(config)
else:
del b[config]
changed.sort()
for config in changed:
print_config("->", config, a[config], b[config])
del b[config]
# now print items in b but not in a
# (items from b that were in a were removed above)
new = sorted(b.keys())
for config in new:
print_config("+", config, None, b[config])
main()

View File

@ -0,0 +1,87 @@
#!/usr/bin/env bash
set -o errexit -o pipefail
DIR=$(dirname "${0}")
MAIN_DIR=$(readlink -f "${DIR}/..")
if [ -L "${MAIN_DIR}/.git/config" ]; then
# Support git-new-workdir
GIT_DIR="$(dirname "$(realpath "${MAIN_DIR}/.git/config")")"
else
# Support git-worktree
GIT_DIR="$(cd "${MAIN_DIR}" && git rev-parse --no-flags --git-common-dir)"
fi
if test -z "${IMAGE}" ; then
# shellcheck disable=SC2016
IMAGE=$(grep ^image: "${MAIN_DIR}/.gitlab-ci.yml" | \
sed -e 's,^image: ,,g' | sed -e 's,\$CI_REGISTRY,registry.gitlab.com,g')
fi
declare -a docker_opts=(
-i
--rm
--user "$(id -u):$(id -g)"
--workdir "$(pwd)"
--security-opt label=disable
--network host
)
declare -a mountpoints=(
"${MAIN_DIR}"
"$(pwd)"
)
# curl lists (and recognises and uses) other types of *_proxy variables,
# but only those make sense for Buildroot:
for env in all_proxy http_proxy https_proxy ftp_proxy no_proxy; do
if [ "${!env}" ]; then
docker_opts+=( --env "${env}" )
# The lower-case variant takes precedence on the upper-case one
# (dixit curl)
continue
fi
# http_proxy is only lower-case (dixit curl)
if [ "${env}" = http_proxy ]; then
continue
fi
# All the others also exist in the upper-case variant
env="${env^^}"
if [ "${!env}" ]; then
docker_opts+=( --env "${env}" )
fi
done
# Empty GIT_DIR means that we are not in a workdir, *and* git is too old
# to know about worktrees, so we're not in a worktree either. So it means
# we're in the main git working copy, and thus we don't need to mount the
# .git directory.
if [ "${GIT_DIR}" ]; then
# GIT_DIR in the main working copy (when git supports worktrees) will
# be just '.git', but 'docker run' needs an absolute path. If it is
# not absolute, GIT_DIR is relative to MAIN_DIR. If it's an absolute
# path already (in a wordir), then that's a noop.
GIT_DIR="$(cd "${MAIN_DIR}"; readlink -e "${GIT_DIR}")"
mountpoints+=( "${GIT_DIR}" )
# 'repo' stores .git/objects separately.
if [ -L "${GIT_DIR}/objects" ]; then
# GITDIR is already an absolute path, but for symetry
# with the above, keep the same cd+readlink construct.
OBJECTS_DIR="$(cd "${MAIN_DIR}"; readlink -e "${GIT_DIR}/objects")"
mountpoints+=( "${OBJECTS_DIR}" )
fi
fi
if [ "${BR2_DL_DIR}" ]; then
mountpoints+=( "${BR2_DL_DIR}" )
docker_opts+=( --env BR2_DL_DIR )
fi
# shellcheck disable=SC2013 # can't use while-read because of the assignment
for dir in $(printf '%s\n' "${mountpoints[@]}" |LC_ALL=C sort -u); do
docker_opts+=( --mount "type=bind,src=${dir},dst=${dir}" )
done
if tty -s; then
docker_opts+=( -t )
fi
exec docker run "${docker_opts[@]}" "${IMAGE}" "${@}"

View File

@ -0,0 +1,814 @@
#!/usr/bin/env python3
# Copyright (C) 2014 by Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# This script generates a random configuration for testing Buildroot.
from binascii import hexlify
import asyncio
import csv
import os
from random import randint
import sys
import traceback
class SystemInfo:
DEFAULT_NEEDED_PROGS = ["make", "git", "gcc", "timeout"]
DEFAULT_OPTIONAL_PROGS = ["bzr", "java", "javac", "jar", "diffoscope"]
def __init__(self):
self.needed_progs = list(self.__class__.DEFAULT_NEEDED_PROGS)
self.optional_progs = list(self.__class__.DEFAULT_OPTIONAL_PROGS)
self.progs = {}
def find_prog(self, name, flags=os.X_OK, env=os.environ):
if not name or name[0] == os.sep:
raise ValueError(name)
prog_path = env.get("PATH", None)
# for windows compatibility, we'd need to take PATHEXT into account
if prog_path:
for prog_dir in filter(None, prog_path.split(os.pathsep)):
# os.join() not necessary: non-empty prog_dir
# and name[0] != os.sep
prog = prog_dir + os.sep + name
if os.access(prog, flags):
return prog
# --
return None
async def has(self, prog):
"""Checks whether a program is available.
Lazily evaluates missing entries.
Returns: None if prog not found, else path to the program [evaluates
to True]
"""
try:
return self.progs[prog]
except KeyError:
pass
have_it = self.find_prog(prog)
# java[c] needs special care
if have_it and prog in ('java', 'javac'):
proc = await asyncio.create_subprocess_shell(
"%s -version | grep gcj" % prog,
stdout=asyncio.subprocess.DEVNULL,
stderr=asyncio.subprocess.DEVNULL)
ret = await proc.wait()
if ret != 1:
have_it = False
# --
self.progs[prog] = have_it
return have_it
def check_requirements(self):
"""Checks program dependencies.
Returns: True if all mandatory programs are present, else False.
"""
do_check_has_prog = self.has
missing_requirements = False
for prog in self.needed_progs:
if not do_check_has_prog(prog):
print("ERROR: your system lacks the '%s' program" % prog)
missing_requirements = True
# check optional programs here,
# else they'd get checked by each worker instance
for prog in self.optional_progs:
do_check_has_prog(prog)
return not missing_requirements
def get_toolchain_configs(toolchains_csv, buildrootdir):
"""Fetch and return the possible toolchain configurations
This function returns an array of toolchain configurations. Each
toolchain configuration is itself an array of lines of the defconfig.
"""
with open(toolchains_csv) as r:
# filter empty lines and comments
lines = [t for t in r.readlines() if len(t.strip()) > 0 and t[0] != '#']
toolchains = lines
configs = []
(_, _, _, _, hostarch) = os.uname()
# ~2015 distros report x86 when on a 32bit install
if hostarch == 'i686' or hostarch == 'i386' or hostarch == 'x86':
hostarch = 'x86'
for row in csv.reader(toolchains):
config = {}
configfile = row[0]
config_hostarch = row[1]
keep = False
# Keep all toolchain configs that work regardless of the host
# architecture
if config_hostarch == "any":
keep = True
# Keep all toolchain configs that can work on the current host
# architecture
if hostarch == config_hostarch:
keep = True
# Assume that x86 32 bits toolchains work on x86_64 build
# machines
if hostarch == 'x86_64' and config_hostarch == "x86":
keep = True
if not keep:
continue
if not os.path.isabs(configfile):
configfile = os.path.join(buildrootdir, configfile)
with open(configfile) as r:
config = r.readlines()
configs.append(config)
return configs
async def is_toolchain_usable(configfile, config):
"""Check if the toolchain is actually usable."""
with open(configfile) as configf:
configlines = configf.readlines()
# Check that the toolchain configuration is still present
for toolchainline in config:
if toolchainline not in configlines:
print("WARN: toolchain can't be used", file=sys.stderr)
print(" Missing: %s" % toolchainline.strip(), file=sys.stderr)
return False
return True
async def fixup_config(sysinfo, configfile):
"""Finalize the configuration and reject any problematic combinations
This function returns 'True' when the configuration has been
accepted, and 'False' when the configuration has not been accepted because
it is known to fail (in which case another random configuration will be
generated).
"""
with open(configfile) as configf:
configlines = configf.readlines()
ROOTFS_SIZE = '5G'
BR2_TOOLCHAIN_EXTERNAL_URL = 'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/'
if "BR2_NEEDS_HOST_JAVA=y\n" in configlines and not await sysinfo.has("java"):
return False
# The ctng toolchain is affected by PR58854
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
return False
# The ctng toolchain tigger an assembler error with guile package when compiled with -Os (same issue as for CS ARM 2014.05-29)
if 'BR2_PACKAGE_GUILE=y\n' in configlines and \
'BR2_OPTIMIZE_S=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
return False
# The ctng toolchain is affected by PR58854
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv6-ctng-linux-uclibcgnueabi.tar.xz"\n' in configlines:
return False
# The ctng toolchain is affected by PR58854
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv7-ctng-linux-gnueabihf.tar.xz"\n' in configlines:
return False
# The ctng toolchain is affected by PR60155
if 'BR2_PACKAGE_SDL=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# The ctng toolchain is affected by PR60155
if 'BR2_PACKAGE_LIBMPEG2=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS toolchain uses eglibc-2.18 which lacks SYS_getdents64
if 'BR2_PACKAGE_STRONGSWAN=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
return False
# This MIPS toolchain uses eglibc-2.18 which lacks SYS_getdents64
if 'BR2_PACKAGE_PYTHON3=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
return False
# libffi not available on ARMv7-M, but propagating libffi arch
# dependencies in Buildroot is really too much work, so we handle
# this here.
if 'BR2_ARM_CPU_ARMV7M=y\n' in configlines and \
'BR2_PACKAGE_LIBFFI=y\n' in configlines:
return False
# libopenssl needs atomic, but propagating this dependency in
# Buildroot is really too much work, so we handle this here.
if 'BR2_PACKAGE_LIBOPENSSL=y\n' in configlines and \
'BR2_TOOLCHAIN_HAS_ATOMIC=y\n' not in configlines:
return False
if 'BR2_PACKAGE_SUNXI_BOARDS=y\n' in configlines:
configlines.remove('BR2_PACKAGE_SUNXI_BOARDS_FEX_FILE=""\n')
configlines.append('BR2_PACKAGE_SUNXI_BOARDS_FEX_FILE="a10/hackberry.fex"\n')
# This MIPS uClibc toolchain fails to build the gdb package
if 'BR2_PACKAGE_GDB=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the rt-tests package
if 'BR2_PACKAGE_RT_TESTS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the civetweb package
if 'BR2_PACKAGE_CIVETWEB=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS ctng toolchain fails to build the python3 package
if 'BR2_PACKAGE_PYTHON3=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the strace package
if 'BR2_PACKAGE_STRACE=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the cdrkit package
if 'BR2_PACKAGE_CDRKIT=y\n' in configlines and \
'BR2_STATIC_LIBS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# uClibc vfork static linking issue
if 'BR2_PACKAGE_ALSA_LIB=y\n' in configlines and \
'BR2_STATIC_LIBS=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'i486-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# This MIPS uClibc toolchain fails to build the weston package
if 'BR2_PACKAGE_WESTON=y\n' in configlines and \
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
return False
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
'BR2_PACKAGE_BOOST=y\n' in configlines:
return False
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
'BR2_PACKAGE_QT5BASE_GUI=y\n' in configlines:
return False
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
'BR2_PACKAGE_FLANN=y\n' in configlines:
return False
# No C library for internal toolchain
if 'BR2_TOOLCHAIN_BUILDROOT_NONE=y' in configlines:
return False
# Xtensa custom cores require an overlay file with internal
# toolchains
if 'BR2_XTENSA_CUSTOM=y' in configlines and \
'BR2_TOOLCHAIN_BUILDROOT=y' in configlines:
return False
if 'BR2_TOOLCHAIN_BARE_METAL_BUILDROOT=y\n' in configlines:
configlines.remove('BR2_TOOLCHAIN_BARE_METAL_BUILDROOT_ARCH=""\n')
configlines.append('BR2_TOOLCHAIN_BARE_METAL_BUILDROOT_ARCH="microblazeel-xilinx-elf"\n')
if 'BR2_PACKAGE_AUFS_UTIL=y\n' in configlines and \
'BR2_PACKAGE_AUFS_UTIL_VERSION=""\n' in configlines:
return False
if 'BR2_PACKAGE_A10DISP=y\n' in configlines:
return False
if 'BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE=y\n' in configlines and \
'BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SOURCE=""\n' in configlines and \
'BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SIZE=""\n' in configlines:
bootenv = os.path.join(args.outputdir, "boot_env.txt")
with open(bootenv, "w+") as bootenvf:
bootenvf.write("prop=value")
configlines.remove('BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SOURCE=""\n')
configlines.append('BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SOURCE="%s"\n' % bootenv)
configlines.remove('BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SIZE=""\n')
configlines.append('BR2_PACKAGE_HOST_UBOOT_TOOLS_ENVIMAGE_SIZE="0x1000"\n')
if 'BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT=y\n' in configlines and \
'BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT_SOURCE=""\n' in configlines:
bootscr = os.path.join(args.outputdir, "boot_script.txt")
with open(bootscr, "w+") as bootscrf:
bootscrf.write("prop=value")
configlines.remove('BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT_SOURCE=""\n')
configlines.append('BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT_SOURCE="%s"\n' % bootscr)
if 'BR2_ROOTFS_SKELETON_CUSTOM=y\n' in configlines and \
'BR2_ROOTFS_SKELETON_CUSTOM_PATH=""\n' in configlines:
configlines.remove('BR2_ROOTFS_SKELETON_CUSTOM=y\n')
configlines.remove('BR2_ROOTFS_SKELETON_CUSTOM_PATH=""\n')
if 'BR2_LINUX_KERNEL=y\n' in configlines and \
'BR2_LINUX_KERNEL_USE_CUSTOM_CONFIG=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_CONFIG_FILE=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_USE_CUSTOM_CONFIG=y\n')
configlines.append('BR2_LINUX_KERNEL_USE_ARCH_DEFAULT_CONFIG=y\n')
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_CONFIG_FILE=""\n')
if 'BR2_LINUX_KERNEL=y\n' in configlines and \
'BR2_LINUX_KERNEL_USE_DEFCONFIG=y\n' in configlines and \
'BR2_LINUX_KERNEL_DEFCONFIG=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_USE_DEFCONFIG=y\n')
configlines.append('BR2_LINUX_KERNEL_USE_ARCH_DEFAULT_CONFIG=y\n')
configlines.remove('BR2_LINUX_KERNEL_DEFCONFIG=""\n')
if 'BR2_LINUX_KERNEL=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_GIT=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_REPO_URL=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_GIT=y\n')
configlines.append('BR2_LINUX_KERNEL_LATEST_VERSION=y\n')
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_REPO_URL=""\n')
if 'BR2_LINUX_KERNEL=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_HG=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_REPO_URL=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_HG=y\n')
configlines.append('BR2_LINUX_KERNEL_LATEST_VERSION=y\n')
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_REPO_URL=""\n')
if 'BR2_LINUX_KERNEL=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_SVN=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_REPO_URL=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_SVN=y\n')
configlines.append('BR2_LINUX_KERNEL_LATEST_VERSION=y\n')
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_REPO_URL=""\n')
if 'BR2_LINUX_KERNEL=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_TARBALL=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_TARBALL_LOCATION=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_TARBALL=y\n')
configlines.append('BR2_LINUX_KERNEL_LATEST_VERSION=y\n')
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_TARBALL_LOCATION=""\n')
if 'BR2_LINUX_KERNEL=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_VERSION=y\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_VERSION_VALUE=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_VERSION=y\n')
configlines.append('BR2_LINUX_KERNEL_LATEST_VERSION=y\n')
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_VERSION_VALUE=""\n')
if 'BR2_LINUX_KERNEL=y\n' in configlines and \
'BR2_LINUX_KERNEL_DTS_SUPPORT=y\n' in configlines and \
'BR2_LINUX_KERNEL_INTREE_DTS_NAME=""\n' in configlines and \
'BR2_LINUX_KERNEL_CUSTOM_DTS_PATH=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_DTS_SUPPORT=y\n')
configlines.remove('BR2_LINUX_KERNEL_INTREE_DTS_NAME=""\n')
configlines.remove('BR2_LINUX_KERNEL_CUSTOM_DTS_PATH=""\n')
if 'BR2_LINUX_KERNEL_APPENDED_UIMAGE=y\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_APPENDED_UIMAGE=y\n')
configlines.append('BR2_LINUX_KERNEL_UIMAGE=y\n')
if 'BR2_LINUX_KERNEL_APPENDED_ZIMAGE=y\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_APPENDED_ZIMAGE=y\n')
configlines.append('BR2_LINUX_KERNEL_ZIMAGE=y\n')
if 'BR2_LINUX_KERNEL_CUIMAGE=y\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_CUIMAGE=y\n')
configlines.append('BR2_LINUX_KERNEL_UIMAGE=y\n')
if 'BR2_LINUX_KERNEL_SIMPLEIMAGE=y\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_SIMPLEIMAGE=y\n')
configlines.append('BR2_LINUX_KERNEL_VMLINUX=y\n')
if 'BR2_LINUX_KERNEL_EXT_AUFS=y\n' in configlines and \
'BR2_LINUX_KERNEL_EXT_AUFS_VERSION=""\n' in configlines:
configlines.remove('BR2_LINUX_KERNEL_EXT_AUFS=y\n')
configlines.remove('BR2_LINUX_KERNEL_EXT_AUFS_VERSION=""\n')
if 'BR2_PACKAGE_LINUX_BACKPORTS=y\n' in configlines and \
'BR2_PACKAGE_LINUX_BACKPORTS_USE_CUSTOM_CONFIG=y\n' in configlines and \
'BR2_PACKAGE_LINUX_BACKPORTS_CUSTOM_CONFIG_FILE=""\n' in configlines:
configlines.remove('BR2_PACKAGE_LINUX_BACKPORTS=y\n')
configlines.remove('BR2_PACKAGE_LINUX_BACKPORTS_USE_CUSTOM_CONFIG=y\n')
configlines.remove('BR2_PACKAGE_LINUX_BACKPORTS_CUSTOM_CONFIG_FILE=""\n')
if 'BR2_PACKAGE_LINUX_BACKPORTS=y\n' in configlines and \
'BR2_PACKAGE_LINUX_BACKPORTS_USE_DEFCONFIG=y\n' in configlines and \
'BR2_PACKAGE_LINUX_BACKPORTS_DEFCONFIG=""\n' in configlines:
configlines.remove('BR2_PACKAGE_LINUX_BACKPORTS=y\n')
configlines.remove('BR2_PACKAGE_LINUX_BACKPORTS_USE_DEFCONFIG=y\n')
configlines.remove('BR2_PACKAGE_LINUX_BACKPORTS_DEFCONFIG=""\n')
if 'BR2_KERNEL_HEADERS_VERSION=y\n' in configlines and \
'BR2_DEFAULT_KERNEL_VERSION=""\n' in configlines:
configlines.remove('BR2_KERNEL_HEADERS_VERSION=y\n')
configlines.remove('BR2_DEFAULT_KERNEL_VERSION=""\n')
if 'BR2_KERNEL_HEADERS_CUSTOM_GIT=y\n' in configlines and \
'BR2_KERNEL_HEADERS_CUSTOM_REPO_URL=""\n':
configlines.remove('BR2_KERNEL_HEADERS_CUSTOM_GIT=y\n')
configlines.remove('BR2_KERNEL_HEADERS_CUSTOM_REPO_URL=""\n')
if 'BR2_KERNEL_HEADERS_CUSTOM_TARBALL=y\n' in configlines and \
'BR2_KERNEL_HEADERS_CUSTOM_TARBALL_LOCATION=""\n' in configlines:
configlines.remove('BR2_KERNEL_HEADERS_CUSTOM_TARBALL=y\n')
configlines.remove('BR2_KERNEL_HEADERS_CUSTOM_TARBALL_LOCATION=""\n')
if 'BR2_TARGET_ARM_TRUSTED_FIRMWARE=y\n' in configlines and \
'BR2_TARGET_ARM_TRUSTED_FIRMWARE_PLATFORM=""\n' in configlines:
return False
if 'BR2_TARGET_ARM_TRUSTED_FIRMWARE=y\n' in configlines and \
'BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_VERSION=y\n' in configlines and \
'BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_VERSION_VALUE=""\n' in configlines:
configlines.remove('BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_VERSION=y\n')
configlines.append('BR2_TARGET_ARM_TRUSTED_FIRMWARE_LATEST_VERSION=y\n')
configlines.remove('BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_VERSION_VALUE=""\n')
if 'BR2_TARGET_ARM_TRUSTED_FIRMWARE=y\n' in configlines and \
'BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_TARBALL=y\n' in configlines and \
'BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_TARBALL_LOCATION=""\n' in configlines:
configlines.remove('BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_TARBALL=y\n')
configlines.append('BR2_TARGET_ARM_TRUSTED_FIRMWARE_LATEST_VERSION=y\n')
configlines.remove('BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_TARBALL_LOCATION=""\n')
if 'BR2_TARGET_ARM_TRUSTED_FIRMWARE=y\n' in configlines and \
'BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_GIT=y\n' in configlines and \
'BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_REPO_URL=""\n' in configlines:
configlines.remove('BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_GIT=y\n')
configlines.append('BR2_TARGET_ARM_TRUSTED_FIRMWARE_LATEST_VERSION=y\n')
configlines.remove('BR2_TARGET_ARM_TRUSTED_FIRMWARE_CUSTOM_REPO_URL=""\n')
if 'BR2_TARGET_AT91BOOTSTRAP3=y\n' in configlines and \
'BR2_TARGET_AT91BOOTSTRAP3_DEFCONFIG=""\n' in configlines:
configlines.remove('BR2_TARGET_AT91BOOTSTRAP3=y\n')
configlines.remove('BR2_TARGET_AT91BOOTSTRAP3_DEFCONFIG=""\n')
if 'BR2_TARGET_BAREBOX=y\n' in configlines and \
'BR2_TARGET_BAREBOX_USE_CUSTOM_CONFIG=y\n' in configlines and \
'BR2_TARGET_BAREBOX_CUSTOM_CONFIG_FILE=""\n' in configlines:
configlines.remove('BR2_TARGET_BAREBOX=y\n')
configlines.remove('BR2_TARGET_BAREBOX_USE_CUSTOM_CONFIG=y\n')
configlines.remove('BR2_TARGET_BAREBOX_CUSTOM_CONFIG_FILE=""\n')
if 'BR2_TARGET_BAREBOX=y\n' in configlines and \
'BR2_TARGET_BAREBOX_USE_DEFCONFIG=y\n' in configlines and \
'BR2_TARGET_BAREBOX_BOARD_DEFCONFIG=""\n' in configlines:
configlines.remove('BR2_TARGET_BAREBOX=y\n')
configlines.remove('BR2_TARGET_BAREBOX_USE_DEFCONFIG=y\n')
configlines.remove('BR2_TARGET_BAREBOX_BOARD_DEFCONFIG=""\n')
if 'BR2_TARGET_BOOT_WRAPPER_AARCH64=y\n' in configlines and \
'BR2_TARGET_BOOT_WRAPPER_AARCH64_DTS=""\n' in configlines:
return False
if 'BR2_TARGET_OPTEE_OS=y\n' in configlines and \
'BR2_TARGET_OPTEE_OS_CUSTOM_TARBALL=y\n' in configlines and \
'BR2_TARGET_OPTEE_OS_CUSTOM_TARBALL_LOCATION=""\n' in configlines:
configlines.remove('BR2_TARGET_OPTEE_OS_CUSTOM_TARBALL=y\n')
configlines.append('BR2_TARGET_OPTEE_OS_LATEST=y\n')
configlines.remove('BR2_TARGET_OPTEE_OS_CUSTOM_TARBALL_LOCATION=""\n')
if 'BR2_TARGET_OPTEE_OS=y\n' in configlines and \
'BR2_TARGET_OPTEE_OS_PLATFORM=""\n' in configlines:
configlines.remove('BR2_TARGET_OPTEE_OS=y\n')
configlines.remove('BR2_TARGET_OPTEE_OS_PLATFORM=""\n')
if 'BR2_TARGET_ROOTFS_CRAMFS=y\n' in configlines:
configlines.remove('BR2_TARGET_ROOTFS_CRAMFS=y\n')
if 'BR2_TARGET_ROOTFS_EXT2=y\n' in configlines and \
'BR2_TARGET_ROOTFS_EXT2_SIZE="60M"\n' in configlines:
configlines.remove('BR2_TARGET_ROOTFS_EXT2_SIZE="60M"\n')
configlines.append('BR2_TARGET_ROOTFS_EXT2_SIZE="%s"\n' % ROOTFS_SIZE)
if 'BR2_TARGET_ROOTFS_F2FS=y\n' in configlines and \
'BR2_TARGET_ROOTFS_F2FS_SIZE="100M"\n' in configlines:
configlines.remove('BR2_TARGET_ROOTFS_F2FS_SIZE="100M"\n')
configlines.append('BR2_TARGET_ROOTFS_F2FS_SIZE="%s"\n' % ROOTFS_SIZE)
if 'BR2_TARGET_ROOTFS_UBIFS=y\n' in configlines and \
'BR2_TARGET_ROOTFS_UBIFS_MAXLEBCNT=2048\n' in configlines:
configlines.remove('BR2_TARGET_ROOTFS_UBIFS_MAXLEBCNT=2048\n')
configlines.append('BR2_TARGET_ROOTFS_UBIFS_MAXLEBCNT=41610\n')
if 'BR2_TARGET_ROOTFS_UBI=y\n' in configlines and \
'BR2_TARGET_ROOTFS_UBI_USE_CUSTOM_CONFIG=y\n' in configlines and \
'BR2_TARGET_ROOTFS_UBI_CUSTOM_CONFIG_FILE=""\n' in configlines:
configlines.remove('BR2_TARGET_ROOTFS_UBI_USE_CUSTOM_CONFIG=y\n')
configlines.remove('BR2_TARGET_ROOTFS_UBI_CUSTOM_CONFIG_FILE=""\n')
if 'BR2_TARGET_S500_BOOTLOADER=y\n' in configlines and \
'BR2_TARGET_S500_BOOTLOADER_BOARD=""\n' in configlines:
configlines.remove('BR2_TARGET_S500_BOOTLOADER=y\n')
configlines.remove('BR2_TARGET_S500_BOOTLOADER_BOARD=""\n')
if 'BR2_TARGET_TI_K3_R5_LOADER=y\n' in configlines and \
'BR2_TARGET_TI_K3_R5_LOADER_USE_DEFCONFIG=y\n' in configlines and \
'BR2_TARGET_TI_K3_R5_LOADER_BOARD_DEFCONFIG=""\n' in configlines:
return False
if 'BR2_TARGET_UBOOT=y\n' in configlines and \
'BR2_TARGET_UBOOT_BUILD_SYSTEM_KCONFIG=y\n' in configlines and \
'BR2_TARGET_UBOOT_USE_CUSTOM_CONFIG=y\n' in configlines and \
'BR2_TARGET_UBOOT_CUSTOM_CONFIG_FILE=""\n' in configlines:
configlines.remove('BR2_TARGET_UBOOT=y\n')
configlines.remove('BR2_TARGET_UBOOT_BUILD_SYSTEM_KCONFIG=y\n')
configlines.remove('BR2_TARGET_UBOOT_USE_CUSTOM_CONFIG=y\n')
configlines.remove('BR2_TARGET_UBOOT_CUSTOM_CONFIG_FILE=""\n')
if 'BR2_TARGET_UBOOT=y\n' in configlines and \
'BR2_TARGET_UBOOT_BUILD_SYSTEM_KCONFIG=y\n' in configlines and \
'BR2_TARGET_UBOOT_USE_DEFCONFIG=y\n' in configlines and \
'BR2_TARGET_UBOOT_BOARD_DEFCONFIG=""\n' in configlines:
configlines.remove('BR2_TARGET_UBOOT=y\n')
configlines.remove('BR2_TARGET_UBOOT_BUILD_SYSTEM_KCONFIG=y\n')
configlines.remove('BR2_TARGET_UBOOT_USE_DEFCONFIG=y\n')
configlines.remove('BR2_TARGET_UBOOT_BOARD_DEFCONFIG=""\n')
if 'BR2_TARGET_UBOOT=y\n' in configlines and \
'BR2_TARGET_UBOOT_BUILD_SYSTEM_LEGACY=y\n' in configlines and \
'BR2_TARGET_UBOOT_BOARDNAME=""\n' in configlines:
configlines.remove('BR2_TARGET_UBOOT=y\n')
configlines.remove('BR2_TARGET_UBOOT_BUILD_SYSTEM_LEGACY=y\n')
configlines.remove('BR2_TARGET_UBOOT_BOARDNAME=""\n')
if 'BR2_TOOLCHAIN_EXTERNAL=y\n' in configlines and \
'BR2_TOOLCHAIN_EXTERNAL_PREINSTALLED=y\n' in configlines and \
'BR2_TOOLCHAIN_EXTERNAL_PATH=""\n' in configlines:
configlines.remove('BR2_TOOLCHAIN_EXTERNAL=y\n')
configlines.remove('BR2_TOOLCHAIN_EXTERNAL_PREINSTALLED=y\n')
configlines.remove('BR2_TOOLCHAIN_EXTERNAL_PATH=""\n')
if 'BR2_ARCH_HAS_NO_TOOLCHAIN_BUILDROOT=y\n' in configlines:
return False
if 'BR2_TOOLCHAIN_EXTERNAL=y\n' in configlines and \
'BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y\n' in configlines and \
'BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y\n' in configlines and \
'BR2_TOOLCHAIN_EXTERNAL_URL=""\n' in configlines:
configlines.remove('BR2_TOOLCHAIN_EXTERNAL=y\n')
configlines.remove('BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y\n')
configlines.remove('BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y\n')
configlines.remove('BR2_TOOLCHAIN_EXTERNAL_URL=""\n')
if 'BR2_ARCH_HAS_NO_TOOLCHAIN_BUILDROOT=y\n' in configlines:
return False
if 'BR2_TARGET_MXS_BOOTLETS=y\n' in configlines and \
'BR2_TARGET_MXS_BOOTLETS_CUSTOM_BOARD=y\n' in configlines and \
'BR2_TARGET_MXS_BOOTLETS_CUSTOM_BOARD_NAME=""\n' in configlines:
configlines.remove('BR2_TARGET_MXS_BOOTLETS_CUSTOM_BOARD=y\n')
configlines.append('BR2_TARGET_MXS_BOOTLETS_STMP37xx=y\n')
configlines.remove('BR2_TARGET_MXS_BOOTLETS_CUSTOM_BOARD_NAME=""\n')
if 'BR2_TARGET_MXS_BOOTLETS=y\n' in configlines and \
'BR2_TARGET_MXS_BOOTLETS_CUSTOM_GIT=y\n' in configlines and \
'BR2_TARGET_MXS_BOOTLETS_CUSTOM_GIT_URL=""\n' in configlines:
configlines.remove('BR2_TARGET_MXS_BOOTLETS_CUSTOM_GIT=y\n')
configlines.append('BR2_TARGET_MXS_BOOTLETS_FREESCALE=y\n')
configlines.remove('BR2_TARGET_MXS_BOOTLETS_CUSTOM_GIT_URL=""\n')
if 'BR2_TARGET_MXS_BOOTLETS=y\n' in configlines and \
'BR2_TARGET_MXS_BOOTLETS_CUSTOM_TARBALL=y\n' in configlines and \
'BR2_TARGET_MXS_BOOTLETS_CUSTOM_TARBALL_URL=""\n' in configlines:
configlines.remove('BR2_TARGET_MXS_BOOTLETS_CUSTOM_TARBALL=y\n')
configlines.append('BR2_TARGET_MXS_BOOTLETS_FREESCALE=y\n')
configlines.remove('BR2_TARGET_MXS_BOOTLETS_CUSTOM_TARBALL_URL=""\n')
if 'BR2_TARGET_OPENSBI=y\n' in configlines and \
'BR2_TARGET_OPENSBI_CUSTOM_GIT=y\n' in configlines and \
'BR2_TARGET_OPENSBI_CUSTOM_REPO_URL=""\n' in configlines:
configlines.remove('BR2_TARGET_OPENSBI_CUSTOM_GIT=y\n')
configlines.append('BR2_TARGET_OPENSBI_LATEST_VERSION=y\n')
configlines.remove('BR2_TARGET_OPENSBI_CUSTOM_REPO_URL=""\n')
if 'BR2_TARGET_OPENSBI=y\n' in configlines and \
'BR2_TARGET_OPENSBI_CUSTOM_TARBALL=y\n' in configlines and \
'BR2_TARGET_OPENSBI_CUSTOM_TARBALL_LOCATION=""\n' in configlines:
configlines.remove('BR2_TARGET_OPENSBI_CUSTOM_TARBALL=y\n')
configlines.append('BR2_TARGET_OPENSBI_LATEST_VERSION=y\n')
configlines.remove('BR2_TARGET_OPENSBI_CUSTOM_TARBALL_LOCATION=""\n')
if 'BR2_TARGET_OPENSBI=y\n' in configlines and \
'BR2_TARGET_OPENSBI_CUSTOM_VERSION=y\n' in configlines and \
'BR2_TARGET_OPENSBI_CUSTOM_VERSION_VALUE=""\n' in configlines:
configlines.remove('BR2_TARGET_OPENSBI_CUSTOM_VERSION=y\n')
configlines.append('BR2_TARGET_OPENSBI_LATEST_VERSION=y\n')
configlines.remove('BR2_TARGET_OPENSBI_CUSTOM_VERSION_VALUE=""\n')
if 'BR2_PACKAGE_REFPOLICY=y\n' in configlines and \
'BR2_PACKAGE_REFPOLICY_CUSTOM_GIT=y\n' in configlines and \
'BR2_PACKAGE_REFPOLICY_CUSTOM_REPO_URL=""\n' in configlines:
configlines.remove('BR2_PACKAGE_REFPOLICY_CUSTOM_GIT=y\n')
configlines.append('BR2_PACKAGE_REFPOLICY_UPSTREAM_VERSION=y\n')
configlines.remove('BR2_PACKAGE_REFPOLICY_CUSTOM_REPO_URL=""\n')
if 'BR2_PACKAGE_XENOMAI=y\n' in configlines and \
'BR2_PACKAGE_XENOMAI_CUSTOM_GIT=y\n' in configlines and \
'BR2_PACKAGE_XENOMAI_REPOSITORY=""\n' in configlines:
configlines.remove('BR2_PACKAGE_XENOMAI_CUSTOM_GIT=y\n')
configlines.append('BR2_PACKAGE_XENOMAI_LATEST_VERSION=y\n')
configlines.remove('BR2_PACKAGE_XENOMAI_REPOSITORY=""\n')
if 'BR2_PACKAGE_XENOMAI=y\n' in configlines and \
'BR2_PACKAGE_XENOMAI_CUSTOM_TARBALL=y\n' in configlines and \
'BR2_PACKAGE_XENOMAI_CUSTOM_TARBALL_URL=""\n' in configlines:
configlines.remove('BR2_PACKAGE_XENOMAI_CUSTOM_TARBALL=y\n')
configlines.append('BR2_PACKAGE_XENOMAI_LATEST_VERSION=y\n')
configlines.remove('BR2_PACKAGE_XENOMAI_CUSTOM_TARBALL_URL=""\n')
if 'BR2_PACKAGE_XENOMAI=y\n' in configlines and \
'BR2_PACKAGE_XENOMAI_CUSTOM_VERSION=y\n' in configlines and \
'BR2_PACKAGE_XENOMAI_CUSTOM_VERSION_VALUE=""\n' in configlines:
configlines.remove('BR2_PACKAGE_XENOMAI_CUSTOM_VERSION=y\n')
configlines.append('BR2_PACKAGE_XENOMAI_LATEST_VERSION=y\n')
configlines.remove('BR2_PACKAGE_XENOMAI_CUSTOM_VERSION_VALUE=""\n')
if 'BR2_PACKAGE_XVISOR=y\n' in configlines and \
'BR2_PACKAGE_XVISOR_USE_CUSTOM_CONFIG=y\n' in configlines and \
'BR2_PACKAGE_XVISOR_CUSTOM_CONFIG_FILE=""\n' in configlines:
configlines.remove('BR2_PACKAGE_XVISOR_USE_CUSTOM_CONFIG=y\n')
configlines.append('BR2_PACKAGE_XVISOR_USE_DEFCONFIG=y\n')
configlines.remove('BR2_PACKAGE_XVISOR_CUSTOM_CONFIG_FILE=""\n')
# Don't build igh-ethercat driver as they are highly
# kernel-version specific
for opt in ['8139TOO', 'E100', 'E1000', 'E1000E', 'R8169']:
optstr = 'BR2_PACKAGE_IGH_ETHERCAT_%s=y\n' % opt
if optstr in configlines:
configlines.remove(optstr)
with open(configfile, "w+") as configf:
configf.writelines(configlines)
return True
async def gen_config(args):
"""Generate a new random configuration
This function generates the configuration, by choosing a random
toolchain configuration and then generating a random selection of
packages.
"""
sysinfo = SystemInfo()
if args.toolchains_csv:
# Select a random toolchain configuration
configs = get_toolchain_configs(args.toolchains_csv, args.buildrootdir)
i = randint(0, len(configs) - 1)
toolchainconfig = configs[i]
else:
toolchainconfig = []
configlines = list(toolchainconfig)
# Combine with the minimal configuration
minimalconfigfile = os.path.join(args.buildrootdir,
'support/config-fragments/minimal.config')
with open(minimalconfigfile) as minimalf:
configlines += minimalf.readlines()
# Allow hosts with old certificates to download over https
configlines.append("BR2_WGET=\"wget --passive-ftp -nd -t 3 --no-check-certificate\"\n")
# Per-package folder
if randint(0, 15) == 0:
configlines.append("BR2_PER_PACKAGE_DIRECTORIES=y\n")
# Amend the configuration with a few things.
if randint(0, 20) == 0:
configlines.append("BR2_ENABLE_DEBUG=y\n")
if randint(0, 20) == 0:
configlines.append("BR2_ENABLE_RUNTIME_DEBUG=y\n")
if randint(0, 1) == 0:
configlines.append("BR2_INIT_BUSYBOX=y\n")
elif randint(0, 15) == 0:
configlines.append("BR2_INIT_SYSTEMD=y\n")
elif randint(0, 10) == 0:
configlines.append("BR2_ROOTFS_DEVICE_CREATION_DYNAMIC_EUDEV=y\n")
if randint(0, 20) == 0:
configlines.append("BR2_STATIC_LIBS=y\n")
if randint(0, 20) == 0:
configlines.append("BR2_PACKAGE_PYTHON3_PY_ONLY=y\n")
if randint(0, 5) == 0:
configlines.append("BR2_OPTIMIZE_2=y\n")
if randint(0, 4) == 0:
configlines.append("BR2_SYSTEM_ENABLE_NLS=y\n")
if randint(0, 4) == 0:
configlines.append("BR2_FORTIFY_SOURCE_2=y\n")
# Randomly enable BR2_REPRODUCIBLE 10% of times
# also enable tar filesystem images for testing
if await sysinfo.has("diffoscope") and randint(0, 10) == 0:
configlines.append("BR2_REPRODUCIBLE=y\n")
configlines.append("BR2_TARGET_ROOTFS_TAR=y\n")
# Write out the configuration file
if not os.path.exists(args.outputdir):
os.makedirs(args.outputdir)
if args.outputdir == os.path.abspath(os.path.join(args.buildrootdir, "output")):
configfile = os.path.join(args.buildrootdir, ".config")
else:
configfile = os.path.join(args.outputdir, ".config")
with open(configfile, "w+") as configf:
configf.writelines(configlines)
proc = await asyncio.create_subprocess_exec(
"make", "O=%s" % args.outputdir, "-C", args.buildrootdir, "olddefconfig")
ret = await proc.wait()
if ret:
return ret
if not await is_toolchain_usable(configfile, toolchainconfig):
return 2
# Now, generate the random selection of packages, and fixup
# things if needed.
# Safe-guard, in case we can not quickly come to a valid
# configuration: allow at most 100 (arbitrary) iterations.
bounded_loop = 100
while True:
if bounded_loop == 0:
print("ERROR: cannot generate random configuration after 100 iterations",
file=sys.stderr)
return 1
bounded_loop -= 1
proc = await asyncio.create_subprocess_exec(
"make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
"KCONFIG_SEED=0x%s" % hexlify(os.urandom(4)).decode("ascii").upper(),
"KCONFIG_PROBABILITY=%d" % randint(1, 20),
"randpackageconfig" if args.toolchains_csv else "randconfig")
ret = await proc.wait()
if ret:
return ret
if await fixup_config(sysinfo, configfile):
break
proc = await asyncio.create_subprocess_exec(
"make", "O=%s" % args.outputdir, "-C", args.buildrootdir, "olddefconfig")
ret = await proc.wait()
if ret:
return ret
proc = await asyncio.create_subprocess_exec(
"make", "O=%s" % args.outputdir, "-C", args.buildrootdir, "savedefconfig")
ret = await proc.wait()
if ret:
return ret
proc = await asyncio.create_subprocess_exec(
"make", "O=%s" % args.outputdir, "-C", args.buildrootdir, "dependencies")
return await proc.wait()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="Generate a random configuration")
parser.add_argument("--outputdir", "-o",
help="Output directory (relative to current directory)",
type=str, default='output')
parser.add_argument("--buildrootdir", "-b",
help="Buildroot directory (relative to current directory)",
type=str, default='.')
toolchains_csv = parser.add_mutually_exclusive_group(required=False)
toolchains_csv.add_argument("--toolchains-csv",
dest="toolchains_csv",
help="Path of the toolchain configuration file",
type=str)
toolchains_csv.add_argument("--no-toolchains-csv",
dest="toolchains_csv",
help="Generate random toolchain configuration",
action='store_false')
parser.set_defaults(toolchains_csv="support/config-fragments/autobuild/toolchain-configs.csv")
args = parser.parse_args()
# We need the absolute path to use with O=, because the relative
# path to the output directory here is not relative to the
# Buildroot sources, but to the current directory.
args.outputdir = os.path.abspath(args.outputdir)
try:
if sys.version_info < (3, 7):
loop = asyncio.get_event_loop()
ret = loop.run_until_complete(gen_config(args))
else:
ret = asyncio.run(gen_config(args))
except Exception:
traceback.print_exc()
parser.exit(1)
parser.exit(ret)

View File

@ -0,0 +1,117 @@
#!/usr/bin/env python3
import argparse
import getdeveloperlib
import sys
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('patches', metavar='P', type=argparse.FileType('r'), nargs='*',
help='list of patches (use - to read patches from stdin)')
parser.add_argument('-a', dest='architecture', action='store',
help='find developers in charge of this architecture')
parser.add_argument('-p', dest='package', action='store',
help='find developers in charge of this package')
parser.add_argument('-f', dest='files', nargs='*',
help='find developers in charge of these files')
parser.add_argument('-c', dest='check', action='store_const',
const=True, help='list files not handled by any developer')
parser.add_argument('-e', dest='email', action='store_const',
const=True, help='only list affected developer email addresses')
parser.add_argument('-v', dest='validate', action='store_const',
const=True, help='validate syntax of DEVELOPERS file')
parser.add_argument('-d', dest='filename', action='store', default=None,
help='override the default DEVELOPERS file (for debug)')
return parser.parse_args()
def __main__():
args = parse_args()
# Check that only one action is given
action = 0
if args.architecture is not None:
action += 1
if args.package is not None:
action += 1
if args.files:
action += 1
if args.check:
action += 1
if args.validate:
action += 1
if len(args.patches) != 0:
action += 1
if action > 1:
print("Cannot do more than one action")
return
if action == 0:
print("No action specified")
return
devs = getdeveloperlib.parse_developers(args.filename)
if devs is None:
sys.exit(1)
# Validation is done by parse_developers() above and we error out
# if the validation didn't work, so if we reach here, it means
# validation passed, so we can simply bail out in success.
if args.validate:
return
# Handle the check action
if args.check:
files = getdeveloperlib.check_developers(devs)
for f in files:
print(f)
# Handle the architecture action
if args.architecture is not None:
for dev in devs:
if args.architecture in dev.architectures:
print(dev.name)
return
# Handle the package action
if args.package is not None:
for dev in devs:
if args.package in dev.packages:
print(dev.name)
return
# Handle the files action
if args.files is not None:
for dev in devs:
for f in args.files:
if dev.hasfile(f):
print(dev.name)
break
# Handle the patches action
if len(args.patches) != 0:
(files, infras) = getdeveloperlib.analyze_patches(args.patches)
matching_devs = set()
for dev in devs:
# See if we have developers matching by package name
for f in files:
if dev.hasfile(f):
matching_devs.add(dev.name)
# See if we have developers matching by package infra
for i in infras:
if i in dev.infras:
matching_devs.add(dev.name)
if args.email:
for dev in matching_devs:
print(dev)
else:
result = "--to buildroot@buildroot.org"
for dev in matching_devs:
result += " --cc \"%s\"" % dev
if result != "":
print("git send-email %s" % result)
__main__()

View File

@ -0,0 +1,293 @@
from io import open
import os
import re
import glob
import subprocess
import sys
import unittest
brpath = os.path.normpath(os.path.join(os.path.dirname(__file__), ".."))
#
# Patch parsing functions
#
FIND_INFRA_IN_PATCH = re.compile(r"^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
def analyze_patch(patch):
"""Parse one patch and return the list of files modified, added or
removed by the patch."""
files = set()
infras = set()
for line in patch:
# If the patch is adding a package, find which infra it is
m = FIND_INFRA_IN_PATCH.match(line)
if m:
infras.add(m.group(2))
if not line.startswith("+++ ") and not line.startswith("--- "):
continue
line.strip()
fname = line[line.find("/") + 1:].strip()
if fname == "dev/null":
continue
files.add(fname)
return (files, infras)
FIND_INFRA_IN_MK = re.compile(r"^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
def fname_get_package_infra(fname):
"""Checks whether the file name passed as argument is a Buildroot .mk
file describing a package, and find the infrastructure it's using."""
if not fname.endswith(".mk"):
return None
if not os.path.exists(fname):
return None
with open(fname, "r") as f:
for line in f:
line = line.strip()
m = FIND_INFRA_IN_MK.match(line)
if m:
return m.group(2)
return None
def analyze_patches(patches):
"""Parse a list of patches and returns the list of files modified,
added or removed by the patches, as well as the list of package
infrastructures used by those patches (if any)"""
allfiles = set()
allinfras = set()
for patch in patches:
(files, infras) = analyze_patch(patch)
allfiles = allfiles | files
allinfras = allinfras | infras
return (allfiles, allinfras)
#
# Unit-test parsing functions
#
def get_all_test_cases(suite):
"""Generate all test-cases from a given test-suite.
:return: (test.module, test.name)"""
if issubclass(type(suite), unittest.TestSuite):
for test in suite:
for res in get_all_test_cases(test):
yield res
else:
yield (suite.__module__, suite.__class__.__name__)
def list_unittests():
"""Use the unittest module to retreive all test cases from a given
directory"""
loader = unittest.TestLoader()
suite = loader.discover(os.path.join(brpath, "support", "testing"))
tests = {}
for module, test in get_all_test_cases(suite):
module_path = os.path.join("support", "testing", *module.split('.'))
tests.setdefault(module_path, []).append('%s.%s' % (module, test))
return tests
unittests = {}
#
# DEVELOPERS file parsing functions
#
class Developer:
def __init__(self, name, files):
self.name = name
self.files = files
self.packages = parse_developer_packages(files)
self.architectures = parse_developer_architectures(files)
self.infras = parse_developer_infras(files)
self.runtime_tests = parse_developer_runtime_tests(files)
self.defconfigs = parse_developer_defconfigs(files)
def hasfile(self, f):
for fs in self.files:
if f.startswith(fs):
return True
return False
def __repr__(self):
name = '\'' + self.name.split(' <')[0][:20] + '\''
things = []
if len(self.files):
things.append('{} files'.format(len(self.files)))
if len(self.packages):
things.append('{} pkgs'.format(len(self.packages)))
if len(self.architectures):
things.append('{} archs'.format(len(self.architectures)))
if len(self.infras):
things.append('{} infras'.format(len(self.infras)))
if len(self.runtime_tests):
things.append('{} tests'.format(len(self.runtime_tests)))
if len(self.defconfigs):
things.append('{} defconfigs'.format(len(self.defconfigs)))
if things:
return 'Developer <{} ({})>'.format(name, ', '.join(things))
else:
return 'Developer <' + name + '>'
def parse_developer_packages(fnames):
"""Given a list of file patterns, travel through the Buildroot source
tree to find which packages are implemented by those file
patterns, and return a list of those packages."""
packages = set()
for fname in fnames:
for root, dirs, files in os.walk(os.path.join(brpath, fname)):
for f in files:
path = os.path.join(root, f)
if fname_get_package_infra(path):
pkg = os.path.splitext(f)[0]
packages.add(pkg)
return packages
def parse_arches_from_config_in(fname):
"""Given a path to an arch/Config.in.* file, parse it to get the list
of BR2_ARCH values for this architecture."""
arches = set()
with open(fname, "r") as f:
parsing_arches = False
for line in f:
line = line.strip()
if line == "config BR2_ARCH":
parsing_arches = True
continue
if parsing_arches:
m = re.match(r"^\s*default \"([^\"]*)\".*", line)
if m:
arches.add(m.group(1))
else:
parsing_arches = False
return arches
def parse_developer_architectures(fnames):
"""Given a list of file names, find the ones starting by
'arch/Config.in.', and use that to determine the architecture a
developer is working on."""
arches = set()
for fname in fnames:
if not re.match(r"^.*/arch/Config\.in\..*$", fname):
continue
arches = arches | parse_arches_from_config_in(fname)
return arches
def parse_developer_infras(fnames):
infras = set()
for fname in fnames:
m = re.match(r"^package/pkg-([^.]*).mk$", fname)
if m:
infras.add(m.group(1))
return infras
def parse_developer_defconfigs(fnames):
"""Given a list of file names, returns the config names
corresponding to defconfigs."""
return {os.path.basename(fname[:-10])
for fname in fnames
if fname.endswith('_defconfig')}
def parse_developer_runtime_tests(fnames):
"""Given a list of file names, returns the runtime tests
corresponding to the file."""
all_files = []
# List all files recursively
for fname in fnames:
if os.path.isdir(fname):
for root, _dirs, files in os.walk(os.path.join(brpath, fname)):
all_files += [os.path.join(root, f) for f in files]
else:
all_files.append(fname)
# Get all runtime tests
runtimes = set()
for f in all_files:
name = os.path.splitext(f)[0]
if name in unittests:
runtimes |= set(unittests[name])
return runtimes
def parse_developers(filename=None):
"""Parse the DEVELOPERS file and return a list of Developer objects."""
developers = []
linen = 0
global unittests
unittests = list_unittests()
developers_fname = filename or os.path.join(brpath, 'DEVELOPERS')
with open(developers_fname, mode='r', encoding='utf_8') as f:
files = []
name = None
for line in f:
linen += 1
line = line.strip()
if line.startswith("#"):
continue
elif line.startswith("N:"):
if name is not None or len(files) != 0:
print("Syntax error in DEVELOPERS file, line %d" % (linen - 1),
file=sys.stderr)
return None
name = line[2:].strip()
elif line.startswith("F:"):
fname = line[2:].strip()
dev_files = glob.glob(os.path.join(brpath, fname))
if len(dev_files) == 0:
print("WARNING: '%s' doesn't match any file, line %d" % (fname, linen),
file=sys.stderr)
for f in dev_files:
dev_file = os.path.relpath(f, brpath)
dev_file = dev_file.replace(os.sep, '/') # force unix sep
if f[-1] == '/': # relpath removes the trailing /
dev_file = dev_file + '/'
files.append(dev_file)
elif line == "":
if not name:
continue
developers.append(Developer(name, files))
files = []
name = None
else:
print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen, line),
file=sys.stderr)
return None
# handle last developer
if name is not None:
developers.append(Developer(name, files))
return developers
def check_developers(developers, basepath=None):
"""Look at the list of files versioned in Buildroot, and returns the
list of files that are not handled by any developer"""
if basepath is None:
basepath = os.getcwd()
cmd = ["git", "--git-dir", os.path.join(basepath, ".git"), "ls-files"]
files = subprocess.check_output(cmd).decode(sys.stdout.encoding).strip().split("\n")
unhandled_files = []
for f in files:
handled = False
for d in developers:
if d.hasfile(f):
handled = True
break
if not handled:
unhandled_files.append(f)
return unhandled_files

View File

@ -0,0 +1,53 @@
This directory contains various useful scripts and tools for working
with Buildroot. You need not add this directory in your PATH to use
any of those tools, but you may do so if you want.
brmake
a script that can be run instead of make, that prepends the date in
front of each line, redirects all of the build output to a file
("'br.log' in the current directory), and just outputs the Buildroot
messages (those lines starting with >>>) on stdout.
Do not run this script for interactive configuration (e.g. menuconfig)
or on an unconfigured directory. The output is redirected so you will see
nothing.
check-package
a script that checks the coding style across the buildroot tree. It
checks package's Config.in and .mk files, runs shellcheck for all shell
scripts, flake8 for python files, checks for typoes, etc.
It checks the .checkpackageignore file if errors should be ignored and
errors if there's a file listed that doesn't produce an error.
docker-run
a script that runs a command (like make check-package) inside the
buildroot CI docker container; pass no command to get an interactive
shell.
genrandconfig
a script that generates a random configuration, used by the autobuilders
(http://autobuild.buildroot.org). It selects a random toolchain from
support/config-fragments/autobuild and randomly selects packages to build.
get-developers
a script to return the list of people interested in a specific part
of Buildroot, so they can be Cc:ed on a mail. Accepts a patch as
input, a package name or and architecture name.
scancpan
a script to create a Buildroot package by scanning a CPAN module
description.
scanpypi
a script to create a Buildroot package by scanning a PyPI package
description.
size-stats-compare
a script to compare the rootfs size between two different Buildroot
configurations. This can be used to identify the size impact of
a specific option, of a set of specific options, or of an update
to a newer Buildroot version...
test-pkg
a script that tests a specific package against a set of various
toolchains, with the goal to detect toolchain-related dependencies
(wchar, threads...)

1000
buildroot-2024.02/utils/scancpan Executable file

File diff suppressed because it is too large Load Diff

836
buildroot-2024.02/utils/scanpypi Executable file
View File

@ -0,0 +1,836 @@
#!/usr/bin/env python3
"""
Utility for building Buildroot packages for existing PyPI packages
Any package built by scanpypi should be manually checked for
errors.
"""
import argparse
import json
import sys
import os
import shutil
import tarfile
import zipfile
import errno
import hashlib
import re
import textwrap
import tempfile
import imp
from functools import wraps
import six.moves.urllib.request
import six.moves.urllib.error
import six.moves.urllib.parse
from six.moves import map
from six.moves import zip
from six.moves import input
if six.PY2:
import StringIO
else:
import io
BUF_SIZE = 65536
try:
import spdx_lookup as liclookup
except ImportError:
# spdx_lookup is not installed
print('spdx_lookup module is not installed. This can lead to an '
'inaccurate licence detection. Please install it via\n'
'pip install spdx_lookup')
liclookup = None
def toml_load(f):
with open(f, 'rb') as fh:
ex = None
# Try standard library tomllib first
try:
from tomllib import load
return load(fh)
except ImportError:
pass
# Try regular tomli next
try:
from tomli import load
return load(fh)
except ImportError as e:
ex = e
# Try pip's vendored tomli
try:
from pip._vendor.tomli import load
try:
return load(fh)
except TypeError:
# Fallback to handle older version
try:
fh.seek(0)
w = io.TextIOWrapper(fh, encoding="utf8", newline="")
return load(w)
finally:
w.detach()
except ImportError:
pass
# Try regular toml last
try:
from toml import load
fh.seek(0)
w = io.TextIOWrapper(fh, encoding="utf8", newline="")
try:
return load(w)
finally:
w.detach()
except ImportError:
pass
print('This package needs tomli')
raise ex
def setup_decorator(func, method):
"""
Decorator for distutils.core.setup and setuptools.setup.
Puts the arguments with which setup is called as a dict
Add key 'method' which should be either 'setuptools' or 'distutils'.
Keyword arguments:
func -- either setuptools.setup or distutils.core.setup
method -- either 'setuptools' or 'distutils'
"""
@wraps(func)
def closure(*args, **kwargs):
# Any python packages calls its setup function to be installed.
# Argument 'name' of this setup function is the package's name
BuildrootPackage.setup_args[kwargs['name']] = kwargs
BuildrootPackage.setup_args[kwargs['name']]['method'] = method
return closure
# monkey patch
import setuptools # noqa E402
setuptools.setup = setup_decorator(setuptools.setup, 'setuptools')
import distutils # noqa E402
distutils.core.setup = setup_decorator(setuptools.setup, 'distutils')
def find_file_upper_case(filenames, path='./'):
"""
List generator:
Recursively find files that matches one of the specified filenames.
Returns a relative path starting with path argument.
Keyword arguments:
filenames -- List of filenames to be found
path -- Path to the directory to search
"""
for root, dirs, files in os.walk(path):
for file in files:
if file.upper() in filenames:
yield (os.path.join(root, file))
def pkg_buildroot_name(pkg_name):
"""
Returns the Buildroot package name for the PyPI package pkg_name.
Remove all non alphanumeric characters except -
Also lowers the name and adds 'python-' suffix
Keyword arguments:
pkg_name -- String to rename
"""
name = re.sub(r'[^\w-]', '', pkg_name.lower())
name = name.replace('_', '-')
prefix = 'python-'
pattern = re.compile(r'^(?!' + prefix + ')(.+?)$')
name = pattern.sub(r'python-\1', name)
return name
class DownloadFailed(Exception):
pass
class BuildrootPackage():
"""This class's methods are not meant to be used individually please
use them in the correct order:
__init__
download_package
extract_package
load_module
get_requirements
create_package_mk
create_hash_file
create_config_in
"""
setup_args = {}
def __init__(self, real_name, pkg_folder):
self.real_name = real_name
self.buildroot_name = pkg_buildroot_name(self.real_name)
self.pkg_dir = os.path.join(pkg_folder, self.buildroot_name)
self.mk_name = self.buildroot_name.upper().replace('-', '_')
self.as_string = None
self.md5_sum = None
self.metadata = None
self.metadata_name = None
self.metadata_url = None
self.pkg_req = None
self.setup_metadata = None
self.tmp_extract = None
self.used_url = None
self.filename = None
self.url = None
self.version = None
self.license_files = []
def fetch_package_info(self):
"""
Fetch a package's metadata from the python package index
"""
self.metadata_url = 'https://pypi.org/pypi/{pkg}/json'.format(
pkg=self.real_name)
try:
pkg_json = six.moves.urllib.request.urlopen(self.metadata_url).read().decode()
except six.moves.urllib.error.HTTPError as error:
print('ERROR:', error.getcode(), error.msg, file=sys.stderr)
print('ERROR: Could not find package {pkg}.\n'
'Check syntax inside the python package index:\n'
'https://pypi.python.org/pypi/ '
.format(pkg=self.real_name))
raise
except six.moves.urllib.error.URLError:
print('ERROR: Could not find package {pkg}.\n'
'Check syntax inside the python package index:\n'
'https://pypi.python.org/pypi/ '
.format(pkg=self.real_name))
raise
self.metadata = json.loads(pkg_json)
self.version = self.metadata['info']['version']
self.metadata_name = self.metadata['info']['name']
def download_package(self):
"""
Download a package using metadata from pypi
"""
download = None
try:
self.metadata['urls'][0]['filename']
except IndexError:
print(
'Non-conventional package, ',
'please check carefully after creation')
self.metadata['urls'] = [{
'packagetype': 'sdist',
'url': self.metadata['info']['download_url'],
'digests': None}]
# In this case, we can't get the name of the downloaded file
# from the pypi api, so we need to find it, this should work
urlpath = six.moves.urllib.parse.urlparse(
self.metadata['info']['download_url']).path
# urlparse().path give something like
# /path/to/file-version.tar.gz
# We use basename to remove /path/to
self.metadata['urls'][0]['filename'] = os.path.basename(urlpath)
for download_url in self.metadata['urls']:
if 'bdist' in download_url['packagetype']:
continue
try:
print('Downloading package {pkg} from {url}...'.format(
pkg=self.real_name, url=download_url['url']))
download = six.moves.urllib.request.urlopen(download_url['url'])
except six.moves.urllib.error.HTTPError as http_error:
download = http_error
else:
self.used_url = download_url
self.as_string = download.read()
if not download_url['digests']['md5']:
break
self.md5_sum = hashlib.md5(self.as_string).hexdigest()
if self.md5_sum == download_url['digests']['md5']:
break
if download is None:
raise DownloadFailed('Failed to download package {pkg}: '
'No source archive available'
.format(pkg=self.real_name))
elif download.__class__ == six.moves.urllib.error.HTTPError:
raise download
self.filename = self.used_url['filename']
self.url = self.used_url['url']
def check_archive(self, members):
"""
Check archive content before extracting
Keyword arguments:
members -- list of archive members
"""
# Protect against https://github.com/snyk/zip-slip-vulnerability
# Older python versions do not validate that the extracted files are
# inside the target directory. Detect and error out on evil paths
evil = [e for e in members if os.path.relpath(e).startswith(('/', '..'))]
if evil:
print('ERROR: Refusing to extract {} with suspicious members {}'.format(
self.filename, evil))
sys.exit(1)
def extract_package(self, tmp_path):
"""
Extract the package contents into a directrory
Keyword arguments:
tmp_path -- directory where you want the package to be extracted
"""
if six.PY2:
as_file = StringIO.StringIO(self.as_string)
else:
as_file = io.BytesIO(self.as_string)
if self.filename[-3:] == 'zip':
with zipfile.ZipFile(as_file) as as_zipfile:
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
try:
os.makedirs(tmp_pkg)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.strerror, file=sys.stderr)
return
print('WARNING:', exception.strerror, file=sys.stderr)
print('Removing {pkg}...'.format(pkg=tmp_pkg))
shutil.rmtree(tmp_pkg)
os.makedirs(tmp_pkg)
self.check_archive(as_zipfile.namelist())
as_zipfile.extractall(tmp_pkg)
pkg_filename = self.filename.split(".zip")[0]
else:
with tarfile.open(fileobj=as_file) as as_tarfile:
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
try:
os.makedirs(tmp_pkg)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.strerror, file=sys.stderr)
return
print('WARNING:', exception.strerror, file=sys.stderr)
print('Removing {pkg}...'.format(pkg=tmp_pkg))
shutil.rmtree(tmp_pkg)
os.makedirs(tmp_pkg)
self.check_archive(as_tarfile.getnames())
as_tarfile.extractall(tmp_pkg)
pkg_filename = self.filename.split(".tar")[0]
tmp_extract = '{folder}/{name}'
self.tmp_extract = tmp_extract.format(
folder=tmp_pkg,
name=pkg_filename)
def load_setup(self):
"""
Loads the corresponding setup and store its metadata
"""
current_dir = os.getcwd()
os.chdir(self.tmp_extract)
sys.path.insert(0, self.tmp_extract)
try:
s_file, s_path, s_desc = imp.find_module('setup', [self.tmp_extract])
imp.load_module('__main__', s_file, s_path, s_desc)
if self.metadata_name in self.setup_args:
pass
elif self.metadata_name.replace('_', '-') in self.setup_args:
self.metadata_name = self.metadata_name.replace('_', '-')
elif self.metadata_name.replace('-', '_') in self.setup_args:
self.metadata_name = self.metadata_name.replace('-', '_')
try:
self.setup_metadata = self.setup_args[self.metadata_name]
except KeyError:
# This means setup was not called
print('ERROR: Could not determine package metadata for {pkg}.\n'
.format(pkg=self.real_name))
raise
finally:
os.chdir(current_dir)
sys.path.remove(self.tmp_extract)
def load_pyproject(self):
"""
Loads the corresponding pyproject.toml and store its metadata
"""
current_dir = os.getcwd()
os.chdir(self.tmp_extract)
sys.path.insert(0, self.tmp_extract)
try:
pyproject_data = toml_load('pyproject.toml')
try:
self.setup_metadata = pyproject_data.get('project', {})
self.metadata_name = self.setup_metadata.get('name', self.real_name)
build_system = pyproject_data.get('build-system', {})
build_backend = build_system.get('build-backend', None)
if build_backend and build_backend == 'flit_core.buildapi':
self.setup_metadata['method'] = 'flit'
elif build_system.get('backend-path', None):
self.setup_metadata['method'] = 'pep517'
else:
self.setup_metadata['method'] = 'unknown'
except KeyError:
print('ERROR: Could not determine package metadata for {pkg}.\n'
.format(pkg=self.real_name))
raise
except FileNotFoundError:
raise
os.chdir(current_dir)
sys.path.remove(self.tmp_extract)
def get_requirements(self, pkg_folder):
"""
Retrieve dependencies from the metadata found in the setup.py script of
a pypi package.
Keyword Arguments:
pkg_folder -- location of the already created packages
"""
if 'install_requires' not in self.setup_metadata:
self.pkg_req = None
return set()
self.pkg_req = self.setup_metadata['install_requires']
self.pkg_req = [re.sub(r'([-.\w]+).*', r'\1', req)
for req in self.pkg_req]
# get rid of commented lines and also strip the package strings
self.pkg_req = {item.strip() for item in self.pkg_req
if len(item) > 0 and item[0] != '#'}
req_not_found = self.pkg_req
self.pkg_req = list(map(pkg_buildroot_name, self.pkg_req))
pkg_tuples = list(zip(req_not_found, self.pkg_req))
# pkg_tuples is a list of tuples that looks like
# ('werkzeug','python-werkzeug') because I need both when checking if
# dependencies already exist or are already in the download list
req_not_found = set(
pkg[0] for pkg in pkg_tuples
if not os.path.isdir(pkg[1])
)
return req_not_found
def __create_mk_header(self):
"""
Create the header of the <package_name>.mk file
"""
header = ['#' * 80 + '\n']
header.append('#\n')
header.append('# {name}\n'.format(name=self.buildroot_name))
header.append('#\n')
header.append('#' * 80 + '\n')
header.append('\n')
return header
def __create_mk_download_info(self):
"""
Create the lines refering to the download information of the
<package_name>.mk file
"""
lines = []
version_line = '{name}_VERSION = {version}\n'.format(
name=self.mk_name,
version=self.version)
lines.append(version_line)
if self.buildroot_name != self.real_name:
targz = self.filename.replace(
self.version,
'$({name}_VERSION)'.format(name=self.mk_name))
targz_line = '{name}_SOURCE = {filename}\n'.format(
name=self.mk_name,
filename=targz)
lines.append(targz_line)
if self.filename not in self.url:
# Sometimes the filename is in the url, sometimes it's not
site_url = self.url
else:
site_url = self.url[:self.url.find(self.filename)]
site_line = '{name}_SITE = {url}'.format(name=self.mk_name,
url=site_url)
site_line = site_line.rstrip('/') + '\n'
lines.append(site_line)
return lines
def __create_mk_setup(self):
"""
Create the line refering to the setup method of the package of the
<package_name>.mk file
There are two things you can use to make an installer
for a python package: distutils or setuptools
distutils comes with python but does not support dependencies.
distutils is mostly still there for backward support.
setuptools is what smart people use,
but it is not shipped with python :(
"""
lines = []
setup_type_line = '{name}_SETUP_TYPE = {method}\n'.format(
name=self.mk_name,
method=self.setup_metadata['method'])
lines.append(setup_type_line)
return lines
def __get_license_names(self, license_files):
"""
Try to determine the related license name.
There are two possibilities. Either the script tries to
get license name from package's metadata or, if spdx_lookup
package is available, the script compares license files with
SPDX database.
"""
license_line = ''
if liclookup is None:
license_dict = {
'Apache Software License': 'Apache-2.0',
'BSD License': 'FIXME: please specify the exact BSD version',
'European Union Public Licence 1.0': 'EUPL-1.0',
'European Union Public Licence 1.1': 'EUPL-1.1',
"GNU General Public License": "GPL",
"GNU General Public License v2": "GPL-2.0",
"GNU General Public License v2 or later": "GPL-2.0+",
"GNU General Public License v3": "GPL-3.0",
"GNU General Public License v3 or later": "GPL-3.0+",
"GNU Lesser General Public License v2": "LGPL-2.1",
"GNU Lesser General Public License v2 or later": "LGPL-2.1+",
"GNU Lesser General Public License v3": "LGPL-3.0",
"GNU Lesser General Public License v3 or later": "LGPL-3.0+",
"GNU Library or Lesser General Public License": "LGPL-2.0",
"ISC License": "ISC",
"MIT License": "MIT",
"Mozilla Public License 1.0": "MPL-1.0",
"Mozilla Public License 1.1": "MPL-1.1",
"Mozilla Public License 2.0": "MPL-2.0",
"Zope Public License": "ZPL"
}
regexp = re.compile(r'^License :* *.* *:+ (.*)( \(.*\))?$')
classifiers_licenses = [regexp.sub(r"\1", lic)
for lic in self.metadata['info']['classifiers']
if regexp.match(lic)]
licenses = [license_dict[x] if x in license_dict else x for x in classifiers_licenses]
if not len(licenses):
print('WARNING: License has been set to "{license}". It is most'
' likely wrong, please change it if need be'.format(
license=', '.join(licenses)))
licenses = [self.metadata['info']['license']]
licenses = set(licenses)
license_line = '{name}_LICENSE = {license}\n'.format(
name=self.mk_name,
license=', '.join(licenses))
else:
license_names = []
for license_file in license_files:
with open(license_file) as lic_file:
match = liclookup.match(lic_file.read())
if match is not None and match.confidence >= 90.0:
license_names.append(match.license.id)
else:
license_names.append("FIXME: license id couldn't be detected")
license_names = set(license_names)
if len(license_names) > 0:
license_line = ('{name}_LICENSE ='
' {names}\n'.format(
name=self.mk_name,
names=', '.join(license_names)))
return license_line
def __create_mk_license(self):
"""
Create the lines referring to the package's license informations of the
<package_name>.mk file
The license's files are found by searching the package (case insensitive)
for files named license, license.txt etc. If more than one license file
is found, the user is asked to select which ones he wants to use.
"""
lines = []
filenames = ['LICENCE', 'LICENSE', 'LICENSE.MD', 'LICENSE.RST',
'LICENCE.TXT', 'LICENSE.TXT', 'COPYING', 'COPYING.TXT']
self.license_files = list(find_file_upper_case(filenames, self.tmp_extract))
lines.append(self.__get_license_names(self.license_files))
license_files = [license.replace(self.tmp_extract, '')[1:]
for license in self.license_files]
if len(license_files) > 0:
if len(license_files) > 1:
print('More than one file found for license:',
', '.join(license_files))
license_files = [filename
for index, filename in enumerate(license_files)]
license_file_line = ('{name}_LICENSE_FILES ='
' {files}\n'.format(
name=self.mk_name,
files=' '.join(license_files)))
lines.append(license_file_line)
else:
print('WARNING: No license file found,'
' please specify it manually afterwards')
license_file_line = '# No license file found\n'
return lines
def __create_mk_requirements(self):
"""
Create the lines referring to the dependencies of the of the
<package_name>.mk file
Keyword Arguments:
pkg_name -- name of the package
pkg_req -- dependencies of the package
"""
lines = []
dependencies_line = ('{name}_DEPENDENCIES ='
' {reqs}\n'.format(
name=self.mk_name,
reqs=' '.join(self.pkg_req)))
lines.append(dependencies_line)
return lines
def create_package_mk(self):
"""
Create the lines corresponding to the <package_name>.mk file
"""
pkg_mk = '{name}.mk'.format(name=self.buildroot_name)
path_to_mk = os.path.join(self.pkg_dir, pkg_mk)
print('Creating {file}...'.format(file=path_to_mk))
lines = self.__create_mk_header()
lines += self.__create_mk_download_info()
lines += self.__create_mk_setup()
lines += self.__create_mk_license()
lines.append('\n')
lines.append('$(eval $(python-package))')
lines.append('\n')
with open(path_to_mk, 'w') as mk_file:
mk_file.writelines(lines)
def create_hash_file(self):
"""
Create the lines corresponding to the <package_name>.hash files
"""
pkg_hash = '{name}.hash'.format(name=self.buildroot_name)
path_to_hash = os.path.join(self.pkg_dir, pkg_hash)
print('Creating {filename}...'.format(filename=path_to_hash))
lines = []
if self.used_url['digests']['md5'] and self.used_url['digests']['sha256']:
hash_header = '# md5, sha256 from {url}\n'.format(
url=self.metadata_url)
lines.append(hash_header)
hash_line = '{method} {digest} {filename}\n'.format(
method='md5',
digest=self.used_url['digests']['md5'],
filename=self.filename)
lines.append(hash_line)
hash_line = '{method} {digest} {filename}\n'.format(
method='sha256',
digest=self.used_url['digests']['sha256'],
filename=self.filename)
lines.append(hash_line)
if self.license_files:
lines.append('# Locally computed sha256 checksums\n')
for license_file in self.license_files:
sha256 = hashlib.sha256()
with open(license_file, 'rb') as lic_f:
while True:
data = lic_f.read(BUF_SIZE)
if not data:
break
sha256.update(data)
hash_line = '{method} {digest} {filename}\n'.format(
method='sha256',
digest=sha256.hexdigest(),
filename=license_file.replace(self.tmp_extract, '')[1:])
lines.append(hash_line)
with open(path_to_hash, 'w') as hash_file:
hash_file.writelines(lines)
def create_config_in(self):
"""
Creates the Config.in file of a package
"""
path_to_config = os.path.join(self.pkg_dir, 'Config.in')
print('Creating {file}...'.format(file=path_to_config))
lines = []
config_line = 'config BR2_PACKAGE_{name}\n'.format(
name=self.mk_name)
lines.append(config_line)
bool_line = '\tbool "{name}"\n'.format(name=self.buildroot_name)
lines.append(bool_line)
if self.pkg_req:
self.pkg_req.sort()
for dep in self.pkg_req:
dep_line = '\tselect BR2_PACKAGE_{req} # runtime\n'.format(
req=dep.upper().replace('-', '_'))
lines.append(dep_line)
lines.append('\thelp\n')
md_info = self.metadata['info']
help_lines = textwrap.wrap(md_info['summary'], 62,
initial_indent='\t ',
subsequent_indent='\t ')
# make sure a help text is terminated with a full stop
if help_lines[-1][-1] != '.':
help_lines[-1] += '.'
home_page = md_info.get('home_page', None)
if not home_page:
project_urls = md_info.get('project_urls', None)
if project_urls:
home_page = project_urls.get('Homepage', None)
if home_page:
# \t + two spaces is 3 char long
help_lines.append('')
help_lines.append('\t ' + home_page)
help_lines = [x + '\n' for x in help_lines]
lines += help_lines
with open(path_to_config, 'w') as config_file:
config_file.writelines(lines)
def main():
# Building the parser
parser = argparse.ArgumentParser(
description="Creates buildroot packages from the metadata of "
"an existing PyPI packages and include it "
"in menuconfig")
parser.add_argument("packages",
help="list of packages to be created",
nargs='+')
parser.add_argument("-o", "--output",
help="""
Output directory for packages.
Default is ./package
""",
default='./package')
args = parser.parse_args()
packages = list(set(args.packages))
# tmp_path is where we'll extract the files later
tmp_prefix = 'scanpypi-'
pkg_folder = args.output
tmp_path = tempfile.mkdtemp(prefix=tmp_prefix)
try:
for real_pkg_name in packages:
package = BuildrootPackage(real_pkg_name, pkg_folder)
print('buildroot package name for {}:'.format(package.real_name),
package.buildroot_name)
# First we download the package
# Most of the info we need can only be found inside the package
print('Package:', package.buildroot_name)
print('Fetching package', package.real_name)
try:
package.fetch_package_info()
except (six.moves.urllib.error.URLError, six.moves.urllib.error.HTTPError):
continue
if package.metadata_name.lower() == 'setuptools':
# setuptools imports itself, that does not work very well
# with the monkey path at the begining
print('Error: setuptools cannot be built using scanPyPI')
continue
try:
package.download_package()
except six.moves.urllib.error.HTTPError as error:
print('Error: {code} {reason}'.format(code=error.code,
reason=error.reason))
print('Error downloading package :', package.buildroot_name)
print()
continue
# extract the tarball
try:
package.extract_package(tmp_path)
except (tarfile.ReadError, zipfile.BadZipfile):
print('Error extracting package {}'.format(package.real_name))
print()
continue
# Loading the package install info from the package
try:
package.load_setup()
except ImportError as err:
if 'buildutils' in str(err):
print('This package needs buildutils')
continue
else:
try:
package.load_pyproject()
except Exception:
raise
except (AttributeError, KeyError) as error:
print('Error: Could not install package {pkg}: {error}'.format(
pkg=package.real_name, error=error))
continue
# Package requirement are an argument of the setup function
req_not_found = package.get_requirements(pkg_folder)
req_not_found = req_not_found.difference(packages)
packages += req_not_found
if req_not_found:
print('Added packages \'{pkgs}\' as dependencies of {pkg}'
.format(pkgs=", ".join(req_not_found),
pkg=package.buildroot_name))
print('Checking if package {name} already exists...'.format(
name=package.pkg_dir))
try:
os.makedirs(package.pkg_dir)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.message, file=sys.stderr)
continue
print('Error: Package {name} already exists'
.format(name=package.pkg_dir))
del_pkg = input(
'Do you want to delete existing package ? [y/N]')
if del_pkg.lower() == 'y':
shutil.rmtree(package.pkg_dir)
os.makedirs(package.pkg_dir)
else:
continue
package.create_package_mk()
package.create_hash_file()
package.create_config_in()
print("NOTE: Remember to also make an update to the DEVELOPERS file")
print(" and include an entry for the pkg in packages/Config.in")
print()
# printing an empty line for visual confort
finally:
shutil.rmtree(tmp_path)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,144 @@
#!/usr/bin/env python3
# Copyright (C) 2016 Thomas De Schampheleire <thomas.de.schampheleire@gmail.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# TODO (improvements)
# - support K,M,G size suffixes for threshold
# - output CSV file in addition to stdout reporting
import csv
import argparse
import sys
def read_file_size_csv(inputf, detail=None):
"""Extract package or file sizes from CSV file into size dictionary"""
sizes = {}
reader = csv.reader(inputf)
header = next(reader)
if header[0] != 'File name' or header[1] != 'Package name' or \
header[2] != 'File size' or header[3] != 'Package size':
print(("Input file %s does not contain the expected header. Are you "
"sure this file corresponds to the file-size-stats.csv "
"file created by 'make graph-size'?") % inputf.name)
sys.exit(1)
for row in reader:
if detail:
sizes[(row[0], row[1])] = int(row[2])
else:
sizes[(None, row[1])] = int(row[3])
return sizes
def compare_sizes(old, new):
"""Return delta/added/removed dictionaries based on two input size
dictionaries"""
delta = {}
oldkeys = set(old.keys())
newkeys = set(new.keys())
# packages/files in both
for entry in newkeys.intersection(oldkeys):
delta[entry] = ('', new[entry] - old[entry])
# packages/files only in new
for entry in newkeys.difference(oldkeys):
delta[entry] = ('added', new[entry])
# packages/files only in old
for entry in oldkeys.difference(newkeys):
delta[entry] = ('removed', -old[entry])
return delta
def print_results(result, threshold):
"""Print the given result dictionary sorted by size, ignoring any entries
below or equal to threshold"""
from six import iteritems
list_result = list(iteritems(result))
# result is a dictionary: (filename, pkgname) -> (flag, size difference)
# list_result is a list of tuples: ((filename, pkgname), (flag, size difference))
# filename may be None if no detail is requested.
maxpkgname = max(len(pkgname) for filename, pkgname in result)
for entry in sorted(list_result, key=lambda entry: entry[1][1]):
data = dict(
filename=entry[0][0],
pkgname=entry[0][1],
action=entry[1][0],
size=entry[1][1],
maxpkgname=maxpkgname,
)
if threshold is not None and abs(data['size']) <= threshold:
continue
if data['filename']:
print('{size:12d} {action:7s} {pkgname:{maxpkgname}s} {filename}'.format(**data))
else:
print('{size:12d} {action:7s} {pkgname}'.format(**data))
# main #########################################################################
description = """
Compare rootfs size between Buildroot compilations, for example after changing
configuration options or after switching to another Buildroot release.
This script compares the file-size-stats.csv file generated by 'make graph-size'
with the corresponding file from another Buildroot compilation.
The size differences can be reported per package or per file.
Size differences smaller or equal than a given threshold can be ignored.
"""
parser = argparse.ArgumentParser(description=description,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--detail', action='store_true',
help='''report differences for individual files rather than
packages''')
parser.add_argument('-t', '--threshold', type=int,
help='''ignore size differences smaller or equal than this
value (bytes)''')
parser.add_argument('old_file_size_csv', type=argparse.FileType('r'),
metavar='old-file-size-stats.csv',
help="""old CSV file with file and package size statistics,
generated by 'make graph-size'""")
parser.add_argument('new_file_size_csv', type=argparse.FileType('r'),
metavar='new-file-size-stats.csv',
help='new CSV file with file and package size statistics')
args = parser.parse_args()
if args.detail:
keyword = 'file'
else:
keyword = 'package'
old_sizes = read_file_size_csv(args.old_file_size_csv, args.detail)
new_sizes = read_file_size_csv(args.new_file_size_csv, args.detail)
delta = compare_sizes(old_sizes, new_sizes)
print('Size difference per %s (bytes), threshold = %s' % (keyword, args.threshold))
print(80*'-')
print_results(delta, args.threshold)
print(80*'-')
print_results({(None, 'TOTAL'): ('', sum(new_sizes.values()) - sum(old_sizes.values()))},
threshold=None)

303
buildroot-2024.02/utils/test-pkg Executable file
View File

@ -0,0 +1,303 @@
#!/usr/bin/env bash
set -e
TOOLCHAINS_CSV='support/config-fragments/autobuild/toolchain-configs.csv'
TEMP_CONF=""
do_clean() {
if [ -n "${TEMP_CONF}" ]; then
rm -f "${TEMP_CONF}"
fi
}
main() {
local o O opts
local cfg dir pkg random toolchains_csv toolchain all number mode prepare_only
local ret nb nb_skip nb_fail nb_legal nb_show nb_tc build_dir keep
local -a toolchains
local pkg_br_name
o='hakc:d:n:p:r:t:'
O='help,all,keep,prepare-only,config-snippet:,build-dir:,number:,package:,random:,toolchains-csv:'
opts="$(getopt -n "${my_name}" -o "${o}" -l "${O}" -- "${@}")"
eval set -- "${opts}"
random=0
all=0
keep=0
number=0
mode=0
prepare_only=0
toolchains_csv="${TOOLCHAINS_CSV}"
while [ ${#} -gt 0 ]; do
case "${1}" in
(-h|--help)
help; exit 0
;;
(-a|--all)
all=1; shift 1
;;
(-k|--keep)
keep=1; shift 1
;;
(--prepare-only)
prepare_only=1; shift 1
;;
(-c|--config-snippet)
cfg="${2}"; shift 2
;;
(-d|--build-dir)
dir="${2}"; shift 2
;;
(-n|--number)
number="${2}"; shift 2
;;
(-p|--package)
pkg="${2}"; shift 2
;;
(-r|--random)
random="${2}"; shift 2
;;
(-t|--toolchains-csv)
toolchains_csv="${2}"; shift 2
;;
(--)
shift; break
;;
esac
done
trap do_clean INT TERM HUP EXIT
if [ -z "${cfg}" ]; then
pkg_br_name="${pkg//-/_}"
pkg_br_name="BR2_PACKAGE_${pkg_br_name^^}"
TEMP_CONF="$(mktemp /tmp/test-"${pkg}"-config.XXXXXX)"
echo "${pkg_br_name}=y" > "${TEMP_CONF}"
cfg="${TEMP_CONF}"
fi
if [ ! -e "${cfg}" ]; then
printf "error: %s: no such file\n" "${cfg}" >&2; exit 1
fi
if [ -z "${dir}" ]; then
dir="${HOME}/br-test-pkg"
fi
if [ "${random}" -gt 0 ]; then
mode=$((mode+1))
fi
if [ "${number}" -gt 0 ]; then
mode=$((mode+1))
fi
if [ "${all}" -eq 1 ]; then
mode=$((mode+1))
fi
# Default mode is to test the N first toolchains, which have been
# chosen to be a good selection of toolchains.
if [ ${mode} -eq 0 ] ; then
number=6
elif [ ${mode} -gt 1 ] ; then
printf "error: --all, --number and --random are mutually exclusive\n" >&2; exit 1
fi
# Extract the URLs of the toolchains; drop internal toolchains
# E.g.: http://server/path/to/name.config,arch,libc
# --> http://server/path/to/name.config
mapfile -t toolchains < <(sed -r -e 's/,.*//; /internal/d; /^#/d; /^$/d;' "${toolchains_csv}" \
| if [ "${random}" -gt 0 ]; then \
sort -R | head -n "${random}"
elif [ "${number}" -gt 0 ]; then \
head -n "${number}"
else
sort
fi
)
nb_tc="${#toolchains[@]}"
if [ "${nb_tc}" -eq 0 ]; then
printf "error: no toolchain found (networking issue?)\n" >&2; exit 1
fi
nb=0
nb_skip=0
nb_fail=0
nb_legal=0
nb_show=0
for toolchainconfig in "${toolchains[@]}"; do
: $((nb++))
toolchain="$(basename "${toolchainconfig}" .config)"
build_dir="${dir}/${toolchain}"
printf "%40s [%*d/%d]: " "${toolchain}" ${#nb_tc} "${nb}" "${nb_tc}"
build_one "${build_dir}" "${toolchainconfig}" "${cfg}" "${pkg}" "${prepare_only}" && ret=0 || ret=${?}
case ${ret} in
(0) printf "OK\n";;
(1) : $((nb_skip++)); printf "SKIPPED\n";;
(2) : $((nb_fail++)); printf "FAILED\n";;
(3) : $((nb_legal++)); printf "FAILED\n";;
(4) : $((nb_show++)); printf "FAILED\n";;
esac
done
printf "%d builds, %d skipped, %d build failed, %d legal-info failed, %d show-info failed\n" \
"${nb}" "${nb_skip}" "${nb_fail}" "${nb_legal}" "${nb_show}"
return $((nb_fail + nb_legal))
}
build_one() {
local dir="${1}"
local toolchainconfig="${2}"
local cfg="${3}"
local pkg="${4}"
local prepare_only="${5}"
mkdir -p "${dir}"
CONFIG_="" support/kconfig/merge_config.sh -O "${dir}" \
"${toolchainconfig}" "support/config-fragments/minimal.config" "${cfg}" \
>> "${dir}/logfile" 2>&1
# We want all the options from the snippet to be present as-is (set
# or not set) in the actual .config; if one of them is not, it means
# some dependency from the toolchain or arch is not available, in
# which case this config is untestable and we skip it.
# We don't care about the locale to sort in, as long as both sort are
# done in the same locale.
comm -23 <(sort "${cfg}") <(sort "${dir}/.config") >"${dir}/missing.config"
if [ -s "${dir}/missing.config" ]; then
if [ ${keep} -ne 1 ]; then
# Invalid configuration, drop it
rm -f "${dir}/.config"
fi
return 1
fi
# Remove file, it's empty anyway.
rm -f "${dir}/missing.config"
# Defer building the job to the caller (e.g. a gitlab pipeline)
if [ "${prepare_only}" -eq 1 ]; then
return 0
fi
if [ -n "${pkg}" ]; then
if ! make O="${dir}" "${pkg}-dirclean" >> "${dir}/logfile" 2>&1; then
return 2
fi
fi
# shellcheck disable=SC2086
if ! BR_FORCE_CHECK_DEPENDENCIES=YES make O="${dir}" ${pkg} >> "${dir}/logfile" 2>&1; then
return 2
fi
# legal-info done systematically, because some packages have different
# sources depending on the configuration (e.g. lua-5.2 vs. lua-5.3)
if ! make O="${dir}" legal-info >> "${dir}/logfile" 2>&1; then
return 3
fi
# Validate that we generate proper json as show-info
{ tput smso; printf '>>> Running show-info\n'; tput rmso; } >> "${dir}/logfile" 2> /dev/null;
JQ="$(which jq 2> /dev/null)"
if [ -z "${JQ}" ]; then
make O="${dir}" host-jq >> "${dir}/logfile" 2>&1
JQ="${dir}/host/bin/jq"
fi
if ! make O="${dir}" "${pkg:+${pkg}-}show-info" > "${dir}/info.json" 2>> "${dir}/logfile"; then
return 4
fi
if ! "${JQ}" . < "${dir}/info.json" >> "${dir}/logfile" 2>&1; then
return 4
fi
# If we get here, the build was successful. Clean up the build/host
# directories to save disk space, unless 'keep' was set.
if [ ${keep} -ne 1 ]; then
make O="${dir}" clean >> "${dir}/logfile" 2>&1
fi
}
help() {
cat <<_EOF_
test-pkg: test-build a package against various toolchains and architectures
The supplied config snippet is appended to each toolchain config, the
resulting configuration is checked to ensure it still contains all options
specified in the snippet; if any is missing, the build is skipped, on the
assumption that the package under test requires a toolchain or architecture
feature that is missing.
In case failures are noticed, you can fix the package and just re-run the
same command again; it will re-run the test where it failed. If you did
specify a package (with -p), the package build dir will be removed first.
The list of toolchains is retrieved from ${TOOLCHAINS_CSV}.
Only the external toolchains are tried, because building a Buildroot toolchain
would take too long. An alternative toolchains CSV file can be specified with
the -t option. This file should have lines consisting of the path to the
toolchain config fragment and the required host architecture, separated by a
comma. The config fragments should contain only the toolchain and architecture
settings.
By default, a useful subset of toolchains is tested. If needed, all
toolchains can be tested (-a), an arbitrary number of toolchains (-n
in order, -r for random).
Options:
-h, --help
Print this help.
-c CFG, --config-snippet CFG
Use the CFG file as the source for the config snippet. This file
should contain all the config options required to build a package.
-d DIR, --build-dir DIR
Do the builds in directory DIR, one sub-dir per toolchain.
If not specified, defaults to \${HOME}/br-test-pkg
-p PKG, --package PKG
Test-build the package PKG, by running 'make PKG'; if not specified,
just runs 'make'.
-a, --all
Test all toolchains, instead of the default subset defined by
Buildroot developers.
-n N, --number N
Test N toolchains, in the order defined in the toolchain CSV
file.
-r N, --random N
Limit the tests to the N randomly selected toolchains.
-t CSVFILE, --toolchains-csv CSVFILE
CSV file containing the paths to config fragments of toolchains to
try. If not specified, the toolchains in ${TOOLCHAINS_CSV} will be
used.
-k, --keep
Keep the build directories even if the build succeeds.
Note: the logfile and configuration is always retained, even without
this option.
--prepare-only
Only prepare the .config files, but do not build them. Output the
list of build directories to stdout, and the status on stderr.
Example:
Testing libcec would require a config snippet that contains:
BR2_PACKAGE_LIBCEC=y
Testing libcurl with openSSL support would require a snippet such as:
BR2_PACKAGE_OPENSSL=y
BR2_PACKAGE_LIBCURL=y
_EOF_
}
my_name="${0##*/}"
main "${@}"