[build] add cvitek build scripts
Change-Id: If63ce4a669e5d4d72b8e3b9253336dd99bf74c30
This commit is contained in:
148
build/scripts/atf.mk
Normal file
148
build/scripts/atf.mk
Normal file
@ -0,0 +1,148 @@
|
||||
|
||||
################################################################################
|
||||
# BLD targets
|
||||
################################################################################
|
||||
BM_BLD_OUTPUT := ${BM_BLD_PATH}/out
|
||||
|
||||
bld-build:
|
||||
${Q}mkdir -p $(RELEASE_BIN_BLD_DIR)
|
||||
${Q}mkdir -p $(RELEASE_BIN_BLDP_DIR)
|
||||
${Q}mkdir -p $(RELEASE_BIN_BLP_DIR)
|
||||
|
||||
bld: export ARCH=$(patsubst "%",%,$(CONFIG_ARCH))
|
||||
bld: bld-build
|
||||
$(call print_target)
|
||||
${Q}$(MAKE) -C ${BM_BLD_PATH} bld
|
||||
${Q}cp ${BM_BLD_OUTPUT}/bldp.bin ${RELEASE_BIN_BLDP_DIR}/bldp_${CHIP_ARCH_L}_${CHIP}_${DDR_CFG}.bin
|
||||
${Q}cp ${BM_BLD_OUTPUT}/blp.bin ${RELEASE_BIN_BLP_DIR}/blp_${CHIP_ARCH_L}_${CHIP}.bin
|
||||
${Q}git -C ${BM_BLD_PATH} log --pretty=oneline -n 1 > ${RELEASE_BIN_BLDP_DIR}/bldp_${CHIP_ARCH_L}_${CHIP}_${DDR_CFG}.txt
|
||||
${Q}git -C ${BM_BLD_PATH} log --pretty=oneline -n 1 > ${RELEASE_BIN_BLP_DIR}/blp_${CHIP_ARCH_L}_${CHIP}.txt
|
||||
|
||||
bld-clean:
|
||||
$(call print_target)
|
||||
${Q}$(MAKE) -C ${BM_BLD_PATH} clean
|
||||
|
||||
################################################################################
|
||||
# arm-trusted-firmware targets
|
||||
################################################################################
|
||||
ATF_FIP_PATH := ${ATF_PATH}/build/${CHIP}_${SUBTYPE}/release/fip.bin
|
||||
|
||||
# clear is not encrypted
|
||||
# key0 is encrypted by keys in ATF git repo
|
||||
export ATF_KEY_SEL := $(subst default,,${ATF_KEY_SEL})
|
||||
|
||||
ifeq ($(CHIP_ARCH_L),$(filter $(CHIP_ARCH_L),cv183x cv75x1 cv952x))
|
||||
define atf_post_action
|
||||
${Q}mv ${ATF_FIP_PATH} ${ATF_FIP_PATH}.ori
|
||||
# append blp.bin and bldp.bin to fip.bin
|
||||
${Q}dd if=${ATF_FIP_PATH}.ori of=${ATF_FIP_PATH} bs=2K conv=sync
|
||||
${Q}dd if=${BLP_PATH} bs=2K conv=sync >> ${ATF_FIP_PATH}
|
||||
${Q}dd if=${DDRC_PATH} bs=2K conv=sync >> ${ATF_FIP_PATH}
|
||||
endef
|
||||
ATF_KEY_SEL := $(or ${ATF_KEY_SEL},key0)
|
||||
else
|
||||
ATF_KEY_SEL := $(or ${ATF_KEY_SEL},clear)
|
||||
endif
|
||||
|
||||
export IMG_ENC_KPATH :=
|
||||
export IMG_ENC_KSRC := dev
|
||||
|
||||
ifeq (${ATF_KEY_SEL},clear)
|
||||
export ATF_TBBR := 0
|
||||
export IMG_ENC := 0
|
||||
export ATF_CRC := 1
|
||||
else
|
||||
export ATF_TBBR := 1
|
||||
export IMG_ENC := 1
|
||||
export ATF_CRC := 0
|
||||
endif
|
||||
|
||||
# ARM ATF TBBR configuration
|
||||
TBBR_MAKE_OPT :=
|
||||
TBBR_MAKE_TGT :=
|
||||
ifeq (${ATF_TBBR},1)
|
||||
TBBR_MAKE_OPT := TRUSTED_BOARD_BOOT=1
|
||||
#TGT is short for TarGeT
|
||||
TBBR_MAKE_TGT := certificates
|
||||
endif
|
||||
|
||||
# ARM ATF bl32
|
||||
SPD_MAKE_OPT :=
|
||||
ifeq (${ATF_BL32},1)
|
||||
SPD_MAKE_OPT := SPD=opteed
|
||||
endif
|
||||
|
||||
FAKE_BL31_32_TGT :=
|
||||
ifeq (${FAKE_BL31_32},1)
|
||||
FAKE_BL31_32_TGT := fake_bl31_32
|
||||
endif
|
||||
|
||||
ifeq ($(wildcard ${BM_BLD_PATH}/*),)
|
||||
arm-trusted-firmware-build: export BLP_PATH=${ATF_PATH}/tools/blp.bin
|
||||
arm-trusted-firmware-build: export DDRC_PATH=${ATF_PATH}/tools/bldp.bin
|
||||
arm-trusted-firmware: export SCP_BL2=${ATF_PATH}/tools/bld.bin
|
||||
else
|
||||
ifeq (${CONFIG_FIP_V1},y)
|
||||
arm-trusted-firmware-build: bld
|
||||
arm-trusted-firmware-build: export BLP_PATH=${BM_BLD_OUTPUT}/blp.bin
|
||||
arm-trusted-firmware-build: export DDRC_PATH=${BM_BLD_OUTPUT}/bldp.bin
|
||||
arm-trusted-firmware: export RTC_CORE_SRAM_BIN_PATH=${BM_BLD_OUTPUT}/blds.bin
|
||||
else
|
||||
arm-trusted-firmware-build: export BLP_PATH=${ATF_PATH}/tools/blp.bin
|
||||
arm-trusted-firmware-build: export DDRC_PATH=${ATF_PATH}/tools/bldp.bin
|
||||
arm-trusted-firmware-build: export SCP_BL2=${ATF_PATH}/tools/bld.bin
|
||||
arm-trusted-firmware-build: export RTC_CORE_SRAM_BIN_PATH=${ATF_PATH}/tools/bld.bin
|
||||
endif
|
||||
|
||||
ifeq (${CONFIG_MULTI_FIP},y)
|
||||
ATF_DEFAULT_SUFFIX := ${ATF_DEFAULT_SUFFIX}_single
|
||||
arm-trusted-firmware: export MULTI_FIP=1
|
||||
arm-trusted-firmware: export SCP_BL2=${ATF_PATH}/tools/fastboot/fake_bld_enc.bin
|
||||
arm-trusted-firmware: export DDR_INIT=${BM_BLD_OUTPUT}/bld.bin
|
||||
else
|
||||
arm-trusted-firmware: export SCP_BL2=${BM_BLD_OUTPUT}/bld.bin
|
||||
endif
|
||||
|
||||
endif
|
||||
|
||||
arm-trusted-firmware-build: export CROSS_COMPILE=${CROSS_COMPILE_64}
|
||||
arm-trusted-firmware-build: export BL33=${ATF_PATH}/build/fake_bl33.bin
|
||||
arm-trusted-firmware-build:
|
||||
$(call print_target)
|
||||
${Q}mkdir -p $(dir ${BL33})
|
||||
${Q}mkdir -p ${RELEASE_BIN_ATF_DIR}
|
||||
${Q}printf 'BL33............' > ${BL33}
|
||||
${Q}echo "SCP_BL2=${SCP_BL2}"
|
||||
${Q}$(MAKE) -j${NPROC} -C ${ATF_PATH} \
|
||||
CRC=${ATF_CRC} IMG_BLD=1 DEBUG=0 ENABLE_ASSERTIONS=1 \
|
||||
${SPD_MAKE_OPT} ${TBBR_MAKE_TGT} ${TBBR_MAKE_OPT} ${FAKE_BL31_32_TGT} all fip
|
||||
$(call atf_post_action)
|
||||
|
||||
ifeq (${ATF_TBBR},0)
|
||||
ATF_DEFAULT_SUFFIX := clear
|
||||
else
|
||||
ATF_DEFAULT_SUFFIX := key0
|
||||
endif
|
||||
|
||||
ifeq (${CONFIG_FIP_V1},y)
|
||||
arm-trusted-firmware-pack: arm-trusted-firmware-build
|
||||
$(call print_target)
|
||||
${Q}cp ${ATF_FIP_PATH} ${RELEASE_BIN_ATF_DIR}/fip_atf_${CHIP_ARCH_L}_${ATF_DEFAULT_SUFFIX}.bin
|
||||
ifneq ($(wildcard ${BM_BLD_PATH}/*),)
|
||||
${Q}python3 ${TOOLS_PATH}/${CHIP_ARCH_L}/pack_fip/pack_fip.py $(if ${CONFIG_MULTI_FIP},--multibin) \
|
||||
--tar-bld ${ATF_FIP_PATH} \
|
||||
--output ${RELEASE_BIN_BLD_DIR}/bld_${CHIP_ARCH_L}_${CHIP}_${BOARD}_${ATF_DEFAULT_SUFFIX}.tar
|
||||
${Q}git -C ${BM_BLD_PATH} log --pretty=oneline -n 1 > ${RELEASE_BIN_BLD_DIR}/bld_${CHIP_ARCH_L}_${CHIP}_${BOARD}_${ATF_DEFAULT_SUFFIX}.txt
|
||||
endif
|
||||
${Q}git -C ${ATF_PATH} log --pretty=oneline -n 1 > ${RELEASE_BIN_ATF_DIR}/fip_atf_${CHIP_ARCH_L}_${ATF_DEFAULT_SUFFIX}.txt
|
||||
|
||||
arm-trusted-firmware: arm-trusted-firmware-pack
|
||||
endif
|
||||
|
||||
arm-trusted-firmware: arm-trusted-firmware-build
|
||||
|
||||
arm-trusted-firmware-clean: bld-clean
|
||||
$(call print_target)
|
||||
${Q}$(MAKE) -C ${ATF_PATH} clean
|
||||
${Q}$(MAKE) -C ${ATF_PATH}/tools/fiptool clean
|
||||
|
||||
659
build/scripts/boards_scan.py
Executable file
659
build/scripts/boards_scan.py
Executable file
@ -0,0 +1,659 @@
|
||||
#!/usr/bin/env python3
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import glob
|
||||
import argparse
|
||||
import itertools
|
||||
import collections
|
||||
import json
|
||||
import os.path
|
||||
from datetime import datetime
|
||||
|
||||
import build_helper
|
||||
import kconfiglib
|
||||
|
||||
try:
|
||||
import argcomplete
|
||||
except ImportError:
|
||||
argcomplete = None
|
||||
|
||||
|
||||
build_helper.check_python_min_version()
|
||||
|
||||
Board = collections.namedtuple("Board", "chip, board, ddr_cfg, info")
|
||||
Arch = collections.namedtuple("Arch", "chip, board")
|
||||
|
||||
ENVS_FROM_CONFIG = [
|
||||
"CHIP",
|
||||
"ARCH",
|
||||
"BOARD",
|
||||
"DDR_CFG",
|
||||
"ATF_SRC",
|
||||
"ATF_KEY_SEL",
|
||||
"KERNEL_SRC",
|
||||
"UBOOT_SRC",
|
||||
"USE_CCACHE",
|
||||
"MULTI_FIP",
|
||||
"STORAGE_TYPE",
|
||||
"NANDFLASH_PAGESIZE",
|
||||
"MW_VER",
|
||||
"SDK_VER",
|
||||
"SENSOR_TUNING_PARAM",
|
||||
"BUILD_TURNKEY_ACCESSGUARD",
|
||||
"BUILD_TURNKEY_IPC",
|
||||
"FLASH_SIZE_SHRINK",
|
||||
"BUILD_FOR_DEBUG",
|
||||
"DDR_64MB_SIZE",
|
||||
"PANEL_TUNING_PARAM",
|
||||
"PANEL_LANE_NUM_TUNING_PARAM",
|
||||
"PANEL_LANE_SWAP_TUNING_PARAM",
|
||||
"MTRACE",
|
||||
]
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Scan boards to generate env and configs"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
default="INFO",
|
||||
choices=["CRITICAL", "DEBUG", "ERROR", "INFO", "NOTSET", "WARNING"],
|
||||
)
|
||||
parser.add_argument("--logfile", type=str)
|
||||
parser.add_argument("--gen-build-kconfig", action="store_true")
|
||||
parser.add_argument("--scan-boards-config", action="store_true")
|
||||
parser.add_argument("--gen-board-env", type=str)
|
||||
parser.add_argument("--print-usage", action="store_true")
|
||||
parser.add_argument("--list-chip-arch", action="store_true")
|
||||
parser.add_argument("--get-chip-arch", action="store_true")
|
||||
parser.add_argument("--list-boards", type=str)
|
||||
parser.add_argument("--gen-board-its", dest="arch")
|
||||
parser.add_argument("--gen_single_board_its", action="store_true")
|
||||
parser.add_argument("--chip_name", dest="chip_name", type=str)
|
||||
parser.add_argument("--board_name", dest="board_name", type=str)
|
||||
parser.add_argument("--skip_ramdisk", action="store_true")
|
||||
|
||||
if argcomplete:
|
||||
argcomplete.autocomplete(parser)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def load_board_config(path):
|
||||
logging.debug("load %s", path)
|
||||
|
||||
kconf = kconfiglib.Kconfig(
|
||||
build_helper.KCONFIG_PATH, suppress_traceback=True, warn=True
|
||||
)
|
||||
kconf.load_config(path)
|
||||
|
||||
return kconf
|
||||
|
||||
|
||||
def check_board_path(board_dir, chip, board):
|
||||
full_board_name = os.path.basename(board_dir)
|
||||
full_board_name2 = "%s_%s" % (chip, board)
|
||||
logging.debug("full_board_name=%s %s", full_board_name, full_board_name2)
|
||||
if full_board_name != full_board_name2:
|
||||
raise Exception(
|
||||
"The CHIP(%s)/BOARD(%s) in .config are not same as %s"
|
||||
% (chip, board, full_board_name)
|
||||
)
|
||||
|
||||
|
||||
def scan_boards_config():
|
||||
configs_saved = sorted(glob.glob(build_helper.BOARD_KCONFIG_SAVED_GLOB))
|
||||
|
||||
boards = {}
|
||||
|
||||
for n, path in enumerate(configs_saved):
|
||||
*_, arch, board, conf = path.split("/")
|
||||
if arch == "default":
|
||||
continue
|
||||
|
||||
kconf = load_board_config(path)
|
||||
|
||||
check_board_path(
|
||||
os.path.dirname(path),
|
||||
kconf.syms["CHIP"].str_value,
|
||||
kconf.syms["BOARD"].str_value,
|
||||
)
|
||||
|
||||
br = Board(
|
||||
kconf.syms["CHIP"].str_value,
|
||||
kconf.syms["BOARD"].str_value,
|
||||
kconf.syms["DDR_CFG"].str_value,
|
||||
"",
|
||||
)
|
||||
|
||||
logging.debug("%d: %s", n, br)
|
||||
boards.setdefault(br.chip, []).append(br)
|
||||
|
||||
return boards
|
||||
|
||||
|
||||
kconfig_tmpl = """
|
||||
#
|
||||
# Automatically generated by boards_scan.py; DO NOT EDIT.
|
||||
#
|
||||
|
||||
choice
|
||||
prompt "Chip selection"
|
||||
{chip_choice}
|
||||
endchoice
|
||||
|
||||
{chip_arch_config}
|
||||
|
||||
config CHIP
|
||||
string
|
||||
{chip_config}
|
||||
|
||||
choice
|
||||
prompt "Board selection"
|
||||
{board_choice}
|
||||
endchoice
|
||||
|
||||
config BOARD
|
||||
string
|
||||
{board_config}
|
||||
|
||||
choice
|
||||
prompt "DDR configuration selection"
|
||||
{ddr_cfg_choice}
|
||||
endchoice
|
||||
|
||||
config DDR_CFG
|
||||
string
|
||||
{ddr_cfg_config}
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def board_dir_to_name(board_dir):
|
||||
chips = build_helper.get_chip_list()
|
||||
chip_list = list(itertools.chain(*chips.values()))
|
||||
|
||||
m = re.search(
|
||||
r"^([0-9a-z]+)_(.+)$", os.path.basename(board_dir), flags=re.IGNORECASE
|
||||
)
|
||||
chip, br_name = m.groups()
|
||||
if chip not in chip_list:
|
||||
raise Exception(
|
||||
"%r of %r is unknown (missing in chip_list.json?)" % (chip, board_dir)
|
||||
)
|
||||
|
||||
for chip_arch, xlist in chips.items():
|
||||
if chip in xlist:
|
||||
break
|
||||
else:
|
||||
raise Exception("Can't find CHIP_ARCH for %r" % chip)
|
||||
|
||||
return chip_arch, chip, br_name
|
||||
|
||||
|
||||
def gen_build_kconfig():
|
||||
board_list = collections.OrderedDict()
|
||||
config_str = {
|
||||
"chip_choice": "",
|
||||
"chip_arch_config": "",
|
||||
"chip_config": "",
|
||||
"board_choice": "",
|
||||
"board_config": "",
|
||||
"ddr_cfg_choice": "",
|
||||
}
|
||||
|
||||
board_list.setdefault("none", []).append(Board("none", "none", "none", "none"))
|
||||
|
||||
os.makedirs(build_helper.BUILD_OUTPUT_DIR, exist_ok=True)
|
||||
|
||||
kconfig_path = os.path.join(build_helper.BUILD_OUTPUT_DIR, "Kconfig")
|
||||
|
||||
for chip_arch in build_helper.get_chip_list():
|
||||
_dir = os.path.join(build_helper.BOARD_DIR, chip_arch)
|
||||
|
||||
for board_dir in sorted(os.listdir(_dir)):
|
||||
if board_dir.strip() == "default":
|
||||
continue
|
||||
board_dir = os.path.join(build_helper.BOARD_DIR, chip_arch, board_dir)
|
||||
if not os.path.isdir(board_dir):
|
||||
continue
|
||||
|
||||
logging.debug("board_dir=%r", board_dir)
|
||||
_, chip, br_name = board_dir_to_name(board_dir)
|
||||
|
||||
cj_path = os.path.join(board_dir, "config.json")
|
||||
with open(cj_path, "r", encoding="utf-8") as fp:
|
||||
cj = json.load(fp)
|
||||
|
||||
br = Board(chip, br_name, cj["ddr_cfg_list"], cj["board_information"])
|
||||
board_list.setdefault(chip, []).append(br)
|
||||
|
||||
chip_list = build_helper.get_chip_list()
|
||||
chip_list["none"] = ["none"]
|
||||
chip_list_r = {c: k for k, v in chip_list.items() for c in v}
|
||||
|
||||
config_str["chip_choice"] = "\n ".join(
|
||||
(
|
||||
'config CHIP_{chip}\n bool "{chip}"\n select CHIP_ARCH_{chip_arch}'.format(
|
||||
chip=chip, chip_arch=chip_list_r[chip]
|
||||
).strip()
|
||||
for chip in board_list.keys()
|
||||
)
|
||||
)
|
||||
|
||||
config_str["chip_config"] = "\n ".join(
|
||||
[
|
||||
'default "{chip}" if CHIP_{chip}'.format(chip=chip).strip()
|
||||
for chip in board_list.keys()
|
||||
]
|
||||
)
|
||||
|
||||
config_str["chip_arch_config"] = "\n".join(
|
||||
(
|
||||
"config CHIP_ARCH_{chip_arch}\n def_bool n".format(
|
||||
chip_arch=chip_arch
|
||||
).strip()
|
||||
for chip_arch in chip_list
|
||||
)
|
||||
)
|
||||
|
||||
config_str["board_choice"] = "\n ".join(
|
||||
[
|
||||
'config BOARD_{br}\n bool "{br} ({br_info})"\n depends on CHIP_{chip}'.format(
|
||||
chip=chip, br=br.board, br_info=br.info if br.info else "none"
|
||||
).strip()
|
||||
for chip, br_list in board_list.items()
|
||||
for br in br_list
|
||||
]
|
||||
)
|
||||
|
||||
config_str["board_config"] = "\n ".join(
|
||||
[
|
||||
'default "{br}" if BOARD_{br}'.format(br=br.board).strip()
|
||||
for _, br_list in board_list.items()
|
||||
for br in br_list
|
||||
]
|
||||
)
|
||||
|
||||
config_str["ddr_cfg_choice"] = "\n ".join(
|
||||
[
|
||||
'config DDR_CFG_{ddf_cfg}\n bool "{ddf_cfg}"\n depends on CHIP_{chip} && BOARD_{br}'.format(
|
||||
chip=chip, br=br.board, ddf_cfg=ddr_cfg if ddr_cfg else "none"
|
||||
).strip()
|
||||
for chip, br_list in board_list.items()
|
||||
for br in br_list
|
||||
for ddr_cfg in br.ddr_cfg
|
||||
]
|
||||
)
|
||||
|
||||
config_str["ddr_cfg_config"] = "\n ".join(
|
||||
[
|
||||
'default "{ddf_cfg}" if DDR_CFG_{ddf_cfg}'.format(
|
||||
ddf_cfg=ddr_cfg if ddr_cfg else "none"
|
||||
).strip()
|
||||
for chip, br_list in board_list.items()
|
||||
for br in br_list
|
||||
for ddr_cfg in br.ddr_cfg
|
||||
]
|
||||
)
|
||||
|
||||
kconfig = kconfig_tmpl.format(**config_str)
|
||||
with open(kconfig_path, "w") as fp:
|
||||
fp.write(kconfig)
|
||||
|
||||
|
||||
def gen_build_env(boards):
|
||||
chips = build_helper.get_chip_list()
|
||||
|
||||
# Chip definition
|
||||
for chip_arch, chip_list in sorted(chips.items()):
|
||||
chip_list = " ".join(sorted(chip_list))
|
||||
print("chip_%s=(%s)" % (chip_arch, chip_list))
|
||||
|
||||
chip_cv_str = " ".join(sorted(itertools.chain(*chips.values())))
|
||||
print("chip_cv=(%s)" % chip_cv_str)
|
||||
# compatible with the original shell script
|
||||
print("chip_sel=(%s)" % chip_cv_str)
|
||||
|
||||
# Platform definition
|
||||
print("subtype_sel=(palladium fpga asic)")
|
||||
|
||||
# Board definition and information
|
||||
for chip, br_list in boards.items():
|
||||
n = 0
|
||||
br_list = [
|
||||
i for i in br_list if all(j not in i.board for j in ["palladium", "fpga"])
|
||||
]
|
||||
br_list.sort()
|
||||
for n, br in enumerate(br_list):
|
||||
print('%s_board_sel[%d]="%s"' % (chip, n, br.board))
|
||||
print('%s_board_info[%d]="%s"' % (chip, n, br.info))
|
||||
print('%s_board_ddr_cfg[%d]="%s"' % (chip, n, br.ddr_cfg))
|
||||
|
||||
|
||||
def gen_board_env(full_board_name):
|
||||
logging.debug("full_board_name=%s", full_board_name)
|
||||
|
||||
config_path = os.path.join(build_helper.BUILD_REPO_DIR, ".config")
|
||||
with open(config_path, "r"):
|
||||
pass
|
||||
kconf = load_board_config(config_path)
|
||||
|
||||
chips = build_helper.get_chip_list()
|
||||
chip = kconf.syms["CHIP"].str_value
|
||||
|
||||
for chip_arch, chip_list in chips.items():
|
||||
if chip in chip_list:
|
||||
print('export CHIP_ARCH="%s"' % chip_arch.upper())
|
||||
break
|
||||
else:
|
||||
raise Exception("Can't find CHIP_ARCH for %r" % chip)
|
||||
|
||||
print('export CHIP_SEGMENT="%s"' % build_helper.get_segment_from_chip(chip))
|
||||
|
||||
for name in ENVS_FROM_CONFIG:
|
||||
print('export %s="%s"' % (name, kconf.syms[name].str_value))
|
||||
|
||||
board = kconf.syms["BOARD"].str_value
|
||||
subtype = [i for i in ["palladium", "fpga"] if i in board]
|
||||
if subtype:
|
||||
subtype = subtype[0]
|
||||
else:
|
||||
subtype = "asic"
|
||||
print('export SUBTYPE="%s"' % subtype)
|
||||
|
||||
|
||||
def get_chip_arch(board):
|
||||
if not board:
|
||||
return
|
||||
board_split, *_ = board.split("_")
|
||||
if board == board_split:
|
||||
return
|
||||
for arch, chips in build_helper.get_chip_list().items():
|
||||
if board_split in chips:
|
||||
print(arch)
|
||||
return
|
||||
|
||||
|
||||
def list_chip_arch():
|
||||
for arch, chips in build_helper.get_chip_list().items():
|
||||
print(" ** %6s ** -> %s" % (arch, chips))
|
||||
|
||||
|
||||
def list_boards_by_chip_arch(chip_arch):
|
||||
boards = {}
|
||||
if chip_arch not in build_helper.get_chip_list():
|
||||
print(" \033[1;31;47m Input chip arch '", chip_arch, "' is ERROR\033[0m")
|
||||
return
|
||||
for arch in build_helper.get_chip_list()[chip_arch]:
|
||||
boards[arch] = []
|
||||
board_dir = os.path.join(build_helper.BOARD_DIR, chip_arch)
|
||||
for board in sorted(os.listdir(board_dir)):
|
||||
m = re.search(r"^([0-9a-z]+)_(.+)$", board, flags=re.IGNORECASE)
|
||||
chip, _ = m.groups()
|
||||
|
||||
conf_path = os.path.join(board_dir, board, "config.json")
|
||||
with open(conf_path, "r", encoding="utf-8") as fp:
|
||||
conf = json.load(fp)
|
||||
boards[chip].append({"board": board, "info": conf["board_information"]})
|
||||
|
||||
print("\033[93m*", chip_arch, "* the avaliable cvitek EVB boards\033[0m")
|
||||
for chip, board_list in boards.items():
|
||||
if not board_list:
|
||||
continue
|
||||
|
||||
jump = 0
|
||||
print("%8s - " % chip, end="")
|
||||
for board in board_list:
|
||||
jump = jump + 1
|
||||
if jump > 1:
|
||||
print(
|
||||
" ",
|
||||
board["board"],
|
||||
" [",
|
||||
board["info"],
|
||||
"]",
|
||||
end="\n",
|
||||
sep="",
|
||||
)
|
||||
else:
|
||||
print(board["board"], " [", board["info"], "]", end="\n", sep="")
|
||||
|
||||
|
||||
def print_usage():
|
||||
chips = build_helper.get_chip_list()
|
||||
chip_list = list(itertools.chain(*chips.values()))
|
||||
|
||||
# Initialize the dictionary
|
||||
map_name = dict()
|
||||
map_info = dict()
|
||||
for what in chip_list:
|
||||
map_name[what] = []
|
||||
|
||||
for board_dir in sorted(os.listdir(build_helper.BOARD_DIR)):
|
||||
if board_dir.strip() == "default":
|
||||
continue
|
||||
board_dir = os.path.join(build_helper.BOARD_DIR, board_dir)
|
||||
if not os.path.isdir(board_dir):
|
||||
continue
|
||||
m = re.search(
|
||||
r"^([0-9a-z]+)_(.+)$", os.path.basename(board_dir), flags=re.IGNORECASE
|
||||
)
|
||||
chip, br_name = m.groups()
|
||||
map_name[chip].append(br_name)
|
||||
cj_path = os.path.join(board_dir, "config.json")
|
||||
with open(cj_path, "r", encoding="utf-8") as fp:
|
||||
cj = json.load(fp)
|
||||
map_info[chip + br_name] = cj["board_information"]
|
||||
|
||||
print("\033[93m- The avaliable cvitek EVB boards\033[0m")
|
||||
for chip in sorted(map_name):
|
||||
jump = 0
|
||||
print("%8s - " % chip, end="")
|
||||
for boards in sorted(map_name[chip]):
|
||||
jump = jump + 1
|
||||
if jump > 1:
|
||||
print(
|
||||
" ",
|
||||
chip,
|
||||
"_",
|
||||
boards,
|
||||
" [",
|
||||
map_info[chip + boards],
|
||||
"]",
|
||||
end="\n",
|
||||
sep="",
|
||||
)
|
||||
else:
|
||||
print(
|
||||
chip,
|
||||
"_",
|
||||
boards,
|
||||
" [",
|
||||
map_info[chip + boards],
|
||||
"]",
|
||||
end="\n",
|
||||
sep="",
|
||||
)
|
||||
|
||||
|
||||
config_list_tmpl = """
|
||||
configurations {{
|
||||
{config}
|
||||
}};
|
||||
"""
|
||||
|
||||
|
||||
fdt_list_tmpl = """
|
||||
{fdt}
|
||||
"""
|
||||
|
||||
|
||||
fdt_tmpl = """
|
||||
fdt-{chip}_{board} {{
|
||||
description = "cvitek device tree - {chip}_{board}";
|
||||
data = /incbin/("./{chip}_{board}.dtb");
|
||||
type = "flat_dt";
|
||||
arch = "arm64";
|
||||
compression = "none";
|
||||
hash-1 {{
|
||||
algo = "{hash_algo}";
|
||||
}};
|
||||
}};
|
||||
"""
|
||||
|
||||
|
||||
config_tmpl = """
|
||||
config-{chip}_{board} {{
|
||||
description = "boot cvitek system with board {chip}_{board}";
|
||||
kernel = "kernel-1";
|
||||
ramdisk = "ramdisk-1";
|
||||
fdt = "fdt-{chip}_{board}";
|
||||
}};
|
||||
"""
|
||||
|
||||
|
||||
config_noramdisk_tmpl = """
|
||||
config-{chip}_{board} {{
|
||||
description = "boot cvitek system with board {chip}_{board}";
|
||||
kernel = "kernel-1";
|
||||
fdt = "fdt-{chip}_{board}";
|
||||
}};
|
||||
"""
|
||||
|
||||
|
||||
def insertAfter(string, keyword, replacement):
|
||||
i = string.find(keyword)
|
||||
return string[: i + len(keyword)] + replacement + string[i + len(keyword) :]
|
||||
|
||||
|
||||
def gen_single_board_its(chip, board, skip_ramdisk=False):
|
||||
its_str = {
|
||||
"fdt": "",
|
||||
"config": "",
|
||||
}
|
||||
|
||||
os.makedirs(build_helper.BUILD_OUTPUT_DIR, exist_ok=True)
|
||||
its_path = os.path.join(build_helper.BUILD_OUTPUT_DIR, "multi.its.tmp")
|
||||
|
||||
cfg_tmpl = config_noramdisk_tmpl if skip_ramdisk else config_tmpl
|
||||
its_str["fdt"] = fdt_tmpl.format(
|
||||
chip=chip, board=board, hash_algo=get_hash_algo(board)
|
||||
)
|
||||
its_str["config"] = cfg_tmpl.format(chip=chip, board=board)
|
||||
|
||||
config_list = config_list_tmpl.format(**its_str)
|
||||
fdt_list = fdt_list_tmpl.format(**its_str)
|
||||
|
||||
with open(its_path, "r") as fp:
|
||||
FileString = fp.read()
|
||||
replaceTmp = insertAfter(FileString, "/*FDT*/", fdt_list)
|
||||
replaceDone = insertAfter(replaceTmp, "/*CFG*/", config_list)
|
||||
|
||||
with open(its_path, "w") as fp:
|
||||
fp.write(replaceDone)
|
||||
|
||||
|
||||
def get_hash_algo(br_name):
|
||||
if "fpga" in br_name:
|
||||
return "crc32"
|
||||
elif "palladium" in br_name:
|
||||
return "crc32"
|
||||
return "sha256"
|
||||
|
||||
|
||||
def gen_board_its(input_arch, skip_ramdisk=False):
|
||||
its_str = {
|
||||
"fdt": "",
|
||||
"config": "",
|
||||
}
|
||||
os.makedirs(build_helper.BUILD_OUTPUT_DIR, exist_ok=True)
|
||||
its_path = os.path.join(build_helper.BUILD_OUTPUT_DIR, "multi.its.tmp")
|
||||
|
||||
board_list = []
|
||||
|
||||
for _arch in build_helper.get_chip_list():
|
||||
_dir = os.path.join(build_helper.BOARD_DIR, _arch)
|
||||
|
||||
for board_dir in sorted(os.listdir(_dir)):
|
||||
if board_dir.strip() == "default":
|
||||
continue
|
||||
board_dir = os.path.join(build_helper.BOARD_DIR, _arch, board_dir)
|
||||
if not os.path.isdir(board_dir):
|
||||
continue
|
||||
|
||||
chip_arch, chip, br_name = board_dir_to_name(board_dir)
|
||||
if chip_arch == input_arch:
|
||||
board_list.append(Arch(chip, br_name))
|
||||
|
||||
cfg_tmpl = config_noramdisk_tmpl if skip_ramdisk else config_tmpl
|
||||
its_str["fdt"] = "\n".join(
|
||||
fdt_tmpl.format(chip=chip, board=board, hash_algo=get_hash_algo(board))
|
||||
for chip, board in board_list
|
||||
)
|
||||
its_str["config"] = "\n".join(
|
||||
cfg_tmpl.format(chip=chip, board=board) for chip, board in board_list
|
||||
)
|
||||
|
||||
config_list = config_list_tmpl.format(**its_str)
|
||||
fdt_list = fdt_list_tmpl.format(**its_str)
|
||||
|
||||
with open(its_path, "r") as fp:
|
||||
FileString = fp.read()
|
||||
replaceTmp = insertAfter(FileString, "/*FDT*/", fdt_list)
|
||||
replaceDone = insertAfter(replaceTmp, "/*CFG*/", config_list)
|
||||
|
||||
with open(its_path, "w") as fp:
|
||||
fp.write(replaceDone)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
# build_helper.init_logging(args.logfile, stdout_level=args.verbose)
|
||||
# build_helper.dump_debug_info()
|
||||
logging.debug("[%s] start", datetime.now().isoformat())
|
||||
|
||||
# The location of the top Kconfig
|
||||
os.environ["srctree"] = build_helper.BUILD_REPO_DIR
|
||||
|
||||
if args.gen_build_kconfig:
|
||||
gen_build_kconfig()
|
||||
|
||||
if args.scan_boards_config:
|
||||
boards = scan_boards_config()
|
||||
gen_build_env(boards)
|
||||
|
||||
if args.gen_board_env:
|
||||
gen_board_env(args.gen_board_env)
|
||||
|
||||
if args.print_usage:
|
||||
print_usage()
|
||||
|
||||
if args.list_chip_arch:
|
||||
list_chip_arch()
|
||||
|
||||
if args.list_boards:
|
||||
list_boards_by_chip_arch(args.list_boards)
|
||||
|
||||
if args.arch:
|
||||
gen_board_its(args.arch.lower(), args.skip_ramdisk)
|
||||
|
||||
if args.get_chip_arch:
|
||||
get_chip_arch(args.board_name)
|
||||
|
||||
if args.gen_single_board_its:
|
||||
gen_single_board_its(
|
||||
args.chip_name.lower(), args.board_name.lower(), args.skip_ramdisk
|
||||
)
|
||||
|
||||
logging.debug("[%s] finished", datetime.now().isoformat())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
100
build/scripts/build_helper.py
Normal file
100
build/scripts/build_helper.py
Normal file
@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
from os.path import normpath, split, join
|
||||
import json
|
||||
|
||||
try:
|
||||
import coloredlogs
|
||||
except ImportError:
|
||||
coloredlogs = None
|
||||
|
||||
|
||||
# Ubuntu 16.04 LTS contains Python v3.5.2 by default
|
||||
PYTHON_MIN_VERSION = (3, 5, 2)
|
||||
|
||||
MODULE_PATH = normpath(split(__file__)[0])
|
||||
|
||||
BUILD_REPO_DIR = normpath(join(MODULE_PATH, ".."))
|
||||
BUILD_OUTPUT_DIR = join(BUILD_REPO_DIR, "output")
|
||||
KCONFIG_PATH = join(BUILD_REPO_DIR, "Kconfig")
|
||||
BOARD_DIR = join(BUILD_REPO_DIR, "boards")
|
||||
BOARD_KCONFIG_SAVED_GLOB = join(BOARD_DIR, "*/*/*_defconfig")
|
||||
CHIP_LIST_PATH = join(BOARD_DIR, "chip_list.json")
|
||||
SENSOR_LIST_PATH = join(BUILD_REPO_DIR, "sensors/sensor_list.json")
|
||||
SENSOR_KCONFIG_PATH = join(BUILD_REPO_DIR, "output/Kconfig.sensors")
|
||||
PANEL_LIST_PATH = join(BUILD_REPO_DIR, "panels/panel_list.json")
|
||||
PANEL_KCONFIG_PATH = join(BUILD_REPO_DIR, "output/Kconfig.panels")
|
||||
|
||||
|
||||
def check_python_min_version():
|
||||
if sys.version_info < PYTHON_MIN_VERSION:
|
||||
print("Python >= %r is required" % (PYTHON_MIN_VERSION,))
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
def dump_debug_info():
|
||||
logging.debug("MODULE_PATH=%s", MODULE_PATH)
|
||||
logging.debug("BUILD_REPO_DIR=%s", BUILD_REPO_DIR)
|
||||
logging.debug("BOARD_DIR=%s", BOARD_DIR)
|
||||
logging.debug("BOARD_KCONFIG_SAVED_GLOB=%s", BOARD_KCONFIG_SAVED_GLOB)
|
||||
|
||||
|
||||
def init_logging(log_file=None, file_level="DEBUG", stdout_level="WARNING"):
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.NOTSET)
|
||||
|
||||
fmt = "%(asctime)s %(levelname)8s:%(module)s:%(message)s"
|
||||
|
||||
if log_file is not None:
|
||||
file_handler = logging.FileHandler(log_file, encoding="utf-8")
|
||||
file_handler.setFormatter(logging.Formatter(fmt))
|
||||
file_handler.setLevel(file_level)
|
||||
root_logger.addHandler(file_handler)
|
||||
|
||||
if coloredlogs:
|
||||
os.environ["COLOREDLOGS_DATE_FORMAT"] = "%H:%M:%S"
|
||||
|
||||
field_styles = {
|
||||
"asctime": {"color": "green"},
|
||||
"hostname": {"color": "magenta"},
|
||||
"levelname": {"color": "black", "bold": True},
|
||||
"name": {"color": "blue"},
|
||||
"module": {"color": "blue"},
|
||||
"programname": {"color": "cyan"},
|
||||
}
|
||||
level_styles = coloredlogs.DEFAULT_LEVEL_STYLES
|
||||
level_styles["debug"]["color"] = "cyan"
|
||||
|
||||
coloredlogs.install(
|
||||
level=stdout_level,
|
||||
fmt=fmt,
|
||||
field_styles=field_styles,
|
||||
level_styles=level_styles,
|
||||
milliseconds=True,
|
||||
)
|
||||
|
||||
|
||||
def get_segment_from_chip(chip):
|
||||
with open(CHIP_LIST_PATH, "r", encoding="utf-8") as fp:
|
||||
din = json.load(fp)
|
||||
|
||||
for arch, segments in din.items():
|
||||
for seg, chips in segments.items():
|
||||
if chip in chips:
|
||||
return seg
|
||||
|
||||
raise IndexError("%s is not in chip_list.json" % chip)
|
||||
|
||||
|
||||
def get_chip_list():
|
||||
with open(CHIP_LIST_PATH, "r", encoding="utf-8") as fp:
|
||||
din = json.load(fp)
|
||||
|
||||
chips = {}
|
||||
for arch, segments in din.items():
|
||||
chips[arch] = [j for i in segments.values() for j in i]
|
||||
|
||||
return chips
|
||||
43
build/scripts/defconfig.py
Executable file
43
build/scripts/defconfig.py
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2019, Ulf Magnusson
|
||||
# SPDX-License-Identifier: ISC
|
||||
|
||||
"""
|
||||
Reads a specified configuration file, then writes a new configuration file.
|
||||
This can be used to initialize the configuration from e.g. an arch-specific
|
||||
configuration file. This input configuration file would usually be a minimal
|
||||
configuration file, as generated by e.g. savedefconfig.
|
||||
|
||||
The default output filename is '.config'. A different filename can be passed in
|
||||
the KCONFIG_CONFIG environment variable.
|
||||
"""
|
||||
import argparse
|
||||
|
||||
import kconfiglib
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description=__doc__)
|
||||
|
||||
parser.add_argument(
|
||||
"--kconfig",
|
||||
default="Kconfig",
|
||||
help="Top-level Kconfig file (default: Kconfig)")
|
||||
|
||||
parser.add_argument(
|
||||
"config",
|
||||
metavar="CONFIGURATION",
|
||||
help="Input configuration file")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
kconf = kconfiglib.Kconfig(args.kconfig, suppress_traceback=True)
|
||||
print(kconf.load_config(args.config))
|
||||
print(kconf.write_config())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
79
build/scripts/fip_v1.mk
Normal file
79
build/scripts/fip_v1.mk
Normal file
@ -0,0 +1,79 @@
|
||||
ifeq (${CONFIG_ATF_SRC},)
|
||||
FIP_PRE_MERGE_DEPS :=
|
||||
else ifeq (${CONFIG_ATF_SRC},y)
|
||||
# Build arm-trusted-firmware and bm_bld tnd put into rel-bin/* git repo
|
||||
FIP_PRE_MERGE_DEPS := arm-trusted-firmware
|
||||
else
|
||||
$(error CONFIG_ATF_SRC=${CONFIG_ATF_SRC} is not used)
|
||||
endif
|
||||
|
||||
FIP_PRE_BIN_DIR := ${OUTPUT_DIR}/fip_pre
|
||||
|
||||
${FIP_PRE_BIN_DIR}:
|
||||
${Q}mkdir -p $@
|
||||
|
||||
ifeq (${ATF_KEY_SEL},clear)
|
||||
define pad_atf_crc_action
|
||||
${BUILD_PATH}/scripts/pad_atf_crc.py --pad-crc ${1}
|
||||
endef
|
||||
else
|
||||
define pad_atf_crc_action
|
||||
endef
|
||||
endif
|
||||
|
||||
FIP_PRE_SUFFIX := $(if ${CONFIG_MULTI_FIP},_single)
|
||||
FIP_PRE_SUFFIX_EXTRA :=
|
||||
|
||||
ifeq ($(ENABLE_ALIOS_FASTBOOT), y)
|
||||
FIP_PRE_SUFFIX_EXTRA := _alios
|
||||
endif
|
||||
|
||||
ATF_FIP_BIN_PATH := ${RELEASE_BIN_ATF_DIR}/fip_atf_${CHIP_ARCH_L}_${ATF_KEY_SEL}${FIP_PRE_SUFFIX}${FIP_PRE_SUFFIX_EXTRA}.bin
|
||||
BLD_TAR_PATH := ${RELEASE_BIN_BLD_DIR}/bld_${CHIP_ARCH_L}_${CHIP}_${BOARD}_${ATF_KEY_SEL}${FIP_PRE_SUFFIX}.tar
|
||||
|
||||
# Generate fip_pre.bin from rel-bin/* git repo
|
||||
fip-pre-merge-build: ${FIP_PRE_BIN_DIR} ${FIP_PRE_MERGE_DEPS}
|
||||
$(call print_target)
|
||||
${Q}cp ${RELEASE_BIN_BLP_DIR}/blp_${CHIP_ARCH_L}_${CHIP}.bin ${FIP_PRE_BIN_DIR}/blp.bin
|
||||
${Q}cp ${RELEASE_BIN_BLDP_DIR}/bldp_${CHIP_ARCH_L}_${CHIP}_${DDR_CFG}.bin ${FIP_PRE_BIN_DIR}/bldp.bin
|
||||
${call pad_atf_crc_action,${FIP_PRE_BIN_DIR}/blp.bin}
|
||||
${call pad_atf_crc_action,${FIP_PRE_BIN_DIR}/bldp.bin}
|
||||
${Q}python3 ${TOOLS_PATH}/${CHIP_ARCH_L}/pack_fip/pack_fip.py \
|
||||
${ATF_FIP_BIN_PATH} \
|
||||
$(if ${CONFIG_MULTI_FIP},--multibin) \
|
||||
--output=${FIP_PRE_BIN_DIR}/fip_pre.bin \
|
||||
--add-license_file=${LICENSE_PATH} \
|
||||
--add-blp=${FIP_PRE_BIN_DIR}/blp.bin \
|
||||
--add-ddrc=${FIP_PRE_BIN_DIR}/bldp.bin \
|
||||
--add-bld-tar=${BLD_TAR_PATH}
|
||||
${Q}ls -l ${FIP_PRE_BIN_DIR}/fip_pre.bin
|
||||
|
||||
ifeq ($(wildcard ${RELEASE_BIN_BLD_DIR}/*),)
|
||||
# No rel_bin. Use install/../fip_pre/
|
||||
fip-pre-merge:
|
||||
else
|
||||
# rel_bin. Use rel_bin/....
|
||||
fip-pre-merge: fip-pre-merge-build
|
||||
|
||||
ifeq ($(wildcard ${RELEASE_BIN_LICENSE_DIR}/*),)
|
||||
fip-pre-merge: export LICENSE_PATH=${ATF_PATH}/tools/license.scm.enc
|
||||
else
|
||||
fip-pre-merge: export LICENSE_PATH=${RELEASE_BIN_LICENSE_DIR}/license_${CHIP_SEGMENT}_for_Customer.scm.enc
|
||||
endif
|
||||
|
||||
endif
|
||||
|
||||
u-boot-dep: fip-pre-merge u-boot-build $(if ${CONFIG_ENABLE_FREERTOS},rtos)
|
||||
$(call print_target)
|
||||
$(call uboot_compress_action)
|
||||
ifeq (${CONFIG_MULTI_FIP},y)
|
||||
${Q}python3 ${TOOLS_PATH}/${CHIP_ARCH_L}/pack_fip/pack_fip_multibin.py --multibin \
|
||||
${FIP_PRE_BIN_DIR}/fip_pre.bin \
|
||||
--fastboot=${FREERTOS_PATH}/cvirtos.bin \
|
||||
--bl33 ${UBOOT_PATH}/${UBOOT_OUTPUT_FOLDER}/u-boot.bin --output ${FIP_PRE_BIN_DIR}/fip.bin
|
||||
${Q}python3 ${IMGTOOL_PATH}/raw2cimg.py ${FIP_PRE_BIN_DIR}/fip_2nd.bin ${OUTPUT_DIR} ${FLASH_PARTITION_XML}
|
||||
${Q}cp ${FIP_PRE_BIN_DIR}/fip_1st.bin ${OUTPUT_DIR}/fip.bin
|
||||
else
|
||||
${Q}python3 ${TOOLS_PATH}/${CHIP_ARCH_L}/pack_fip/pack_fip.py ${FIP_PRE_BIN_DIR}/fip_pre.bin \
|
||||
--add-bl33 ${UBOOT_PATH}/${UBOOT_OUTPUT_FOLDER}/u-boot.bin --output ${OUTPUT_DIR}/fip.bin
|
||||
endif
|
||||
50
build/scripts/fip_v2.mk
Normal file
50
build/scripts/fip_v2.mk
Normal file
@ -0,0 +1,50 @@
|
||||
opensbi: export CROSS_COMPILE=$(CONFIG_CROSS_COMPILE_SDK)
|
||||
opensbi: u-boot-build
|
||||
$(call print_target)
|
||||
${Q}$(MAKE) -j${NPROC} -C ${OPENSBI_PATH} PLATFORM=generic \
|
||||
FW_PAYLOAD_PATH=${UBOOT_PATH}/${UBOOT_OUTPUT_FOLDER}/u-boot-raw.bin \
|
||||
FW_FDT_PATH=${UBOOT_PATH}/${UBOOT_OUTPUT_FOLDER}/arch/riscv/dts/${CHIP}_${BOARD}.dtb
|
||||
|
||||
opensbi-clean:
|
||||
$(call print_target)
|
||||
${Q}$(MAKE) -C ${OPENSBI_PATH} PLATFORM=generic distclean
|
||||
|
||||
FSBL_OUTPUT_PATH = ${FSBL_PATH}/build/${PROJECT_FULLNAME}
|
||||
ifeq ($(call qstrip,${CONFIG_ARCH}),riscv)
|
||||
fsbl-build: opensbi
|
||||
endif
|
||||
ifeq (${CONFIG_ENABLE_FREERTOS},y)
|
||||
fsbl-build: rtos
|
||||
fsbl%: export BLCP_2ND_PATH=${FREERTOS_PATH}/cvitek/install/bin/cvirtos.bin
|
||||
fsbl%: export RTOS_DUMP_PRINT_ENABLE=$(CONFIG_ENABLE_RTOS_DUMP_PRINT)
|
||||
fsbl%: export RTOS_DUMP_PRINT_SZ_IDX=$(CONFIG_DUMP_PRINT_SZ_IDX)
|
||||
fsbl%: export RTOS_FAST_IMAGE_TYPE=${CONFIG_FAST_IMAGE_TYPE}
|
||||
fsbl%: export RTOS_ENABLE_FREERTOS=${CONFIG_ENABLE_FREERTOS}
|
||||
endif
|
||||
fsbl%: export FSBL_SECURE_BOOT_SUPPORT=${CONFIG_FSBL_SECURE_BOOT_SUPPORT}
|
||||
fsbl%: export ARCH=$(call qstrip,${CONFIG_ARCH})
|
||||
fsbl%: export OD_CLK_SEL=${CONFIG_OD_CLK_SEL}
|
||||
fsbl%: export VC_CLK_OVERDRIVE=${CONFIG_VC_CLK_OVERDRIVE}
|
||||
fsbl-build: u-boot-build memory-map
|
||||
$(call print_target)
|
||||
${Q}mkdir -p ${FSBL_PATH}/build
|
||||
${Q}ln -snrf -t ${FSBL_PATH}/build ${CVI_BOARD_MEMMAP_H_PATH}
|
||||
${Q}$(MAKE) -j${NPROC} -C ${FSBL_PATH} O=${FSBL_OUTPUT_PATH} BLCP_2ND_PATH=${BLCP_2ND_PATH} \
|
||||
LOADER_2ND_PATH=${UBOOT_PATH}/${UBOOT_OUTPUT_FOLDER}/u-boot-raw.bin
|
||||
${Q}cp ${FSBL_OUTPUT_PATH}/fip.bin ${OUTPUT_DIR}/
|
||||
|
||||
fsbl-clean: rtos-clean
|
||||
$(call print_target)
|
||||
|
||||
|
||||
u-boot-dep: fsbl-build ${OUTPUT_DIR}/elf
|
||||
$(call print_target)
|
||||
ifeq ($(call qstrip,${CONFIG_ARCH}),riscv)
|
||||
${Q}cp ${OPENSBI_PATH}/build/platform/generic/firmware/fw_payload.bin ${OUTPUT_DIR}/fw_payload_uboot.bin
|
||||
${Q}cp ${OPENSBI_PATH}/build/platform/generic/firmware/fw_payload.elf ${OUTPUT_DIR}/elf/fw_payload_uboot.elf
|
||||
endif
|
||||
|
||||
ifeq ($(call qstrip,${CONFIG_ARCH}),riscv)
|
||||
u-boot-clean: opensbi-clean
|
||||
endif
|
||||
u-boot-clean: fsbl-clean
|
||||
123
build/scripts/gen_panel_config.py
Executable file
123
build/scripts/gen_panel_config.py
Executable file
@ -0,0 +1,123 @@
|
||||
#!/usr/bin/env python3
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
|
||||
import json
|
||||
import build_helper
|
||||
|
||||
kconfig_tmpl = """
|
||||
#
|
||||
# Automatically generated by gen_panel_config.py; DO NOT EDIT.
|
||||
#
|
||||
|
||||
menu "Panel settings"
|
||||
{0}
|
||||
|
||||
{1}
|
||||
|
||||
endmenu
|
||||
"""
|
||||
|
||||
kconfig_choice_tmpl = """
|
||||
choice
|
||||
prompt "{0}"
|
||||
{1}
|
||||
endchoice
|
||||
"""
|
||||
|
||||
kconfig_config_bool_tmpl = """
|
||||
config {0}
|
||||
bool "{1}"
|
||||
help
|
||||
"y" Config {1}.
|
||||
"""
|
||||
|
||||
kconfig_config_str_tmpl = """
|
||||
config {1}
|
||||
string{0}
|
||||
"""
|
||||
|
||||
param_default_str_tmpl = """
|
||||
default "{0}" if {1}"""
|
||||
|
||||
|
||||
def gen_panel_list(panel_intf_list):
|
||||
kconfig_panel_list = ""
|
||||
|
||||
for panel_intf in panel_intf_list:
|
||||
panel_list = panel_intf_list[panel_intf]
|
||||
panel_intf = panel_intf.upper()
|
||||
|
||||
kconfig_panel_config_list = ""
|
||||
for panel in panel_list:
|
||||
panel_name_u = panel.upper()
|
||||
panel_name_l = panel.lower()
|
||||
|
||||
kconfig_panel_config_list = (
|
||||
kconfig_panel_config_list
|
||||
+ kconfig_config_bool_tmpl.format(
|
||||
panel_intf + "_PANEL_" + panel_name_u,
|
||||
panel_intf + "_panel_" + panel_name_l))
|
||||
|
||||
kconfig_panel_list = kconfig_panel_list + kconfig_panel_config_list
|
||||
kconfig_panel_list = kconfig_choice_tmpl.format("Panel selecting", kconfig_panel_list)
|
||||
|
||||
return kconfig_panel_list
|
||||
|
||||
|
||||
def gen_panel_tuning_list(panel_intf_list, tuning_param):
|
||||
kconfig_panel_tuning_list = ""
|
||||
param_default_str = ""
|
||||
|
||||
for panel_intf in panel_intf_list:
|
||||
panel_list = panel_intf_list[panel_intf]
|
||||
panel_intf = panel_intf.upper()
|
||||
|
||||
for panel in panel_list:
|
||||
panel_u = panel.upper()
|
||||
panel_l = panel.lower()
|
||||
param_default_str = (
|
||||
param_default_str
|
||||
+ param_default_str_tmpl.format(panel_intf + "_panel_" + panel_l, panel_intf + "_PANEL_" + panel_u))
|
||||
|
||||
kconfig_panel_tuning_list = kconfig_config_str_tmpl.format(param_default_str, tuning_param)
|
||||
|
||||
return kconfig_panel_tuning_list
|
||||
|
||||
|
||||
def gen_panel_tuning_param_list(panel_param_list, tuning_param):
|
||||
param_default_str = ""
|
||||
|
||||
for panel_param in panel_param_list:
|
||||
panel_param_u = panel_param.upper()
|
||||
panel_param_l = panel_param.lower()
|
||||
param_default_str = (
|
||||
param_default_str
|
||||
+ param_default_str_tmpl.format("MIPI_panel_" + panel_param_l, "MIPI_PANEL_" + panel_param_u))
|
||||
|
||||
kconfig_panel_tuning_param_list = kconfig_config_str_tmpl.format(param_default_str, tuning_param)
|
||||
|
||||
return kconfig_panel_tuning_param_list
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
with open(build_helper.PANEL_LIST_PATH, "r", encoding="utf-8") as fp:
|
||||
panel_list_json = json.load(fp)
|
||||
panel_intf_list = panel_list_json['panel_list']
|
||||
panel_param_list = panel_list_json['panel_param']
|
||||
kconfig_panel_list = gen_panel_list(panel_intf_list)
|
||||
kconfig_panel_tuning_param_list = (
|
||||
gen_panel_tuning_list(panel_intf_list, "PANEL_TUNING_PARAM")
|
||||
+ gen_panel_tuning_param_list(panel_param_list['lane_num'], "PANEL_LANE_NUM_TUNING_PARAM")
|
||||
+ gen_panel_tuning_param_list(panel_param_list['lane_swap'], "PANEL_LANE_SWAP_TUNING_PARAM"))
|
||||
|
||||
kconfig = kconfig_tmpl.format(
|
||||
kconfig_panel_list, kconfig_panel_tuning_param_list
|
||||
)
|
||||
|
||||
with open(build_helper.PANEL_KCONFIG_PATH, "w") as fp:
|
||||
fp.write(kconfig)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
433
build/scripts/gen_release_bin.py
Executable file
433
build/scripts/gen_release_bin.py
Executable file
@ -0,0 +1,433 @@
|
||||
#!/usr/bin/env python3
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import argparse
|
||||
import itertools
|
||||
import collections
|
||||
import json
|
||||
import os.path
|
||||
import subprocess
|
||||
import glob
|
||||
from datetime import datetime
|
||||
|
||||
import build_helper
|
||||
import sign_fip
|
||||
|
||||
try:
|
||||
import argcomplete
|
||||
except ImportError:
|
||||
argcomplete = None
|
||||
|
||||
|
||||
build_helper.check_python_min_version()
|
||||
|
||||
Board = collections.namedtuple("Board", "chip, board, ddr_cfg, info")
|
||||
Target = collections.namedtuple("Target", "chip_arch chip is_multi_fip atf_key")
|
||||
|
||||
GEN_REL_BIN_EXCLUDE_CHIPS = [
|
||||
"cv181x",
|
||||
"cv180x",
|
||||
]
|
||||
|
||||
GEN_REL_BIN_EXCLUDE = [
|
||||
"cv1835_fpga",
|
||||
"cv1835_palladium",
|
||||
"cv1822_fpga",
|
||||
"cv1822_palladium",
|
||||
"cv181x_fpga",
|
||||
"cv181x_fpga_c906",
|
||||
"cv181x_riscv-fpga",
|
||||
"cv181x_palladium",
|
||||
"cv1826_wevb_0005a_alios_spinand",
|
||||
]
|
||||
|
||||
ATF_REPO_BRANCH = {}
|
||||
|
||||
ATF_REPO_PATH = "arm-trusted-firmware"
|
||||
BLD_REPO_PATH = "bm_bld"
|
||||
REL_BIN_ATF = "rel_bin/release_bin_atf"
|
||||
REL_BIN_BLDS = {
|
||||
"bld": "rel_bin/release_bin_bld",
|
||||
"bldp": "rel_bin/release_bin_bldp",
|
||||
"blp": "rel_bin/release_bin_blp",
|
||||
}
|
||||
|
||||
KEYSERVER = "10.18.98.102"
|
||||
KEYSERVER_SSHKEY_PATH = os.path.join(
|
||||
ATF_REPO_PATH, "tools/build_script/service_sign@cvi_keyserver.pem"
|
||||
)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Scan boards to generate env and configs"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
default="INFO",
|
||||
choices=["CRITICAL", "DEBUG", "ERROR", "INFO", "NOTSET", "WARNING"],
|
||||
)
|
||||
parser.add_argument("--logfile", type=str)
|
||||
parser.add_argument("--gen-atf", action="store_true")
|
||||
parser.add_argument("--gen-bld", action="store_true")
|
||||
parser.add_argument("--push", action="store_true")
|
||||
parser.add_argument("--exclude", action="append")
|
||||
|
||||
if argcomplete:
|
||||
argcomplete.autocomplete(parser)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def board_dir_to_name(board_dir):
|
||||
chips = build_helper.get_chip_list()
|
||||
chip_list = list(itertools.chain(*chips.values()))
|
||||
|
||||
m = re.search(
|
||||
r"^([0-9a-z]+)_(.+)$", os.path.basename(board_dir), flags=re.IGNORECASE
|
||||
)
|
||||
chip, br_name = m.groups()
|
||||
if chip not in chip_list:
|
||||
raise Exception(
|
||||
"%r of %r is unknown (missing in chip_list.json?)" % (chip, board_dir)
|
||||
)
|
||||
|
||||
for chip_arch, xlist in chips.items():
|
||||
if chip in xlist:
|
||||
break
|
||||
else:
|
||||
raise Exception("Can't find CHIP_ARCH for %r" % chip)
|
||||
|
||||
return chip_arch, chip, br_name
|
||||
|
||||
|
||||
def get_current_branch(git_path):
|
||||
ret = subprocess.run(
|
||||
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
||||
cwd=git_path,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
return ret.stdout.decode().strip()
|
||||
|
||||
|
||||
def get_atf_branch(chip_arch, is_multi_fip):
|
||||
if is_multi_fip:
|
||||
return "cv1835_multibin_boot"
|
||||
|
||||
try:
|
||||
atf_branch = [b for b, a in ATF_REPO_BRANCH.items() if chip_arch in a][0]
|
||||
except IndexError:
|
||||
atf_branch = "master"
|
||||
|
||||
return atf_branch
|
||||
|
||||
|
||||
def checkout_atf_branch(atf_branch):
|
||||
current = get_current_branch(ATF_REPO_PATH)
|
||||
|
||||
if current == atf_branch:
|
||||
return
|
||||
|
||||
logging.info("checkout_atf_branch: %s", atf_branch)
|
||||
|
||||
if atf_branch != "master":
|
||||
git_cmd = [
|
||||
"git",
|
||||
"fetch",
|
||||
"--depth",
|
||||
"1",
|
||||
"origin",
|
||||
"%s:%s" % (atf_branch, atf_branch),
|
||||
]
|
||||
subprocess.run(git_cmd, cwd=ATF_REPO_PATH, check=True)
|
||||
|
||||
subprocess.run(["git", "checkout", atf_branch], cwd=ATF_REPO_PATH, check=True)
|
||||
subprocess.run(["git", "clean", "-fd"], cwd=ATF_REPO_PATH, check=True)
|
||||
|
||||
|
||||
def list_board_dirs():
|
||||
for board_conf in sorted(glob.glob(build_helper.BOARD_KCONFIG_SAVED_GLOB)):
|
||||
board_dir = os.path.dirname(board_conf)
|
||||
|
||||
if "/default/" in board_dir:
|
||||
continue
|
||||
|
||||
if not os.path.isdir(board_dir):
|
||||
continue
|
||||
|
||||
yield board_dir
|
||||
|
||||
|
||||
def gen_bld_for_board(chip_arch, board, atf_key):
|
||||
logging.info("gen_bld_rel_bin_for_board=%s (%s)", board, atf_key)
|
||||
|
||||
for i in [ATF_REPO_PATH, BLD_REPO_PATH]:
|
||||
if not os.path.exists(i):
|
||||
raise Exception("%s doesn't exist" % i)
|
||||
|
||||
chip_arch, chip, br_name = board_dir_to_name(board)
|
||||
logging.info("%r", [chip_arch, chip, br_name])
|
||||
atf_branch = get_atf_branch(chip_arch, "rtos" in br_name)
|
||||
|
||||
checkout_atf_branch(atf_branch)
|
||||
|
||||
cj_path = os.path.join(
|
||||
"build", build_helper.BOARD_DIR, chip_arch, board, "config.json"
|
||||
)
|
||||
with open(cj_path, "r", encoding="utf-8") as fp:
|
||||
cj = json.load(fp)
|
||||
ddr_cfg_list = cj["ddr_cfg_list"]
|
||||
|
||||
ddr_cfg_list = [i for i in ddr_cfg_list if i]
|
||||
if not ddr_cfg_list:
|
||||
ddr_cfg_list = ["none"]
|
||||
|
||||
# Build for all DDR_CFG
|
||||
if atf_key != "":
|
||||
atf_key = "setconfig ATF_KEY_SEL_%s=y" % atf_key
|
||||
|
||||
for d in ddr_cfg_list:
|
||||
logging.info("ddr_cfg=%s", d)
|
||||
|
||||
script = """
|
||||
set -eo pipefail
|
||||
source build/envsetup_soc.sh f
|
||||
defconfig %(board)s
|
||||
setconfig DDR_CFG_%(ddr_cfg)s=y
|
||||
%(atf_key)s
|
||||
clean_bld
|
||||
clean_atf
|
||||
build_atf
|
||||
""" % {
|
||||
"board": board,
|
||||
"ddr_cfg": d,
|
||||
"atf_key": atf_key,
|
||||
}
|
||||
subprocess.run(["bash"], input=script.encode(), shell=True, check=True)
|
||||
|
||||
|
||||
def gen_bld(push):
|
||||
boards = [board_dir_to_name(e) for e in list_board_dirs()]
|
||||
boards = [
|
||||
(get_atf_branch(chip_arch, False), chip_arch, chip, br)
|
||||
for chip_arch, chip, br in boards
|
||||
]
|
||||
boards.sort()
|
||||
for n, (_, chip_arch, chip, br) in enumerate(boards):
|
||||
if chip in GEN_REL_BIN_EXCLUDE_CHIPS:
|
||||
continue
|
||||
|
||||
fullname = chip + "_" + br
|
||||
if fullname in GEN_REL_BIN_EXCLUDE:
|
||||
continue
|
||||
|
||||
gen_bld_for_board(chip_arch, fullname, "")
|
||||
if chip_arch == "cv183x" and "rtos" not in fullname:
|
||||
gen_bld_for_board(chip_arch, fullname, "clear")
|
||||
|
||||
checkout_atf_branch("master")
|
||||
|
||||
for i in sorted(glob.glob(os.path.join(REL_BIN_BLDS["bld"], "*_key0.tar"))):
|
||||
if "_single" in i:
|
||||
continue
|
||||
if "_rtos" in i:
|
||||
continue
|
||||
|
||||
b = re.findall(r'^bld_(.*?)_(.*)_key0.tar', os.path.basename(i))[0][1]
|
||||
chip_arch, chip, _ = board_dir_to_name(b)
|
||||
sign_fip.sign_bld(i, chip_arch, chip)
|
||||
|
||||
git_cmd = ["git", "log", r"--pretty=format:%h %aI%n%s%n%b", "-n", "1"]
|
||||
ret = subprocess.run(git_cmd, cwd=BLD_REPO_PATH, check=True, stdout=subprocess.PIPE)
|
||||
message = ret.stdout.decode()
|
||||
message = "\n".join([">>> " + i for i in message.split("\n")])
|
||||
logging.info("message=%r", message)
|
||||
|
||||
for path in REL_BIN_BLDS.values():
|
||||
subprocess.run(["git", "add", "."], cwd=path, check=True)
|
||||
ret = subprocess.run(
|
||||
["git", "status", "--porcelain"],
|
||||
cwd=path,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
|
||||
if not ret.stdout.decode().strip():
|
||||
logging.info("%s: nothing to commit", path)
|
||||
continue
|
||||
|
||||
subprocess.run(
|
||||
["git", "commit", "-F", "-"],
|
||||
cwd=path,
|
||||
input=message.encode(),
|
||||
check=True,
|
||||
)
|
||||
if push:
|
||||
subprocess.run(
|
||||
["git", "push", "origin", "HEAD:master"], cwd=path, check=True
|
||||
)
|
||||
|
||||
|
||||
def gen_atf_for_chip(target):
|
||||
logging.info("gen_atf_for_chip=%r", target)
|
||||
|
||||
for i in [ATF_REPO_PATH, BLD_REPO_PATH]:
|
||||
if not os.path.exists(i):
|
||||
raise Exception("%s doesn't exist" % i)
|
||||
|
||||
boards = [board_dir_to_name(e) for e in list_board_dirs()]
|
||||
|
||||
for _, chip, br in boards:
|
||||
if "fpga" in br or "palladium" in br:
|
||||
continue
|
||||
|
||||
if target.chip in chip:
|
||||
break
|
||||
else:
|
||||
logging.warning("No board for %s", target.chip)
|
||||
return
|
||||
|
||||
board = chip + "_" + br
|
||||
|
||||
atf_branch = get_atf_branch(target.chip_arch, target.is_multi_fip)
|
||||
logging.info("atf_branch=%s board=%s", atf_branch, board)
|
||||
checkout_atf_branch(atf_branch)
|
||||
|
||||
git_cmd = ["git", "log", r"--pretty=format:%h %aI%n%s%n%b", "-n", "1"]
|
||||
ret = subprocess.run(git_cmd, cwd=ATF_REPO_PATH, check=True, stdout=subprocess.PIPE)
|
||||
message = ret.stdout.decode()
|
||||
|
||||
# Build fip.bin
|
||||
atf_key = ""
|
||||
if target.atf_key:
|
||||
atf_key = "setconfig ATF_KEY_SEL_%s=y" % target.atf_key
|
||||
|
||||
script = """
|
||||
set -eo pipefail
|
||||
source build/envsetup_soc.sh f
|
||||
defconfig %(board)s
|
||||
%(multibin)s
|
||||
%(atf_key)s
|
||||
clean_bld
|
||||
clean_atf
|
||||
build_atf
|
||||
""" % {
|
||||
"board": board,
|
||||
"multibin": "setconfig MULTI_FIP=y" if target.is_multi_fip else "",
|
||||
"atf_key": atf_key,
|
||||
}
|
||||
subprocess.run(["bash"], input=script.encode(), shell=True, check=True)
|
||||
|
||||
# Check status
|
||||
ret = subprocess.run(
|
||||
["git", "status", "--porcelain", "-z"],
|
||||
cwd=REL_BIN_ATF,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
out = ret.stdout.strip(b"\0").split(b"\0")
|
||||
out = [i.decode() for i in out]
|
||||
|
||||
fip = []
|
||||
txt = []
|
||||
for i in out:
|
||||
if re.search(r"^( [MA]|\?\?) .+\.bin", i):
|
||||
fip.append(i)
|
||||
elif re.search(r"^( [MA]|\?\?) .+\.txt", i):
|
||||
txt.append(i)
|
||||
|
||||
if len(fip) != 1:
|
||||
raise ValueError("Only one fip.bin should be generated (%r)", fip)
|
||||
if len(txt) > 1:
|
||||
raise ValueError("Only one fip.txt should be generated (%r)", txt)
|
||||
|
||||
# Add fip.bin
|
||||
logging.info("add fip %s", fip[0])
|
||||
subprocess.run(["git", "add", fip[0][3:]], cwd=REL_BIN_ATF, check=True)
|
||||
if txt:
|
||||
logging.info("add txt %s", txt[0])
|
||||
subprocess.run(["git", "add", txt[0][3:]], cwd=REL_BIN_ATF, check=True)
|
||||
|
||||
if fip[0][3:].endswith("_key0.bin"):
|
||||
sign_fip.sign_atf(os.path.join(REL_BIN_ATF, fip[0][3:]))
|
||||
subprocess.run(
|
||||
["git", "add", fip[0][3:].replace("_key0", "_key1")],
|
||||
cwd=REL_BIN_ATF,
|
||||
check=True,
|
||||
)
|
||||
|
||||
return message
|
||||
|
||||
|
||||
def gen_atf(push):
|
||||
chip_list = build_helper.get_chip_list()
|
||||
|
||||
xlist = [
|
||||
(get_atf_branch(chip_arch, False), chip_arch, chips, False)
|
||||
for chip_arch, chips in chip_list.items()
|
||||
]
|
||||
xlist.sort(reverse=True)
|
||||
|
||||
targets = [Target("cv183x", "cv1835", None, "clear")]
|
||||
for _, chip_arch, chips, is_multi_fip in xlist:
|
||||
if not chips:
|
||||
continue
|
||||
t = Target(chip_arch, chips[0], is_multi_fip, None)
|
||||
targets.append(t)
|
||||
|
||||
message = set()
|
||||
for t in targets:
|
||||
m = gen_atf_for_chip(t)
|
||||
if not m:
|
||||
continue
|
||||
message.add(m.strip())
|
||||
|
||||
message = "\n\n".join(message)
|
||||
message = "\n".join([">>> " + i for i in message.split("\n")])
|
||||
logging.info("message=%r", message)
|
||||
|
||||
subprocess.run(
|
||||
["git", "commit", "-F", "-"],
|
||||
cwd=REL_BIN_ATF,
|
||||
input=message.encode(),
|
||||
check=True,
|
||||
)
|
||||
|
||||
if push:
|
||||
subprocess.run(
|
||||
["git", "push", "origin", "HEAD:master"], cwd=REL_BIN_ATF, check=True
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
build_helper.init_logging(args.logfile, stdout_level=args.verbose)
|
||||
logging.debug("[%s] start", datetime.now().isoformat())
|
||||
|
||||
# The location of the top Kconfig
|
||||
os.environ["srctree"] = build_helper.BUILD_REPO_DIR
|
||||
|
||||
os.environ["KEYSERVER"] = KEYSERVER
|
||||
os.environ["KEYSERVER_SSHKEY_PATH"] = KEYSERVER_SSHKEY_PATH
|
||||
os.environ["RELEASE_BIN_ATF_DIR"] = REL_BIN_ATF
|
||||
|
||||
if args.exclude:
|
||||
GEN_REL_BIN_EXCLUDE.extend(args.exclude)
|
||||
|
||||
if args.gen_bld:
|
||||
gen_bld(args.push)
|
||||
|
||||
if args.gen_atf:
|
||||
gen_atf(args.push)
|
||||
|
||||
logging.debug("[%s] finished", datetime.now().isoformat())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
159
build/scripts/gen_sensor_config.py
Executable file
159
build/scripts/gen_sensor_config.py
Executable file
@ -0,0 +1,159 @@
|
||||
#!/usr/bin/env python3
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
|
||||
import os
|
||||
import json
|
||||
import build_helper
|
||||
|
||||
kconfig_tmpl = """
|
||||
#
|
||||
# Automatically generated by gen_sensor_config.py; DO NOT EDIT.
|
||||
#
|
||||
|
||||
menu "Sensor settings"
|
||||
menu "Sensor support list"
|
||||
{0}
|
||||
endmenu
|
||||
|
||||
menu "Sensor tuning param config"
|
||||
{1}
|
||||
endmenu
|
||||
endmenu
|
||||
"""
|
||||
|
||||
kconfig_sensor_tmpl = """
|
||||
config SENSOR_{0}
|
||||
bool "Choose sensor {1}"
|
||||
default n
|
||||
help
|
||||
"y" Add sensor {1} to libsns_full.so.
|
||||
"""
|
||||
|
||||
kconfig_param_config_menu_tmpl = """
|
||||
if {0}
|
||||
menu "{1}"
|
||||
choice
|
||||
prompt "sensor tuning param"
|
||||
{2}
|
||||
endchoice
|
||||
endmenu
|
||||
endif
|
||||
"""
|
||||
|
||||
kconfig_param_config_tmpl = """
|
||||
config SENSOR_TUNING_PARAM_{0}
|
||||
bool "{1}"
|
||||
"""
|
||||
|
||||
kconfig_param_config_str_tmpl = """
|
||||
config SENSOR_TUNING_PARAM
|
||||
string{0}"""
|
||||
|
||||
kconfig_param_config_str_item_tmpl = """
|
||||
default "{1}" if SENSOR_TUNING_PARAM_{0}"""
|
||||
|
||||
kconfig_param_config_default_tmpl = """
|
||||
menu "src"
|
||||
choice
|
||||
prompt "sensor tuning param"
|
||||
|
||||
config SENSOR_TUNING_PARAM_cv183x_src_sony_imx307
|
||||
bool "sony_imx307"
|
||||
|
||||
endchoice
|
||||
endmenu
|
||||
|
||||
config SENSOR_TUNING_PARAM
|
||||
string
|
||||
default "sony_imx307" if SENSOR_TUNING_PARAM_cv183x_src_sony_imx307
|
||||
"""
|
||||
|
||||
|
||||
def gen_sensor_support_list():
|
||||
with open(build_helper.SENSOR_LIST_PATH, "r", encoding="utf-8") as fp:
|
||||
sensor_list_json = json.load(fp)
|
||||
|
||||
sensor_list = sensor_list_json['sensor_list']
|
||||
|
||||
kconfig_sensor_list = ""
|
||||
|
||||
for sensor in sensor_list:
|
||||
sensor_name_u = sensor.upper()
|
||||
sensor_name_l = sensor.lower()
|
||||
kconfig_sensor_list = (kconfig_sensor_list
|
||||
+ kconfig_sensor_tmpl.format(sensor_name_u, sensor_name_l))
|
||||
|
||||
return kconfig_sensor_list
|
||||
|
||||
|
||||
def gen_sensor_tuning_param_list():
|
||||
menu_list = ""
|
||||
param_str = ""
|
||||
|
||||
isp_tuning_path = os.path.normpath(os.path.join(build_helper.BUILD_REPO_DIR, "../isp_tuning"))
|
||||
|
||||
if not os.path.exists(isp_tuning_path):
|
||||
print("isp_tuning_path: " + isp_tuning_path + " not exists....")
|
||||
return kconfig_param_config_default_tmpl
|
||||
|
||||
chip_list = os.listdir(isp_tuning_path)
|
||||
|
||||
for arch in chip_list:
|
||||
if os.path.isdir(os.path.join(isp_tuning_path, arch)) and arch != ".git":
|
||||
temp_path = os.path.join(isp_tuning_path, arch)
|
||||
customers_list = os.listdir(temp_path)
|
||||
for customers in customers_list:
|
||||
temp_path = os.path.join(isp_tuning_path, arch)
|
||||
if os.path.isdir(os.path.join(temp_path, customers)):
|
||||
param_config_list = ""
|
||||
temp_path = os.path.join(temp_path, customers)
|
||||
param_list = os.listdir(temp_path)
|
||||
for param in param_list:
|
||||
if os.path.isdir(os.path.join(temp_path, param)):
|
||||
temp_str = kconfig_param_config_tmpl.format(
|
||||
arch + "_" + customers + "_" + param,
|
||||
param
|
||||
)
|
||||
param_config_list = param_config_list + temp_str
|
||||
|
||||
temp_str = kconfig_param_config_str_item_tmpl.format(
|
||||
arch + "_" + customers + "_" + param,
|
||||
param
|
||||
)
|
||||
param_str = param_str + temp_str
|
||||
|
||||
chips = build_helper.get_chip_list()
|
||||
temp_chip_list = []
|
||||
for chip_arch, xlist in chips.items():
|
||||
if chip_arch.upper() == arch.upper():
|
||||
for x in xlist:
|
||||
temp_chip_list.append("CHIP_" + x)
|
||||
|
||||
temp_str = kconfig_param_config_menu_tmpl.format(
|
||||
" || ".join(temp_chip_list),
|
||||
customers,
|
||||
param_config_list
|
||||
)
|
||||
menu_list = menu_list + temp_str
|
||||
|
||||
param_str = kconfig_param_config_str_tmpl.format(param_str)
|
||||
|
||||
return (menu_list + param_str)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
kconfig_sensor_list = gen_sensor_support_list()
|
||||
kconfig_param_list = gen_sensor_tuning_param_list()
|
||||
|
||||
kconfig = kconfig_tmpl.format(
|
||||
kconfig_sensor_list,
|
||||
kconfig_param_list
|
||||
)
|
||||
|
||||
with open(build_helper.SENSOR_KCONFIG_PATH, "w") as fp:
|
||||
fp.write(kconfig)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
103
build/scripts/gencmakeconfig.py
Executable file
103
build/scripts/gencmakeconfig.py
Executable file
@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env python3
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
import argparse
|
||||
import os
|
||||
import logging
|
||||
from os.path import join
|
||||
|
||||
import build_helper
|
||||
import kconfiglib
|
||||
|
||||
build_helper.check_python_min_version()
|
||||
|
||||
try:
|
||||
import argcomplete
|
||||
except ImportError:
|
||||
argcomplete = None
|
||||
|
||||
|
||||
def _cmake_contents(kconfig, header):
|
||||
chunks = [header]
|
||||
add = chunks.append
|
||||
config_vars = []
|
||||
|
||||
for sym in kconfig.unique_defined_syms:
|
||||
# _write_to_conf is determined when the value is calculated. This
|
||||
# is a hidden function call due to property magic.
|
||||
val = sym.str_value
|
||||
if not sym._write_to_conf:
|
||||
continue
|
||||
if sym.orig_type in (kconfiglib.BOOL, kconfiglib.TRISTATE) and val == "n":
|
||||
val = ""
|
||||
add('set({}{} "{}")\n'.format(kconfig.config_prefix, sym.name, val))
|
||||
config_vars.append(str(kconfig.config_prefix + sym.name))
|
||||
add("set(CONFIGS_LIST {})\n".format(";".join(config_vars)))
|
||||
return "".join(chunks)
|
||||
|
||||
|
||||
def write_cmake(kconfig, filename, gui):
|
||||
cmake_conf_header = "# Generated by gencmakeconfig.py\n"
|
||||
cmake_conf_header += "### DO NOT edit this file!! ###\n\n"
|
||||
cmake_conf_content = _cmake_contents(kconfig, cmake_conf_header)
|
||||
# don't change file info if config no change
|
||||
if os.path.exists(filename):
|
||||
with open(filename) as f:
|
||||
if f.read() == cmake_conf_content:
|
||||
return
|
||||
f = open(filename, "w")
|
||||
f.write(cmake_conf_content)
|
||||
f.close()
|
||||
|
||||
|
||||
def write_all_configs(kconfig):
|
||||
conf = {}
|
||||
|
||||
for sym in kconfig.unique_defined_syms:
|
||||
conf[str(sym.name)] = "<{}>".format(kconfiglib.TYPE_TO_STR[sym.orig_type])
|
||||
|
||||
with open(join(build_helper.BUILD_OUTPUT_DIR, "config_map.sh"), "w") as fp:
|
||||
fp.write(
|
||||
"""#!/bin/bash
|
||||
unset _BUILD_KCONFIG_MAP
|
||||
declare -g -A _BUILD_KCONFIG_MAP
|
||||
\n"""
|
||||
)
|
||||
fp.write(
|
||||
"\n".join(
|
||||
("_BUILD_KCONFIG_MAP['{0}=']='{1}'".format(k, v) for k, v in conf.items())
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def load_board_config(path):
|
||||
logging.debug("load %s", path)
|
||||
|
||||
kconf = kconfiglib.Kconfig(
|
||||
build_helper.KCONFIG_PATH, suppress_traceback=True, warn=True
|
||||
)
|
||||
kconf.load_config(path)
|
||||
|
||||
return kconf
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter, description=__doc__
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"config", metavar="CONFIGURATION", help="Input configuration file"
|
||||
)
|
||||
|
||||
if argcomplete:
|
||||
argcomplete.autocomplete(parser)
|
||||
args = parser.parse_args()
|
||||
|
||||
kconfig = load_board_config(args.config)
|
||||
|
||||
write_cmake(kconfig, "config.cmake", False)
|
||||
write_all_configs(kconfig)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
2319
build/scripts/guiconfig.py
Executable file
2319
build/scripts/guiconfig.py
Executable file
File diff suppressed because it is too large
Load Diff
7160
build/scripts/kconfiglib.py
Executable file
7160
build/scripts/kconfiglib.py
Executable file
File diff suppressed because it is too large
Load Diff
3278
build/scripts/menuconfig.py
Executable file
3278
build/scripts/menuconfig.py
Executable file
File diff suppressed because it is too large
Load Diff
30
build/scripts/mmap.mk
Normal file
30
build/scripts/mmap.mk
Normal file
@ -0,0 +1,30 @@
|
||||
.PHONY: memory-map
|
||||
|
||||
CVI_BOARD_MEMMAP_H_PATH := ${BUILD_PATH}/output/${PROJECT_FULLNAME}/cvi_board_memmap.h
|
||||
CVI_BOARD_MEMMAP_CONF_PATH := ${BUILD_PATH}/output/${PROJECT_FULLNAME}/cvi_board_memmap.conf
|
||||
CVI_BOARD_MEMMAP_LD_PATH:= ${BUILD_PATH}/output/${PROJECT_FULLNAME}/cvi_board_memmap.ld
|
||||
|
||||
BOARD_MMAP_PATH := ${BORAD_FOLDER_PATH}/memmap.py
|
||||
MMAP_CONV_PY := ${BUILD_PATH}/scripts/mmap_conv.py
|
||||
|
||||
|
||||
${CVI_BOARD_MEMMAP_H_PATH}: ${BOARD_MMAP_PATH} ${MMAP_CONV_PY}
|
||||
$(call print_target)
|
||||
mkdir -p $(dir $@)
|
||||
@${MMAP_CONV_PY} --type h $< $@
|
||||
|
||||
${CVI_BOARD_MEMMAP_CONF_PATH}: ${BOARD_MMAP_PATH} ${MMAP_CONV_PY}
|
||||
$(call print_target)
|
||||
@mkdir -p $(dir $@)
|
||||
@${MMAP_CONV_PY} --type conf $< $@
|
||||
|
||||
${CVI_BOARD_MEMMAP_LD_PATH}: ${BOARD_MMAP_PATH} ${MMAP_CONV_PY}
|
||||
$(call print_target)
|
||||
@mkdir -p $(dir $@)
|
||||
@${MMAP_CONV_PY} --type ld $< $@
|
||||
|
||||
ifeq ($(wildcard ${BOARD_MMAP_PATH}),)
|
||||
memory-map:
|
||||
else
|
||||
memory-map: ${CVI_BOARD_MEMMAP_H_PATH} ${CVI_BOARD_MEMMAP_CONF_PATH} ${CVI_BOARD_MEMMAP_LD_PATH}
|
||||
endif
|
||||
131
build/scripts/mmap_conv.py
Executable file
131
build/scripts/mmap_conv.py
Executable file
@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import logging
|
||||
import argparse
|
||||
import importlib.util
|
||||
from os.path import basename, splitext, abspath
|
||||
from collections import OrderedDict
|
||||
import random
|
||||
|
||||
|
||||
MEMMAP_PREFIX = "CVIMMAP_"
|
||||
|
||||
|
||||
def sort_mmap(mlist):
|
||||
base = mlist.get("CVIMMAP_DRAM_BASE")
|
||||
if not base:
|
||||
base = 0
|
||||
|
||||
mm = mlist.items()
|
||||
|
||||
mm = sorted(mm, key=lambda x: x[1])
|
||||
mm = sorted(mm, key=lambda x: x[0])
|
||||
|
||||
return OrderedDict(mm)
|
||||
|
||||
|
||||
def parse_mmap(mmap_module):
|
||||
try:
|
||||
mmap = mmap_module.MemoryMap
|
||||
except AttributeError:
|
||||
logging.error("Memory map file must have 'class MemoryMap'")
|
||||
raise
|
||||
|
||||
no_prefix = getattr(mmap_module.MemoryMap, "_no_prefix", [])
|
||||
|
||||
mlist = OrderedDict()
|
||||
|
||||
for attr in mmap.__dict__:
|
||||
if attr.startswith("_"):
|
||||
continue
|
||||
|
||||
value = getattr(mmap, attr)
|
||||
if attr not in no_prefix:
|
||||
attr = MEMMAP_PREFIX + attr
|
||||
mlist[attr] = int(value)
|
||||
|
||||
return sort_mmap(mlist)
|
||||
|
||||
|
||||
def int_to_si(n):
|
||||
off = ""
|
||||
|
||||
for i in [0x80000000, 0x100000000]:
|
||||
if n >= i:
|
||||
off = "offset "
|
||||
n -= i
|
||||
break
|
||||
|
||||
if n < 1024 * 1024:
|
||||
s = "{0}KiB".format(n / 1024)
|
||||
else:
|
||||
s = "{0}MiB".format(n / (1024 * 1024))
|
||||
|
||||
return off + s
|
||||
|
||||
|
||||
def mmap_to_ld(mlist):
|
||||
mlist = ["{0:s} = {1:#x};".format(a, v) for a, v in mlist.items()]
|
||||
|
||||
conf = "\n".join(mlist)
|
||||
return conf
|
||||
|
||||
|
||||
def mmap_to_conf(mlist):
|
||||
mlist = ["{0:s}={1:#x}".format(a, v) for a, v in mlist.items()]
|
||||
|
||||
conf = "\n".join(mlist)
|
||||
return conf
|
||||
|
||||
|
||||
def mmap_to_h(mlist):
|
||||
mlist = ["#define {0:s} {1:#x} /* {2} */".format(a, v, int_to_si(v)) for a, v in mlist.items()]
|
||||
|
||||
r = random.randint(0x80000000, 0xFFFFFFFF)
|
||||
|
||||
conf = (
|
||||
"#ifndef __BOARD_MMAP__{0:08x}__\n"
|
||||
"#define __BOARD_MMAP__{0:08x}__\n\n"
|
||||
"{1}\n\n"
|
||||
"#endif /* __BOARD_MMAP__{0:08x}__ */\n".format(r, "\n".join(mlist))
|
||||
)
|
||||
return conf
|
||||
|
||||
|
||||
def main():
|
||||
logging.basicConfig(
|
||||
format="%(levelname)8s:%(module)s: %(message)s", level=logging.NOTSET
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(description="Generate mmap.h")
|
||||
parser.add_argument("--type", choices=["h", "conf", "ld"], required=True)
|
||||
parser.add_argument("MAP_FILE", type=str, nargs=1)
|
||||
parser.add_argument("OUTPUT", type=str, nargs=1)
|
||||
args = parser.parse_args()
|
||||
|
||||
map_file_path = args.MAP_FILE[0]
|
||||
logging.info("map_file_path is at %s", map_file_path)
|
||||
map_name = splitext(basename(map_file_path))[0]
|
||||
|
||||
# Load map_file as python module
|
||||
spec = importlib.util.spec_from_file_location(map_name, map_file_path)
|
||||
mmap_module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(mmap_module)
|
||||
|
||||
mlist = parse_mmap(mmap_module)
|
||||
|
||||
if args.type == "h":
|
||||
out = mmap_to_h(mlist)
|
||||
elif args.type == "conf":
|
||||
out = mmap_to_conf(mlist)
|
||||
elif args.type == "ld":
|
||||
out = mmap_to_ld(mlist)
|
||||
|
||||
out_path = abspath(args.OUTPUT[0])
|
||||
logging.info("Generate to %s", out_path)
|
||||
with open(out_path, "w") as fp:
|
||||
fp.write(out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
246
build/scripts/oldconfig.py
Executable file
246
build/scripts/oldconfig.py
Executable file
@ -0,0 +1,246 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2018-2019, Ulf Magnusson
|
||||
# SPDX-License-Identifier: ISC
|
||||
|
||||
"""
|
||||
Implements oldconfig functionality.
|
||||
|
||||
1. Loads existing .config
|
||||
2. Prompts for the value of all modifiable symbols/choices that
|
||||
aren't already set in the .config
|
||||
3. Writes an updated .config
|
||||
|
||||
The default input/output filename is '.config'. A different filename can be
|
||||
passed in the KCONFIG_CONFIG environment variable.
|
||||
|
||||
When overwriting a configuration file, the old version is saved to
|
||||
<filename>.old (e.g. .config.old).
|
||||
|
||||
Entering '?' displays the help text of the symbol/choice, if any.
|
||||
|
||||
Unlike 'make oldconfig', this script doesn't print menu titles and comments,
|
||||
but gives Kconfig definition locations. Printing menus and comments would be
|
||||
pretty easy to add: Look at the parents of each item, and print all menu
|
||||
prompts and comments unless they have already been printed (assuming you want
|
||||
to skip "irrelevant" menus).
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from kconfiglib import Symbol, Choice, BOOL, TRISTATE, HEX, standard_kconfig
|
||||
|
||||
|
||||
# Python 2/3 compatibility hack
|
||||
if sys.version_info[0] < 3:
|
||||
input = raw_input
|
||||
|
||||
|
||||
def _main():
|
||||
# Earlier symbols in Kconfig files might depend on later symbols and become
|
||||
# visible if their values change. This flag is set to True if the value of
|
||||
# any symbol changes, in which case we rerun the oldconfig to check for new
|
||||
# visible symbols.
|
||||
global conf_changed
|
||||
|
||||
kconf = standard_kconfig(__doc__)
|
||||
print(kconf.load_config())
|
||||
|
||||
while True:
|
||||
conf_changed = False
|
||||
|
||||
for node in kconf.node_iter():
|
||||
oldconfig(node)
|
||||
|
||||
if not conf_changed:
|
||||
break
|
||||
|
||||
print(kconf.write_config())
|
||||
|
||||
|
||||
def oldconfig(node):
|
||||
"""
|
||||
Prompts the user for a value if node.item is a visible symbol/choice with
|
||||
no user value.
|
||||
"""
|
||||
# See main()
|
||||
global conf_changed
|
||||
|
||||
# Only symbols and choices can be configured
|
||||
if not isinstance(node.item, (Symbol, Choice)):
|
||||
return
|
||||
|
||||
# Skip symbols and choices that aren't visible
|
||||
if not node.item.visibility:
|
||||
return
|
||||
|
||||
# Skip symbols and choices that don't have a prompt (at this location)
|
||||
if not node.prompt:
|
||||
return
|
||||
|
||||
if isinstance(node.item, Symbol):
|
||||
sym = node.item
|
||||
|
||||
# Skip symbols that already have a user value
|
||||
if sym.user_value is not None:
|
||||
return
|
||||
|
||||
# Skip symbols that can only have a single value, due to selects
|
||||
if len(sym.assignable) == 1:
|
||||
return
|
||||
|
||||
# Skip symbols in choices in y mode. We ask once for the entire choice
|
||||
# instead.
|
||||
if sym.choice and sym.choice.tri_value == 2:
|
||||
return
|
||||
|
||||
# Loop until the user enters a valid value or enters a blank string
|
||||
# (for the default value)
|
||||
while True:
|
||||
val = input("{} ({}) [{}] ".format(
|
||||
node.prompt[0], _name_and_loc_str(sym),
|
||||
_default_value_str(sym)))
|
||||
|
||||
if val == "?":
|
||||
_print_help(node)
|
||||
continue
|
||||
|
||||
# Substitute a blank string with the default value the symbol
|
||||
# would get
|
||||
if not val:
|
||||
val = sym.str_value
|
||||
|
||||
# Automatically add a "0x" prefix for hex symbols, like the
|
||||
# menuconfig interface does. This isn't done when loading .config
|
||||
# files, hence why set_value() doesn't do it automatically.
|
||||
if sym.type == HEX and not val.startswith(("0x", "0X")):
|
||||
val = "0x" + val
|
||||
|
||||
old_str_val = sym.str_value
|
||||
|
||||
# Kconfiglib itself will print a warning here if the value
|
||||
# is invalid, so we don't need to bother
|
||||
if sym.set_value(val):
|
||||
# Valid value input. We're done with this node.
|
||||
|
||||
if sym.str_value != old_str_val:
|
||||
conf_changed = True
|
||||
|
||||
return
|
||||
|
||||
else:
|
||||
choice = node.item
|
||||
|
||||
# Skip choices that already have a visible user selection...
|
||||
if choice.user_selection and choice.user_selection.visibility == 2:
|
||||
# ...unless there are new visible symbols in the choice. (We know
|
||||
# they have y (2) visibility in that case, because m-visible
|
||||
# symbols get demoted to n-visibility in y-mode choices, and the
|
||||
# user-selected symbol had visibility y.)
|
||||
for sym in choice.syms:
|
||||
if sym is not choice.user_selection and sym.visibility and \
|
||||
sym.user_value is None:
|
||||
# New visible symbols in the choice
|
||||
break
|
||||
else:
|
||||
# No new visible symbols in the choice
|
||||
return
|
||||
|
||||
# Get a list of available selections. The mode of the choice limits
|
||||
# the visibility of the choice value symbols, so this will indirectly
|
||||
# skip choices in n and m mode.
|
||||
options = [sym for sym in choice.syms if sym.visibility == 2]
|
||||
|
||||
if not options:
|
||||
# No y-visible choice value symbols
|
||||
return
|
||||
|
||||
# Loop until the user enters a valid selection or a blank string (for
|
||||
# the default selection)
|
||||
while True:
|
||||
print("{} ({})".format(node.prompt[0], _name_and_loc_str(choice)))
|
||||
|
||||
for i, sym in enumerate(options, 1):
|
||||
print("{} {}. {} ({})".format(
|
||||
">" if sym is choice.selection else " ",
|
||||
i,
|
||||
# Assume people don't define choice symbols with multiple
|
||||
# prompts. That generates a warning anyway.
|
||||
sym.nodes[0].prompt[0],
|
||||
sym.name))
|
||||
|
||||
sel_index = input("choice[1-{}]: ".format(len(options)))
|
||||
|
||||
if sel_index == "?":
|
||||
_print_help(node)
|
||||
continue
|
||||
|
||||
# Pick the default selection if the string is blank
|
||||
if not sel_index:
|
||||
choice.selection.set_value(2)
|
||||
break
|
||||
|
||||
try:
|
||||
sel_index = int(sel_index)
|
||||
except ValueError:
|
||||
print("Bad index", file=sys.stderr)
|
||||
continue
|
||||
|
||||
if not 1 <= sel_index <= len(options):
|
||||
print("Bad index", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Valid selection
|
||||
|
||||
if options[sel_index - 1].tri_value != 2:
|
||||
conf_changed = True
|
||||
|
||||
options[sel_index - 1].set_value(2)
|
||||
break
|
||||
|
||||
# Give all of the non-selected visible choice symbols the user value n.
|
||||
# This makes it so that the choice is no longer considered new once we
|
||||
# do additional passes, if the reason that it was considered new was
|
||||
# that it had new visible choice symbols.
|
||||
#
|
||||
# Only giving visible choice symbols the user value n means we will
|
||||
# prompt for the choice again if later user selections make more new
|
||||
# choice symbols visible, which is correct.
|
||||
for sym in choice.syms:
|
||||
if sym is not choice.user_selection and sym.visibility:
|
||||
sym.set_value(0)
|
||||
|
||||
|
||||
def _name_and_loc_str(sc):
|
||||
# Helper for printing the name of the symbol/choice 'sc' along with the
|
||||
# location(s) in the Kconfig files where it is defined. Unnamed choices
|
||||
# return "choice" instead of the name.
|
||||
|
||||
return "{}, defined at {}".format(
|
||||
sc.name or "choice",
|
||||
", ".join("{}:{}".format(node.filename, node.linenr)
|
||||
for node in sc.nodes))
|
||||
|
||||
|
||||
def _print_help(node):
|
||||
print("\n" + (node.help or "No help text\n"))
|
||||
|
||||
|
||||
def _default_value_str(sym):
|
||||
# Returns the "m/M/y" string in e.g.
|
||||
#
|
||||
# TRISTATE_SYM prompt (TRISTATE_SYM, defined at Kconfig:9) [n/M/y]:
|
||||
#
|
||||
# For string/int/hex, returns the default value as-is.
|
||||
|
||||
if sym.type in (BOOL, TRISTATE):
|
||||
return "/".join(("NMY" if sym.tri_value == tri else "nmy")[tri]
|
||||
for tri in sym.assignable)
|
||||
|
||||
# string/int/hex
|
||||
return sym.str_value
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_main()
|
||||
28
build/scripts/olddefconfig.py
Executable file
28
build/scripts/olddefconfig.py
Executable file
@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2018-2019, Ulf Magnusson
|
||||
# SPDX-License-Identifier: ISC
|
||||
|
||||
"""
|
||||
Updates an old .config file or creates a new one, by filling in default values
|
||||
for all new symbols. This is the same as picking the default selection for all
|
||||
symbols in oldconfig, or entering the menuconfig interface and immediately
|
||||
saving.
|
||||
|
||||
The default input/output filename is '.config'. A different filename can be
|
||||
passed in the KCONFIG_CONFIG environment variable.
|
||||
|
||||
When overwriting a configuration file, the old version is saved to
|
||||
<filename>.old (e.g. .config.old).
|
||||
"""
|
||||
import kconfiglib
|
||||
|
||||
|
||||
def main():
|
||||
kconf = kconfiglib.standard_kconfig(__doc__)
|
||||
print(kconf.load_config())
|
||||
print(kconf.write_config())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
57
build/scripts/pad_atf_crc.py
Executable file
57
build/scripts/pad_atf_crc.py
Executable file
@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python3
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
|
||||
import logging
|
||||
import argparse
|
||||
import binascii
|
||||
from struct import pack
|
||||
|
||||
import build_helper
|
||||
|
||||
try:
|
||||
import argcomplete
|
||||
except ImportError:
|
||||
argcomplete = None
|
||||
|
||||
|
||||
build_helper.check_python_min_version()
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Pad ATF CRC")
|
||||
parser.add_argument("-v", "--verbose", default="INFO")
|
||||
parser.add_argument("--logfile", type=str)
|
||||
parser.add_argument("--pad-crc", type=str)
|
||||
|
||||
if argcomplete:
|
||||
argcomplete.autocomplete(parser)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def pad_crc(path):
|
||||
logging.info("Pad CRC16 to %s", path)
|
||||
with open(path, "rb") as fp:
|
||||
din = fp.read()
|
||||
c = binascii.crc_hqx(din, 0)
|
||||
logging.info("CRC16=0x%04x", c)
|
||||
|
||||
dout = din + pack("<H", c) + b"\xFE\xCA"
|
||||
with open(path, "wb") as fp:
|
||||
fp.write(dout)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
build_helper.init_logging(args.logfile, stdout_level=args.verbose)
|
||||
build_helper.dump_debug_info()
|
||||
|
||||
if args.pad_crc:
|
||||
pad_crc(args.pad_crc)
|
||||
|
||||
logging.info("END")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
13
build/scripts/rtos.mk
Normal file
13
build/scripts/rtos.mk
Normal file
@ -0,0 +1,13 @@
|
||||
rtos: memory-map
|
||||
$(call print_target)
|
||||
ifeq ($(CHIP_ARCH_L),$(filter $(CHIP_ARCH_L), cv180x))
|
||||
cd ${FREERTOS_PATH}/cvitek && ./build_cv180x.sh
|
||||
else
|
||||
cd ${FREERTOS_PATH}/cvitek && ./build_cv181x.sh
|
||||
endif
|
||||
|
||||
rtos-clean:
|
||||
ifeq (${CONFIG_ENABLE_FREERTOS},y)
|
||||
$(call print_target)
|
||||
cd ${FREERTOS_PATH}/cvitek && rm -rf build
|
||||
endif
|
||||
49
build/scripts/savedefconfig.py
Executable file
49
build/scripts/savedefconfig.py
Executable file
@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2019, Ulf Magnusson
|
||||
# SPDX-License-Identifier: ISC
|
||||
|
||||
"""
|
||||
Saves a minimal configuration file that only lists symbols that differ in value
|
||||
from their defaults. Loading such a configuration file is equivalent to loading
|
||||
the "full" configuration file.
|
||||
|
||||
Minimal configuration files are handy to start from when editing configuration
|
||||
files by hand.
|
||||
|
||||
The default input configuration file is '.config'. A different input filename
|
||||
can be passed in the KCONFIG_CONFIG environment variable.
|
||||
|
||||
Note: Minimal configurations can also be generated from within the menuconfig
|
||||
interface.
|
||||
"""
|
||||
import argparse
|
||||
|
||||
import kconfiglib
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description=__doc__)
|
||||
|
||||
parser.add_argument(
|
||||
"--kconfig",
|
||||
default="Kconfig",
|
||||
help="Top-level Kconfig file (default: Kconfig)")
|
||||
|
||||
parser.add_argument(
|
||||
"--out",
|
||||
metavar="MINIMAL_CONFIGURATION",
|
||||
default="defconfig",
|
||||
help="Output filename for minimal configuration (default: defconfig)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
kconf = kconfiglib.Kconfig(args.kconfig, suppress_traceback=True)
|
||||
print(kconf.load_config())
|
||||
print(kconf.write_min_config(args.out))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
92
build/scripts/setconfig.py
Executable file
92
build/scripts/setconfig.py
Executable file
@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2019, Ulf Magnusson
|
||||
# SPDX-License-Identifier: ISC
|
||||
|
||||
"""
|
||||
Simple utility for setting configuration values from the command line.
|
||||
|
||||
Sample usage:
|
||||
|
||||
$ setconfig FOO_SUPPORT=y BAR_BITS=8
|
||||
|
||||
Note: Symbol names should not be prefixed with 'CONFIG_'.
|
||||
|
||||
The exit status on errors is 1.
|
||||
|
||||
The default input/output configuration file is '.config'. A different filename
|
||||
can be passed in the KCONFIG_CONFIG environment variable.
|
||||
|
||||
When overwriting a configuration file, the old version is saved to
|
||||
<filename>.old (e.g. .config.old).
|
||||
"""
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import kconfiglib
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description=__doc__)
|
||||
|
||||
parser.add_argument(
|
||||
"--kconfig",
|
||||
default="Kconfig",
|
||||
help="Top-level Kconfig file (default: Kconfig)")
|
||||
|
||||
parser.add_argument(
|
||||
"--no-check-exists",
|
||||
dest="check_exists",
|
||||
action="store_false",
|
||||
help="Ignore assignments to non-existent symbols instead of erroring "
|
||||
"out")
|
||||
|
||||
parser.add_argument(
|
||||
"--no-check-value",
|
||||
dest="check_value",
|
||||
action="store_false",
|
||||
help="Ignore assignments that didn't \"take\" (where the symbol got a "
|
||||
"different value, e.g. due to unsatisfied dependencies) instead "
|
||||
"of erroring out")
|
||||
|
||||
parser.add_argument(
|
||||
"assignments",
|
||||
metavar="ASSIGNMENT",
|
||||
nargs="*",
|
||||
help="A 'NAME=value' assignment")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
kconf = kconfiglib.Kconfig(args.kconfig, suppress_traceback=True)
|
||||
print(kconf.load_config())
|
||||
|
||||
for arg in args.assignments:
|
||||
if "=" not in arg:
|
||||
sys.exit("error: no '=' in assignment: '{}'".format(arg))
|
||||
name, value = arg.split("=", 1)
|
||||
|
||||
if name not in kconf.syms:
|
||||
if not args.check_exists:
|
||||
continue
|
||||
sys.exit("error: no symbol '{}' in configuration".format(name))
|
||||
|
||||
sym = kconf.syms[name]
|
||||
|
||||
if not sym.set_value(value):
|
||||
sys.exit("error: '{}' is an invalid value for the {} symbol {}"
|
||||
.format(value, kconfiglib.TYPE_TO_STR[sym.orig_type],
|
||||
name))
|
||||
|
||||
if args.check_value and sym.str_value != value:
|
||||
sys.exit("error: {} was assigned the value '{}', but got the "
|
||||
"value '{}'. Check the symbol's dependencies, and make "
|
||||
"sure that it has a prompt."
|
||||
.format(name, value, sym.str_value))
|
||||
|
||||
print(kconf.write_config())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
194
build/scripts/sign_fip.py
Executable file
194
build/scripts/sign_fip.py
Executable file
@ -0,0 +1,194 @@
|
||||
#!/usr/bin/env python3
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
|
||||
import logging
|
||||
import argparse
|
||||
import os
|
||||
import stat
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
import os.path
|
||||
import tarfile
|
||||
|
||||
import build_helper
|
||||
|
||||
try:
|
||||
import argcomplete
|
||||
except ImportError:
|
||||
argcomplete = None
|
||||
|
||||
|
||||
build_helper.check_python_min_version()
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Scan boards to generate env and configs"
|
||||
)
|
||||
parser.add_argument("-v", "--verbose", default="INFO")
|
||||
parser.add_argument("--logfile", type=str)
|
||||
parser.add_argument("--sign-atf", type=str)
|
||||
parser.add_argument("--sign-bld", type=str)
|
||||
|
||||
if argcomplete:
|
||||
argcomplete.autocomplete(parser)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def sign_atf(fip_path):
|
||||
keyserver = os.environ["KEYSERVER"]
|
||||
keyserver_sshkey_path = os.environ["KEYSERVER_SSHKEY_PATH"]
|
||||
logging.info(
|
||||
"keyserver=%s keyserver_sshkey_path=%s", keyserver, keyserver_sshkey_path
|
||||
)
|
||||
|
||||
os.chmod(keyserver_sshkey_path, stat.S_IRUSR)
|
||||
|
||||
logging.info("scp %s to keyserver", fip_path)
|
||||
ret = subprocess.run(
|
||||
[
|
||||
"scp",
|
||||
"-i",
|
||||
keyserver_sshkey_path,
|
||||
fip_path,
|
||||
"service_sign@%s:fip.bin" % keyserver,
|
||||
],
|
||||
stderr=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
print(ret.stderr.decode())
|
||||
|
||||
m = re.search(r"TOKEN:(.{32})", ret.stderr.decode())
|
||||
token = m.group(1)
|
||||
logging.info("token=%s", token)
|
||||
|
||||
logging.info("sign in keyserver")
|
||||
ret = subprocess.run(
|
||||
[
|
||||
"ssh",
|
||||
"-i",
|
||||
keyserver_sshkey_path,
|
||||
"service_sign@%s" % keyserver,
|
||||
"sign_fip",
|
||||
"--chip=cv1835",
|
||||
"--token=%s" % token,
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
logging.debug("%r" % ret)
|
||||
|
||||
fip_signed_path = os.path.splitext(fip_path.replace("_key0", ""))
|
||||
fip_signed_path = fip_signed_path[0] + "_key1" + fip_signed_path[1]
|
||||
|
||||
logging.info("copy %s from keyserver", fip_signed_path)
|
||||
ret = subprocess.run(
|
||||
[
|
||||
"scp",
|
||||
"-i",
|
||||
keyserver_sshkey_path,
|
||||
"service_sign@%s:fip_ID%s_signed_encrypted.bin" % (keyserver, token),
|
||||
fip_signed_path,
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
logging.debug("%r" % ret)
|
||||
|
||||
|
||||
def extract_bld_bin(bld_tar_path, dst):
|
||||
with tarfile.open(bld_tar_path, "r") as tf:
|
||||
tf.extract("BLD.bin", dst)
|
||||
|
||||
return os.path.join(dst, "BLD.bin")
|
||||
|
||||
|
||||
def sign_bld(bld_path, chip_arch, chip):
|
||||
logging.info("chip_arch=%s:%s bld_path=%s", chip_arch, chip, bld_path)
|
||||
fip_atf_key0_path = os.path.join(
|
||||
os.environ["RELEASE_BIN_ATF_DIR"],
|
||||
"fip_atf_%s_key0.bin" % chip_arch,
|
||||
)
|
||||
|
||||
logging.debug("fip_atf_key0_path=%s", fip_atf_key0_path)
|
||||
assert os.path.exists(fip_atf_key0_path)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
logging.debug("tmpdir=%s", tmpdir)
|
||||
|
||||
fip_w_bld_path = os.path.join(
|
||||
tmpdir, os.path.basename(fip_atf_key0_path).replace("_key0", "")
|
||||
)
|
||||
pack_fip_path = os.path.join("build/tools", chip_arch, "pack_fip", "pack_fip.py")
|
||||
|
||||
bld_bin_path = bld_path
|
||||
if os.path.splitext(bld_path)[1] == ".tar":
|
||||
bld_bin_path = extract_bld_bin(bld_path, tmpdir)
|
||||
else:
|
||||
bld_bin_path = bld_path
|
||||
|
||||
if os.path.getsize(bld_bin_path) % 16 != 0:
|
||||
raise ValueError("bld_bin_path size is %d", os.path.getsize(bld_bin_path))
|
||||
|
||||
subprocess.run(
|
||||
[
|
||||
"python3",
|
||||
pack_fip_path,
|
||||
fip_atf_key0_path,
|
||||
"--output",
|
||||
fip_w_bld_path,
|
||||
"--add-bld",
|
||||
bld_bin_path,
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
|
||||
subprocess.run(
|
||||
["python3", pack_fip_path, "--parse", fip_w_bld_path], check=True
|
||||
)
|
||||
|
||||
sign_atf(fip_w_bld_path)
|
||||
|
||||
fip_signed_path = os.path.splitext(fip_w_bld_path)
|
||||
fip_signed_path = fip_signed_path[0] + "_key1" + fip_signed_path[1]
|
||||
logging.debug("fip_signed_path=%s", fip_signed_path)
|
||||
assert os.path.exists(fip_signed_path)
|
||||
|
||||
subprocess.run(
|
||||
["python3", pack_fip_path, "--unpack", fip_signed_path],
|
||||
check=True,
|
||||
)
|
||||
|
||||
bld_tar_path = os.path.splitext(bld_path)[0].replace("_key0", "") + "_key1.tar"
|
||||
logging.info("bld_tar_path=%s", bld_tar_path)
|
||||
members = [
|
||||
"BLD_CONTENT_CERT.bin",
|
||||
"BLD2_KEY_CERT.bin",
|
||||
"BLD1_KEY_CERT.bin",
|
||||
"BLD.bin",
|
||||
]
|
||||
with tarfile.open(bld_tar_path, "w") as tf:
|
||||
for m in members:
|
||||
s = os.path.splitext(fip_signed_path)[0] + "_" + m
|
||||
logging.debug("Add %s", s)
|
||||
tf.add(s, arcname=m, recursive=False)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
build_helper.init_logging(args.logfile, stdout_level=args.verbose)
|
||||
|
||||
if args.sign_atf:
|
||||
sign_atf(args.sign_atf)
|
||||
|
||||
if args.sign_bld:
|
||||
chip_arch = os.environ["CHIP_ARCH_L"]
|
||||
chip = os.environ["CHIP"]
|
||||
sign_bld(args.sign_bld, chip_arch, chip)
|
||||
|
||||
logging.info("END")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
12
build/scripts/sync_git_ver.py
Normal file
12
build/scripts/sync_git_ver.py
Normal file
@ -0,0 +1,12 @@
|
||||
import os
|
||||
|
||||
fd = open("git_version.txt")
|
||||
dataList = fd.readlines()
|
||||
rootdir = os.getcwd()
|
||||
for i in range(0, len(dataList), 3):
|
||||
os.chdir("{location}".format(location=dataList[i].split(' ')[1].strip()))
|
||||
os.system("git reset --hard {commit}".format(
|
||||
commit=dataList[i + 1].split(' ')[0])
|
||||
)
|
||||
os.chdir(rootdir)
|
||||
fd.close()
|
||||
Reference in New Issue
Block a user