[build] add cvitek build scripts
Change-Id: If63ce4a669e5d4d72b8e3b9253336dd99bf74c30
This commit is contained in:
127
build/tools/common/image_tool/XmlParser.py
Normal file
127
build/tools/common/image_tool/XmlParser.py
Normal file
@ -0,0 +1,127 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
FORMAT = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT)
|
||||
Storage_EMMC = 0
|
||||
Storage_SPINAND = 1
|
||||
Storage_SPINOR = 2
|
||||
LBA_SIZE = 512
|
||||
|
||||
|
||||
class XmlParser:
|
||||
@staticmethod
|
||||
def parse_size(size):
|
||||
units = {"B": 1, "K": 2 ** 10, "M": 2 ** 20, "G": 2 ** 30, "T": 2 ** 40}
|
||||
size = size.upper()
|
||||
logging.debug("parsing size %s" % size)
|
||||
if not re.match(r" ", size):
|
||||
size = re.sub(r"([BKMGT])", r" \1", size)
|
||||
try:
|
||||
number, unit = [string.strip() for string in size.split()]
|
||||
except ValueError:
|
||||
number = size
|
||||
unit = "B"
|
||||
|
||||
ret = int(float(number) * units[unit])
|
||||
|
||||
return ret
|
||||
|
||||
def parse(self, install=None):
|
||||
try:
|
||||
tree = ET.parse(self.xml)
|
||||
except Exception:
|
||||
logging.error(self.xml + " is not a vaild xml file")
|
||||
raise
|
||||
|
||||
root = tree.getroot()
|
||||
self.storage = root.attrib["type"]
|
||||
install_dir = install
|
||||
parts = []
|
||||
for part in root:
|
||||
p = dict()
|
||||
if "size_in_kb" in part.attrib:
|
||||
p["part_size"] = int(part.attrib["size_in_kb"]) * 1024
|
||||
elif "size_in_b" in part.attrib:
|
||||
p["part_size"] = int(part.attrib["size_in_b"])
|
||||
else:
|
||||
p["part_size"] = sys.maxsize
|
||||
# Assign 0 means biggest number
|
||||
|
||||
if part.attrib["file"] and install_dir is not None:
|
||||
path = os.path.join(install_dir, part.attrib["file"])
|
||||
try:
|
||||
file_size = os.stat(path).st_size
|
||||
except Exception:
|
||||
file_size = 0
|
||||
if file_size > p["part_size"]:
|
||||
logging.error(
|
||||
"Image: %s(%d) is larger than partition size(%d)"
|
||||
% (part.attrib["file"], file_size, p["part_size"])
|
||||
)
|
||||
raise OverflowError
|
||||
p["file_path"] = path
|
||||
logging.debug("size of " + path + " : " + str(file_size))
|
||||
else:
|
||||
file_size = 0
|
||||
|
||||
p["file_size"] = int(file_size)
|
||||
p["file_name"] = part.attrib["file"]
|
||||
p["label"] = part.attrib["label"]
|
||||
p["mountpoint"] = (
|
||||
part.attrib["mountpoint"] if "mountpoint" in part.attrib else None
|
||||
)
|
||||
p["type"] = part.attrib["type"] if "type" in part.attrib else ""
|
||||
p["options"] = part.attrib["options"] if "options" in part.attrib else None
|
||||
|
||||
parts.append(p)
|
||||
|
||||
if self.storage == "emmc":
|
||||
self.__calEmmcOffset(parts)
|
||||
elif self.storage == "spinor":
|
||||
self.__calNorOffset(parts)
|
||||
elif self.storage == "spinand":
|
||||
self.__calNandOffset(parts)
|
||||
elif self.storage == "sd":
|
||||
self.__calNandOffset(parts)
|
||||
elif self.storage == "none":
|
||||
self.__calNandOffset(parts)
|
||||
|
||||
else:
|
||||
logging.error("Unknown storage type")
|
||||
raise ValueError(self.storage)
|
||||
for p in parts:
|
||||
self.parts[p["label"]] = p
|
||||
return parts
|
||||
|
||||
def __calEmmcOffset(self, parts):
|
||||
# EMMC will program gpt in the beggining of the emmc, start from 8192
|
||||
start = 0
|
||||
for p in parts:
|
||||
p["offset"] = start
|
||||
start += p["part_size"]
|
||||
|
||||
def __calNandOffset(self, parts):
|
||||
start = 0
|
||||
for p in parts:
|
||||
p["offset"] = start
|
||||
start += p["part_size"]
|
||||
|
||||
def __calNorOffset(self, parts):
|
||||
start = 0
|
||||
for p in parts:
|
||||
p["offset"] = start
|
||||
start += p["part_size"]
|
||||
|
||||
def getStorage(self):
|
||||
return self.storage
|
||||
|
||||
def __init__(self, xml):
|
||||
self.xml = xml
|
||||
self.storage = "emmc"
|
||||
self.parts = dict()
|
||||
Binary file not shown.
Binary file not shown.
104
build/tools/common/image_tool/alios/mk_imgHeader_alios.py
Normal file
104
build/tools/common/image_tool/alios/mk_imgHeader_alios.py
Normal file
@ -0,0 +1,104 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
class partition:
|
||||
name = ""
|
||||
addr = ""
|
||||
size = ""
|
||||
filename = ""
|
||||
|
||||
def __init__(self, name, addr, size, filename):
|
||||
self.name = name
|
||||
self.addr = addr
|
||||
self.size = size
|
||||
self.filename = filename
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Create imgs.h for u-boot")
|
||||
parser.add_argument("part_table", help="path to partition xml")
|
||||
parser.add_argument("output", help="output folder")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def gen_imgs_h(output, part_list):
|
||||
with open(os.path.join(output, "imgs.h"), "w") as of:
|
||||
of.write("/* this file should be generated by mk_imgHeader.py,")
|
||||
of.write("please do not modify this file manually*/\n\n")
|
||||
of.write("#ifndef __IMGS_H__\n")
|
||||
of.write("#define __IMGS_H__\n\n")
|
||||
of.write("char *imgs[] = {};\n\n")
|
||||
of.write("struct img_type {\n")
|
||||
of.write("\tchar *part_name;\n")
|
||||
of.write("\tchar *filename;\n")
|
||||
of.write("\tunsigned int offset;\n")
|
||||
of.write("};\n\n")
|
||||
|
||||
of.write("struct img_type rtos_imgs[] = {\n")
|
||||
for p in part_list:
|
||||
if p.filename != "" and p.name != "":
|
||||
of.write('\t{"%s", ' % p.name)
|
||||
of.write('"%s", ' % p.filename)
|
||||
else:
|
||||
continue
|
||||
if p.addr != "":
|
||||
of.write('%s},\n' % p.addr)
|
||||
|
||||
of.write("};\n\n")
|
||||
of.write("#endif\n")
|
||||
|
||||
|
||||
def get_part(str_list):
|
||||
name = ""
|
||||
addr = ""
|
||||
size = ""
|
||||
filename = ""
|
||||
str = ''.join(str_list).replace(" ", "")
|
||||
kv_list = str.split(",")
|
||||
for i in kv_list:
|
||||
key = i.split(":")[0]
|
||||
val = i.split(":")[1]
|
||||
if(key == "name"):
|
||||
name = val
|
||||
elif(key == "address"):
|
||||
addr = val
|
||||
elif(key == "size"):
|
||||
size = val
|
||||
elif(key == "filename"):
|
||||
filename = val
|
||||
part = partition(name, addr, size, filename)
|
||||
|
||||
return part
|
||||
|
||||
|
||||
def parse_part_table(file):
|
||||
part_list = []
|
||||
fp = open(file, "r")
|
||||
sample = fp.readlines()
|
||||
p = re.compile(r'[{](.*?)[}]', re.S)
|
||||
for i in sample:
|
||||
if "-" in i:
|
||||
str_list = re.findall(p, i)
|
||||
partition = get_part(str_list)
|
||||
part_list.append(partition)
|
||||
fp.close()
|
||||
|
||||
return part_list
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
part_list = parse_part_table(args.part_table)
|
||||
|
||||
gen_imgs_h(args.output, part_list)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
115
build/tools/common/image_tool/alios/mkcvipart_alios.py
Normal file
115
build/tools/common/image_tool/alios/mkcvipart_alios.py
Normal file
@ -0,0 +1,115 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
class partition:
|
||||
name = ""
|
||||
addr = ""
|
||||
size = ""
|
||||
filename = ""
|
||||
|
||||
def __init__(self, name, addr, size):
|
||||
self.name = name
|
||||
self.addr = addr
|
||||
self.size = size
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Create cvipart.h")
|
||||
parser.add_argument("part_table", help="path to partition xml")
|
||||
parser.add_argument("output", help="output folder")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def gen_cvipart_h(output, part_list):
|
||||
logging.info("generating cvipart.h")
|
||||
with open(os.path.join(output, "cvipart.h"), "w") as of:
|
||||
of.write("/* this file should be generated by mkcvipart.py,")
|
||||
of.write("please do not modify this file manually*/\n\n")
|
||||
of.write("#ifndef CVIPART_H\n")
|
||||
of.write("#define CVIPART_H\n")
|
||||
env_exist = True
|
||||
|
||||
# If no ENV or U-BOOT ENV has been set in partition.xml, we assume
|
||||
# there is no env support
|
||||
of.write("#ifndef CONFIG_ENV_IS_NOWHERE\n#define CONFIG_ENV_IS_NOWHERE\n#endif\n")
|
||||
of.write("#define CONFIG_ENV_SIZE 0x20000\n")
|
||||
env_exist = False
|
||||
|
||||
if env_exist:
|
||||
of.write("#define CONFIG_ENV_IS_IN_SPI_FLASH\n")
|
||||
of.write("#define CONFIG_ENV_SECT_SIZE 0x10000\n")
|
||||
# Generintg MTDPART
|
||||
of.write("#define PART_LAYOUT ")
|
||||
of.write('"mtdparts=10000000.cvi-spif:"\n')
|
||||
of.write('#define ROOTFS_DEV ""\n')
|
||||
|
||||
# Generintg PART_ENV
|
||||
of.write("#define PARTS_OFFSET \\\n")
|
||||
for i, p in enumerate(part_list):
|
||||
of.write('"%s_PART_OFFSET=%s\\0" \\\n' % (p.name, p.addr))
|
||||
if i == len(part_list) - 1:
|
||||
of.write(
|
||||
'"%s_PART_SIZE=%s\\0"\n'
|
||||
% (p.name, p.size)
|
||||
)
|
||||
else:
|
||||
of.write(
|
||||
'"%s_PART_SIZE=%s\\0" \\\n'
|
||||
% (p.name, p.size)
|
||||
)
|
||||
|
||||
of.write("#endif")
|
||||
logging.info("Done!")
|
||||
|
||||
|
||||
def get_part(str_list):
|
||||
name = addr = size = ""
|
||||
str = ''.join(str_list).replace(" ", "")
|
||||
kv_list = str.split(",")
|
||||
for i in kv_list:
|
||||
key = i.split(":")[0]
|
||||
val = i.split(":")[1]
|
||||
if(key == "name"):
|
||||
name = val
|
||||
elif(key == "address"):
|
||||
addr = val
|
||||
elif(key == "size"):
|
||||
size = val
|
||||
|
||||
part = partition(name, addr, size)
|
||||
|
||||
return part
|
||||
|
||||
|
||||
def parse_part_table(file):
|
||||
part_list = []
|
||||
fp = open(file, "r")
|
||||
sample = fp.readlines()
|
||||
p = re.compile(r'[{](.*?)[}]', re.S)
|
||||
for i in sample:
|
||||
if "-" in i:
|
||||
str_list = re.findall(p, i)
|
||||
partition = get_part(str_list)
|
||||
part_list.append(partition)
|
||||
fp.close()
|
||||
|
||||
return part_list
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
part_list = parse_part_table(args.part_table)
|
||||
|
||||
gen_cvipart_h(args.output, part_list)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
83
build/tools/common/image_tool/cimg2raw.py
Executable file
83
build/tools/common/image_tool/cimg2raw.py
Executable file
@ -0,0 +1,83 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import argparse
|
||||
import errno
|
||||
from os import getcwd, path, makedirs
|
||||
|
||||
MAX_LOAD_SIZE = 100 * 1024 * 1024
|
||||
CHUNK_TYPE_DONT_CARE = 0
|
||||
CHUNK_TYPE_CRC_CHECK = 1
|
||||
FORMAT = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT)
|
||||
|
||||
|
||||
def parse_Args():
|
||||
parser = argparse.ArgumentParser(description="Create CVITEK device image")
|
||||
|
||||
parser.add_argument(
|
||||
"file_path",
|
||||
metavar="file_path",
|
||||
type=str,
|
||||
help="the file you want to pack with cvitek image header",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output_dir",
|
||||
metavar="output_folder_path",
|
||||
type=str,
|
||||
help="the folder path to save output, defuale will be ./rawimages",
|
||||
default=path.join(getcwd(), "rawimages"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", help="increase output verbosity", action="store_true"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if args.verbose:
|
||||
logging.debug("Enable more verbose output")
|
||||
logging.getLogger().setLevel(level=logging.DEBUG)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class ImagerRemover(object):
|
||||
@staticmethod
|
||||
def removeHeader(img, out):
|
||||
"""
|
||||
Header format total 64 bytes
|
||||
4 Bytes: Magic
|
||||
4 Bytes: Version
|
||||
4 Bytes: Chunk header size
|
||||
4 Bytes: Total chunks
|
||||
4 Bytes: File size
|
||||
32 Bytes: Extra Flags
|
||||
12 Bytes: Reserved
|
||||
"""
|
||||
with open(img, "rb") as fd:
|
||||
magic = fd.read(4)
|
||||
if magic != b"CIMG":
|
||||
logging.error("%s is not cvitek image!!" % img)
|
||||
raise TypeError
|
||||
|
||||
with open(out, "wb") as fo:
|
||||
fd.seek(64) # Skip Header
|
||||
# Skip chunk header
|
||||
while fd.read(64):
|
||||
fo.write(fd.read(MAX_LOAD_SIZE))
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_Args()
|
||||
output_path = path.join(args.output_dir, path.basename(args.file_path))
|
||||
logging.debug("Input %s, Output %s\n" % (args.file_path, output_path))
|
||||
logging.debug("Creating folder for output\n")
|
||||
try:
|
||||
makedirs(args.output_dir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
ImagerRemover.removeHeader(args.file_path, output_path)
|
||||
logging.info("Write %s Done" % output_path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
140
build/tools/common/image_tool/create_automount.py
Executable file
140
build/tools/common/image_tool/create_automount.py
Executable file
@ -0,0 +1,140 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import logging
|
||||
import argparse
|
||||
from os import path, chmod
|
||||
from XmlParser import XmlParser
|
||||
|
||||
FORMAT = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT)
|
||||
|
||||
|
||||
def parse_Args():
|
||||
parser = argparse.ArgumentParser(description="Create CVITEK device image")
|
||||
parser.add_argument("xml", help="path to partition xml")
|
||||
parser.add_argument(
|
||||
"output",
|
||||
metavar="output",
|
||||
type=str,
|
||||
help="the output folder for saving the fstab",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def genCase(case, out, parts, storage):
|
||||
out.write("%s)\n" % case)
|
||||
ubi_cnt = 0
|
||||
for i, p in enumerate(parts):
|
||||
if p["label"] in ("BOOT", "MISC", "ROOTFS", "fip"):
|
||||
continue
|
||||
if not p["mountpoint"]:
|
||||
continue
|
||||
if case == "start":
|
||||
# out.write('DL_FLAG=`devmem 0x0e000030`\n')
|
||||
out.write(
|
||||
"ENV_DLFLAG=`fw_printenv dl_flag 2>/dev/null |awk -F= '{print $2}'`\n"
|
||||
)
|
||||
out.write('printf "Mounting %s partition\\n"\n' % p["label"])
|
||||
if storage == "emmc":
|
||||
source = "/dev/mmcblk0p" + str((i + 1))
|
||||
if i == len(parts) - 1:
|
||||
out.write(
|
||||
"if [ $DL_FLAG == '0x50524F47' ] || [ -z $ENV_DLFLAG ] || [ $ENV_DLFLAG == 'prog' ]; then\n"
|
||||
)
|
||||
out.write(
|
||||
'printf "OK\\nFix\\n" | parted ---pretend-input-tty /dev/mmcblk0 print\n'
|
||||
)
|
||||
out.write("parted -s /dev/mmcblk0 resizepart %d 100%% \n" % (i + 1))
|
||||
out.write("fi\n")
|
||||
out.write("e2fsck.static -y %s\n" % source)
|
||||
out.write(
|
||||
"mount -t %s -o sync %s %s\n" % (p["type"], source, p["mountpoint"])
|
||||
)
|
||||
out.write("if [ $? != 0 ]; then\n")
|
||||
out.write(
|
||||
"echo 'Mount %s failed, Try formatting and remounting'\n"
|
||||
% p["label"]
|
||||
)
|
||||
out.write("mke2fs -T %s %s\n" % (p["type"], source))
|
||||
out.write(
|
||||
"mount -t %s -o sync %s %s\n" % (p["type"], source, p["mountpoint"])
|
||||
)
|
||||
out.write("resize2fs %s\n" % (source))
|
||||
out.write(
|
||||
"elif [ $DL_FLAG == '0x50524F47' ] || [ -z $ENV_DLFLAG ] || [ $ENV_DLFLAG == 'prog' ]; then\n"
|
||||
)
|
||||
out.write("resize2fs %s\n" % (source))
|
||||
out.write("fi\n")
|
||||
elif storage == "spinand":
|
||||
ubi_cnt += 1
|
||||
source = "/dev/ubi" + str(ubi_cnt) + "_0"
|
||||
out.write("ubiattach /dev/ubi_ctrl -m %d\n" % (i))
|
||||
if p["type"] != "ubifs":
|
||||
assert "Only supoort ubifs"
|
||||
out.write("if [ $? != 0 ]; then\n")
|
||||
out.write("ubiformat -y /dev/mtd%d\n" % (i))
|
||||
out.write("ubiattach /dev/ubi_ctrl -m %d\n" % (i))
|
||||
out.write("ubimkvol /dev/ubi%d -N %s -m\n" % (ubi_cnt, p["label"]))
|
||||
out.write("fi\n")
|
||||
out.write("if [ ! -c %s ]; then\n" % source)
|
||||
out.write("mdev -s\n")
|
||||
out.write("fi\n")
|
||||
out.write("mount -t ubifs -o sync %s %s\n" % (source, p["mountpoint"]))
|
||||
out.write("if [ $? != 0 ]; then\n")
|
||||
out.write(
|
||||
"echo 'Mount %s failed, Try formatting and remounting'\n"
|
||||
% p["label"]
|
||||
)
|
||||
out.write("ubimkvol /dev/ubi%d -N %s -m\n" % (ubi_cnt, p["label"]))
|
||||
out.write("mount -t ubifs -o sync %s %s\n" % (source, p["mountpoint"]))
|
||||
out.write("fi\n")
|
||||
elif storage == "spinor":
|
||||
script = """
|
||||
if ! mount -t {filesystem} /dev/mtdblock{dev_no} {dev_path}; then
|
||||
echo 'Mount {label} failed, Try erasing and remounting'
|
||||
flash_erase -j /dev/mtd{dev_no} 0 0
|
||||
mount -t {filesystem} /dev/mtdblock{dev_no} {dev_path}
|
||||
fi
|
||||
""".format(
|
||||
filesystem="jffs2",
|
||||
dev_no=i,
|
||||
dev_path=p["mountpoint"],
|
||||
label=p["label"]
|
||||
)
|
||||
out.write(script)
|
||||
else:
|
||||
out.write('printf "Unmounting %s partition\\n"\n' % p["label"])
|
||||
out.write("umount %s\n" % p["mountpoint"])
|
||||
|
||||
# Set DL_FLAG flag to complete
|
||||
if case == "start" and storage == "emmc":
|
||||
# out.write("devmem 0x0e000030 32 0x444F4E45\n")
|
||||
out.write("fw_setenv dl_flag done\n")
|
||||
out.write(";;\n")
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_Args()
|
||||
xmlParser = XmlParser(args.xml)
|
||||
parts = xmlParser.parse()
|
||||
out_path = path.join(args.output, "S10auto_mount")
|
||||
try:
|
||||
out = open(out_path, "w")
|
||||
except Exception:
|
||||
logging.error("Create S10auto_mount failed")
|
||||
raise
|
||||
out.write(
|
||||
"#!/bin/sh\n"
|
||||
"${CVI_SHOPTS}\n"
|
||||
"# This file is automatically generated by create_automount.py\n"
|
||||
"# Please do not modify this file manually!\n"
|
||||
'case "$1" in\n')
|
||||
genCase("start", out, parts, xmlParser.getStorage())
|
||||
out.write("esac\n")
|
||||
out.close()
|
||||
chmod(out_path, 0o755)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
32
build/tools/common/image_tool/mk_imgHeader.py
Normal file
32
build/tools/common/image_tool/mk_imgHeader.py
Normal file
@ -0,0 +1,32 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
from XmlParser import XmlParser
|
||||
|
||||
FORMAT = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Create imgs.h for u-boot")
|
||||
parser.add_argument("xml", help="path to partition xml")
|
||||
parser.add_argument("output", help="output folder")
|
||||
args = parser.parse_args()
|
||||
|
||||
parser = XmlParser(args.xml)
|
||||
parts = parser.parse()
|
||||
storage = parser.getStorage()
|
||||
with open(os.path.join(args.output, "imgs.h"), "w") as of:
|
||||
of.write("char imgs[][255] = {")
|
||||
if storage == "emmc":
|
||||
of.write('"fip.bin",\n')
|
||||
for p in parts:
|
||||
if p["file_name"] != "":
|
||||
of.write('"%s",\n' % p["file_name"])
|
||||
of.write("};")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
117
build/tools/common/image_tool/mk_package.py
Normal file
117
build/tools/common/image_tool/mk_package.py
Normal file
@ -0,0 +1,117 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
from zipfile import ZipFile, ZIP_DEFLATED
|
||||
from XmlParser import XmlParser
|
||||
from hashlib import md5
|
||||
from os import path
|
||||
from tempfile import NamedTemporaryFile
|
||||
import logging
|
||||
import argparse
|
||||
from raw2cimg import ImagerBuilder
|
||||
|
||||
FORMAT = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT)
|
||||
|
||||
|
||||
def argparser():
|
||||
parser = argparse.ArgumentParser(description="Pack CVI upgrade package")
|
||||
parser.add_argument("xml", help="path to partition xml")
|
||||
parser.add_argument("input", metavar="image_folder", help="path to images folder")
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", help="increase output verbosity", action="store_true"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
help="path to output file, default is upgrade.zip",
|
||||
default="upgrade.zip",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--file",
|
||||
nargs=2,
|
||||
metavar=("FOLDER IN ZIP", "FILE"),
|
||||
help="extra files you want to add to the upgrade.zip, "
|
||||
"all the files will add to utils folder.",
|
||||
action="append",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if args.verbose:
|
||||
logging.debug("Enable more verbose output")
|
||||
logging.getLogger().setLevel(level=logging.DEBUG)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def getMD5Sum(file_path: str) -> str:
|
||||
m = md5()
|
||||
# Partially caculate md5sum for speeding up
|
||||
with open(file_path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
m.update(chunk)
|
||||
|
||||
return m.hexdigest()
|
||||
|
||||
|
||||
def main():
|
||||
args = argparser()
|
||||
parser = XmlParser(args.xml)
|
||||
parts = parser.parse(install=args.input)
|
||||
storage = parser.getStorage()
|
||||
logging.debug(args)
|
||||
|
||||
imgBuilder = ImagerBuilder(storage, args.input)
|
||||
# create a ZipFile object
|
||||
with ZipFile(args.output, "w", ZIP_DEFLATED) as zipObj:
|
||||
# create metadata for record md5sum
|
||||
metadata = NamedTemporaryFile(prefix="meta")
|
||||
|
||||
# Since emmc will not define fip in partition.xml add them
|
||||
# manually.
|
||||
if storage == "emmc":
|
||||
fip_path = path.join(args.input, "fip.bin")
|
||||
if path.isfile(fip_path):
|
||||
zipObj.write(fip_path, "fip.bin")
|
||||
|
||||
# Add partition file to zip
|
||||
for p in parts:
|
||||
# Skip file size is equal to zero(Not exists)
|
||||
if p["file_size"] == 0:
|
||||
continue
|
||||
# Try pack header first to avoid user copy image without header
|
||||
if p["file_name"] != "fip.bin":
|
||||
imgBuilder.packHeader(p)
|
||||
|
||||
# Add file to zipfile
|
||||
zipObj.write(p["file_path"], path.basename(p["file_path"]))
|
||||
|
||||
# get MD5sum
|
||||
m = getMD5Sum(p["file_path"])
|
||||
logging.debug("%s %s" % (path.basename(p["file_path"]), m))
|
||||
with open(metadata.name, "a") as meta:
|
||||
meta.write("%s %s\n" % (m, path.basename(p["file_path"])))
|
||||
|
||||
# Add extra files to zip
|
||||
if args.file:
|
||||
for folder, f in args.file:
|
||||
logging.debug(f)
|
||||
m = getMD5Sum(f)
|
||||
in_zip_path = path.join(folder, path.basename(f))
|
||||
logging.debug("%s %s\n" % (m, in_zip_path))
|
||||
with open(metadata.name, "a") as meta:
|
||||
meta.write("%s %s\n" % (m, in_zip_path))
|
||||
zipObj.write(f, in_zip_path)
|
||||
|
||||
# Add metadata.txt and partition.xml
|
||||
zipObj.write(metadata.name, path.join("META", "metadata.txt"))
|
||||
zipObj.write(args.xml, path.basename(args.xml))
|
||||
# Show zipinfo message
|
||||
if args.verbose:
|
||||
for info in zipObj.infolist():
|
||||
logging.debug(info)
|
||||
logging.info("Packing %s done!" % args.output)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
219
build/tools/common/image_tool/mkcvipart.py
Normal file
219
build/tools/common/image_tool/mkcvipart.py
Normal file
@ -0,0 +1,219 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from XmlParser import XmlParser
|
||||
|
||||
FORMAT = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Create cvipart.h and fw_env.config")
|
||||
parser.add_argument("xml", help="path to partition xml")
|
||||
parser.add_argument("output", help="output folder")
|
||||
parser.add_argument(
|
||||
"--fw_env",
|
||||
help="create fw_env.config with the parameter the script will create cvipart.h for u-boot",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--block_size",
|
||||
help="set block size for spinand, default is 128KB",
|
||||
default=128 * 1024,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", help="increase output verbosity", action="store_true"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if args.verbose:
|
||||
logging.debug("Enable more verbose output")
|
||||
logging.getLogger().setLevel(level=logging.DEBUG)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def gen_cvipart_h(output, parser):
|
||||
logging.info("generating cvipart.h")
|
||||
parts = parser.parse()
|
||||
with open(os.path.join(output, "cvipart.h"), "w") as of:
|
||||
of.write("/* this file should be generated by mkcvipart.py,")
|
||||
of.write("please do not modify this file manually*/\n\n")
|
||||
of.write("#ifndef CVIPART_H\n")
|
||||
of.write("#define CVIPART_H\n")
|
||||
env_exist = True
|
||||
env_bak = False
|
||||
|
||||
# Generate ENV_OFFSET
|
||||
if "ENV" in parser.parts:
|
||||
label = "ENV"
|
||||
elif "U-BOOT ENV" in parser.parts:
|
||||
label = "U-BOOT ENV"
|
||||
else:
|
||||
# If no ENV or U-BOOT ENV has been set in partition.xml, we assume
|
||||
# there is no env support
|
||||
of.write("#ifndef CONFIG_ENV_IS_NOWHERE\n#define CONFIG_ENV_IS_NOWHERE\n#endif\n")
|
||||
of.write("#define CONFIG_ENV_SIZE 0x20000\n")
|
||||
env_exist = False
|
||||
|
||||
if env_exist and "ENV_BAK" in parser.parts:
|
||||
env_bak = True
|
||||
|
||||
LBA_SIZE = 1
|
||||
if parser.getStorage() == "emmc":
|
||||
if env_exist:
|
||||
of.write("#define CONFIG_ENV_IS_IN_MMC\n")
|
||||
of.write("#define CONFIG_ENV_SECT_SIZE 0x40000\n")
|
||||
of.write("#define CONFIG_SYS_MMC_ENV_DEV 0\n")
|
||||
of.write("#define CONFIG_SYS_MMC_ENV_PART 0\n")
|
||||
|
||||
# Generintg BLKDEV
|
||||
of.write("#define PART_LAYOUT ")
|
||||
of.write("\"blkdevparts=mmcblk0:")
|
||||
for i, p in enumerate(parts):
|
||||
if p["part_size"] != sys.maxsize:
|
||||
part_size = str(int(p["part_size"] / 1024)) + "K"
|
||||
else:
|
||||
part_size = "-"
|
||||
if part_size == "-" or i == len(parts) - 1:
|
||||
comma = ";"
|
||||
else:
|
||||
comma = ","
|
||||
of.write("%s(%s)%s" % (part_size, p["label"], comma))
|
||||
of.write("mmcblk0boot0:1M(fip),1M(fip_bak);\"")
|
||||
of.write("\n")
|
||||
|
||||
for i, p in enumerate(parts):
|
||||
if p["label"] == "ROOTFS":
|
||||
of.write('#define ROOTFS_DEV "/dev/mmcblk0p%d"\n' % (i + 1))
|
||||
|
||||
elif parser.getStorage() == "spinand":
|
||||
if env_exist:
|
||||
of.write("#define CONFIG_ENV_IS_IN_NAND\n")
|
||||
of.write("#define CONFIG_ENV_SECT_SIZE 0x40000\n")
|
||||
# Generintg MTDPART
|
||||
of.write("#define PART_LAYOUT ")
|
||||
of.write('"mtdparts=cvsnfc:')
|
||||
for i, p in enumerate(parts):
|
||||
if p["part_size"] != sys.maxsize:
|
||||
part_size = str(int(p["part_size"] / 1024)) + "K"
|
||||
else:
|
||||
part_size = "-"
|
||||
if part_size == "-" or i == len(parts) - 1:
|
||||
comma = "\"\n"
|
||||
else:
|
||||
comma = ","
|
||||
of.write("%s(%s)%s" % (part_size, p["label"], comma))
|
||||
|
||||
elif parser.getStorage() == "spinor":
|
||||
if env_exist:
|
||||
of.write("#define CONFIG_ENV_IS_IN_SPI_FLASH\n")
|
||||
of.write("#define CONFIG_ENV_SECT_SIZE 0x10000\n")
|
||||
# Generintg MTDPART
|
||||
of.write("#define PART_LAYOUT ")
|
||||
of.write('"mtdparts=10000000.cvi-spif:')
|
||||
for i, p in enumerate(parts):
|
||||
if p["part_size"] != sys.maxsize:
|
||||
part_size = str(int(p["part_size"] / 1024)) + "K"
|
||||
else:
|
||||
part_size = "-"
|
||||
if part_size == "-" or i == len(parts) - 1:
|
||||
comma = '"\n'
|
||||
else:
|
||||
comma = ","
|
||||
of.write("%s(%s)%s" % (part_size, p["label"], comma))
|
||||
|
||||
for i, p in enumerate(parts):
|
||||
if p["label"] == "ROOTFS":
|
||||
of.write('#define ROOTFS_DEV "/dev/mtdblock%d"\n' % i)
|
||||
break
|
||||
|
||||
elif parser.getStorage() == "none":
|
||||
of.write('#define PART_LAYOUT ""\n')
|
||||
of.write('#define ROOTFS_DEV ""\n')
|
||||
of.write('#define PARTS_OFFSET ""\n')
|
||||
|
||||
if env_exist:
|
||||
of.write(
|
||||
"#define CONFIG_ENV_OFFSET 0x%X\n"
|
||||
% (parser.parts[label]["offset"])
|
||||
)
|
||||
if env_bak:
|
||||
of.write(
|
||||
"#define CONFIG_ENV_OFFSET_REDUND 0x%X\n"
|
||||
% (parser.parts["ENV_BAK"]["offset"] * LBA_SIZE)
|
||||
)
|
||||
of.write("#define CONFIG_SYS_REDUNDAND_ENVIRONMENT\n")
|
||||
of.write(
|
||||
"#define CONFIG_ENV_SIZE 0x%X\n" % parser.parts[label]["part_size"]
|
||||
)
|
||||
|
||||
# Generintg PART_ENV
|
||||
if parser.getStorage() == "emmc":
|
||||
LBA_SIZE = 512
|
||||
|
||||
if parser.getStorage() != "none":
|
||||
of.write("#define PARTS_OFFSET \\\n")
|
||||
for i, p in enumerate(parts):
|
||||
of.write('"%s_PART_OFFSET=0x%x\\0" \\\n' % (p["label"], int(p["offset"] / LBA_SIZE)))
|
||||
if i == len(parts) - 1:
|
||||
of.write(
|
||||
'"%s_PART_SIZE=0x%x\\0"\n'
|
||||
% (p["label"], int(p["part_size"] / LBA_SIZE))
|
||||
)
|
||||
else:
|
||||
of.write(
|
||||
'"%s_PART_SIZE=0x%x\\0" \\\n'
|
||||
% (p["label"], int(p["part_size"] / LBA_SIZE))
|
||||
)
|
||||
|
||||
of.write("#endif")
|
||||
logging.info("Done!")
|
||||
|
||||
|
||||
def gen_fw_config(output, parser, block_size=128 * 1024):
|
||||
logging.info("generating fw_env.config")
|
||||
parts = parser.parse()
|
||||
part_index = -1
|
||||
with open(os.path.join(output, "fw_env.config"), "w") as of:
|
||||
for i in range(len(parts)):
|
||||
if parts[i]["label"] == "ENV" or parts[i]["label"] == "U-BOOT ENV" or parts[i]["label"] == "ENV_BAK":
|
||||
part_index = i
|
||||
if parser.storage == "spinand":
|
||||
of.write(
|
||||
"/dev/mtd%d 0x%x 0x%x 0x%x\n"
|
||||
% (part_index, 0, parts[part_index]["part_size"], block_size)
|
||||
)
|
||||
elif parser.storage == "emmc":
|
||||
of.write(
|
||||
"/dev/mmcblk0 0x%x 0x%x\n"
|
||||
% ((parts[part_index]["offset"] * 512), parts[part_index]["part_size"])
|
||||
)
|
||||
elif parser.storage == "spinor":
|
||||
of.write(
|
||||
"/dev/mtd%d 0x%x 0x%x 0x%x\n"
|
||||
% (part_index, 0, parts[part_index]["part_size"], 64 * 1024)
|
||||
)
|
||||
if part_index == -1:
|
||||
logging.info(
|
||||
"There is no ENV or U-BOOT ENV partition in partition.xml ignore generating fw_env.config"
|
||||
)
|
||||
if os.path.isfile("fw_env.config"):
|
||||
os.remove("fw_env.config")
|
||||
return
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
parser = XmlParser(args.xml)
|
||||
if not args.fw_env:
|
||||
gen_cvipart_h(args.output, parser)
|
||||
else:
|
||||
gen_fw_config(args.output, parser, args.block_size)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
239
build/tools/common/image_tool/pack_images.py
Normal file
239
build/tools/common/image_tool/pack_images.py
Normal file
@ -0,0 +1,239 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import argparse
|
||||
import sys
|
||||
from os import path, stat
|
||||
from array import array
|
||||
from XmlParser import XmlParser
|
||||
|
||||
|
||||
FORMAT = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT)
|
||||
LBA_SIZE = 512
|
||||
MAX_WRITE_SIZE = 50 * 1024 * 1024
|
||||
DEFAULT_BLOCK_SIZE = 128 * 1024
|
||||
FIP_BACKUP_BLOCK_POS = 10
|
||||
SV_BLOCK_NUM = 4
|
||||
BLOCK_SIZE_FOR_4K_NAND = 262144
|
||||
|
||||
global chip_list
|
||||
chip_list = ["cv183x", "cv182x", "cv181x"]
|
||||
|
||||
|
||||
def parse_Args():
|
||||
parser = argparse.ArgumentParser(description="Create CVITEK device image for burning")
|
||||
parser.add_argument("chip", help="the current chip for using")
|
||||
parser.add_argument("xml", help="path to partition xml")
|
||||
parser.add_argument("images_path", help="path to images")
|
||||
parser.add_argument(
|
||||
"output",
|
||||
metavar="output",
|
||||
type=str,
|
||||
help="the output folder for saving the data.bin and boot.bin",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-m",
|
||||
"--max_write_size",
|
||||
type=int,
|
||||
help="max write buffer size when generating file. "
|
||||
"Increasing the size when speedup the procedue "
|
||||
"but it will use more system memory. "
|
||||
"Default is 50MB.",
|
||||
default=MAX_WRITE_SIZE,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--block_size",
|
||||
type=int,
|
||||
help="block size only for nand, defaule is 128K",
|
||||
default=DEFAULT_BLOCK_SIZE,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", help="increase output verbosity", action="store_true"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if args.verbose:
|
||||
logging.debug("Enable more verbose output")
|
||||
logging.getLogger().setLevel(level=logging.DEBUG)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def handle_fip_for_4k_page(images_path, blocksize: int):
|
||||
|
||||
logging.info("handle fip for 4K page....")
|
||||
fill_array = array("B", [0xFF for _ in range(2048)])
|
||||
fip_path = path.join(images_path, "fip.bin")
|
||||
new_fip_path = path.join(images_path, "fip_4k.bin")
|
||||
if path.exists(fip_path):
|
||||
with open(fip_path, "rb") as fip:
|
||||
with open(new_fip_path, "wb") as new_fip:
|
||||
off = 0
|
||||
while True:
|
||||
data = fip.read(2048)
|
||||
if data == b"":
|
||||
break
|
||||
|
||||
new_fip.seek(off, 0)
|
||||
new_fip.write(data)
|
||||
fill_array.tofile(new_fip)
|
||||
new_fip.flush()
|
||||
off = off + 4096
|
||||
|
||||
new_fip_size = stat(new_fip_path).st_size
|
||||
print("new fip size is %d " % new_fip_size)
|
||||
append_array = array("B", [0xFF for _ in range(blocksize * 5 - new_fip_size)])
|
||||
with open(new_fip_path, "ab") as f:
|
||||
append_array.tofile(f)
|
||||
|
||||
|
||||
def raw_image_check(f, filename):
|
||||
|
||||
if filename == "fip.bin":
|
||||
return
|
||||
# CIMG
|
||||
head = f.read(4)
|
||||
if head == b"CIMG":
|
||||
logging.info("%s is not raw image, please use raw image" % filename)
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
def genDataBin(
|
||||
out,
|
||||
parts,
|
||||
images_path,
|
||||
storage_type,
|
||||
max_write_size=50 * 1024 * 1024,
|
||||
block_size=128 * 1024,
|
||||
chip="cv182x",
|
||||
):
|
||||
|
||||
if chip not in chip_list:
|
||||
logging.info("do not support %s" % chip)
|
||||
logging.info("only support ")
|
||||
logging.info(chip)
|
||||
sys.exit(-1)
|
||||
|
||||
sv_array = array("B")
|
||||
if storage_type == "spinand" and chip != "cv181x":
|
||||
sv_path = path.join(images_path, "sv.bin")
|
||||
if path.exists(sv_path):
|
||||
logging.info("sv.bin is exist!")
|
||||
sv_size = stat(sv_path).st_size
|
||||
with open(sv_path, "rb") as f:
|
||||
sv_array.fromfile(f, sv_size)
|
||||
out.seek(0, 0)
|
||||
sv_array.tofile(out)
|
||||
else:
|
||||
logging.info("there is no sv.bin, please add it")
|
||||
|
||||
for i, p in enumerate(parts):
|
||||
file_array = array("B")
|
||||
skip_size = 0
|
||||
# we change offset of fip for nand
|
||||
if p["file_name"] == "fip.bin" and storage_type == "spinand" and chip != "cv181x":
|
||||
p["offset"] = block_size * SV_BLOCK_NUM
|
||||
skip_size = block_size * SV_BLOCK_NUM
|
||||
# handle fip for 4K page
|
||||
if block_size == BLOCK_SIZE_FOR_4K_NAND:
|
||||
handle_fip_for_4k_page(images_path, block_size)
|
||||
# use fip_4k.bin to pack
|
||||
p["file_name"] = "fip_4k.bin"
|
||||
|
||||
if p["file_name"] == "":
|
||||
continue
|
||||
|
||||
logging.debug("file name is %s" % p["file_name"])
|
||||
file_path = path.join(images_path, p["file_name"])
|
||||
|
||||
if not path.exists(file_path):
|
||||
continue
|
||||
|
||||
logging.debug("%s is exits" % file_path)
|
||||
file_size = stat(file_path).st_size
|
||||
|
||||
logging.debug("%s size is %d" % (p["file_name"], file_size))
|
||||
logging.debug("Packing %s" % p["label"])
|
||||
if path.exists(file_path):
|
||||
with open(file_path, "rb") as f:
|
||||
raw_image_check(f, p["file_name"])
|
||||
f.seek(0)
|
||||
file_array.fromfile(f, file_size)
|
||||
out.seek(p["offset"], 0)
|
||||
logging.info("Writing %s to pos %d" % (p["label"], p["offset"]))
|
||||
file_array.tofile(out)
|
||||
# for fip.bin of spi nand, we do a backup at 9th block
|
||||
if i == 0 and storage_type == "spinand":
|
||||
out.seek(block_size * FIP_BACKUP_BLOCK_POS, 0)
|
||||
file_array.tofile(out)
|
||||
logging.info(
|
||||
"do a backup for fip.bin at %d"
|
||||
% (block_size * FIP_BACKUP_BLOCK_POS)
|
||||
)
|
||||
|
||||
# Only append 0xff when the partition is not the last partition.
|
||||
if i != len(parts) - 1 and p["file_name"] != "fip.bin":
|
||||
append_size = p["part_size"] - file_size - skip_size
|
||||
# This part may seems stupid, but it works when image is too large
|
||||
# to keep content in memory.
|
||||
for j in range(0, append_size, max_write_size):
|
||||
append_byte = array(
|
||||
"B", [0xFF for _ in range(min(max_write_size, append_size - j))]
|
||||
)
|
||||
append_byte.tofile(out)
|
||||
logging.info("generating Data.bin done!")
|
||||
|
||||
|
||||
def genBootBin(out, images_path):
|
||||
file_path = path.join(images_path, "fip.bin")
|
||||
try:
|
||||
file_array = array("B")
|
||||
fip_size = stat(file_path).st_size
|
||||
with open(file_path, "rb") as f:
|
||||
file_array.fromfile(f, fip_size)
|
||||
for _ in range(0x800 * LBA_SIZE - stat(file_path).st_size):
|
||||
file_array.append(0xFF)
|
||||
file_array.tofile(out)
|
||||
# write twice to backup fip
|
||||
file_array.tofile(out)
|
||||
logging.info("generating Boot.bin done!")
|
||||
out.close()
|
||||
except FileNotFoundError as e:
|
||||
logging.error("fip.bin is not exist")
|
||||
raise e
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_Args()
|
||||
xmlParser = XmlParser(args.xml)
|
||||
parts = xmlParser.parse(args.images_path)
|
||||
out_path = path.join(args.output, "Data.bin")
|
||||
|
||||
storage_type = xmlParser.getStorage()
|
||||
logging.info("storage type is %s " % storage_type)
|
||||
|
||||
with open(out_path, "wb") as out:
|
||||
genDataBin(
|
||||
out,
|
||||
parts,
|
||||
args.images_path,
|
||||
storage_type,
|
||||
args.max_write_size,
|
||||
args.block_size,
|
||||
args.chip,
|
||||
)
|
||||
|
||||
# append fip to 1M for emmc
|
||||
if storage_type == "emmc":
|
||||
fip_out_path = path.join(args.output, "Boot.bin")
|
||||
try:
|
||||
fd = open(fip_out_path, "wb")
|
||||
except Exception:
|
||||
logging.error("Create %s failed!", out_path)
|
||||
raise
|
||||
genBootBin(fd, args.images_path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
170
build/tools/common/image_tool/raw2cimg.py
Executable file
170
build/tools/common/image_tool/raw2cimg.py
Executable file
@ -0,0 +1,170 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import argparse
|
||||
import os
|
||||
from array import array
|
||||
import binascii
|
||||
from XmlParser import XmlParser
|
||||
from tempfile import TemporaryDirectory
|
||||
import shutil
|
||||
|
||||
MAX_LOAD_SIZE = 16 * 1024 * 1024
|
||||
CHUNK_TYPE_DONT_CARE = 0
|
||||
CHUNK_TYPE_CRC_CHECK = 1
|
||||
FORMAT = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT)
|
||||
|
||||
|
||||
def parse_Args():
|
||||
parser = argparse.ArgumentParser(description="Create CVITEK device image")
|
||||
|
||||
parser.add_argument(
|
||||
"file_path",
|
||||
metavar="file_path",
|
||||
type=str,
|
||||
help="the file you want to pack with cvitek image header",
|
||||
)
|
||||
parser.add_argument(
|
||||
"output_dir",
|
||||
metavar="output_folder_path",
|
||||
type=str,
|
||||
help="the folder path to install dir inclued fip,rootfs and kernel",
|
||||
)
|
||||
parser.add_argument("xml", help="path to partition xml")
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", help="increase output verbosity", action="store_true"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if args.verbose:
|
||||
logging.debug("Enable more verbose output")
|
||||
logging.getLogger().setLevel(level=logging.DEBUG)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class ImagerBuilder(object):
|
||||
def __init__(self, storage: int, output_path):
|
||||
self.storage = storage
|
||||
self.output_path = output_path
|
||||
|
||||
def packHeader(self, part):
|
||||
"""
|
||||
Header format total 64 bytes
|
||||
4 Bytes: Magic
|
||||
4 Bytes: Version
|
||||
4 Bytes: Chunk header size
|
||||
4 Bytes: Total chunks
|
||||
4 Bytes: File size
|
||||
32 Bytes: Extra Flags
|
||||
12 Bytes: Reserved
|
||||
"""
|
||||
with open(part["file_path"], "rb") as fd:
|
||||
magic = fd.read(4)
|
||||
if magic == b"CIMG":
|
||||
logging.debug("%s has been packed, skip it!" % part["file_name"])
|
||||
return
|
||||
fd.seek(0)
|
||||
Magic = array("b", [ord(c) for c in "CIMG"])
|
||||
Version = array("I", [1])
|
||||
chunk_header_sz = 64
|
||||
Chunk_sz = array("I", [chunk_header_sz])
|
||||
chunk_counts = part["file_size"] // MAX_LOAD_SIZE
|
||||
remain = part["file_size"] - MAX_LOAD_SIZE * chunk_counts
|
||||
if (remain != 0):
|
||||
chunk_counts = chunk_counts + 1
|
||||
Totak_chunk = array("I", [chunk_counts])
|
||||
File_sz = array("I", [part["file_size"] + (chunk_counts * chunk_header_sz)])
|
||||
try:
|
||||
label = part["label"]
|
||||
except KeyError:
|
||||
label = "gpt"
|
||||
Extra_flags = array("B", [ord(c) for c in label])
|
||||
for _ in range(len(label), 32):
|
||||
Extra_flags.append(ord("\0"))
|
||||
|
||||
img = open(os.path.join(self.output_path, part["file_name"]), "wb")
|
||||
# Write Header
|
||||
for h in [Magic, Version, Chunk_sz, Totak_chunk, File_sz, Extra_flags]:
|
||||
h.tofile(img)
|
||||
img.seek(64)
|
||||
total_size = part["file_size"]
|
||||
offset = part["offset"]
|
||||
part_sz = part["part_size"]
|
||||
while total_size:
|
||||
chunk_sz = min(MAX_LOAD_SIZE, total_size)
|
||||
chunk = fd.read(chunk_sz)
|
||||
crc = binascii.crc32(chunk) & 0xFFFFFFFF
|
||||
chunk_header = self._getChunkHeader(chunk_sz, offset, part_sz, crc)
|
||||
img.write(chunk_header)
|
||||
img.write(chunk)
|
||||
total_size -= chunk_sz
|
||||
offset += chunk_sz
|
||||
img.close()
|
||||
|
||||
def _getChunkHeader(self, size: int, offset: int, part_sz: int, crc32: int):
|
||||
"""
|
||||
Header format total 64 bytes
|
||||
4 Bytes: Chunk Type
|
||||
4 Bytes: Chunk data size
|
||||
4 Bytes: Program part offset
|
||||
4 Bytes: Program part size
|
||||
4 Bytes: Crc32 checksum
|
||||
"""
|
||||
logging.info("size:%x, offset:%x, part_sz:%x, crc:%x" % (size, offset, part_sz, crc32))
|
||||
Chunk = array(
|
||||
"I",
|
||||
[
|
||||
CHUNK_TYPE_CRC_CHECK,
|
||||
size,
|
||||
offset,
|
||||
part_sz,
|
||||
crc32,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
],
|
||||
)
|
||||
return Chunk
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_Args()
|
||||
xmlParser = XmlParser(args.xml)
|
||||
install_dir = os.path.dirname(args.file_path)
|
||||
parts = xmlParser.parse(install_dir)
|
||||
storage = xmlParser.getStorage()
|
||||
tmp = TemporaryDirectory()
|
||||
imgBuilder = ImagerBuilder(storage, tmp.name)
|
||||
for p in parts:
|
||||
# Since xml parser will parse with abspath and the user input path can
|
||||
# be relative path, use file name to check.
|
||||
if os.path.basename(args.file_path) == p["file_name"]:
|
||||
if (
|
||||
storage != "emmc" and storage != "spinor"
|
||||
and p["file_size"] > p["part_size"] - 128 * 1024
|
||||
and p["mountpoint"]
|
||||
and p["mountpoint"] != ""
|
||||
):
|
||||
logging.error(
|
||||
"Imaege is too big, it will cause mount partition failed!!"
|
||||
)
|
||||
raise ValueError
|
||||
imgBuilder.packHeader(p)
|
||||
tmp_path = os.path.join(tmp.name, p["file_name"])
|
||||
out_path = os.path.join(args.output_dir, p["file_name"])
|
||||
logging.debug("Moving %s -> %s" % (tmp_path, out_path))
|
||||
shutil.move(tmp_path, out_path)
|
||||
logging.info("Packing %s done!" % (p["file_name"]))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
BIN
build/tools/common/image_tool/sv_cv182x_2k_pg.bin
Normal file
BIN
build/tools/common/image_tool/sv_cv182x_2k_pg.bin
Normal file
Binary file not shown.
BIN
build/tools/common/image_tool/sv_cv182x_4k_pg.bin
Normal file
BIN
build/tools/common/image_tool/sv_cv182x_4k_pg.bin
Normal file
Binary file not shown.
Reference in New Issue
Block a user