Merge pull request #2436 from proddy/dev

show localized elapsed time in version
This commit is contained in:
Proddy
2025-03-02 15:21:47 +01:00
committed by GitHub
21 changed files with 220 additions and 173 deletions

View File

@@ -16,7 +16,7 @@ T := $(shell $(MAKE) $(MAKECMDGOALS) --no-print-directory \
ECHO="COUNTTHIS" | grep -c "COUNTTHIS")
N := x
C = $(words $N)$(eval N := x $N)
ECHO = python3 $(I)/echo_progress.py --stepno=$C --nsteps=$T
ECHO = python3 $(I)/scripts/echo_progress.py --stepno=$C --nsteps=$T
endif
# determine number of parallel compiles based on OS
@@ -151,7 +151,7 @@ COMPILE.cpp = $(CXX) $(CXX_STANDARD) $(CXXFLAGS) $(DEPFLAGS) -c $< -o $@
.SILENT: $(OUTPUT)
all: $(OUTPUT)
@$(ECHO) All done
@$(ECHO) Build complete.
$(OUTPUT): $(OBJS)
@mkdir -p $(@D)

View File

@@ -1,28 +0,0 @@
"""
Print makefile progress
From https://stackoverflow.com/questions/451413/make-makefile-progress-indication
"""
import argparse
import math
import sys
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--stepno", type=int, required=True)
parser.add_argument("--nsteps", type=int, required=True)
parser.add_argument("remainder", nargs=argparse.REMAINDER)
args = parser.parse_args()
nchars = int(math.log(args.nsteps, 10)) + 1
fmt_str = "[{:Xd}/{:Xd}]({:6.2f}%)".replace("X", str(nchars))
progress = 100 * args.stepno / args.nsteps
sys.stdout.write(fmt_str.format(args.stepno, args.nsteps, progress))
for item in args.remainder:
sys.stdout.write(" ")
sys.stdout.write(item)
sys.stdout.write("\n")
if __name__ == "__main__":
main()

View File

@@ -31,7 +31,7 @@ import { FormLoader, SectionContent, useLayoutTitle } from 'components';
import { useI18nContext } from 'i18n/i18n-react';
const Version = () => {
const { LL } = useI18nContext();
const { LL, locale } = useI18nContext();
const [restarting, setRestarting] = useState<boolean>(false);
const [openInstallDialog, setOpenInstallDialog] = useState<boolean>(false);
const [usingDevVersion, setUsingDevVersion] = useState<boolean>(false);
@@ -92,6 +92,30 @@ const Version = () => {
}
}, [latestVersion, latestDevVersion]);
const rtf = new Intl.RelativeTimeFormat(locale, { numeric: 'auto' });
const DIVISIONS = [
{ amount: 60, name: 'seconds' },
{ amount: 60, name: 'minutes' },
{ amount: 24, name: 'hours' },
{ amount: 7, name: 'days' },
{ amount: 4.34524, name: 'weeks' },
{ amount: 12, name: 'months' },
{ amount: Number.POSITIVE_INFINITY, name: 'years' }
];
function formatTimeAgo(date) {
let duration = (date.getTime() - new Date().getTime()) / 1000;
for (let i = 0; i < DIVISIONS.length; i++) {
const division = DIVISIONS[i];
if (Math.abs(duration) < division.amount) {
return rtf.format(
Math.round(duration),
division.name as Intl.RelativeTimeFormatUnit
);
}
duration /= division.amount;
}
}
const getBinURL = () => {
if (!internetLive) {
return '';
@@ -274,7 +298,7 @@ const Version = () => {
</Grid>
<Grid size={{ xs: 8, md: 10 }}>
<FormControlLabel
disabled
disabled={!isDev}
control={
<Checkbox
sx={{
@@ -284,12 +308,17 @@ const Version = () => {
}}
/>
}
slotProps={{
typography: {
color: 'grey'
}
}}
checked={!isDev}
label={LL.STABLE()}
sx={{ '& .MuiSvgIcon-root': { fontSize: 18 } }}
sx={{ '& .MuiSvgIcon-root': { fontSize: 16 } }}
/>
<FormControlLabel
disabled
disabled={isDev}
control={
<Checkbox
sx={{
@@ -299,9 +328,14 @@ const Version = () => {
}}
/>
}
slotProps={{
typography: {
color: 'grey'
}
}}
checked={isDev}
label={LL.DEVELOPMENT()}
sx={{ '& .MuiSvgIcon-root': { fontSize: 18 } }}
sx={{ '& .MuiSvgIcon-root': { fontSize: 16 } }}
/>
</Grid>
</Grid>
@@ -332,14 +366,7 @@ const Version = () => {
{latestVersion.published_at && (
<Typography component="span" variant="caption">
&nbsp;(
{LL.DAYS_AGO(
Math.floor(
(Date.now() -
new Date(latestVersion.published_at).getTime()) /
(1000 * 60 * 60 * 24)
)
)}
)
{formatTimeAgo(new Date(latestVersion.published_at))})
</Typography>
)}
{!usingDevVersion && showButtons(false)}
@@ -357,14 +384,7 @@ const Version = () => {
{latestDevVersion.published_at && (
<Typography component="span" variant="caption">
&nbsp;(
{LL.DAYS_AGO(
Math.floor(
(Date.now() -
new Date(latestDevVersion.published_at).getTime()) /
(1000 * 60 * 60 * 24)
)
)}
)
{formatTimeAgo(new Date(latestDevVersion.published_at))})
</Typography>
)}
{showButtons(true)}

View File

@@ -351,7 +351,6 @@ const cz: Translation = {
RELEASE_TYPE: 'Typ sestavení',
REINSTALL: 'Přeinstalovat',
INTERNET_CONNECTION_REQUIRED: 'Pro automatickou kontrolu a instalaci aktualizací je třeba internetové připojení',
DAYS_AGO: '{0} dn{{y|í|í|í|í|í}} zpátky'
};
export default cz;

View File

@@ -351,7 +351,6 @@ const de: Translation = {
RELEASE_TYPE: 'Release Typ',
REINSTALL: 'Neu installieren',
INTERNET_CONNECTION_REQUIRED: 'Internetverbindung erforderlich für automatische Version-Überprüfung und -Aktualisierung',
DAYS_AGO: '{0} Tag{{e}} vorher'
};
export default de;

View File

@@ -351,7 +351,6 @@ const en: Translation = {
RELEASE_TYPE: 'Release Type',
REINSTALL: 'Re-install',
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
DAYS_AGO: '{0} day{{s}} ago'
};
export default en;

View File

@@ -351,7 +351,6 @@ const fr: Translation = {
RELEASE_TYPE: 'Release Type', // TODO translate
REINSTALL: 'Re-install', // TODO translate
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
DAYS_AGO: '{0} jour{{s}} avant'
};
export default fr;

View File

@@ -351,7 +351,6 @@ const it: Translation = {
RELEASE_TYPE: 'Release Type', // TODO translate
REINSTALL: 'Re-install', // TODO translate
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
DAYS_AGO: '{0} giorni{{s}} fa'
};
export default it;

View File

@@ -351,7 +351,6 @@ const nl: Translation = {
RELEASE_TYPE: 'Release Typ',
REINSTALL: 'Opnieuw Installeren',
INTERNET_CONNECTION_REQUIRED: 'Internetverbinding vereist voor automatische versiecontrole en -upgrade',
DAYS_AGO: '{0} dag{{en}} geleden'
};
export default nl;

View File

@@ -351,7 +351,6 @@ const no: Translation = {
RELEASE_TYPE: 'Release Type', // TODO translate
REINSTALL: 'Re-install', // TODO translate
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
DAYS_AGO: '{0} dag{{er}} siden'
};
export default no;

View File

@@ -351,7 +351,6 @@ const pl: BaseTranslation = {
RELEASE_TYPE: 'Release Type', // TODO translate
REINSTALL: 'Re-install', // TODO translate
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading', // TODO translate
DAYS_AGO: '{0} dzień{{s}} temu'
};
export default pl;

View File

@@ -351,7 +351,6 @@ const sk: Translation = {
RELEASE_TYPE: 'Typ vydania',
REINSTALL: 'Preinštalovať',
INTERNET_CONNECTION_REQUIRED: 'Internetové pripojenie je potrebné pre automatickú kontrolu a aktualizáciu',
DAYS_AGO: 'pred {0} d{{ňami|eň|ní|ní|ní|ní}}'
};
export default sk;

View File

@@ -351,7 +351,6 @@ const sv: Translation = {
RELEASE_TYPE: 'Release Type', // TODO translate
REINSTALL: 'Re-install', // TODO translate
INTERNET_CONNECTION_REQUIRED: 'Internetanslutning krävs för automatisk version kontroll och uppdatering',
DAYS_AGO: '{0} dag{{ar}} sedan'
};
export default sv;

View File

@@ -351,7 +351,6 @@ const tr: Translation = {
RELEASE_TYPE: 'Release Type', // TODO translate
REINSTALL: 'Re-install', // TODO translate
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading', // TODO translate
DAYS_AGO: '{0} gün{{ü|ü|ü|ü|ü|ü}} önce'
};
export default tr;

View File

@@ -269,10 +269,10 @@ function updateMask(entity: any, de: any, dd: any) {
const old_custom_name = dd.nodes[dd_objIndex].cn;
console.log(
'comparing names, old (' +
old_custom_name +
') with new (' +
new_custom_name +
')'
old_custom_name +
') with new (' +
new_custom_name +
')'
);
if (old_custom_name !== new_custom_name) {
changed = true;
@@ -367,15 +367,15 @@ function check_upgrade(version: string) {
const stable_version = version.split(',')[1];
console.log(
'latest dev version: ' +
dev_version +
', latest stable version: ' +
stable_version
dev_version +
', latest stable version: ' +
stable_version
);
console.log(
'Version upgrade check from version ' +
THIS_VERSION +
', upgradable: ' +
VERSION_IS_UPGRADEABLE
THIS_VERSION +
', upgradable: ' +
VERSION_IS_UPGRADEABLE
);
data = {
emsesp_version: THIS_VERSION,
@@ -5049,20 +5049,21 @@ router
});
// Mock GitHub API
// https://api.github.com/repos/emsesp/EMS-ESP32/releases
router
.get(GH_ENDPOINT_ROOT + '/tags/latest', () => {
const data = {
name: 'v' + LATEST_DEV_VERSION,
published_at: new Date().toISOString()
published_at: new Date().toISOString() // use todays date
};
console.log('returning latest development version: ', data);
console.log('returning latest development version (today): ', data);
return data;
})
.get(GH_ENDPOINT_ROOT + '/latest', () => {
const data = {
name: 'v' + LATEST_STABLE_VERSION,
published_at: '2025-02-07T20:09:46Z'
published_at: '2025-03-01T13:29:13.999Z'
};
console.log('returning latest stable version: ', data);
return data;

View File

@@ -1,3 +0,0 @@
{
"packageManager": "yarn@4.6.0"
}

View File

@@ -26,79 +26,87 @@ import re
import subprocess
import sys
RE_ELF_SECTION = re.compile(r"^\s*(?P<type>\w+)\s+(?P<offset>\w+)\s+(?P<virtaddr>\w+)\s+(?P<physaddr>\w+)\s+(?P<filesiz>\w+)\s+(?P<memsiz>\w+)\s+(?P<ndx>\w+)\s+")
RE_ELF_SECTION = re.compile(
r"^\s*(?P<type>\w+)\s+(?P<offset>\w+)\s+(?P<virtaddr>\w+)\s+(?P<physaddr>\w+)\s+(?P<filesiz>\w+)\s+(?P<memsiz>\w+)\s+(?P<ndx>\w+)\s+")
Symbol = collections.namedtuple("Symbol", ["value", "size", "line"])
RE_ELF_SYMBOL = re.compile(r"^(?P<before_value>\s*(?P<num>\w+):\s+)(?P<value>\w+)(?P<after_value>\s+(?P<size>\w+)\s+(?P<type>\w+)\s+(?P<bind>\w+)\s+(?P<visibility>\w+)\s+(?P<ndx>\w+)\s+(?P<name>\w+))")
RE_ELF_SYMBOL = re.compile(
r"^(?P<before_value>\s*(?P<num>\w+):\s+)(?P<value>\w+)(?P<after_value>\s+(?P<size>\w+)\s+(?P<type>\w+)\s+(?P<bind>\w+)\s+(?P<visibility>\w+)\s+(?P<ndx>\w+)\s+(?P<name>\w+))")
def print_tls_size(fw_elf):
tls_offset = None
width = 8
tls_offset = None
width = 8
lines = subprocess.run(["readelf", "-W", "--program-headers", fw_elf],
check=True, universal_newlines=True, stdout=subprocess.PIPE
).stdout.strip().split("\n")
lines = subprocess.run(["readelf", "-W", "--program-headers", fw_elf],
check=True, universal_newlines=True, stdout=subprocess.PIPE
).stdout.strip().split("\n")
for line in lines:
match = RE_ELF_SECTION.match(line)
if match:
if tls_offset is None and match["type"] == "TLS":
tls_offset = int(match["virtaddr"], 16)
for line in lines:
match = RE_ELF_SECTION.match(line)
if match:
if tls_offset is None and match["type"] == "TLS":
tls_offset = int(match["virtaddr"], 16)
header = True
lines = subprocess.run(["readelf", "-W", "--syms", "--dyn-syms", fw_elf],
check=True, universal_newlines=True, stdout=subprocess.PIPE
).stdout.strip().split("\n")
syms = set()
header = True
lines = subprocess.run(["readelf", "-W", "--syms", "--dyn-syms", fw_elf],
check=True, universal_newlines=True, stdout=subprocess.PIPE
).stdout.strip().split("\n")
syms = set()
for line in lines:
match = RE_ELF_SYMBOL.match(line)
if match:
header = False
for line in lines:
match = RE_ELF_SYMBOL.match(line)
if match:
header = False
if match["type"] == "TLS":
syms.add(Symbol(int(match["value"], 16), int(match["size"]), line))
width = len(match['value'])
elif tls_offset is not None and (match["type"] == "NOTYPE" and match["bind"] == "GLOBAL"
and match["visibility"] == "DEFAULT"
and match["name"] in set(["_thread_local_start", "_thread_local_end"])
):
value = int(match["value"], 16) - tls_offset
line = ("{1}{2:0{0}x}{3}").format(len(match['value']),
match["before_value"], value, match["after_value"])
syms.add(Symbol(value, int(match["size"]), line))
if match["type"] == "TLS":
syms.add(
Symbol(int(match["value"], 16), int(match["size"]), line))
width = len(match['value'])
elif tls_offset is not None and (match["type"] == "NOTYPE" and match["bind"] == "GLOBAL"
and match["visibility"] == "DEFAULT"
and match["name"] in set(["_thread_local_start", "_thread_local_end"])
):
value = int(match["value"], 16) - tls_offset
line = ("{1}{2:0{0}x}{3}").format(len(match['value']),
match["before_value"], value, match["after_value"])
syms.add(Symbol(value, int(match["size"]), line))
elif header:
print(line)
elif header:
print(line)
if syms:
syms = list(syms)
syms.sort()
size = (syms[-1].value + syms[-1].size) - syms[0].value
else:
size = 0
if syms:
syms = list(syms)
syms.sort()
size = (syms[-1].value + syms[-1].size) - syms[0].value
else:
size = 0
value = syms[0].value
for sym in syms:
if sym.value > value:
print("\t{1:0{0}x} {2:5d} TLS UNKNOWN".format(width, value, sym.value - value))
print(sym.line)
value = sym.value + sym.size
value = syms[0].value
for sym in syms:
if sym.value > value:
print("\t{1:0{0}x} {2:5d} TLS UNKNOWN".format(
width, value, sym.value - value))
print(sym.line)
value = sym.value + sym.size
print()
print(f"Total Thread-Local Storage size: {size} bytes")
print()
print(f"Total Thread-Local Storage size: {size} bytes")
def after_fw_elf(source, target, env):
fw_elf = str(target[0])
print_tls_size(fw_elf)
fw_elf = str(target[0])
print_tls_size(fw_elf)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Calculate size of Thread-Local Storage")
parser.add_argument("fw_elf", metavar="ELF", type=str, help="Firmware ELF filename")
parser = argparse.ArgumentParser(
description="Calculate size of Thread-Local Storage")
parser.add_argument("fw_elf", metavar="ELF", type=str,
help="Firmware ELF filename")
args = parser.parse_args()
print_tls_size(**vars(args))
args = parser.parse_args()
print_tls_size(**vars(args))
elif __name__ == "SCons.Script":
Import("env")
Import("env")
env.AddPostAction("${BUILD_DIR}/${PROGNAME}.elf", after_fw_elf)
env.AddPostAction("${BUILD_DIR}/${PROGNAME}.elf", after_fw_elf)

View File

@@ -3,6 +3,7 @@ import os
Import("env")
def buildWeb():
os.chdir("interface")
print("Building web interface...")
@@ -19,6 +20,7 @@ def buildWeb():
finally:
os.chdir("..")
# Don't buuld webUI if called from GitHub Actions
if "NO_BUILD_WEBUI" in os.environ:
print("!! Skipping the build of the web interface !!")

29
scripts/echo_progress.py Normal file
View File

@@ -0,0 +1,29 @@
"""
Print makefile progress
From https://stackoverflow.com/questions/451413/make-makefile-progress-indication
"""
import argparse
import math
import sys
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--stepno", type=int, required=True)
parser.add_argument("--nsteps", type=int, required=True)
parser.add_argument("remainder", nargs=argparse.REMAINDER)
args = parser.parse_args()
nchars = int(math.log(args.nsteps, 10)) + 1
fmt_str = "[{:Xd}/{:Xd}]({:6.2f}%)".replace("X", str(nchars))
progress = 100 * args.stepno / args.nsteps
sys.stdout.write(fmt_str.format(args.stepno, args.nsteps, progress))
for item in args.remainder:
sys.stdout.write(" ")
sys.stdout.write(item)
sys.stdout.write("\n")
if __name__ == "__main__":
main()

View File

@@ -18,7 +18,8 @@ import tempfile
try:
from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget
except ImportError:
COMPONENTS_PATH = os.path.expandvars(os.path.join('$IDF_PATH', 'components'))
COMPONENTS_PATH = os.path.expandvars(
os.path.join('$IDF_PATH', 'components'))
PARTTOOL_DIR = os.path.join(COMPONENTS_PATH, 'partition_table')
sys.path.append(PARTTOOL_DIR)
from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget
@@ -49,7 +50,8 @@ class OtatoolTarget():
temp_file = tempfile.NamedTemporaryFile(delete=False)
temp_file.close()
try:
self.target.read_partition(OtatoolTarget.OTADATA_PARTITION, temp_file.name)
self.target.read_partition(
OtatoolTarget.OTADATA_PARTITION, temp_file.name)
with open(temp_file.name, 'rb') as f:
self.otadata = f.read()
finally:
@@ -101,7 +103,8 @@ class OtatoolTarget():
ota_partitions = list()
for i in range(gen.NUM_PARTITION_SUBTYPE_APP_OTA):
ota_partition = filter(lambda p: p.subtype == (gen.MIN_PARTITION_SUBTYPE_APP_OTA + i), partition_table)
ota_partition = filter(lambda p: p.subtype == (
gen.MIN_PARTITION_SUBTYPE_APP_OTA + i), partition_table)
try:
ota_partitions.append(list(ota_partition)[0])
@@ -118,9 +121,11 @@ class OtatoolTarget():
try:
if isinstance(ota_id, int):
ota_partition_next = filter(lambda p: p.subtype - gen.MIN_PARTITION_SUBTYPE_APP_OTA == ota_id, ota_partitions)
ota_partition_next = filter(
lambda p: p.subtype - gen.MIN_PARTITION_SUBTYPE_APP_OTA == ota_id, ota_partitions)
else:
ota_partition_next = filter(lambda p: p.name == ota_id, ota_partitions)
ota_partition_next = filter(
lambda p: p.name == ota_id, ota_partitions)
ota_partition_next = list(ota_partition_next)[0]
except IndexError:
@@ -173,7 +178,8 @@ class OtatoolTarget():
try:
with open(temp_file.name, 'wb') as otadata_next_file:
start = (1 if otadata_compute_base == 0 else 0) * (self.spi_flash_sec_size >> 1)
start = (1 if otadata_compute_base == 0 else 0) * \
(self.spi_flash_sec_size >> 1)
otadata_next_file.write(self.otadata)
@@ -185,15 +191,18 @@ class OtatoolTarget():
otadata_next_file.flush()
self.target.write_partition(OtatoolTarget.OTADATA_PARTITION, temp_file.name)
self.target.write_partition(
OtatoolTarget.OTADATA_PARTITION, temp_file.name)
finally:
os.unlink(temp_file.name)
def read_ota_partition(self, ota_id, output):
self.target.read_partition(self._get_partition_id_from_ota_id(ota_id), output)
self.target.read_partition(
self._get_partition_id_from_ota_id(ota_id), output)
def write_ota_partition(self, ota_id, input):
self.target.write_partition(self._get_partition_id_from_ota_id(ota_id), input)
self.target.write_partition(
self._get_partition_id_from_ota_id(ota_id), input)
def erase_ota_partition(self, ota_id):
self.target.erase_partition(self._get_partition_id_from_ota_id(ota_id))
@@ -204,7 +213,8 @@ def _read_otadata(target):
otadata_info = target._get_otadata_info()
print(' {:8s} \t {:8s} | \t {:8s} \t {:8s}'.format('OTA_SEQ', 'CRC', 'OTA_SEQ', 'CRC'))
print(' {:8s} \t {:8s} | \t {:8s} \t {:8s}'.format(
'OTA_SEQ', 'CRC', 'OTA_SEQ', 'CRC'))
print('Firmware: 0x{:08x} \t0x{:08x} | \t0x{:08x} \t 0x{:08x}'.format(otadata_info[0].seq, otadata_info[0].crc,
otadata_info[1].seq, otadata_info[1].crc))
@@ -238,46 +248,64 @@ def main():
parser = argparse.ArgumentParser('ESP-IDF OTA Partitions Tool')
parser.add_argument('--quiet', '-q', help='suppress stderr messages', action='store_true')
parser.add_argument('--esptool-args', help='additional main arguments for esptool', nargs='+')
parser.add_argument('--esptool-write-args', help='additional subcommand arguments for esptool write_flash', nargs='+')
parser.add_argument('--esptool-read-args', help='additional subcommand arguments for esptool read_flash', nargs='+')
parser.add_argument('--esptool-erase-args', help='additional subcommand arguments for esptool erase_region', nargs='+')
parser.add_argument(
'--quiet', '-q', help='suppress stderr messages', action='store_true')
parser.add_argument(
'--esptool-args', help='additional main arguments for esptool', nargs='+')
parser.add_argument('--esptool-write-args',
help='additional subcommand arguments for esptool write_flash', nargs='+')
parser.add_argument('--esptool-read-args',
help='additional subcommand arguments for esptool read_flash', nargs='+')
parser.add_argument('--esptool-erase-args',
help='additional subcommand arguments for esptool erase_region', nargs='+')
# There are two possible sources for the partition table: a device attached to the host
# or a partition table CSV/binary file. These sources are mutually exclusive.
parser.add_argument('--port', '-p', help='port where the device to read the partition table from is attached')
parser.add_argument(
'--port', '-p', help='port where the device to read the partition table from is attached')
parser.add_argument('--baud', '-b', help='baudrate to use', type=int)
parser.add_argument('--partition-table-offset', '-o', help='offset to read the partition table from', type=str)
parser.add_argument('--partition-table-offset', '-o',
help='offset to read the partition table from', type=str)
parser.add_argument('--partition-table-file', '-f', help='file (CSV/binary) to read the partition table from; \
overrides device attached to specified port as the partition table source when defined')
subparsers = parser.add_subparsers(dest='operation', help='run otatool -h for additional help')
subparsers = parser.add_subparsers(
dest='operation', help='run otatool -h for additional help')
spi_flash_sec_size = argparse.ArgumentParser(add_help=False)
spi_flash_sec_size.add_argument('--spi-flash-sec-size', help='value of SPI_FLASH_SEC_SIZE macro', type=str)
spi_flash_sec_size.add_argument(
'--spi-flash-sec-size', help='value of SPI_FLASH_SEC_SIZE macro', type=str)
# Specify the supported operations
subparsers.add_parser('read_otadata', help='read otadata partition', parents=[spi_flash_sec_size])
subparsers.add_parser('read_otadata', help='read otadata partition', parents=[
spi_flash_sec_size])
subparsers.add_parser('erase_otadata', help='erase otadata partition')
slot_or_name_parser = argparse.ArgumentParser(add_help=False)
slot_or_name_parser_args = slot_or_name_parser.add_mutually_exclusive_group()
slot_or_name_parser_args.add_argument('--slot', help='slot number of the ota partition', type=int)
slot_or_name_parser_args.add_argument('--name', help='name of the ota partition')
slot_or_name_parser_args.add_argument(
'--slot', help='slot number of the ota partition', type=int)
slot_or_name_parser_args.add_argument(
'--name', help='name of the ota partition')
subparsers.add_parser('switch_ota_partition', help='switch otadata partition', parents=[slot_or_name_parser, spi_flash_sec_size])
subparsers.add_parser('switch_ota_partition', help='switch otadata partition', parents=[
slot_or_name_parser, spi_flash_sec_size])
read_ota_partition_subparser = subparsers.add_parser('read_ota_partition', help='read contents of an ota partition', parents=[slot_or_name_parser])
read_ota_partition_subparser.add_argument('--output', help='file to write the contents of the ota partition to', required=True)
read_ota_partition_subparser = subparsers.add_parser(
'read_ota_partition', help='read contents of an ota partition', parents=[slot_or_name_parser])
read_ota_partition_subparser.add_argument(
'--output', help='file to write the contents of the ota partition to', required=True)
write_ota_partition_subparser = subparsers.add_parser('write_ota_partition', help='write contents to an ota partition', parents=[slot_or_name_parser])
write_ota_partition_subparser.add_argument('--input', help='file whose contents to write to the ota partition')
write_ota_partition_subparser = subparsers.add_parser(
'write_ota_partition', help='write contents to an ota partition', parents=[slot_or_name_parser])
write_ota_partition_subparser.add_argument(
'--input', help='file whose contents to write to the ota partition')
subparsers.add_parser('erase_ota_partition', help='erase contents of an ota partition', parents=[slot_or_name_parser])
subparsers.add_parser(
'erase_ota_partition', help='erase contents of an ota partition', parents=[slot_or_name_parser])
args = parser.parse_args()
@@ -298,7 +326,8 @@ def main():
target_args['partition_table_file'] = args.partition_table_file
if args.partition_table_offset:
target_args['partition_table_offset'] = int(args.partition_table_offset, 0)
target_args['partition_table_offset'] = int(
args.partition_table_offset, 0)
try:
if args.spi_flash_sec_size:
@@ -324,7 +353,7 @@ def main():
target = OtatoolTarget(**target_args)
# Create the operation table and execute the operation
common_args = {'target':target}
common_args = {'target': target}
ota_id = []
@@ -338,18 +367,18 @@ def main():
pass
otatool_ops = {
'read_otadata':(_read_otadata, []),
'erase_otadata':(_erase_otadata, []),
'switch_ota_partition':(_switch_ota_partition, ota_id),
'read_ota_partition':(_read_ota_partition, ['output'] + ota_id),
'write_ota_partition':(_write_ota_partition, ['input'] + ota_id),
'erase_ota_partition':(_erase_ota_partition, ota_id)
'read_otadata': (_read_otadata, []),
'erase_otadata': (_erase_otadata, []),
'switch_ota_partition': (_switch_ota_partition, ota_id),
'read_ota_partition': (_read_ota_partition, ['output'] + ota_id),
'write_ota_partition': (_write_ota_partition, ['input'] + ota_id),
'erase_ota_partition': (_erase_ota_partition, ota_id)
}
(op, op_args) = otatool_ops[args.operation]
for op_arg in op_args:
common_args.update({op_arg:vars(args)[op_arg]})
common_args.update({op_arg: vars(args)[op_arg]})
try:
common_args['ota_id'] = common_args.pop('name')

View File

@@ -161,6 +161,6 @@ def on_upload(source, target, env):
print()
if env.get('UPLOAD_PROTOCOL') == 'custom':
env.Replace(UPLOADCMD=on_upload)