mirror of
https://github.com/emsesp/EMS-ESP32.git
synced 2025-12-08 08:49:52 +03:00
Merge pull request #2436 from proddy/dev
show localized elapsed time in version
This commit is contained in:
4
Makefile
4
Makefile
@@ -16,7 +16,7 @@ T := $(shell $(MAKE) $(MAKECMDGOALS) --no-print-directory \
|
|||||||
ECHO="COUNTTHIS" | grep -c "COUNTTHIS")
|
ECHO="COUNTTHIS" | grep -c "COUNTTHIS")
|
||||||
N := x
|
N := x
|
||||||
C = $(words $N)$(eval N := x $N)
|
C = $(words $N)$(eval N := x $N)
|
||||||
ECHO = python3 $(I)/echo_progress.py --stepno=$C --nsteps=$T
|
ECHO = python3 $(I)/scripts/echo_progress.py --stepno=$C --nsteps=$T
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# determine number of parallel compiles based on OS
|
# determine number of parallel compiles based on OS
|
||||||
@@ -151,7 +151,7 @@ COMPILE.cpp = $(CXX) $(CXX_STANDARD) $(CXXFLAGS) $(DEPFLAGS) -c $< -o $@
|
|||||||
.SILENT: $(OUTPUT)
|
.SILENT: $(OUTPUT)
|
||||||
|
|
||||||
all: $(OUTPUT)
|
all: $(OUTPUT)
|
||||||
@$(ECHO) All done
|
@$(ECHO) Build complete.
|
||||||
|
|
||||||
$(OUTPUT): $(OBJS)
|
$(OUTPUT): $(OBJS)
|
||||||
@mkdir -p $(@D)
|
@mkdir -p $(@D)
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
"""
|
|
||||||
Print makefile progress
|
|
||||||
From https://stackoverflow.com/questions/451413/make-makefile-progress-indication
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import math
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__)
|
|
||||||
parser.add_argument("--stepno", type=int, required=True)
|
|
||||||
parser.add_argument("--nsteps", type=int, required=True)
|
|
||||||
parser.add_argument("remainder", nargs=argparse.REMAINDER)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
nchars = int(math.log(args.nsteps, 10)) + 1
|
|
||||||
fmt_str = "[{:Xd}/{:Xd}]({:6.2f}%)".replace("X", str(nchars))
|
|
||||||
progress = 100 * args.stepno / args.nsteps
|
|
||||||
sys.stdout.write(fmt_str.format(args.stepno, args.nsteps, progress))
|
|
||||||
for item in args.remainder:
|
|
||||||
sys.stdout.write(" ")
|
|
||||||
sys.stdout.write(item)
|
|
||||||
sys.stdout.write("\n")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ import { FormLoader, SectionContent, useLayoutTitle } from 'components';
|
|||||||
import { useI18nContext } from 'i18n/i18n-react';
|
import { useI18nContext } from 'i18n/i18n-react';
|
||||||
|
|
||||||
const Version = () => {
|
const Version = () => {
|
||||||
const { LL } = useI18nContext();
|
const { LL, locale } = useI18nContext();
|
||||||
const [restarting, setRestarting] = useState<boolean>(false);
|
const [restarting, setRestarting] = useState<boolean>(false);
|
||||||
const [openInstallDialog, setOpenInstallDialog] = useState<boolean>(false);
|
const [openInstallDialog, setOpenInstallDialog] = useState<boolean>(false);
|
||||||
const [usingDevVersion, setUsingDevVersion] = useState<boolean>(false);
|
const [usingDevVersion, setUsingDevVersion] = useState<boolean>(false);
|
||||||
@@ -92,6 +92,30 @@ const Version = () => {
|
|||||||
}
|
}
|
||||||
}, [latestVersion, latestDevVersion]);
|
}, [latestVersion, latestDevVersion]);
|
||||||
|
|
||||||
|
const rtf = new Intl.RelativeTimeFormat(locale, { numeric: 'auto' });
|
||||||
|
const DIVISIONS = [
|
||||||
|
{ amount: 60, name: 'seconds' },
|
||||||
|
{ amount: 60, name: 'minutes' },
|
||||||
|
{ amount: 24, name: 'hours' },
|
||||||
|
{ amount: 7, name: 'days' },
|
||||||
|
{ amount: 4.34524, name: 'weeks' },
|
||||||
|
{ amount: 12, name: 'months' },
|
||||||
|
{ amount: Number.POSITIVE_INFINITY, name: 'years' }
|
||||||
|
];
|
||||||
|
function formatTimeAgo(date) {
|
||||||
|
let duration = (date.getTime() - new Date().getTime()) / 1000;
|
||||||
|
for (let i = 0; i < DIVISIONS.length; i++) {
|
||||||
|
const division = DIVISIONS[i];
|
||||||
|
if (Math.abs(duration) < division.amount) {
|
||||||
|
return rtf.format(
|
||||||
|
Math.round(duration),
|
||||||
|
division.name as Intl.RelativeTimeFormatUnit
|
||||||
|
);
|
||||||
|
}
|
||||||
|
duration /= division.amount;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const getBinURL = () => {
|
const getBinURL = () => {
|
||||||
if (!internetLive) {
|
if (!internetLive) {
|
||||||
return '';
|
return '';
|
||||||
@@ -274,7 +298,7 @@ const Version = () => {
|
|||||||
</Grid>
|
</Grid>
|
||||||
<Grid size={{ xs: 8, md: 10 }}>
|
<Grid size={{ xs: 8, md: 10 }}>
|
||||||
<FormControlLabel
|
<FormControlLabel
|
||||||
disabled
|
disabled={!isDev}
|
||||||
control={
|
control={
|
||||||
<Checkbox
|
<Checkbox
|
||||||
sx={{
|
sx={{
|
||||||
@@ -284,12 +308,17 @@ const Version = () => {
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
}
|
}
|
||||||
|
slotProps={{
|
||||||
|
typography: {
|
||||||
|
color: 'grey'
|
||||||
|
}
|
||||||
|
}}
|
||||||
checked={!isDev}
|
checked={!isDev}
|
||||||
label={LL.STABLE()}
|
label={LL.STABLE()}
|
||||||
sx={{ '& .MuiSvgIcon-root': { fontSize: 18 } }}
|
sx={{ '& .MuiSvgIcon-root': { fontSize: 16 } }}
|
||||||
/>
|
/>
|
||||||
<FormControlLabel
|
<FormControlLabel
|
||||||
disabled
|
disabled={isDev}
|
||||||
control={
|
control={
|
||||||
<Checkbox
|
<Checkbox
|
||||||
sx={{
|
sx={{
|
||||||
@@ -299,9 +328,14 @@ const Version = () => {
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
}
|
}
|
||||||
|
slotProps={{
|
||||||
|
typography: {
|
||||||
|
color: 'grey'
|
||||||
|
}
|
||||||
|
}}
|
||||||
checked={isDev}
|
checked={isDev}
|
||||||
label={LL.DEVELOPMENT()}
|
label={LL.DEVELOPMENT()}
|
||||||
sx={{ '& .MuiSvgIcon-root': { fontSize: 18 } }}
|
sx={{ '& .MuiSvgIcon-root': { fontSize: 16 } }}
|
||||||
/>
|
/>
|
||||||
</Grid>
|
</Grid>
|
||||||
</Grid>
|
</Grid>
|
||||||
@@ -332,14 +366,7 @@ const Version = () => {
|
|||||||
{latestVersion.published_at && (
|
{latestVersion.published_at && (
|
||||||
<Typography component="span" variant="caption">
|
<Typography component="span" variant="caption">
|
||||||
(
|
(
|
||||||
{LL.DAYS_AGO(
|
{formatTimeAgo(new Date(latestVersion.published_at))})
|
||||||
Math.floor(
|
|
||||||
(Date.now() -
|
|
||||||
new Date(latestVersion.published_at).getTime()) /
|
|
||||||
(1000 * 60 * 60 * 24)
|
|
||||||
)
|
|
||||||
)}
|
|
||||||
)
|
|
||||||
</Typography>
|
</Typography>
|
||||||
)}
|
)}
|
||||||
{!usingDevVersion && showButtons(false)}
|
{!usingDevVersion && showButtons(false)}
|
||||||
@@ -357,14 +384,7 @@ const Version = () => {
|
|||||||
{latestDevVersion.published_at && (
|
{latestDevVersion.published_at && (
|
||||||
<Typography component="span" variant="caption">
|
<Typography component="span" variant="caption">
|
||||||
(
|
(
|
||||||
{LL.DAYS_AGO(
|
{formatTimeAgo(new Date(latestDevVersion.published_at))})
|
||||||
Math.floor(
|
|
||||||
(Date.now() -
|
|
||||||
new Date(latestDevVersion.published_at).getTime()) /
|
|
||||||
(1000 * 60 * 60 * 24)
|
|
||||||
)
|
|
||||||
)}
|
|
||||||
)
|
|
||||||
</Typography>
|
</Typography>
|
||||||
)}
|
)}
|
||||||
{showButtons(true)}
|
{showButtons(true)}
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const cz: Translation = {
|
|||||||
RELEASE_TYPE: 'Typ sestavení',
|
RELEASE_TYPE: 'Typ sestavení',
|
||||||
REINSTALL: 'Přeinstalovat',
|
REINSTALL: 'Přeinstalovat',
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Pro automatickou kontrolu a instalaci aktualizací je třeba internetové připojení',
|
INTERNET_CONNECTION_REQUIRED: 'Pro automatickou kontrolu a instalaci aktualizací je třeba internetové připojení',
|
||||||
DAYS_AGO: '{0} dn{{y|í|í|í|í|í}} zpátky'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default cz;
|
export default cz;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const de: Translation = {
|
|||||||
RELEASE_TYPE: 'Release Typ',
|
RELEASE_TYPE: 'Release Typ',
|
||||||
REINSTALL: 'Neu installieren',
|
REINSTALL: 'Neu installieren',
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internetverbindung erforderlich für automatische Version-Überprüfung und -Aktualisierung',
|
INTERNET_CONNECTION_REQUIRED: 'Internetverbindung erforderlich für automatische Version-Überprüfung und -Aktualisierung',
|
||||||
DAYS_AGO: '{0} Tag{{e}} vorher'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default de;
|
export default de;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const en: Translation = {
|
|||||||
RELEASE_TYPE: 'Release Type',
|
RELEASE_TYPE: 'Release Type',
|
||||||
REINSTALL: 'Re-install',
|
REINSTALL: 'Re-install',
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
|
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
|
||||||
DAYS_AGO: '{0} day{{s}} ago'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default en;
|
export default en;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const fr: Translation = {
|
|||||||
RELEASE_TYPE: 'Release Type', // TODO translate
|
RELEASE_TYPE: 'Release Type', // TODO translate
|
||||||
REINSTALL: 'Re-install', // TODO translate
|
REINSTALL: 'Re-install', // TODO translate
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
|
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
|
||||||
DAYS_AGO: '{0} jour{{s}} avant'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default fr;
|
export default fr;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const it: Translation = {
|
|||||||
RELEASE_TYPE: 'Release Type', // TODO translate
|
RELEASE_TYPE: 'Release Type', // TODO translate
|
||||||
REINSTALL: 'Re-install', // TODO translate
|
REINSTALL: 'Re-install', // TODO translate
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
|
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
|
||||||
DAYS_AGO: '{0} giorni{{s}} fa'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default it;
|
export default it;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const nl: Translation = {
|
|||||||
RELEASE_TYPE: 'Release Typ',
|
RELEASE_TYPE: 'Release Typ',
|
||||||
REINSTALL: 'Opnieuw Installeren',
|
REINSTALL: 'Opnieuw Installeren',
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internetverbinding vereist voor automatische versiecontrole en -upgrade',
|
INTERNET_CONNECTION_REQUIRED: 'Internetverbinding vereist voor automatische versiecontrole en -upgrade',
|
||||||
DAYS_AGO: '{0} dag{{en}} geleden'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default nl;
|
export default nl;
|
||||||
@@ -351,7 +351,6 @@ const no: Translation = {
|
|||||||
RELEASE_TYPE: 'Release Type', // TODO translate
|
RELEASE_TYPE: 'Release Type', // TODO translate
|
||||||
REINSTALL: 'Re-install', // TODO translate
|
REINSTALL: 'Re-install', // TODO translate
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
|
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading',
|
||||||
DAYS_AGO: '{0} dag{{er}} siden'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default no;
|
export default no;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const pl: BaseTranslation = {
|
|||||||
RELEASE_TYPE: 'Release Type', // TODO translate
|
RELEASE_TYPE: 'Release Type', // TODO translate
|
||||||
REINSTALL: 'Re-install', // TODO translate
|
REINSTALL: 'Re-install', // TODO translate
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading', // TODO translate
|
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading', // TODO translate
|
||||||
DAYS_AGO: '{0} dzień{{s}} temu'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default pl;
|
export default pl;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const sk: Translation = {
|
|||||||
RELEASE_TYPE: 'Typ vydania',
|
RELEASE_TYPE: 'Typ vydania',
|
||||||
REINSTALL: 'Preinštalovať',
|
REINSTALL: 'Preinštalovať',
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internetové pripojenie je potrebné pre automatickú kontrolu a aktualizáciu',
|
INTERNET_CONNECTION_REQUIRED: 'Internetové pripojenie je potrebné pre automatickú kontrolu a aktualizáciu',
|
||||||
DAYS_AGO: 'pred {0} d{{ňami|eň|ní|ní|ní|ní}}'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default sk;
|
export default sk;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const sv: Translation = {
|
|||||||
RELEASE_TYPE: 'Release Type', // TODO translate
|
RELEASE_TYPE: 'Release Type', // TODO translate
|
||||||
REINSTALL: 'Re-install', // TODO translate
|
REINSTALL: 'Re-install', // TODO translate
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internetanslutning krävs för automatisk version kontroll och uppdatering',
|
INTERNET_CONNECTION_REQUIRED: 'Internetanslutning krävs för automatisk version kontroll och uppdatering',
|
||||||
DAYS_AGO: '{0} dag{{ar}} sedan'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default sv;
|
export default sv;
|
||||||
|
|||||||
@@ -351,7 +351,6 @@ const tr: Translation = {
|
|||||||
RELEASE_TYPE: 'Release Type', // TODO translate
|
RELEASE_TYPE: 'Release Type', // TODO translate
|
||||||
REINSTALL: 'Re-install', // TODO translate
|
REINSTALL: 'Re-install', // TODO translate
|
||||||
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading', // TODO translate
|
INTERNET_CONNECTION_REQUIRED: 'Internet connection required for automatic version checking and upgrading', // TODO translate
|
||||||
DAYS_AGO: '{0} gün{{ü|ü|ü|ü|ü|ü}} önce'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default tr;
|
export default tr;
|
||||||
|
|||||||
@@ -269,10 +269,10 @@ function updateMask(entity: any, de: any, dd: any) {
|
|||||||
const old_custom_name = dd.nodes[dd_objIndex].cn;
|
const old_custom_name = dd.nodes[dd_objIndex].cn;
|
||||||
console.log(
|
console.log(
|
||||||
'comparing names, old (' +
|
'comparing names, old (' +
|
||||||
old_custom_name +
|
old_custom_name +
|
||||||
') with new (' +
|
') with new (' +
|
||||||
new_custom_name +
|
new_custom_name +
|
||||||
')'
|
')'
|
||||||
);
|
);
|
||||||
if (old_custom_name !== new_custom_name) {
|
if (old_custom_name !== new_custom_name) {
|
||||||
changed = true;
|
changed = true;
|
||||||
@@ -367,15 +367,15 @@ function check_upgrade(version: string) {
|
|||||||
const stable_version = version.split(',')[1];
|
const stable_version = version.split(',')[1];
|
||||||
console.log(
|
console.log(
|
||||||
'latest dev version: ' +
|
'latest dev version: ' +
|
||||||
dev_version +
|
dev_version +
|
||||||
', latest stable version: ' +
|
', latest stable version: ' +
|
||||||
stable_version
|
stable_version
|
||||||
);
|
);
|
||||||
console.log(
|
console.log(
|
||||||
'Version upgrade check from version ' +
|
'Version upgrade check from version ' +
|
||||||
THIS_VERSION +
|
THIS_VERSION +
|
||||||
', upgradable: ' +
|
', upgradable: ' +
|
||||||
VERSION_IS_UPGRADEABLE
|
VERSION_IS_UPGRADEABLE
|
||||||
);
|
);
|
||||||
data = {
|
data = {
|
||||||
emsesp_version: THIS_VERSION,
|
emsesp_version: THIS_VERSION,
|
||||||
@@ -5049,20 +5049,21 @@ router
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Mock GitHub API
|
// Mock GitHub API
|
||||||
|
// https://api.github.com/repos/emsesp/EMS-ESP32/releases
|
||||||
|
|
||||||
router
|
router
|
||||||
.get(GH_ENDPOINT_ROOT + '/tags/latest', () => {
|
.get(GH_ENDPOINT_ROOT + '/tags/latest', () => {
|
||||||
const data = {
|
const data = {
|
||||||
name: 'v' + LATEST_DEV_VERSION,
|
name: 'v' + LATEST_DEV_VERSION,
|
||||||
published_at: new Date().toISOString()
|
published_at: new Date().toISOString() // use todays date
|
||||||
};
|
};
|
||||||
console.log('returning latest development version: ', data);
|
console.log('returning latest development version (today): ', data);
|
||||||
return data;
|
return data;
|
||||||
})
|
})
|
||||||
.get(GH_ENDPOINT_ROOT + '/latest', () => {
|
.get(GH_ENDPOINT_ROOT + '/latest', () => {
|
||||||
const data = {
|
const data = {
|
||||||
name: 'v' + LATEST_STABLE_VERSION,
|
name: 'v' + LATEST_STABLE_VERSION,
|
||||||
published_at: '2025-02-07T20:09:46Z'
|
published_at: '2025-03-01T13:29:13.999Z'
|
||||||
};
|
};
|
||||||
console.log('returning latest stable version: ', data);
|
console.log('returning latest stable version: ', data);
|
||||||
return data;
|
return data;
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"packageManager": "yarn@4.6.0"
|
|
||||||
}
|
|
||||||
@@ -26,79 +26,87 @@ import re
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
RE_ELF_SECTION = re.compile(r"^\s*(?P<type>\w+)\s+(?P<offset>\w+)\s+(?P<virtaddr>\w+)\s+(?P<physaddr>\w+)\s+(?P<filesiz>\w+)\s+(?P<memsiz>\w+)\s+(?P<ndx>\w+)\s+")
|
RE_ELF_SECTION = re.compile(
|
||||||
|
r"^\s*(?P<type>\w+)\s+(?P<offset>\w+)\s+(?P<virtaddr>\w+)\s+(?P<physaddr>\w+)\s+(?P<filesiz>\w+)\s+(?P<memsiz>\w+)\s+(?P<ndx>\w+)\s+")
|
||||||
Symbol = collections.namedtuple("Symbol", ["value", "size", "line"])
|
Symbol = collections.namedtuple("Symbol", ["value", "size", "line"])
|
||||||
RE_ELF_SYMBOL = re.compile(r"^(?P<before_value>\s*(?P<num>\w+):\s+)(?P<value>\w+)(?P<after_value>\s+(?P<size>\w+)\s+(?P<type>\w+)\s+(?P<bind>\w+)\s+(?P<visibility>\w+)\s+(?P<ndx>\w+)\s+(?P<name>\w+))")
|
RE_ELF_SYMBOL = re.compile(
|
||||||
|
r"^(?P<before_value>\s*(?P<num>\w+):\s+)(?P<value>\w+)(?P<after_value>\s+(?P<size>\w+)\s+(?P<type>\w+)\s+(?P<bind>\w+)\s+(?P<visibility>\w+)\s+(?P<ndx>\w+)\s+(?P<name>\w+))")
|
||||||
|
|
||||||
|
|
||||||
def print_tls_size(fw_elf):
|
def print_tls_size(fw_elf):
|
||||||
tls_offset = None
|
tls_offset = None
|
||||||
width = 8
|
width = 8
|
||||||
|
|
||||||
lines = subprocess.run(["readelf", "-W", "--program-headers", fw_elf],
|
lines = subprocess.run(["readelf", "-W", "--program-headers", fw_elf],
|
||||||
check=True, universal_newlines=True, stdout=subprocess.PIPE
|
check=True, universal_newlines=True, stdout=subprocess.PIPE
|
||||||
).stdout.strip().split("\n")
|
).stdout.strip().split("\n")
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
match = RE_ELF_SECTION.match(line)
|
match = RE_ELF_SECTION.match(line)
|
||||||
if match:
|
if match:
|
||||||
if tls_offset is None and match["type"] == "TLS":
|
if tls_offset is None and match["type"] == "TLS":
|
||||||
tls_offset = int(match["virtaddr"], 16)
|
tls_offset = int(match["virtaddr"], 16)
|
||||||
|
|
||||||
header = True
|
header = True
|
||||||
lines = subprocess.run(["readelf", "-W", "--syms", "--dyn-syms", fw_elf],
|
lines = subprocess.run(["readelf", "-W", "--syms", "--dyn-syms", fw_elf],
|
||||||
check=True, universal_newlines=True, stdout=subprocess.PIPE
|
check=True, universal_newlines=True, stdout=subprocess.PIPE
|
||||||
).stdout.strip().split("\n")
|
).stdout.strip().split("\n")
|
||||||
syms = set()
|
syms = set()
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
match = RE_ELF_SYMBOL.match(line)
|
match = RE_ELF_SYMBOL.match(line)
|
||||||
if match:
|
if match:
|
||||||
header = False
|
header = False
|
||||||
|
|
||||||
if match["type"] == "TLS":
|
if match["type"] == "TLS":
|
||||||
syms.add(Symbol(int(match["value"], 16), int(match["size"]), line))
|
syms.add(
|
||||||
width = len(match['value'])
|
Symbol(int(match["value"], 16), int(match["size"]), line))
|
||||||
elif tls_offset is not None and (match["type"] == "NOTYPE" and match["bind"] == "GLOBAL"
|
width = len(match['value'])
|
||||||
and match["visibility"] == "DEFAULT"
|
elif tls_offset is not None and (match["type"] == "NOTYPE" and match["bind"] == "GLOBAL"
|
||||||
and match["name"] in set(["_thread_local_start", "_thread_local_end"])
|
and match["visibility"] == "DEFAULT"
|
||||||
):
|
and match["name"] in set(["_thread_local_start", "_thread_local_end"])
|
||||||
value = int(match["value"], 16) - tls_offset
|
):
|
||||||
line = ("{1}{2:0{0}x}{3}").format(len(match['value']),
|
value = int(match["value"], 16) - tls_offset
|
||||||
match["before_value"], value, match["after_value"])
|
line = ("{1}{2:0{0}x}{3}").format(len(match['value']),
|
||||||
syms.add(Symbol(value, int(match["size"]), line))
|
match["before_value"], value, match["after_value"])
|
||||||
|
syms.add(Symbol(value, int(match["size"]), line))
|
||||||
|
|
||||||
elif header:
|
elif header:
|
||||||
print(line)
|
print(line)
|
||||||
|
|
||||||
if syms:
|
if syms:
|
||||||
syms = list(syms)
|
syms = list(syms)
|
||||||
syms.sort()
|
syms.sort()
|
||||||
size = (syms[-1].value + syms[-1].size) - syms[0].value
|
size = (syms[-1].value + syms[-1].size) - syms[0].value
|
||||||
else:
|
else:
|
||||||
size = 0
|
size = 0
|
||||||
|
|
||||||
value = syms[0].value
|
value = syms[0].value
|
||||||
for sym in syms:
|
for sym in syms:
|
||||||
if sym.value > value:
|
if sym.value > value:
|
||||||
print("\t{1:0{0}x} {2:5d} TLS UNKNOWN".format(width, value, sym.value - value))
|
print("\t{1:0{0}x} {2:5d} TLS UNKNOWN".format(
|
||||||
print(sym.line)
|
width, value, sym.value - value))
|
||||||
value = sym.value + sym.size
|
print(sym.line)
|
||||||
|
value = sym.value + sym.size
|
||||||
|
|
||||||
|
print()
|
||||||
|
print(f"Total Thread-Local Storage size: {size} bytes")
|
||||||
|
|
||||||
print()
|
|
||||||
print(f"Total Thread-Local Storage size: {size} bytes")
|
|
||||||
|
|
||||||
def after_fw_elf(source, target, env):
|
def after_fw_elf(source, target, env):
|
||||||
fw_elf = str(target[0])
|
fw_elf = str(target[0])
|
||||||
print_tls_size(fw_elf)
|
print_tls_size(fw_elf)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(description="Calculate size of Thread-Local Storage")
|
parser = argparse.ArgumentParser(
|
||||||
parser.add_argument("fw_elf", metavar="ELF", type=str, help="Firmware ELF filename")
|
description="Calculate size of Thread-Local Storage")
|
||||||
|
parser.add_argument("fw_elf", metavar="ELF", type=str,
|
||||||
|
help="Firmware ELF filename")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
print_tls_size(**vars(args))
|
print_tls_size(**vars(args))
|
||||||
elif __name__ == "SCons.Script":
|
elif __name__ == "SCons.Script":
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
env.AddPostAction("${BUILD_DIR}/${PROGNAME}.elf", after_fw_elf)
|
env.AddPostAction("${BUILD_DIR}/${PROGNAME}.elf", after_fw_elf)
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import os
|
|||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
|
|
||||||
def buildWeb():
|
def buildWeb():
|
||||||
os.chdir("interface")
|
os.chdir("interface")
|
||||||
print("Building web interface...")
|
print("Building web interface...")
|
||||||
@@ -19,6 +20,7 @@ def buildWeb():
|
|||||||
finally:
|
finally:
|
||||||
os.chdir("..")
|
os.chdir("..")
|
||||||
|
|
||||||
|
|
||||||
# Don't buuld webUI if called from GitHub Actions
|
# Don't buuld webUI if called from GitHub Actions
|
||||||
if "NO_BUILD_WEBUI" in os.environ:
|
if "NO_BUILD_WEBUI" in os.environ:
|
||||||
print("!! Skipping the build of the web interface !!")
|
print("!! Skipping the build of the web interface !!")
|
||||||
|
|||||||
29
scripts/echo_progress.py
Normal file
29
scripts/echo_progress.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
"""
|
||||||
|
Print makefile progress
|
||||||
|
From https://stackoverflow.com/questions/451413/make-makefile-progress-indication
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description=__doc__)
|
||||||
|
parser.add_argument("--stepno", type=int, required=True)
|
||||||
|
parser.add_argument("--nsteps", type=int, required=True)
|
||||||
|
parser.add_argument("remainder", nargs=argparse.REMAINDER)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
nchars = int(math.log(args.nsteps, 10)) + 1
|
||||||
|
fmt_str = "[{:Xd}/{:Xd}]({:6.2f}%)".replace("X", str(nchars))
|
||||||
|
progress = 100 * args.stepno / args.nsteps
|
||||||
|
sys.stdout.write(fmt_str.format(args.stepno, args.nsteps, progress))
|
||||||
|
for item in args.remainder:
|
||||||
|
sys.stdout.write(" ")
|
||||||
|
sys.stdout.write(item)
|
||||||
|
sys.stdout.write("\n")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -18,7 +18,8 @@ import tempfile
|
|||||||
try:
|
try:
|
||||||
from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget
|
from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget
|
||||||
except ImportError:
|
except ImportError:
|
||||||
COMPONENTS_PATH = os.path.expandvars(os.path.join('$IDF_PATH', 'components'))
|
COMPONENTS_PATH = os.path.expandvars(
|
||||||
|
os.path.join('$IDF_PATH', 'components'))
|
||||||
PARTTOOL_DIR = os.path.join(COMPONENTS_PATH, 'partition_table')
|
PARTTOOL_DIR = os.path.join(COMPONENTS_PATH, 'partition_table')
|
||||||
sys.path.append(PARTTOOL_DIR)
|
sys.path.append(PARTTOOL_DIR)
|
||||||
from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget
|
from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget
|
||||||
@@ -49,7 +50,8 @@ class OtatoolTarget():
|
|||||||
temp_file = tempfile.NamedTemporaryFile(delete=False)
|
temp_file = tempfile.NamedTemporaryFile(delete=False)
|
||||||
temp_file.close()
|
temp_file.close()
|
||||||
try:
|
try:
|
||||||
self.target.read_partition(OtatoolTarget.OTADATA_PARTITION, temp_file.name)
|
self.target.read_partition(
|
||||||
|
OtatoolTarget.OTADATA_PARTITION, temp_file.name)
|
||||||
with open(temp_file.name, 'rb') as f:
|
with open(temp_file.name, 'rb') as f:
|
||||||
self.otadata = f.read()
|
self.otadata = f.read()
|
||||||
finally:
|
finally:
|
||||||
@@ -101,7 +103,8 @@ class OtatoolTarget():
|
|||||||
ota_partitions = list()
|
ota_partitions = list()
|
||||||
|
|
||||||
for i in range(gen.NUM_PARTITION_SUBTYPE_APP_OTA):
|
for i in range(gen.NUM_PARTITION_SUBTYPE_APP_OTA):
|
||||||
ota_partition = filter(lambda p: p.subtype == (gen.MIN_PARTITION_SUBTYPE_APP_OTA + i), partition_table)
|
ota_partition = filter(lambda p: p.subtype == (
|
||||||
|
gen.MIN_PARTITION_SUBTYPE_APP_OTA + i), partition_table)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ota_partitions.append(list(ota_partition)[0])
|
ota_partitions.append(list(ota_partition)[0])
|
||||||
@@ -118,9 +121,11 @@ class OtatoolTarget():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
if isinstance(ota_id, int):
|
if isinstance(ota_id, int):
|
||||||
ota_partition_next = filter(lambda p: p.subtype - gen.MIN_PARTITION_SUBTYPE_APP_OTA == ota_id, ota_partitions)
|
ota_partition_next = filter(
|
||||||
|
lambda p: p.subtype - gen.MIN_PARTITION_SUBTYPE_APP_OTA == ota_id, ota_partitions)
|
||||||
else:
|
else:
|
||||||
ota_partition_next = filter(lambda p: p.name == ota_id, ota_partitions)
|
ota_partition_next = filter(
|
||||||
|
lambda p: p.name == ota_id, ota_partitions)
|
||||||
|
|
||||||
ota_partition_next = list(ota_partition_next)[0]
|
ota_partition_next = list(ota_partition_next)[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
@@ -173,7 +178,8 @@ class OtatoolTarget():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with open(temp_file.name, 'wb') as otadata_next_file:
|
with open(temp_file.name, 'wb') as otadata_next_file:
|
||||||
start = (1 if otadata_compute_base == 0 else 0) * (self.spi_flash_sec_size >> 1)
|
start = (1 if otadata_compute_base == 0 else 0) * \
|
||||||
|
(self.spi_flash_sec_size >> 1)
|
||||||
|
|
||||||
otadata_next_file.write(self.otadata)
|
otadata_next_file.write(self.otadata)
|
||||||
|
|
||||||
@@ -185,15 +191,18 @@ class OtatoolTarget():
|
|||||||
|
|
||||||
otadata_next_file.flush()
|
otadata_next_file.flush()
|
||||||
|
|
||||||
self.target.write_partition(OtatoolTarget.OTADATA_PARTITION, temp_file.name)
|
self.target.write_partition(
|
||||||
|
OtatoolTarget.OTADATA_PARTITION, temp_file.name)
|
||||||
finally:
|
finally:
|
||||||
os.unlink(temp_file.name)
|
os.unlink(temp_file.name)
|
||||||
|
|
||||||
def read_ota_partition(self, ota_id, output):
|
def read_ota_partition(self, ota_id, output):
|
||||||
self.target.read_partition(self._get_partition_id_from_ota_id(ota_id), output)
|
self.target.read_partition(
|
||||||
|
self._get_partition_id_from_ota_id(ota_id), output)
|
||||||
|
|
||||||
def write_ota_partition(self, ota_id, input):
|
def write_ota_partition(self, ota_id, input):
|
||||||
self.target.write_partition(self._get_partition_id_from_ota_id(ota_id), input)
|
self.target.write_partition(
|
||||||
|
self._get_partition_id_from_ota_id(ota_id), input)
|
||||||
|
|
||||||
def erase_ota_partition(self, ota_id):
|
def erase_ota_partition(self, ota_id):
|
||||||
self.target.erase_partition(self._get_partition_id_from_ota_id(ota_id))
|
self.target.erase_partition(self._get_partition_id_from_ota_id(ota_id))
|
||||||
@@ -204,7 +213,8 @@ def _read_otadata(target):
|
|||||||
|
|
||||||
otadata_info = target._get_otadata_info()
|
otadata_info = target._get_otadata_info()
|
||||||
|
|
||||||
print(' {:8s} \t {:8s} | \t {:8s} \t {:8s}'.format('OTA_SEQ', 'CRC', 'OTA_SEQ', 'CRC'))
|
print(' {:8s} \t {:8s} | \t {:8s} \t {:8s}'.format(
|
||||||
|
'OTA_SEQ', 'CRC', 'OTA_SEQ', 'CRC'))
|
||||||
print('Firmware: 0x{:08x} \t0x{:08x} | \t0x{:08x} \t 0x{:08x}'.format(otadata_info[0].seq, otadata_info[0].crc,
|
print('Firmware: 0x{:08x} \t0x{:08x} | \t0x{:08x} \t 0x{:08x}'.format(otadata_info[0].seq, otadata_info[0].crc,
|
||||||
otadata_info[1].seq, otadata_info[1].crc))
|
otadata_info[1].seq, otadata_info[1].crc))
|
||||||
|
|
||||||
@@ -238,46 +248,64 @@ def main():
|
|||||||
|
|
||||||
parser = argparse.ArgumentParser('ESP-IDF OTA Partitions Tool')
|
parser = argparse.ArgumentParser('ESP-IDF OTA Partitions Tool')
|
||||||
|
|
||||||
parser.add_argument('--quiet', '-q', help='suppress stderr messages', action='store_true')
|
parser.add_argument(
|
||||||
parser.add_argument('--esptool-args', help='additional main arguments for esptool', nargs='+')
|
'--quiet', '-q', help='suppress stderr messages', action='store_true')
|
||||||
parser.add_argument('--esptool-write-args', help='additional subcommand arguments for esptool write_flash', nargs='+')
|
parser.add_argument(
|
||||||
parser.add_argument('--esptool-read-args', help='additional subcommand arguments for esptool read_flash', nargs='+')
|
'--esptool-args', help='additional main arguments for esptool', nargs='+')
|
||||||
parser.add_argument('--esptool-erase-args', help='additional subcommand arguments for esptool erase_region', nargs='+')
|
parser.add_argument('--esptool-write-args',
|
||||||
|
help='additional subcommand arguments for esptool write_flash', nargs='+')
|
||||||
|
parser.add_argument('--esptool-read-args',
|
||||||
|
help='additional subcommand arguments for esptool read_flash', nargs='+')
|
||||||
|
parser.add_argument('--esptool-erase-args',
|
||||||
|
help='additional subcommand arguments for esptool erase_region', nargs='+')
|
||||||
|
|
||||||
# There are two possible sources for the partition table: a device attached to the host
|
# There are two possible sources for the partition table: a device attached to the host
|
||||||
# or a partition table CSV/binary file. These sources are mutually exclusive.
|
# or a partition table CSV/binary file. These sources are mutually exclusive.
|
||||||
parser.add_argument('--port', '-p', help='port where the device to read the partition table from is attached')
|
parser.add_argument(
|
||||||
|
'--port', '-p', help='port where the device to read the partition table from is attached')
|
||||||
|
|
||||||
parser.add_argument('--baud', '-b', help='baudrate to use', type=int)
|
parser.add_argument('--baud', '-b', help='baudrate to use', type=int)
|
||||||
|
|
||||||
parser.add_argument('--partition-table-offset', '-o', help='offset to read the partition table from', type=str)
|
parser.add_argument('--partition-table-offset', '-o',
|
||||||
|
help='offset to read the partition table from', type=str)
|
||||||
|
|
||||||
parser.add_argument('--partition-table-file', '-f', help='file (CSV/binary) to read the partition table from; \
|
parser.add_argument('--partition-table-file', '-f', help='file (CSV/binary) to read the partition table from; \
|
||||||
overrides device attached to specified port as the partition table source when defined')
|
overrides device attached to specified port as the partition table source when defined')
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(dest='operation', help='run otatool -h for additional help')
|
subparsers = parser.add_subparsers(
|
||||||
|
dest='operation', help='run otatool -h for additional help')
|
||||||
|
|
||||||
spi_flash_sec_size = argparse.ArgumentParser(add_help=False)
|
spi_flash_sec_size = argparse.ArgumentParser(add_help=False)
|
||||||
spi_flash_sec_size.add_argument('--spi-flash-sec-size', help='value of SPI_FLASH_SEC_SIZE macro', type=str)
|
spi_flash_sec_size.add_argument(
|
||||||
|
'--spi-flash-sec-size', help='value of SPI_FLASH_SEC_SIZE macro', type=str)
|
||||||
|
|
||||||
# Specify the supported operations
|
# Specify the supported operations
|
||||||
subparsers.add_parser('read_otadata', help='read otadata partition', parents=[spi_flash_sec_size])
|
subparsers.add_parser('read_otadata', help='read otadata partition', parents=[
|
||||||
|
spi_flash_sec_size])
|
||||||
subparsers.add_parser('erase_otadata', help='erase otadata partition')
|
subparsers.add_parser('erase_otadata', help='erase otadata partition')
|
||||||
|
|
||||||
slot_or_name_parser = argparse.ArgumentParser(add_help=False)
|
slot_or_name_parser = argparse.ArgumentParser(add_help=False)
|
||||||
slot_or_name_parser_args = slot_or_name_parser.add_mutually_exclusive_group()
|
slot_or_name_parser_args = slot_or_name_parser.add_mutually_exclusive_group()
|
||||||
slot_or_name_parser_args.add_argument('--slot', help='slot number of the ota partition', type=int)
|
slot_or_name_parser_args.add_argument(
|
||||||
slot_or_name_parser_args.add_argument('--name', help='name of the ota partition')
|
'--slot', help='slot number of the ota partition', type=int)
|
||||||
|
slot_or_name_parser_args.add_argument(
|
||||||
|
'--name', help='name of the ota partition')
|
||||||
|
|
||||||
subparsers.add_parser('switch_ota_partition', help='switch otadata partition', parents=[slot_or_name_parser, spi_flash_sec_size])
|
subparsers.add_parser('switch_ota_partition', help='switch otadata partition', parents=[
|
||||||
|
slot_or_name_parser, spi_flash_sec_size])
|
||||||
|
|
||||||
read_ota_partition_subparser = subparsers.add_parser('read_ota_partition', help='read contents of an ota partition', parents=[slot_or_name_parser])
|
read_ota_partition_subparser = subparsers.add_parser(
|
||||||
read_ota_partition_subparser.add_argument('--output', help='file to write the contents of the ota partition to', required=True)
|
'read_ota_partition', help='read contents of an ota partition', parents=[slot_or_name_parser])
|
||||||
|
read_ota_partition_subparser.add_argument(
|
||||||
|
'--output', help='file to write the contents of the ota partition to', required=True)
|
||||||
|
|
||||||
write_ota_partition_subparser = subparsers.add_parser('write_ota_partition', help='write contents to an ota partition', parents=[slot_or_name_parser])
|
write_ota_partition_subparser = subparsers.add_parser(
|
||||||
write_ota_partition_subparser.add_argument('--input', help='file whose contents to write to the ota partition')
|
'write_ota_partition', help='write contents to an ota partition', parents=[slot_or_name_parser])
|
||||||
|
write_ota_partition_subparser.add_argument(
|
||||||
|
'--input', help='file whose contents to write to the ota partition')
|
||||||
|
|
||||||
subparsers.add_parser('erase_ota_partition', help='erase contents of an ota partition', parents=[slot_or_name_parser])
|
subparsers.add_parser(
|
||||||
|
'erase_ota_partition', help='erase contents of an ota partition', parents=[slot_or_name_parser])
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -298,7 +326,8 @@ def main():
|
|||||||
target_args['partition_table_file'] = args.partition_table_file
|
target_args['partition_table_file'] = args.partition_table_file
|
||||||
|
|
||||||
if args.partition_table_offset:
|
if args.partition_table_offset:
|
||||||
target_args['partition_table_offset'] = int(args.partition_table_offset, 0)
|
target_args['partition_table_offset'] = int(
|
||||||
|
args.partition_table_offset, 0)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if args.spi_flash_sec_size:
|
if args.spi_flash_sec_size:
|
||||||
@@ -324,7 +353,7 @@ def main():
|
|||||||
target = OtatoolTarget(**target_args)
|
target = OtatoolTarget(**target_args)
|
||||||
|
|
||||||
# Create the operation table and execute the operation
|
# Create the operation table and execute the operation
|
||||||
common_args = {'target':target}
|
common_args = {'target': target}
|
||||||
|
|
||||||
ota_id = []
|
ota_id = []
|
||||||
|
|
||||||
@@ -338,18 +367,18 @@ def main():
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
otatool_ops = {
|
otatool_ops = {
|
||||||
'read_otadata':(_read_otadata, []),
|
'read_otadata': (_read_otadata, []),
|
||||||
'erase_otadata':(_erase_otadata, []),
|
'erase_otadata': (_erase_otadata, []),
|
||||||
'switch_ota_partition':(_switch_ota_partition, ota_id),
|
'switch_ota_partition': (_switch_ota_partition, ota_id),
|
||||||
'read_ota_partition':(_read_ota_partition, ['output'] + ota_id),
|
'read_ota_partition': (_read_ota_partition, ['output'] + ota_id),
|
||||||
'write_ota_partition':(_write_ota_partition, ['input'] + ota_id),
|
'write_ota_partition': (_write_ota_partition, ['input'] + ota_id),
|
||||||
'erase_ota_partition':(_erase_ota_partition, ota_id)
|
'erase_ota_partition': (_erase_ota_partition, ota_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
(op, op_args) = otatool_ops[args.operation]
|
(op, op_args) = otatool_ops[args.operation]
|
||||||
|
|
||||||
for op_arg in op_args:
|
for op_arg in op_args:
|
||||||
common_args.update({op_arg:vars(args)[op_arg]})
|
common_args.update({op_arg: vars(args)[op_arg]})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
common_args['ota_id'] = common_args.pop('name')
|
common_args['ota_id'] = common_args.pop('name')
|
||||||
|
|||||||
@@ -161,6 +161,6 @@ def on_upload(source, target, env):
|
|||||||
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
||||||
if env.get('UPLOAD_PROTOCOL') == 'custom':
|
if env.get('UPLOAD_PROTOCOL') == 'custom':
|
||||||
env.Replace(UPLOADCMD=on_upload)
|
env.Replace(UPLOADCMD=on_upload)
|
||||||
|
|
||||||
Reference in New Issue
Block a user