This commit is contained in:
proddy
2025-03-22 10:32:03 +01:00
parent e418b7d8e7
commit eaa277fef0
281 changed files with 15297 additions and 21851 deletions

View File

@@ -26,79 +26,87 @@ import re
import subprocess
import sys
RE_ELF_SECTION = re.compile(r"^\s*(?P<type>\w+)\s+(?P<offset>\w+)\s+(?P<virtaddr>\w+)\s+(?P<physaddr>\w+)\s+(?P<filesiz>\w+)\s+(?P<memsiz>\w+)\s+(?P<ndx>\w+)\s+")
RE_ELF_SECTION = re.compile(
r"^\s*(?P<type>\w+)\s+(?P<offset>\w+)\s+(?P<virtaddr>\w+)\s+(?P<physaddr>\w+)\s+(?P<filesiz>\w+)\s+(?P<memsiz>\w+)\s+(?P<ndx>\w+)\s+")
Symbol = collections.namedtuple("Symbol", ["value", "size", "line"])
RE_ELF_SYMBOL = re.compile(r"^(?P<before_value>\s*(?P<num>\w+):\s+)(?P<value>\w+)(?P<after_value>\s+(?P<size>\w+)\s+(?P<type>\w+)\s+(?P<bind>\w+)\s+(?P<visibility>\w+)\s+(?P<ndx>\w+)\s+(?P<name>\w+))")
RE_ELF_SYMBOL = re.compile(
r"^(?P<before_value>\s*(?P<num>\w+):\s+)(?P<value>\w+)(?P<after_value>\s+(?P<size>\w+)\s+(?P<type>\w+)\s+(?P<bind>\w+)\s+(?P<visibility>\w+)\s+(?P<ndx>\w+)\s+(?P<name>\w+))")
def print_tls_size(fw_elf):
tls_offset = None
width = 8
tls_offset = None
width = 8
lines = subprocess.run(["readelf", "-W", "--program-headers", fw_elf],
check=True, universal_newlines=True, stdout=subprocess.PIPE
).stdout.strip().split("\n")
lines = subprocess.run(["readelf", "-W", "--program-headers", fw_elf],
check=True, universal_newlines=True, stdout=subprocess.PIPE
).stdout.strip().split("\n")
for line in lines:
match = RE_ELF_SECTION.match(line)
if match:
if tls_offset is None and match["type"] == "TLS":
tls_offset = int(match["virtaddr"], 16)
for line in lines:
match = RE_ELF_SECTION.match(line)
if match:
if tls_offset is None and match["type"] == "TLS":
tls_offset = int(match["virtaddr"], 16)
header = True
lines = subprocess.run(["readelf", "-W", "--syms", "--dyn-syms", fw_elf],
check=True, universal_newlines=True, stdout=subprocess.PIPE
).stdout.strip().split("\n")
syms = set()
header = True
lines = subprocess.run(["readelf", "-W", "--syms", "--dyn-syms", fw_elf],
check=True, universal_newlines=True, stdout=subprocess.PIPE
).stdout.strip().split("\n")
syms = set()
for line in lines:
match = RE_ELF_SYMBOL.match(line)
if match:
header = False
for line in lines:
match = RE_ELF_SYMBOL.match(line)
if match:
header = False
if match["type"] == "TLS":
syms.add(Symbol(int(match["value"], 16), int(match["size"]), line))
width = len(match['value'])
elif tls_offset is not None and (match["type"] == "NOTYPE" and match["bind"] == "GLOBAL"
and match["visibility"] == "DEFAULT"
and match["name"] in set(["_thread_local_start", "_thread_local_end"])
):
value = int(match["value"], 16) - tls_offset
line = ("{1}{2:0{0}x}{3}").format(len(match['value']),
match["before_value"], value, match["after_value"])
syms.add(Symbol(value, int(match["size"]), line))
if match["type"] == "TLS":
syms.add(
Symbol(int(match["value"], 16), int(match["size"]), line))
width = len(match['value'])
elif tls_offset is not None and (match["type"] == "NOTYPE" and match["bind"] == "GLOBAL"
and match["visibility"] == "DEFAULT"
and match["name"] in set(["_thread_local_start", "_thread_local_end"])
):
value = int(match["value"], 16) - tls_offset
line = ("{1}{2:0{0}x}{3}").format(len(match['value']),
match["before_value"], value, match["after_value"])
syms.add(Symbol(value, int(match["size"]), line))
elif header:
print(line)
elif header:
print(line)
if syms:
syms = list(syms)
syms.sort()
size = (syms[-1].value + syms[-1].size) - syms[0].value
else:
size = 0
if syms:
syms = list(syms)
syms.sort()
size = (syms[-1].value + syms[-1].size) - syms[0].value
else:
size = 0
value = syms[0].value
for sym in syms:
if sym.value > value:
print("\t{1:0{0}x} {2:5d} TLS UNKNOWN".format(width, value, sym.value - value))
print(sym.line)
value = sym.value + sym.size
value = syms[0].value
for sym in syms:
if sym.value > value:
print("\t{1:0{0}x} {2:5d} TLS UNKNOWN".format(
width, value, sym.value - value))
print(sym.line)
value = sym.value + sym.size
print()
print(f"Total Thread-Local Storage size: {size} bytes")
print()
print(f"Total Thread-Local Storage size: {size} bytes")
def after_fw_elf(source, target, env):
fw_elf = str(target[0])
print_tls_size(fw_elf)
fw_elf = str(target[0])
print_tls_size(fw_elf)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Calculate size of Thread-Local Storage")
parser.add_argument("fw_elf", metavar="ELF", type=str, help="Firmware ELF filename")
parser = argparse.ArgumentParser(
description="Calculate size of Thread-Local Storage")
parser.add_argument("fw_elf", metavar="ELF", type=str,
help="Firmware ELF filename")
args = parser.parse_args()
print_tls_size(**vars(args))
args = parser.parse_args()
print_tls_size(**vars(args))
elif __name__ == "SCons.Script":
Import("env")
Import("env")
env.AddPostAction("${BUILD_DIR}/${PROGNAME}.elf", after_fw_elf)
env.AddPostAction("${BUILD_DIR}/${PROGNAME}.elf", after_fw_elf)

View File

@@ -21,5 +21,9 @@ def buildWeb():
os.chdir("..")
if not (env.IsCleanTarget()):
buildWeb()
# Don't buuld webUI if called from GitHub Actions
if "NO_BUILD_WEBUI" in os.environ:
print("!! Skipping the build of the web interface !!")
else:
if not (env.IsCleanTarget()):
buildWeb()

29
scripts/echo_progress.py Normal file
View File

@@ -0,0 +1,29 @@
"""
Print makefile progress
From https://stackoverflow.com/questions/451413/make-makefile-progress-indication
"""
import argparse
import math
import sys
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--stepno", type=int, required=True)
parser.add_argument("--nsteps", type=int, required=True)
parser.add_argument("remainder", nargs=argparse.REMAINDER)
args = parser.parse_args()
nchars = int(math.log(args.nsteps, 10)) + 1
fmt_str = "[{:Xd}/{:Xd}]({:6.2f}%)".replace("X", str(nchars))
progress = 100 * args.stepno / args.nsteps
sys.stdout.write(fmt_str.format(args.stepno, args.nsteps, progress))
for item in args.remainder:
sys.stdout.write(" ")
sys.stdout.write(item)
sys.stdout.write("\n")
if __name__ == "__main__":
main()

View File

@@ -1,11 +1,11 @@
#!/bin/sh
#
# Builds the dump CSV files, modbus headers and documentation.
# Called by /scripts/generate_csv_and_headers.sh
# Builds the dump_*.csv files, modbus headers and modbus documentation.
# Run as `sh scripts/generate_csv_and_headers.sh` from the root of the repository.
# build dummy modbus_entity_parameters.hpp so it compiles
cat >./src/modbus_entity_parameters.hpp <<EOL
# create a dummy modbus_entity_parameters.hpp so the first pass compiles
cat >./src/core/modbus_entity_parameters.hpp <<EOL
#include "modbus.h"
#include "emsdevice.h"
@@ -31,29 +31,29 @@ const std::initializer_list<Modbus::EntityModbusInfo> Modbus::modbus_register_ma
EOL
# build emsesp for standalone
# First generate Modbus entity parameters
# build the modbus_entity_parameters.hpp header file
make clean
make -s ARGS=-DEMSESP_MODBUS
rm -f ./src/core/modbus_entity_parameters.hpp ./docs/dump_entities.csv
echo "test entity_dump" | ./emsesp | python3 ./scripts/strip_csv.py > ./docs/dump_entities.csv
cat ./docs/dump_entities.csv | python3 ./scripts/update_modbus_registers.py > ./src/core/modbus_entity_parameters.hpp
# regenerate dump_entities.csv but without the Modbus entity parameters
make clean
make -s ARGS=-DEMSESP_STANDALONE
# Generate Modbus entity parameters
# One to build the modbus_entity_parameters.hpp header file
# And then run entity_dump test again to create the dump_entities.csv file with the correct modbus counts
rm -f ./src/modbus_entity_parameters.hpp
echo "test entity_dump" | ./emsesp | python3 ./scripts/strip_csv.py | python3 ./scripts/update_modbus_registers.py >./src/modbus_entity_parameters.hpp
ls -al ./src/modbus_entity_parameters.hpp
# dump_entities.csv
make -s ARGS=-DEMSESP_STANDALONE
rm -f ./docs/dump_entities.csv
echo "test entity_dump" | ./emsesp | python3 ./scripts/strip_csv.py >./docs/dump_entities.csv
ls -al ./docs/dump_entities.csv
echo "test entity_dump" | ./emsesp | python3 ./scripts/strip_csv.py > ./docs/dump_entities.csv
# generate Modbus doc - Modbus-Entity-Registers.md used in the emsesp.org documentation
rm -f ./docs/Modbus-Entity-Registers.md
cat ./docs/dump_entities.csv | python3 ./scripts/generate-modbus-register-doc.py > ./docs/Modbus-Entity-Registers.md
# dump_telegrams.csv
rm -f ./docs/dump_telegrams.csv
echo "test telegram_dump" | ./emsesp | python3 ./scripts/strip_csv.py >./docs/dump_telegrams.csv
ls -al ./docs/dump_telegrams.csv
echo "test telegram_dump" | ./emsesp | python3 ./scripts/strip_csv.py > ./docs/dump_telegrams.csv
# generate doc - Modbus-Entity-Registers.md used in the emsesp.org documentation
rm -f ./docs/Modbus-Entity-Registers.md
cat ./docs/dump_entities.csv | python3 ./scripts/generate-modbus-register-doc.py >./docs/Modbus-Entity-Registers.md
ls -al ./src/core/modbus_entity_parameters.hpp
ls -al ./docs/Modbus-Entity-Registers.md
ls -al ./docs/dump_entities.csv
ls -al ./docs/dump_telegrams.csv

402
scripts/otatool.py Normal file
View File

@@ -0,0 +1,402 @@
#!/usr/bin/env python
#
# otatool is used to perform ota-level operations - flashing ota partition
# erasing ota partition and switching ota partition
#
# SPDX-FileCopyrightText: 2018-2021 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from __future__ import division, print_function
import argparse
import binascii
import collections
import os
import struct
import sys
import tempfile
try:
from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget
except ImportError:
COMPONENTS_PATH = os.path.expandvars(
os.path.join('$IDF_PATH', 'components'))
PARTTOOL_DIR = os.path.join(COMPONENTS_PATH, 'partition_table')
sys.path.append(PARTTOOL_DIR)
from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget
__version__ = '2.0'
SPI_FLASH_SEC_SIZE = 0x2000
quiet = False
def status(msg):
if not quiet:
print(msg)
class OtatoolTarget():
OTADATA_PARTITION = PartitionType('data', 'ota')
def __init__(self, port=None, baud=None, partition_table_offset=PARTITION_TABLE_OFFSET, partition_table_file=None,
spi_flash_sec_size=SPI_FLASH_SEC_SIZE, esptool_args=[], esptool_write_args=[],
esptool_read_args=[], esptool_erase_args=[]):
self.target = ParttoolTarget(port, baud, partition_table_offset, partition_table_file, esptool_args,
esptool_write_args, esptool_read_args, esptool_erase_args)
self.spi_flash_sec_size = spi_flash_sec_size
temp_file = tempfile.NamedTemporaryFile(delete=False)
temp_file.close()
try:
self.target.read_partition(
OtatoolTarget.OTADATA_PARTITION, temp_file.name)
with open(temp_file.name, 'rb') as f:
self.otadata = f.read()
finally:
os.unlink(temp_file.name)
def _check_otadata_partition(self):
if not self.otadata:
raise Exception('No otadata partition found')
def erase_otadata(self):
self._check_otadata_partition()
self.target.erase_partition(OtatoolTarget.OTADATA_PARTITION)
def _get_otadata_info(self):
info = []
otadata_info = collections.namedtuple('otadata_info', 'seq crc')
for i in range(2):
start = i * (self.spi_flash_sec_size >> 1)
seq = bytearray(self.otadata[start:start + 4])
crc = bytearray(self.otadata[start + 28:start + 32])
seq = struct.unpack('I', seq)
crc = struct.unpack('I', crc)
info.append(otadata_info(seq[0], crc[0]))
return info
def _get_partition_id_from_ota_id(self, ota_id):
if isinstance(ota_id, int):
return PartitionType('app', 'ota_' + str(ota_id))
else:
return PartitionName(ota_id)
def switch_ota_partition(self, ota_id):
self._check_otadata_partition()
import gen_esp32part as gen
def is_otadata_info_valid(status):
seq = status.seq % (1 << 32)
crc = binascii.crc32(struct.pack('I', seq), 0xFFFFFFFF) % (1 << 32)
return seq < (int('0xFFFFFFFF', 16) % (1 << 32)) and status.crc == crc
partition_table = self.target.partition_table
ota_partitions = list()
for i in range(gen.NUM_PARTITION_SUBTYPE_APP_OTA):
ota_partition = filter(lambda p: p.subtype == (
gen.MIN_PARTITION_SUBTYPE_APP_OTA + i), partition_table)
try:
ota_partitions.append(list(ota_partition)[0])
except IndexError:
break
ota_partitions = sorted(ota_partitions, key=lambda p: p.subtype)
if not ota_partitions:
raise Exception('No ota app partitions found')
# Look for the app partition to switch to
ota_partition_next = None
try:
if isinstance(ota_id, int):
ota_partition_next = filter(
lambda p: p.subtype - gen.MIN_PARTITION_SUBTYPE_APP_OTA == ota_id, ota_partitions)
else:
ota_partition_next = filter(
lambda p: p.name == ota_id, ota_partitions)
ota_partition_next = list(ota_partition_next)[0]
except IndexError:
raise Exception('Partition to switch to not found')
otadata_info = self._get_otadata_info()
# Find the copy to base the computation for ota sequence number on
otadata_compute_base = -1
# Both are valid, take the max as computation base
if is_otadata_info_valid(otadata_info[0]) and is_otadata_info_valid(otadata_info[1]):
if otadata_info[0].seq >= otadata_info[1].seq:
otadata_compute_base = 0
else:
otadata_compute_base = 1
# Only one copy is valid, use that
elif is_otadata_info_valid(otadata_info[0]):
otadata_compute_base = 0
elif is_otadata_info_valid(otadata_info[1]):
otadata_compute_base = 1
# Both are invalid (could be initial state - all 0xFF's)
else:
pass
ota_seq_next = 0
ota_partitions_num = len(ota_partitions)
target_seq = (ota_partition_next.subtype & 0x0F) + 1
# Find the next ota sequence number
if otadata_compute_base == 0 or otadata_compute_base == 1:
base_seq = otadata_info[otadata_compute_base].seq % (1 << 32)
i = 0
while base_seq > target_seq % ota_partitions_num + i * ota_partitions_num:
i += 1
ota_seq_next = target_seq % ota_partitions_num + i * ota_partitions_num
else:
ota_seq_next = target_seq
# Create binary data from computed values
ota_seq_next = struct.pack('I', ota_seq_next)
ota_seq_crc_next = binascii.crc32(ota_seq_next, 0xFFFFFFFF) % (1 << 32)
ota_seq_crc_next = struct.pack('I', ota_seq_crc_next)
temp_file = tempfile.NamedTemporaryFile(delete=False)
temp_file.close()
try:
with open(temp_file.name, 'wb') as otadata_next_file:
start = (1 if otadata_compute_base == 0 else 0) * \
(self.spi_flash_sec_size >> 1)
otadata_next_file.write(self.otadata)
otadata_next_file.seek(start)
otadata_next_file.write(ota_seq_next)
otadata_next_file.seek(start + 28)
otadata_next_file.write(ota_seq_crc_next)
otadata_next_file.flush()
self.target.write_partition(
OtatoolTarget.OTADATA_PARTITION, temp_file.name)
finally:
os.unlink(temp_file.name)
def read_ota_partition(self, ota_id, output):
self.target.read_partition(
self._get_partition_id_from_ota_id(ota_id), output)
def write_ota_partition(self, ota_id, input):
self.target.write_partition(
self._get_partition_id_from_ota_id(ota_id), input)
def erase_ota_partition(self, ota_id):
self.target.erase_partition(self._get_partition_id_from_ota_id(ota_id))
def _read_otadata(target):
target._check_otadata_partition()
otadata_info = target._get_otadata_info()
print(' {:8s} \t {:8s} | \t {:8s} \t {:8s}'.format(
'OTA_SEQ', 'CRC', 'OTA_SEQ', 'CRC'))
print('Firmware: 0x{:08x} \t0x{:08x} | \t0x{:08x} \t 0x{:08x}'.format(otadata_info[0].seq, otadata_info[0].crc,
otadata_info[1].seq, otadata_info[1].crc))
def _erase_otadata(target):
target.erase_otadata()
status('Erased ota_data partition contents')
def _switch_ota_partition(target, ota_id):
target.switch_ota_partition(ota_id)
def _read_ota_partition(target, ota_id, output):
target.read_ota_partition(ota_id, output)
status('Read ota partition contents to file {}'.format(output))
def _write_ota_partition(target, ota_id, input):
target.write_ota_partition(ota_id, input)
status('Written contents of file {} to ota partition'.format(input))
def _erase_ota_partition(target, ota_id):
target.erase_ota_partition(ota_id)
status('Erased contents of ota partition')
def main():
global quiet
parser = argparse.ArgumentParser('ESP-IDF OTA Partitions Tool')
parser.add_argument(
'--quiet', '-q', help='suppress stderr messages', action='store_true')
parser.add_argument(
'--esptool-args', help='additional main arguments for esptool', nargs='+')
parser.add_argument('--esptool-write-args',
help='additional subcommand arguments for esptool write_flash', nargs='+')
parser.add_argument('--esptool-read-args',
help='additional subcommand arguments for esptool read_flash', nargs='+')
parser.add_argument('--esptool-erase-args',
help='additional subcommand arguments for esptool erase_region', nargs='+')
# There are two possible sources for the partition table: a device attached to the host
# or a partition table CSV/binary file. These sources are mutually exclusive.
parser.add_argument(
'--port', '-p', help='port where the device to read the partition table from is attached')
parser.add_argument('--baud', '-b', help='baudrate to use', type=int)
parser.add_argument('--partition-table-offset', '-o',
help='offset to read the partition table from', type=str)
parser.add_argument('--partition-table-file', '-f', help='file (CSV/binary) to read the partition table from; \
overrides device attached to specified port as the partition table source when defined')
subparsers = parser.add_subparsers(
dest='operation', help='run otatool -h for additional help')
spi_flash_sec_size = argparse.ArgumentParser(add_help=False)
spi_flash_sec_size.add_argument(
'--spi-flash-sec-size', help='value of SPI_FLASH_SEC_SIZE macro', type=str)
# Specify the supported operations
subparsers.add_parser('read_otadata', help='read otadata partition', parents=[
spi_flash_sec_size])
subparsers.add_parser('erase_otadata', help='erase otadata partition')
slot_or_name_parser = argparse.ArgumentParser(add_help=False)
slot_or_name_parser_args = slot_or_name_parser.add_mutually_exclusive_group()
slot_or_name_parser_args.add_argument(
'--slot', help='slot number of the ota partition', type=int)
slot_or_name_parser_args.add_argument(
'--name', help='name of the ota partition')
subparsers.add_parser('switch_ota_partition', help='switch otadata partition', parents=[
slot_or_name_parser, spi_flash_sec_size])
read_ota_partition_subparser = subparsers.add_parser(
'read_ota_partition', help='read contents of an ota partition', parents=[slot_or_name_parser])
read_ota_partition_subparser.add_argument(
'--output', help='file to write the contents of the ota partition to', required=True)
write_ota_partition_subparser = subparsers.add_parser(
'write_ota_partition', help='write contents to an ota partition', parents=[slot_or_name_parser])
write_ota_partition_subparser.add_argument(
'--input', help='file whose contents to write to the ota partition')
subparsers.add_parser(
'erase_ota_partition', help='erase contents of an ota partition', parents=[slot_or_name_parser])
args = parser.parse_args()
quiet = args.quiet
# No operation specified, display help and exit
if args.operation is None:
if not quiet:
parser.print_help()
sys.exit(1)
target_args = {}
if args.port:
target_args['port'] = args.port
if args.partition_table_file:
target_args['partition_table_file'] = args.partition_table_file
if args.partition_table_offset:
target_args['partition_table_offset'] = int(
args.partition_table_offset, 0)
try:
if args.spi_flash_sec_size:
target_args['spi_flash_sec_size'] = int(args.spi_flash_sec_size, 0)
except AttributeError:
pass
if args.esptool_args:
target_args['esptool_args'] = args.esptool_args
if args.esptool_write_args:
target_args['esptool_write_args'] = args.esptool_write_args
if args.esptool_read_args:
target_args['esptool_read_args'] = args.esptool_read_args
if args.esptool_erase_args:
target_args['esptool_erase_args'] = args.esptool_erase_args
if args.baud:
target_args['baud'] = args.baud
target = OtatoolTarget(**target_args)
# Create the operation table and execute the operation
common_args = {'target': target}
ota_id = []
try:
if args.name is not None:
ota_id = ['name']
else:
if args.slot is not None:
ota_id = ['slot']
except AttributeError:
pass
otatool_ops = {
'read_otadata': (_read_otadata, []),
'erase_otadata': (_erase_otadata, []),
'switch_ota_partition': (_switch_ota_partition, ota_id),
'read_ota_partition': (_read_ota_partition, ['output'] + ota_id),
'write_ota_partition': (_write_ota_partition, ['input'] + ota_id),
'erase_ota_partition': (_erase_ota_partition, ota_id)
}
(op, op_args) = otatool_ops[args.operation]
for op_arg in op_args:
common_args.update({op_arg: vars(args)[op_arg]})
try:
common_args['ota_id'] = common_args.pop('name')
except KeyError:
try:
common_args['ota_id'] = common_args.pop('slot')
except KeyError:
pass
if quiet:
# If exceptions occur, suppress and exit quietly
try:
op(**common_args)
except Exception:
sys.exit(2)
else:
op(**common_args)
if __name__ == '__main__':
main()

View File

@@ -100,17 +100,17 @@ def bin_copy(source, target, env):
file1.close()
# make a copy using the old 3.6.x filename format for backwards compatibility with the WebUI version check, e.g.
# create a EMS-ESP-<version>-ESP32_S3.bin if target is ci_s3_16M_P (16MB, PSRAM)
# create a EMS-ESP-<version>-ESP32.bin if target is ci_s_4M (4MB, no PSRAM), compatible only with S32 V1 and E32 V1.0,1.4,1.5
# create a EMS-ESP-<version>-ESP32_S3.bin if target is s3_16M_P (16MB, PSRAM)
# create a EMS-ESP-<version>-ESP32.bin if target is s_4M (4MB, no PSRAM), compatible only with S32 V1 and E32 V1.0,1.4,1.5
#
# Note: there is a chance newer E32V2s (which use the 16MB partition table and PSRAM) are running a custom build
# of the 3.6.5 firmware as 3.6.5 was released before production of the gateway board. Updating via the WebUI will break the system and require a manual update.
#
extra_variant = ""
if env.get('PIOENV') == "ci_s3_16M_P":
if env.get('PIOENV') == "s3_16M_P":
extra_variant = "EMS-ESP-" + \
app_version.replace(".", "_") + "-ESP32_S3"
elif env.get('PIOENV') == "ci_s_4M":
elif env.get('PIOENV') == "s_4M":
extra_variant = "EMS-ESP-" + app_version.replace(".", "_") + "-ESP32"
if extra_variant:

13
scripts/show_mem.sh Normal file
View File

@@ -0,0 +1,13 @@
#!/bin/sh
echo -n "Arduino: "
curl -s http://10.10.10.93/api/system/system/arduino | jq -r '.value'
echo -n "SDK: "
curl -s http://10.10.10.93/api/system/system/sdk | jq -r '.value'
echo -n "Free heap: "
curl -s http://10.10.10.93/api/system/system/freeMem | jq -r '.value'
echo -n "Max Alloc: "
curl -s http://10.10.10.93/api/system/system/maxAlloc | jq -r '.value'

View File

@@ -1,5 +1,5 @@
# strips out lines between two markers
# pipe a file into, for example: 'cat x | python3 strip_csv.py'
# pipe a file into it, for example: 'cat x | python3 strip_csv.py'
import fileinput
with fileinput.input() as f_input:
inRecordingMode = False

View File

@@ -1,11 +1,12 @@
#!/bin/sh
# run from root
# run from root folder, like `sh ./scripts/update_all.sh`
# make sure ncu is installed globally (https://github.com/raineorshine/npm-check-updates)
# as well as GNUMake (make) and python3
cd interface
rm -rf yarn.lock node_modules
touch yarn.lock
ncu -u
yarn set version stable
yarn
@@ -14,6 +15,7 @@ yarn lint
cd ../mock-api
rm -rf yarn.lock node_modules
touch yarn.lock
ncu -u
yarn set version stable
yarn

View File

@@ -140,7 +140,7 @@ cpp_entry_template = Template(
# read translations
listNames = {}
transre = re.compile(r'^MAKE_TRANSLATION\(([^,\s]+)\s*,\s*\"([^\"]+)\"')
transf = open('./src/locale_translations.h', 'r')
transf = open('./src/core/locale_translations.h', 'r')
while True:
line = transf.readline()
if not line:

View File

@@ -19,6 +19,8 @@ import requests
import hashlib
from urllib.parse import urlparse
import time
import os
Import("env")
try:
@@ -65,7 +67,7 @@ def on_upload(source, target, env):
'Host': host_ip,
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0',
'Accept': '*/*',
'Accept-Language': 'de,en-US;q=0.7,en;q=0.3',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Referer': f'{emsesp_url}',
'Content-Type': 'application/json',
@@ -116,7 +118,7 @@ def on_upload(source, target, env):
'Host': host_ip,
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0',
'Accept': '*/*',
'Accept-Language': 'de,en-US;q=0.7,en;q=0.3',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Referer': f'{emsesp_url}',
'Connection': 'keep-alive',
@@ -144,7 +146,7 @@ def on_upload(source, target, env):
'Host': host_ip,
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0',
'Accept': '*/*',
'Accept-Language': 'de,en-US;q=0.7,en;q=0.3',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Referer': f'{emsesp_url}',
'Content-Type': 'application/json',
@@ -160,4 +162,5 @@ def on_upload(source, target, env):
print()
env.Replace(UPLOADCMD=on_upload)
if env.get('UPLOAD_PROTOCOL') == 'custom':
env.Replace(UPLOADCMD=on_upload)

View File

@@ -41,7 +41,7 @@ def upload(file, ip, username, password):
'Host': host_ip,
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0',
'Accept': '*/*',
'Accept-Language': 'de,en-US;q=0.7,en;q=0.3',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Referer': f'{emsesp_url}',
'Content-Type': 'application/json',
@@ -90,7 +90,7 @@ def upload(file, ip, username, password):
'Host': host_ip,
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0',
'Accept': '*/*',
'Accept-Language': 'de,en-US;q=0.7,en;q=0.3',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Referer': f'{emsesp_url}',
'Connection': 'keep-alive',
@@ -116,7 +116,7 @@ def upload(file, ip, username, password):
'Host': host_ip,
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0',
'Accept': '*/*',
'Accept-Language': 'de,en-US;q=0.7,en;q=0.3',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Referer': f'{emsesp_url}',
'Content-Type': 'application/json',