Merge branch 'develop' into pioarduino

This commit is contained in:
Austin
2026-01-13 12:05:06 -05:00
committed by GitHub
188 changed files with 5208 additions and 273 deletions

View File

@@ -38,4 +38,4 @@ cp bin/device-install.* $OUTDIR/
cp bin/device-update.* $OUTDIR/
echo "Copying manifest"
cp $BUILDDIR/$basename.mt.json $OUTDIR/$basename.mt.json
cp $BUILDDIR/$basename.mt.json $OUTDIR/$basename.mt.json || true

View File

@@ -49,4 +49,4 @@ if (echo $1 | grep -q "rak4631"); then
fi
echo "Copying manifest"
cp $BUILDDIR/$basename.mt.json $OUTDIR/$basename.mt.json
cp $BUILDDIR/$basename.mt.json $OUTDIR/$basename.mt.json || true

View File

@@ -30,4 +30,4 @@ echo "Copying uf2 file"
cp $BUILDDIR/$basename.uf2 $OUTDIR/$basename.uf2
echo "Copying manifest"
cp $BUILDDIR/$basename.mt.json $OUTDIR/$basename.mt.json
cp $BUILDDIR/$basename.mt.json $OUTDIR/$basename.mt.json || true

View File

@@ -30,4 +30,4 @@ echo "Copying STM32 bin file"
cp $BUILDDIR/$basename.bin $OUTDIR/$basename.bin
echo "Copying manifest"
cp $BUILDDIR/$basename.mt.json $OUTDIR/$basename.mt.json
cp $BUILDDIR/$basename.mt.json $OUTDIR/$basename.mt.json || true

View File

@@ -105,6 +105,8 @@ Lora:
GPS:
# SerialPath: /dev/ttyS0
# ExtraPins:
# - 22
### Specify I2C device, or leave blank for none

View File

@@ -0,0 +1,11 @@
Lora:
### RAK13300in Slot 1
Module: sx1262
IRQ: 22 #IO6
Reset: 16 # IO4
Busy: 24 # IO5
# Ant_sw: 13 # IO3
DIO3_TCXO_VOLTAGE: true
DIO2_AS_RF_SWITCH: true
spidev: spidev0.0

View File

@@ -0,0 +1,14 @@
Lora:
Module: sx1262
CS: 0
IRQ: 6
Reset: 2
Busy: 4
RXen: 1
DIO2_AS_RF_SWITCH: true
spidev: ch341
DIO3_TCXO_VOLTAGE: true
# USB_Serialnum: 12345678
USB_PID: 0x5512
USB_VID: 0x1A86
SX126X_MAX_POWER: 22

View File

@@ -0,0 +1,15 @@
Lora:
Module: sx1262
CS: 0
IRQ: 6
Reset: 1
Busy: 4
RXen: 2
DIO2_AS_RF_SWITCH: true
spidev: ch341
USB_PID: 0x5512
USB_VID: 0x1A86
DIO3_TCXO_VOLTAGE: true
# USB_Serialnum: 12345678
SX126X_MAX_POWER: 30
# Reduce output power to improve EMI

View File

@@ -0,0 +1,15 @@
Lora:
Module: sx1268
CS: 0
IRQ: 6
Reset: 1
Busy: 4
RXen: 2
DIO2_AS_RF_SWITCH: true
spidev: ch341
USB_PID: 0x5512
USB_VID: 0x1A86
DIO3_TCXO_VOLTAGE: true
# USB_Serialnum: 12345678
SX126X_MAX_POWER: 30
# Reduce output power to improve EMI

355
bin/generate_release_notes.py Executable file
View File

@@ -0,0 +1,355 @@
#!/usr/bin/env python3
"""
Generate release notes from merged PRs on develop and master branches.
Categorizes PRs into Enhancements and Bug Fixes/Maintenance sections.
"""
import subprocess
import re
import json
import sys
from datetime import datetime
def get_last_release_tag():
"""Get the most recent release tag."""
result = subprocess.run(
["git", "describe", "--tags", "--abbrev=0"],
capture_output=True,
text=True,
check=True,
)
return result.stdout.strip()
def get_tag_date(tag):
"""Get the commit date (ISO 8601) of the tag."""
result = subprocess.run(
["git", "show", "-s", "--format=%cI", tag],
capture_output=True,
text=True,
check=True,
)
return result.stdout.strip()
def get_merged_prs_since_tag(tag, branch):
"""Get all merged PRs since the given tag on the specified branch."""
# Get commits since tag on the branch - look for PR numbers in parentheses
result = subprocess.run(
[
"git",
"log",
f"{tag}..origin/{branch}",
"--oneline",
],
capture_output=True,
text=True,
)
prs = []
seen_pr_numbers = set()
for line in result.stdout.strip().split("\n"):
if not line:
continue
# Extract PR number from commit message - format: "Title (#1234)"
pr_match = re.search(r"\(#(\d+)\)", line)
if pr_match:
pr_number = pr_match.group(1)
if pr_number not in seen_pr_numbers:
seen_pr_numbers.add(pr_number)
prs.append(pr_number)
return prs
def get_pr_details(pr_number):
"""Get PR details from GitHub API via gh CLI."""
try:
result = subprocess.run(
[
"gh",
"pr",
"view",
pr_number,
"--json",
"title,author,labels,url",
],
capture_output=True,
text=True,
check=True,
)
return json.loads(result.stdout)
except subprocess.CalledProcessError:
return None
def should_exclude_pr(pr_details):
"""Check if PR should be excluded from release notes."""
if not pr_details:
return True
title = pr_details.get("title", "").lower()
# Exclude trunk update PRs
if "upgrade trunk" in title or "update trunk" in title or "trunk update" in title:
return True
# Exclude protobuf update PRs
if "update protobufs" in title or "update protobuf" in title:
return True
# Exclude automated version bump PRs
if "bump release version" in title or "bump version" in title:
return True
return False
def is_dependency_update(pr_details):
"""Check if PR is a dependency/chore update."""
if not pr_details:
return False
title = pr_details.get("title", "").lower()
author = pr_details.get("author", {}).get("login", "").lower()
labels = [label.get("name", "").lower() for label in pr_details.get("labels", [])]
# Check for renovate or dependabot authors
if "renovate" in author or "dependabot" in author:
return True
# Check for chore(deps) pattern
if re.match(r"^chore\(deps\):", title):
return True
# Check for digest update patterns
if re.match(r".*digest to [a-f0-9]+", title, re.IGNORECASE):
return True
# Check for dependency-related labels
dependency_labels = ["dependencies", "deps", "renovate"]
if any(dep in label for label in labels for dep in dependency_labels):
return True
return False
def is_enhancement(pr_details):
"""Determine if PR is an enhancement based on labels and title."""
labels = [label.get("name", "").lower() for label in pr_details.get("labels", [])]
# Check labels first
enhancement_labels = ["enhancement", "feature", "feat", "new feature"]
for label in labels:
if any(enh in label for enh in enhancement_labels):
return True
# Check title prefixes
title = pr_details.get("title", "")
enhancement_prefixes = ["feat:", "feature:", "add:"]
title_lower = title.lower()
for prefix in enhancement_prefixes:
if title_lower.startswith(prefix) or f" {prefix}" in title_lower:
return True
return False
def clean_title(title):
"""Clean up PR title for release notes."""
# Remove common prefixes
prefixes_to_remove = [
r"^fix:\s*",
r"^feat:\s*",
r"^feature:\s*",
r"^bug:\s*",
r"^bugfix:\s*",
r"^chore:\s*",
r"^chore\([^)]+\):\s*",
r"^refactor:\s*",
r"^docs:\s*",
r"^ci:\s*",
r"^build:\s*",
r"^perf:\s*",
r"^style:\s*",
r"^test:\s*",
]
cleaned = title
for prefix in prefixes_to_remove:
cleaned = re.sub(prefix, "", cleaned, flags=re.IGNORECASE)
# Ensure first letter is capitalized
if cleaned:
cleaned = cleaned[0].upper() + cleaned[1:]
return cleaned.strip()
def format_pr_line(pr_details):
"""Format a PR as a markdown bullet point."""
title = clean_title(pr_details.get("title", "Unknown"))
author = pr_details.get("author", {}).get("login", "unknown")
url = pr_details.get("url", "")
return f"- {title} by @{author} in {url}"
def get_new_contributors(pr_details_list, tag, repo="meshtastic/firmware"):
"""Find contributors who made their first merged PR before this release.
GitHub usernames do not necessarily match git commit authors, so we use the
GitHub search API via `gh` to see if the user has any merged PRs before the
tag date. This mirrors how GitHub's "Generate release notes" feature works.
"""
bot_authors = {"github-actions", "renovate", "dependabot", "app/renovate", "app/github-actions", "app/dependabot"}
new_contributors = []
seen_authors = set()
try:
tag_date = get_tag_date(tag)
except subprocess.CalledProcessError:
print(f"Warning: Could not determine tag date for {tag}; skipping new contributor detection", file=sys.stderr)
return []
for pr in pr_details_list:
author = pr.get("author", {}).get("login", "")
if not author or author in seen_authors:
continue
# Skip bots
if author.lower() in bot_authors or author.startswith("app/"):
continue
seen_authors.add(author)
try:
# Search for merged PRs by this author created before the tag date
search_query = f"is:pr author:{author} repo:{repo} closed:<=\"{tag_date}\""
search = subprocess.run(
[
"gh",
"search",
"issues",
"--json",
"number,mergedAt,createdAt",
"--state",
"closed",
"--limit",
"200",
search_query,
],
capture_output=True,
text=True,
)
if search.returncode != 0:
# If gh fails, be conservative and skip adding to new contributors
print(f"Warning: gh search failed for author {author}: {search.stderr.strip()}", file=sys.stderr)
continue
results = json.loads(search.stdout or "[]")
# If any merged PR exists before or on tag date, not a new contributor
had_prior_pr = any(item.get("mergedAt") for item in results)
if not had_prior_pr:
new_contributors.append((author, pr.get("url", "")))
except Exception as e:
print(f"Warning: Could not check contributor history for {author}: {e}", file=sys.stderr)
continue
return new_contributors
def main():
if len(sys.argv) < 2:
print("Usage: generate_release_notes.py <new_version>", file=sys.stderr)
sys.exit(1)
new_version = sys.argv[1]
# Get last release tag
try:
last_tag = get_last_release_tag()
except subprocess.CalledProcessError:
print("Error: Could not find last release tag", file=sys.stderr)
sys.exit(1)
# Collect PRs from both branches
all_pr_numbers = set()
for branch in ["develop", "master"]:
try:
prs = get_merged_prs_since_tag(last_tag, branch)
all_pr_numbers.update(prs)
except Exception as e:
print(f"Warning: Could not get PRs from {branch}: {e}", file=sys.stderr)
# Get details for all PRs
enhancements = []
bug_fixes = []
dependencies = []
all_pr_details = []
for pr_number in sorted(all_pr_numbers, key=int):
details = get_pr_details(pr_number)
if details and not should_exclude_pr(details):
all_pr_details.append(details)
if is_dependency_update(details):
dependencies.append(details)
elif is_enhancement(details):
enhancements.append(details)
else:
bug_fixes.append(details)
# Generate release notes
output = []
if enhancements:
output.append("## 🚀 Enhancements\n")
for pr in enhancements:
output.append(format_pr_line(pr))
output.append("")
if bug_fixes:
output.append("## 🐛 Bug fixes and maintenance\n")
for pr in bug_fixes:
output.append(format_pr_line(pr))
output.append("")
if dependencies:
output.append("## ⚙️ Dependencies\n")
for pr in dependencies:
output.append(format_pr_line(pr))
output.append("")
# Find new contributors (GitHub-accurate check using merged PRs before tag date)
new_contributors = get_new_contributors(all_pr_details, last_tag)
if new_contributors:
output.append("## New Contributors\n")
for author, url in new_contributors:
# Find first PR URL for this contributor
first_pr_url = url
for pr in all_pr_details:
if pr.get("author", {}).get("login") == author:
first_pr_url = pr.get("url", url)
break
output.append(f"- @{author} made their first contribution in {first_pr_url}")
output.append("")
# Add full changelog link
output.append(
f"**Full Changelog**: https://github.com/meshtastic/firmware/compare/{last_tag}...v{new_version}"
)
print("\n".join(output))
if __name__ == "__main__":
main()

32
bin/meshtasticd-start.sh Executable file
View File

@@ -0,0 +1,32 @@
#!/usr/bin/env sh
INSTANCE=$1
CONF_DIR="/etc/meshtasticd/config.d"
VFS_DIR="/var/lib"
# If no instance ID provided, start bare daemon and exit
echo "no instance ID provided, starting bare meshtasticd service"
if [ -z "${INSTANCE}" ]; then
/usr/bin/meshtasticd
exit 0
fi
# Make VFS dir if it does not exist
if [ ! -d "${VFS_DIR}/meshtasticd-${INSTANCE}" ]; then
echo "vfs for ${INSTANCE} does not exist, creating it."
mkdir "${VFS_DIR}/meshtasticd-${INSTANCE}"
fi
# Abort if config for $INSTANCE does not exist
if [ ! -f "${CONF_DIR}/config-${INSTANCE}.yaml" ]; then
echo "no config for ${INSTANCE} found in ${CONF_DIR}. refusing to start" >&2
exit 1
fi
# Start meshtasticd with instance parameters
printf "starting meshtasticd-%s..., ${INSTANCE}"
if /usr/bin/meshtasticd --config="${CONF_DIR}/config-${INSTANCE}.yaml" --fsdir="${VFS_DIR}/meshtasticd-${INSTANCE}"; then
echo "ok"
else
echo "failed"
fi

View File

@@ -1,5 +1,5 @@
[Unit]
Description=Meshtastic Native Daemon
Description=Meshtastic %i Daemon
After=network-online.target
StartLimitInterval=200
StartLimitBurst=5
@@ -9,7 +9,7 @@ AmbientCapabilities=CAP_NET_BIND_SERVICE
User=meshtasticd
Group=meshtasticd
Type=simple
ExecStart=/usr/bin/meshtasticd
ExecStart=/usr/bin/meshtasticd-start.sh %i
Restart=always
RestartSec=3

View File

@@ -1,6 +1,7 @@
#!/usr/bin/env bash
cp "release/meshtasticd_linux_$(uname -m)" /usr/bin/meshtasticd
cp "bin/meshtasticd-start.sh" /usr/bin/meshtasticd-start.sh
mkdir -p /etc/meshtasticd
if [[ -f "/etc/meshtasticd/config.yaml" ]]; then
cp bin/config-dist.yaml /etc/meshtasticd/config-upgrade.yaml

View File

@@ -2,11 +2,12 @@
# trunk-ignore-all(ruff/F821)
# trunk-ignore-all(flake8/F821): For SConstruct imports
import sys
from os.path import join, basename, isfile
from os.path import join
import subprocess
import json
import re
from datetime import datetime
from typing import Dict
from readprops import readProps
@@ -14,11 +15,59 @@ Import("env")
platform = env.PioPlatform()
progname = env.get("PROGNAME")
lfsbin = f"{progname.replace('firmware-', 'littlefs-')}.bin"
manifest_ran = False
def infer_architecture(board_cfg):
try:
mcu = board_cfg.get("build.mcu") if board_cfg else None
except KeyError:
mcu = None
except Exception:
mcu = None
if not mcu:
return None
mcu_l = str(mcu).lower()
if "esp32s3" in mcu_l:
return "esp32-s3"
if "esp32c6" in mcu_l:
return "esp32-c6"
if "esp32c3" in mcu_l:
return "esp32-c3"
if "esp32" in mcu_l:
return "esp32"
if "rp2040" in mcu_l:
return "rp2040"
if "rp2350" in mcu_l:
return "rp2350"
if "nrf52" in mcu_l or "nrf52840" in mcu_l:
return "nrf52840"
if "stm32" in mcu_l:
return "stm32"
return None
def manifest_gather(source, target, env):
global manifest_ran
if manifest_ran:
return
# Skip manifest generation if we cannot determine architecture (host/native builds)
board_arch = infer_architecture(env.BoardConfig())
if not board_arch:
print(f"Skipping mtjson generation for unknown architecture (env={env.get('PIOENV')})")
manifest_ran = True
return
manifest_ran = True
out = []
board_platform = env.BoardConfig().get("platform")
board_mcu = env.BoardConfig().get("build.mcu").lower()
needs_ota_suffix = board_platform == "nordicnrf52"
# Mapping of bin files to their target partition names
# Maps the filename pattern to the partition name where it should be flashed
partition_map = {
f"{progname}.bin": "app0", # primary application slot (app0 / OTA_0)
lfsbin: "spiffs", # filesystem image flashed to spiffs
}
check_paths = [
progname,
f"{progname}.elf",
@@ -29,7 +78,9 @@ def manifest_gather(source, target, env):
f"{progname}.uf2",
f"{progname}.factory.uf2",
f"{progname}.zip",
lfsbin
lfsbin,
f"mt-{board_mcu}-ota.bin",
"bleota-c3.bin"
]
for p in check_paths:
f = env.File(env.subst(f"$BUILD_DIR/{p}"))
@@ -42,19 +93,47 @@ def manifest_gather(source, target, env):
"md5": f.get_content_hash(), # Returns MD5 hash
"bytes": f.get_size() # Returns file size in bytes
}
# Add part_name if this file represents a partition that should be flashed
if p in partition_map:
d["part_name"] = partition_map[p]
out.append(d)
print(d)
manifest_write(out, env)
def manifest_write(files, env):
# Defensive: also skip manifest writing if we cannot determine architecture
def get_project_option(name):
try:
return env.GetProjectOption(name)
except Exception:
return None
def get_project_option_any(names):
for name in names:
val = get_project_option(name)
if val is not None:
return val
return None
def as_bool(val):
return str(val).strip().lower() in ("1", "true", "yes", "on")
def as_int(val):
try:
return int(str(val), 10)
except (TypeError, ValueError):
return None
def as_list(val):
return [item.strip() for item in str(val).split(",") if item.strip()]
manifest = {
"version": verObj["long"],
"build_epoch": build_epoch,
"board": env.get("PIOENV"),
"platformioTarget": env.get("PIOENV"),
"mcu": env.get("BOARD_MCU"),
"repo": repo_owner,
"files": files,
"part": None,
"has_mui": False,
"has_inkhud": False,
}
@@ -69,6 +148,51 @@ def manifest_write(files, env):
if "MESHTASTIC_INCLUDE_INKHUD" in env.get("CPPDEFINES", []):
manifest["has_inkhud"] = True
pioenv = env.get("PIOENV")
device_meta = {}
device_meta_fields = [
("hwModel", ["custom_meshtastic_hw_model"], as_int),
("hwModelSlug", ["custom_meshtastic_hw_model_slug"], str),
("architecture", ["custom_meshtastic_architecture"], str),
("activelySupported", ["custom_meshtastic_actively_supported"], as_bool),
("displayName", ["custom_meshtastic_display_name"], str),
("supportLevel", ["custom_meshtastic_support_level"], as_int),
("images", ["custom_meshtastic_images"], as_list),
("tags", ["custom_meshtastic_tags"], as_list),
("requiresDfu", ["custom_meshtastic_requires_dfu"], as_bool),
("partitionScheme", ["custom_meshtastic_partition_scheme"], str),
("url", ["custom_meshtastic_url"], str),
("key", ["custom_meshtastic_key"], str),
("variant", ["custom_meshtastic_variant"], str),
]
for manifest_key, option_keys, caster in device_meta_fields:
raw_val = get_project_option_any(option_keys)
if raw_val is None:
continue
parsed = caster(raw_val) if callable(caster) else raw_val
if parsed is not None and parsed != "":
device_meta[manifest_key] = parsed
# Determine architecture once; if we can't infer it, skip manifest generation
board_arch = device_meta.get("architecture") or infer_architecture(env.BoardConfig())
if not board_arch:
print(f"Skipping mtjson write for unknown architecture (env={env.get('PIOENV')})")
return
device_meta["architecture"] = board_arch
# Always set requiresDfu: true for nrf52840 targets
if board_arch == "nrf52840":
device_meta["requiresDfu"] = True
device_meta.setdefault("displayName", pioenv)
device_meta.setdefault("activelySupported", False)
if device_meta:
manifest.update(device_meta)
# Write the manifest to the build directory
with open(env.subst("$BUILD_DIR/${PROGNAME}.mt.json"), "w") as f:
json.dump(manifest, f, indent=2)
@@ -166,8 +290,12 @@ def load_boot_logo(source, target, env):
if ("HAS_TFT", 1) in env.get("CPPDEFINES", []):
env.AddPreAction(f"$BUILD_DIR/{lfsbin}", load_boot_logo)
mtjson_deps = ["buildprog"]
if platform.name == "espressif32":
board_arch = infer_architecture(env.BoardConfig())
should_skip_manifest = board_arch is None
# For host/native envs, avoid depending on 'buildprog' (some targets don't define it)
mtjson_deps = [] if should_skip_manifest else ["buildprog"]
if not should_skip_manifest and platform.name == "espressif32":
# Build littlefs image as part of mtjson target
# Equivalent to `pio run -t buildfs`
target_lfs = env.DataToBin(
@@ -176,11 +304,27 @@ if platform.name == "espressif32":
# prepend the littlefs target to the mtjson dependencies
# mtjson_deps.insert(0, target_lfs)
env.AddCustomTarget(
name="mtjson",
dependencies=mtjson_deps,
actions=[manifest_gather],
title="Meshtastic Manifest",
description="Generating Meshtastic manifest JSON + Checksums",
always_build=False,
)
if should_skip_manifest:
def skip_manifest(source, target, env):
print(f"mtjson: skipped for native environment: {env.get('PIOENV')}")
env.AddCustomTarget(
name="mtjson",
dependencies=mtjson_deps,
actions=[skip_manifest],
title="Meshtastic Manifest (skipped)",
description="mtjson generation is skipped for native environments",
always_build=True,
)
else:
env.AddCustomTarget(
name="mtjson",
dependencies=mtjson_deps,
actions=[manifest_gather],
title="Meshtastic Manifest",
description="Generating Meshtastic manifest JSON + Checksums",
always_build=True,
)
# Run manifest generation as part of the default build pipeline for non-native builds.
env.Default("mtjson")

View File

@@ -18,8 +18,9 @@ def readProps(prefsLoc):
# Try to find current build SHA if if the workspace is clean. This could fail if git is not installed
try:
# Pin abbreviation length to keep local builds and CI matching (avoid auto-shortening)
sha = (
subprocess.check_output(["git", "rev-parse", "--short", "HEAD"])
subprocess.check_output(["git", "rev-parse", "--short=7", "HEAD"])
.decode("utf-8")
.strip()
)