Merge branch 'develop' into sfpp

This commit is contained in:
Jonathan Bennett
2026-01-15 14:03:43 -06:00
committed by GitHub
139 changed files with 4896 additions and 529 deletions

View File

@@ -105,6 +105,8 @@ Lora:
GPS:
# SerialPath: /dev/ttyS0
# ExtraPins:
# - 22
### Specify I2C device, or leave blank for none

View File

@@ -0,0 +1,15 @@
Lora:
Module: sx1262
CS: 0
IRQ: 6
Reset: 1
Busy: 4
RXen: 2
DIO2_AS_RF_SWITCH: true
spidev: ch341
USB_PID: 0x5512
USB_VID: 0x1A86
DIO3_TCXO_VOLTAGE: true
# USB_Serialnum: 12345678
SX126X_MAX_POWER: 30
# Reduce output power to improve EMI

View File

@@ -0,0 +1,15 @@
Lora:
Module: sx1268
CS: 0
IRQ: 6
Reset: 1
Busy: 4
RXen: 2
DIO2_AS_RF_SWITCH: true
spidev: ch341
USB_PID: 0x5512
USB_VID: 0x1A86
DIO3_TCXO_VOLTAGE: true
# USB_Serialnum: 12345678
SX126X_MAX_POWER: 30
# Reduce output power to improve EMI

355
bin/generate_release_notes.py Executable file
View File

@@ -0,0 +1,355 @@
#!/usr/bin/env python3
"""
Generate release notes from merged PRs on develop and master branches.
Categorizes PRs into Enhancements and Bug Fixes/Maintenance sections.
"""
import subprocess
import re
import json
import sys
from datetime import datetime
def get_last_release_tag():
"""Get the most recent release tag."""
result = subprocess.run(
["git", "describe", "--tags", "--abbrev=0"],
capture_output=True,
text=True,
check=True,
)
return result.stdout.strip()
def get_tag_date(tag):
"""Get the commit date (ISO 8601) of the tag."""
result = subprocess.run(
["git", "show", "-s", "--format=%cI", tag],
capture_output=True,
text=True,
check=True,
)
return result.stdout.strip()
def get_merged_prs_since_tag(tag, branch):
"""Get all merged PRs since the given tag on the specified branch."""
# Get commits since tag on the branch - look for PR numbers in parentheses
result = subprocess.run(
[
"git",
"log",
f"{tag}..origin/{branch}",
"--oneline",
],
capture_output=True,
text=True,
)
prs = []
seen_pr_numbers = set()
for line in result.stdout.strip().split("\n"):
if not line:
continue
# Extract PR number from commit message - format: "Title (#1234)"
pr_match = re.search(r"\(#(\d+)\)", line)
if pr_match:
pr_number = pr_match.group(1)
if pr_number not in seen_pr_numbers:
seen_pr_numbers.add(pr_number)
prs.append(pr_number)
return prs
def get_pr_details(pr_number):
"""Get PR details from GitHub API via gh CLI."""
try:
result = subprocess.run(
[
"gh",
"pr",
"view",
pr_number,
"--json",
"title,author,labels,url",
],
capture_output=True,
text=True,
check=True,
)
return json.loads(result.stdout)
except subprocess.CalledProcessError:
return None
def should_exclude_pr(pr_details):
"""Check if PR should be excluded from release notes."""
if not pr_details:
return True
title = pr_details.get("title", "").lower()
# Exclude trunk update PRs
if "upgrade trunk" in title or "update trunk" in title or "trunk update" in title:
return True
# Exclude protobuf update PRs
if "update protobufs" in title or "update protobuf" in title:
return True
# Exclude automated version bump PRs
if "bump release version" in title or "bump version" in title:
return True
return False
def is_dependency_update(pr_details):
"""Check if PR is a dependency/chore update."""
if not pr_details:
return False
title = pr_details.get("title", "").lower()
author = pr_details.get("author", {}).get("login", "").lower()
labels = [label.get("name", "").lower() for label in pr_details.get("labels", [])]
# Check for renovate or dependabot authors
if "renovate" in author or "dependabot" in author:
return True
# Check for chore(deps) pattern
if re.match(r"^chore\(deps\):", title):
return True
# Check for digest update patterns
if re.match(r".*digest to [a-f0-9]+", title, re.IGNORECASE):
return True
# Check for dependency-related labels
dependency_labels = ["dependencies", "deps", "renovate"]
if any(dep in label for label in labels for dep in dependency_labels):
return True
return False
def is_enhancement(pr_details):
"""Determine if PR is an enhancement based on labels and title."""
labels = [label.get("name", "").lower() for label in pr_details.get("labels", [])]
# Check labels first
enhancement_labels = ["enhancement", "feature", "feat", "new feature"]
for label in labels:
if any(enh in label for enh in enhancement_labels):
return True
# Check title prefixes
title = pr_details.get("title", "")
enhancement_prefixes = ["feat:", "feature:", "add:"]
title_lower = title.lower()
for prefix in enhancement_prefixes:
if title_lower.startswith(prefix) or f" {prefix}" in title_lower:
return True
return False
def clean_title(title):
"""Clean up PR title for release notes."""
# Remove common prefixes
prefixes_to_remove = [
r"^fix:\s*",
r"^feat:\s*",
r"^feature:\s*",
r"^bug:\s*",
r"^bugfix:\s*",
r"^chore:\s*",
r"^chore\([^)]+\):\s*",
r"^refactor:\s*",
r"^docs:\s*",
r"^ci:\s*",
r"^build:\s*",
r"^perf:\s*",
r"^style:\s*",
r"^test:\s*",
]
cleaned = title
for prefix in prefixes_to_remove:
cleaned = re.sub(prefix, "", cleaned, flags=re.IGNORECASE)
# Ensure first letter is capitalized
if cleaned:
cleaned = cleaned[0].upper() + cleaned[1:]
return cleaned.strip()
def format_pr_line(pr_details):
"""Format a PR as a markdown bullet point."""
title = clean_title(pr_details.get("title", "Unknown"))
author = pr_details.get("author", {}).get("login", "unknown")
url = pr_details.get("url", "")
return f"- {title} by @{author} in {url}"
def get_new_contributors(pr_details_list, tag, repo="meshtastic/firmware"):
"""Find contributors who made their first merged PR before this release.
GitHub usernames do not necessarily match git commit authors, so we use the
GitHub search API via `gh` to see if the user has any merged PRs before the
tag date. This mirrors how GitHub's "Generate release notes" feature works.
"""
bot_authors = {"github-actions", "renovate", "dependabot", "app/renovate", "app/github-actions", "app/dependabot"}
new_contributors = []
seen_authors = set()
try:
tag_date = get_tag_date(tag)
except subprocess.CalledProcessError:
print(f"Warning: Could not determine tag date for {tag}; skipping new contributor detection", file=sys.stderr)
return []
for pr in pr_details_list:
author = pr.get("author", {}).get("login", "")
if not author or author in seen_authors:
continue
# Skip bots
if author.lower() in bot_authors or author.startswith("app/"):
continue
seen_authors.add(author)
try:
# Search for merged PRs by this author created before the tag date
search_query = f"is:pr author:{author} repo:{repo} closed:<=\"{tag_date}\""
search = subprocess.run(
[
"gh",
"search",
"issues",
"--json",
"number,mergedAt,createdAt",
"--state",
"closed",
"--limit",
"200",
search_query,
],
capture_output=True,
text=True,
)
if search.returncode != 0:
# If gh fails, be conservative and skip adding to new contributors
print(f"Warning: gh search failed for author {author}: {search.stderr.strip()}", file=sys.stderr)
continue
results = json.loads(search.stdout or "[]")
# If any merged PR exists before or on tag date, not a new contributor
had_prior_pr = any(item.get("mergedAt") for item in results)
if not had_prior_pr:
new_contributors.append((author, pr.get("url", "")))
except Exception as e:
print(f"Warning: Could not check contributor history for {author}: {e}", file=sys.stderr)
continue
return new_contributors
def main():
if len(sys.argv) < 2:
print("Usage: generate_release_notes.py <new_version>", file=sys.stderr)
sys.exit(1)
new_version = sys.argv[1]
# Get last release tag
try:
last_tag = get_last_release_tag()
except subprocess.CalledProcessError:
print("Error: Could not find last release tag", file=sys.stderr)
sys.exit(1)
# Collect PRs from both branches
all_pr_numbers = set()
for branch in ["develop", "master"]:
try:
prs = get_merged_prs_since_tag(last_tag, branch)
all_pr_numbers.update(prs)
except Exception as e:
print(f"Warning: Could not get PRs from {branch}: {e}", file=sys.stderr)
# Get details for all PRs
enhancements = []
bug_fixes = []
dependencies = []
all_pr_details = []
for pr_number in sorted(all_pr_numbers, key=int):
details = get_pr_details(pr_number)
if details and not should_exclude_pr(details):
all_pr_details.append(details)
if is_dependency_update(details):
dependencies.append(details)
elif is_enhancement(details):
enhancements.append(details)
else:
bug_fixes.append(details)
# Generate release notes
output = []
if enhancements:
output.append("## 🚀 Enhancements\n")
for pr in enhancements:
output.append(format_pr_line(pr))
output.append("")
if bug_fixes:
output.append("## 🐛 Bug fixes and maintenance\n")
for pr in bug_fixes:
output.append(format_pr_line(pr))
output.append("")
if dependencies:
output.append("## ⚙️ Dependencies\n")
for pr in dependencies:
output.append(format_pr_line(pr))
output.append("")
# Find new contributors (GitHub-accurate check using merged PRs before tag date)
new_contributors = get_new_contributors(all_pr_details, last_tag)
if new_contributors:
output.append("## New Contributors\n")
for author, url in new_contributors:
# Find first PR URL for this contributor
first_pr_url = url
for pr in all_pr_details:
if pr.get("author", {}).get("login") == author:
first_pr_url = pr.get("url", url)
break
output.append(f"- @{author} made their first contribution in {first_pr_url}")
output.append("")
# Add full changelog link
output.append(
f"**Full Changelog**: https://github.com/meshtastic/firmware/compare/{last_tag}...v{new_version}"
)
print("\n".join(output))
if __name__ == "__main__":
main()

32
bin/meshtasticd-start.sh Executable file
View File

@@ -0,0 +1,32 @@
#!/usr/bin/env sh
INSTANCE=$1
CONF_DIR="/etc/meshtasticd/config.d"
VFS_DIR="/var/lib"
# If no instance ID provided, start bare daemon and exit
echo "no instance ID provided, starting bare meshtasticd service"
if [ -z "${INSTANCE}" ]; then
/usr/bin/meshtasticd
exit 0
fi
# Make VFS dir if it does not exist
if [ ! -d "${VFS_DIR}/meshtasticd-${INSTANCE}" ]; then
echo "vfs for ${INSTANCE} does not exist, creating it."
mkdir "${VFS_DIR}/meshtasticd-${INSTANCE}"
fi
# Abort if config for $INSTANCE does not exist
if [ ! -f "${CONF_DIR}/config-${INSTANCE}.yaml" ]; then
echo "no config for ${INSTANCE} found in ${CONF_DIR}. refusing to start" >&2
exit 1
fi
# Start meshtasticd with instance parameters
printf "starting meshtasticd-%s..., ${INSTANCE}"
if /usr/bin/meshtasticd --config="${CONF_DIR}/config-${INSTANCE}.yaml" --fsdir="${VFS_DIR}/meshtasticd-${INSTANCE}"; then
echo "ok"
else
echo "failed"
fi

View File

@@ -1,5 +1,5 @@
[Unit]
Description=Meshtastic Native Daemon
Description=Meshtastic %i Daemon
After=network-online.target
StartLimitInterval=200
StartLimitBurst=5
@@ -9,7 +9,7 @@ AmbientCapabilities=CAP_NET_BIND_SERVICE
User=meshtasticd
Group=meshtasticd
Type=simple
ExecStart=/usr/bin/meshtasticd
ExecStart=/usr/bin/meshtasticd-start.sh %i
Restart=always
RestartSec=3

View File

@@ -1,6 +1,7 @@
#!/usr/bin/env bash
cp "release/meshtasticd_linux_$(uname -m)" /usr/bin/meshtasticd
cp "bin/meshtasticd-start.sh" /usr/bin/meshtasticd-start.sh
mkdir -p /etc/meshtasticd
if [[ -f "/etc/meshtasticd/config.yaml" ]]; then
cp bin/config-dist.yaml /etc/meshtasticd/config-upgrade.yaml

View File

@@ -58,7 +58,16 @@ def manifest_gather(source, target, env):
manifest_ran = True
out = []
board_platform = env.BoardConfig().get("platform")
board_mcu = env.BoardConfig().get("build.mcu").lower()
needs_ota_suffix = board_platform == "nordicnrf52"
# Mapping of bin files to their target partition names
# Maps the filename pattern to the partition name where it should be flashed
partition_map = {
f"{progname}.bin": "app0", # primary application slot (app0 / OTA_0)
lfsbin: "spiffs", # filesystem image flashed to spiffs
}
check_paths = [
progname,
f"{progname}.elf",
@@ -69,7 +78,9 @@ def manifest_gather(source, target, env):
f"{progname}.uf2",
f"{progname}.factory.uf2",
f"{progname}.zip",
lfsbin
lfsbin,
f"mt-{board_mcu}-ota.bin",
"bleota-c3.bin"
]
for p in check_paths:
f = env.File(env.subst(f"$BUILD_DIR/{p}"))
@@ -82,6 +93,9 @@ def manifest_gather(source, target, env):
"md5": f.get_content_hash(), # Returns MD5 hash
"bytes": f.get_size() # Returns file size in bytes
}
# Add part_name if this file represents a partition that should be flashed
if p in partition_map:
d["part_name"] = partition_map[p]
out.append(d)
print(d)
manifest_write(out, env)