diff --git a/.github/workflows/podman.yml b/.github/workflows/podman.yml
index 46fc866c..03c4c752 100644
--- a/.github/workflows/podman.yml
+++ b/.github/workflows/podman.yml
@@ -18,28 +18,29 @@ env:
LOG_LEVEL: DEBUG
jobs:
- build:
- runs-on: ubuntu-latest
+ direct:
+ runs-on: ubuntu-24.04
- name: podman-compose integration test
+ name: podman-compose
steps:
- uses: actions/checkout@v4
- - name: Install ImageBuilder prereqs
- run: sudo apt-get install -y podman-compose jq
+ - name: Install prereqs
+ run: sudo apt-get install -y jq
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
- name: Build the containers
run: |
- podman-compose build
+ uv run podman-compose build
- name: Start the containers
run: |
- export CONTAINER_SOCKET_PATH="/tmp/podman.sock"
- podman system service --time=0 "unix://$CONTAINER_SOCKET_PATH" &
- echo "PUBLIC_PATH=$(pwd)/public" > .env
- echo "CONTAINER_SOCKET_PATH=$CONTAINER_SOCKET_PATH" >> .env
- podman-compose up -d
+ podman network create asu-build
+ podman system service --time=0 "unix://$(pwd)/podman.sock" &
+ uv run podman-compose up -d
- name: Let the containers start
run: sleep 30
@@ -55,7 +56,7 @@ jobs:
curl 'http://localhost:8000/api/v1/build' \
--request 'POST' \
--header 'Content-Type: application/json' \
- --data @tests/ci/openwrt-one-24.10.0.json | tee response.json | jq
+ --data @tests/ci/openwrt-one-25.12.2.json | tee response.json | jq
if [ "$(jq -r '.status' response.json)" -eq 200 ]; then
break
fi
@@ -64,3 +65,57 @@ jobs:
fi
sleep 10
done
+
+ cached:
+ runs-on: ubuntu-latest
+
+ name: podman-compose with nginx cache
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Install prereqs
+ run: sudo apt-get install -y jq
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+
+ - name: Build the containers
+ run: |
+ uv run podman-compose build
+
+ - name: Start the containers with cache
+ run: |
+ podman network create asu-build
+ podman system service --time=0 "unix://$(pwd)/podman.sock" &
+ cp tests/ci/asu-cache.toml asu.toml
+ uv run podman-compose -f podman-compose.yml -f podman-compose.cache.yml up -d
+
+ - name: Let the containers start
+ run: sleep 30
+
+ - name: Test startup
+ run: |
+ curl -s http://localhost:8000/api/v1/stats | tee response.json | jq
+ [ "$(jq -r '.queue_length' response.json)" -eq 0 ] || exit 1
+
+ - name: Test build (first request, cache miss)
+ run: |
+ for i in {1..20}; do
+ curl 'http://localhost:8000/api/v1/build' \
+ --request 'POST' \
+ --header 'Content-Type: application/json' \
+ --data @tests/ci/openwrt-one-25.12.2.json | tee response.json | jq
+ if [ "$(jq -r '.status' response.json)" -eq 200 ]; then
+ break
+ fi
+ if [ $i -eq 20 ]; then
+ exit 1
+ fi
+ sleep 10
+ done
+
+ - name: Verify cache was used
+ run: |
+ uv run podman-compose -f podman-compose.yml -f podman-compose.cache.yml logs cache 2>&1 | tee cache.log
+ grep -q "downloads.openwrt.org" cache.log
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 1c6ce639..5102ac43 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -24,8 +24,8 @@ jobs:
fail-fast: false
matrix:
python-version:
- - "3.11"
- - "3.12"
+ - "3.13"
+ - "3.14"
name: Python ${{ matrix.python-version }}
@@ -37,9 +37,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install uv
- uses: astral-sh/setup-uv@v4
- with:
- enable-cache: true
+ uses: astral-sh/setup-uv@v5
- name: Display Python version
run: python -c "import sys; print(sys.version)"
@@ -55,8 +53,8 @@ jobs:
- name: Test with pytest
run: |
- export CONTAINER_SOCKET_PATH="/tmp/podman.sock"
- podman system service --time=0 "unix://$CONTAINER_SOCKET_PATH" &
+ podman network create asu-build
+ podman system service --time=0 "unix://$(pwd)/podman.sock" &
uv run coverage run -m pytest -vv --runslow
uv run coverage xml
diff --git a/Containerfile b/Containerfile
index 462e75e8..b9122bb7 100644
--- a/Containerfile
+++ b/Containerfile
@@ -1,4 +1,4 @@
-FROM python:3.12-slim
+FROM python:3.14-slim
WORKDIR /app/
diff --git a/README.md b/README.md
index fd6073c7..f3d5e09e 100644
--- a/README.md
+++ b/README.md
@@ -115,10 +115,11 @@ The services are configured via environment variables, which can be set in a
`.env` file
```bash
-echo "PUBLIC_PATH=$(pwd)/public" > .env
-echo "CONTAINER_SOCKET_PATH=/run/user/$(id -u)/podman/podman.sock" >> .env
-# optionally allow custom scripts running on first boot
-echo "ALLOW_DEFAULTS=1" >> .env
+# symlink the podman socket into the asu directory
+ln -sf /run/user/$(id -u)/podman/podman.sock podman.sock
+
+# create isolated network for build containers (no access to Redis)
+podman network create asu-build
```
Now it's possible to run all services via `podman-compose`:
@@ -187,6 +188,12 @@ curl -LsSf https://astral.sh/uv/install.sh | sh
uv sync --extra dev
```
+#### Running Redis
+
+```bash
+podman run -d --name redis -p 6379:6379 redis:alpine
+```
+
#### Running the server
```bash
diff --git a/asu.example.toml b/asu.example.toml
new file mode 100644
index 00000000..97510806
--- /dev/null
+++ b/asu.example.toml
@@ -0,0 +1,38 @@
+# ASU Server Configuration
+#
+# Copy to asu.toml and adjust. Environment variables and .env
+# override values set here.
+
+upstream_url = "https://downloads.openwrt.org"
+allow_defaults = false
+log_level = "INFO"
+
+# Container settings
+base_container = "ghcr.io/openwrt/imagebuilder"
+
+# Caching proxy: uncomment to route package downloads through nginx cache
+# upstream_url = "http://cache"
+
+# Build cache TTLs
+build_ttl = "7d"
+build_ttl_unversioned = "24h"
+build_defaults_ttl = "30m"
+build_failure_ttl = "1h"
+max_pending_jobs = 200
+job_timeout = "10m"
+
+# S3 storage (optional, default is local)
+# store_backend = "s3"
+# s3_endpoint = "https://s3.example.com"
+# s3_access_key = ""
+# s3_secret_key = ""
+# s3_bucket = "asu-store"
+# s3_region = "auto"
+# s3_public_url = "https://cdn.example.com"
+
+# Allowed external repository URL prefixes
+# repository_allow_list = [
+# "https://raw.githubusercontent.com/libremesh/",
+# "https://feed.libremesh.org/",
+# "https://buildbot.weimarnetz.de/",
+# ]
diff --git a/asu/build.py b/asu/build.py
index 7cb55b38..84c048fd 100644
--- a/asu/build.py
+++ b/asu/build.py
@@ -2,23 +2,33 @@
import json
import logging
import re
+import shutil
+import tarfile
+from io import BytesIO
from os import getenv
from pathlib import Path
from typing import Union
from time import perf_counter
from rq import get_current_job
-from rq.utils import parse_timeout
from podman import errors
+from rq.utils import parse_timeout
from asu.build_request import BuildRequest
from asu.config import settings
from asu.package_changes import apply_package_changes
+from asu.repositories import (
+ merge_repositories,
+ validate_repos,
+)
+from asu.store import LocalStore, get_store
from asu.util import (
add_timestamp,
add_build_event,
check_manifest,
+ check_package_errors,
diff_packages,
+ error_log,
fingerprint_pubkey_usign,
get_branch,
get_container_version_tag,
@@ -34,6 +44,88 @@
log = logging.getLogger("rq.worker")
+def _cleanup_container(container):
+ """Kill and remove a container with its volumes."""
+ try:
+ container.kill()
+ except Exception:
+ pass
+ try:
+ container.remove(v=True, force=True)
+ except Exception as e:
+ log.warning(f"Failed to remove container {container.id[:12]}: {e}")
+
+
+def _make_tar(files: dict[str, str | bytes]) -> bytes:
+ """Create an in-memory tar archive from a dict of {path: content}.
+
+ Args:
+ files: mapping of archive-relative paths to file contents
+ (str will be encoded to utf-8)
+
+ Returns:
+ bytes of a tar archive
+ """
+ buf = BytesIO()
+ with tarfile.TarFile(fileobj=buf, mode="w") as tar:
+ for name, content in files.items():
+ data = content.encode("utf-8") if isinstance(content, str) else content
+ info = tarfile.TarInfo(name=name)
+ info.size = len(data)
+ tar.addfile(info, BytesIO(data))
+ return buf.getvalue()
+
+
+def _detect_apk_mode(container) -> bool:
+ """Detect whether the ImageBuilder uses apk or opkg.
+
+ Checks for the presence of /builder/repositories (apk) vs
+ /builder/repositories.conf (opkg) inside the running container.
+ """
+ rc, _, _ = run_cmd(container, ["test", "-f", "/builder/repositories"])
+ return rc == 0
+
+
+def inject_files(container, build_request, job=None):
+ """Copy keys, repositories, and defaults into a running container.
+
+ Uses put_archive to inject files directly — no bind mounts needed,
+ so there are no host-path dependencies.
+ """
+ if build_request.repository_keys:
+ files = {}
+ for i, key in enumerate(build_request.repository_keys):
+ if key.strip().startswith("-----BEGIN"):
+ files[f"keys/custom-{i}.pem"] = key
+ else:
+ fingerprint = fingerprint_pubkey_usign(key)
+ files[f"keys/{fingerprint}"] = (
+ f"untrusted comment: {fingerprint}\n{key}"
+ )
+ if files:
+ container.put_archive("/builder/", _make_tar(files))
+
+ if build_request.repositories:
+ allowed = validate_repos(build_request.repositories)
+ apk_mode = _detect_apk_mode(container)
+ repo_file = "repositories" if apk_mode else "repositories.conf"
+
+ base = ""
+ if build_request.repositories_mode == "append":
+ _, base, _ = run_cmd(container, ["cat", repo_file])
+
+ merged = merge_repositories(base, allowed, apk_mode)
+ container.put_archive("/builder/", _make_tar({repo_file: merged}))
+
+ if build_request.defaults:
+ container.put_archive(
+ "/builder/",
+ _make_tar(
+ {"asu-files/etc/uci-defaults/99-asu-defaults": build_request.defaults}
+ ),
+ )
+
+
def _build(build_request: BuildRequest, job=None):
"""Build image request and setup ImageBuilders automatically
@@ -46,6 +138,7 @@ def _build(build_request: BuildRequest, job=None):
build_start: float = perf_counter()
request_hash = get_request_hash(build_request)
+
bin_dir: Path = settings.public_path / "store" / request_hash
bin_dir.mkdir(parents=True, exist_ok=True)
log.debug(f"Bin dir: {bin_dir}")
@@ -67,7 +160,6 @@ def _build(build_request: BuildRequest, job=None):
f"Container version: {container_version_tag} (requested {build_request.version})"
)
- mounts: list[dict[str, Union[str, bool]]] = []
environment: dict[str, str] = {}
image = f"{settings.base_container}:{build_request.target.replace('/', '-')}-{container_version_tag}"
@@ -82,15 +174,6 @@ def _build(build_request: BuildRequest, job=None):
}
)
- if settings.squid_cache:
- environment.update(
- {
- "UPSTREAM_URL": settings.upstream_url.replace("https", "http"),
- "use_proxy": "on",
- "http_proxy": "http://127.0.0.1:3128",
- }
- )
-
job.meta["imagebuilder_status"] = "container_setup"
job.save_meta()
@@ -104,69 +187,9 @@ def _build(build_request: BuildRequest, job=None):
)
log.info(f"Pulling {image}... done")
- bin_dir.mkdir(parents=True, exist_ok=True)
- log.debug("Created store path: %s", bin_dir)
-
- if build_request.repository_keys:
- log.debug("Found extra keys")
-
- (bin_dir / "keys").mkdir(parents=True, exist_ok=True)
-
- for key in build_request.repository_keys:
- fingerprint = fingerprint_pubkey_usign(key)
- log.debug(f"Found key {fingerprint}")
-
- (bin_dir / "keys" / fingerprint).write_text(
- f"untrusted comment: {fingerprint}\n{key}"
- )
-
- mounts.append(
- {
- "type": "bind",
- "source": str(bin_dir / "keys" / fingerprint),
- "target": "/builder/keys/" + fingerprint,
- "read_only": True,
- },
- )
-
- if build_request.repositories:
- log.debug("Found extra repos")
- repositories = ""
- for name, repo in build_request.repositories.items():
- if repo.startswith(tuple(settings.repository_allow_list)):
- repositories += f"src/gz {name} {repo}\n"
- else:
- report_error(job, f"Repository {repo} not allowed")
-
- repositories += "src imagebuilder file:packages\noption check_signature"
-
- (bin_dir / "repositories.conf").write_text(repositories)
-
- mounts.append(
- {
- "type": "bind",
- "source": str(bin_dir / "repositories.conf"),
- "target": "/builder/repositories.conf",
- "read_only": True,
- },
- )
-
- if build_request.defaults:
- log.debug("Found defaults")
-
- defaults_file = bin_dir / "files/etc/uci-defaults/99-asu-defaults"
- defaults_file.parent.mkdir(parents=True, exist_ok=True)
- defaults_file.write_text(build_request.defaults)
- mounts.append(
- {
- "type": "bind",
- "source": str(bin_dir / "files"),
- "target": str(bin_dir / "files"),
- "read_only": True,
- },
- )
-
- log.debug("Mounts: %s", mounts)
+ mounts: list[dict[str, Union[str, bool]]] = [
+ {"type": "tmpfs", "target": f"/builder/{request_hash}"},
+ ]
container = podman.containers.create(
image,
@@ -175,128 +198,150 @@ def _build(build_request: BuildRequest, job=None):
cap_drop=["all"],
no_new_privileges=True,
privileged=False,
- networks={"pasta": {}},
- auto_remove=True,
+ network_mode="bridge",
+ networks={"asu-build": {}},
environment=environment,
+ image_volume_mode="ignore",
)
- container.start()
-
- if is_snapshot_build(build_request.version):
- log.info("Running setup.sh for ImageBuilder")
- returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
- container, ["sh", "setup.sh"]
- )
- if returncode:
- container.kill()
- report_error(job, "Could not set up ImageBuilder")
-
- returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
- container, ["make", "info"]
- )
+ try:
+ container.start()
- job.meta["imagebuilder_status"] = "validate_revision"
- job.save_meta()
+ if is_snapshot_build(build_request.version):
+ log.info("Running setup.sh for ImageBuilder")
+ returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
+ container, ["sh", "setup.sh"]
+ )
+ if returncode:
+ report_error(job, f"Could not set up ImageBuilder ({returncode=})")
- version_code = re.search('Current Revision: "(r.+)"', job.meta["stdout"]).group(1)
+ inject_files(container, build_request, job)
- if requested := build_request.version_code:
- if version_code != requested:
- report_error(
- job,
- f"Received incorrect version {version_code} (requested {requested})",
+ # If upstream_url is HTTP (caching proxy), rewrite repository URLs
+ # from https://host/path to http://cache/host/path
+ if settings.upstream_url.startswith("http://"):
+ cache_host = settings.upstream_url.rstrip("/")
+ repo_file = (
+ "repositories" if _detect_apk_mode(container) else "repositories.conf"
+ )
+ run_cmd(
+ container,
+ ["sed", "-i", f"s|https://|{cache_host}/|g", repo_file],
)
- default_packages = set(
- re.search(r"Default Packages: (.*)\n", job.meta["stdout"]).group(1).split()
- )
- log.debug(f"Default packages: {default_packages}")
-
- profile_packages = set(
- re.search(
- r"{}:\n .+\n Packages: (.*?)\n".format(build_request.profile),
- job.meta["stdout"],
- re.MULTILINE,
+ returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
+ container, ["make", "info"]
)
- .group(1)
- .split()
- )
-
- apply_package_changes(build_request)
- build_cmd_packages = build_request.packages
+ job.meta["imagebuilder_status"] = "validate_revision"
+ job.save_meta()
- if build_request.diff_packages:
- build_cmd_packages: list[str] = diff_packages(
- build_request.packages, default_packages | profile_packages
+ version_code = re.search('Current Revision: "(r.+)"', job.meta["stdout"]).group(
+ 1
)
- log.debug(f"Diffed packages: {build_cmd_packages}")
- job.meta["imagebuilder_status"] = "validate_manifest"
- job.save_meta()
+ if requested := build_request.version_code:
+ if version_code != requested:
+ report_error(
+ job,
+ f"Received incorrect version {version_code} (requested {requested})",
+ )
- if settings.squid_cache and not is_snapshot_build(build_request.version):
- log.info("Disabling HTTPS for repositories")
- run_cmd(
- container,
- ["sed", "-i", "s|https|http|g", "repositories.conf", "repositories"],
+ default_packages = set(
+ re.search(r"Default Packages: (.*)\n", job.meta["stdout"]).group(1).split()
)
+ log.debug(f"Default packages: {default_packages}")
- returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
- container,
- [
- "make",
- "manifest",
- f"PROFILE={build_request.profile}",
- f"PACKAGES={' '.join(build_cmd_packages)}",
- "STRIP_ABI=1",
- ],
- )
+ profile_packages = set(
+ re.search(
+ r"{}:\n .+\n Packages: (.*?)\n".format(build_request.profile),
+ job.meta["stdout"],
+ re.MULTILINE,
+ )
+ .group(1)
+ .split()
+ )
- job.save_meta()
+ apply_package_changes(build_request)
- if returncode:
- container.kill()
- report_error(job, "Impossible package selection")
+ extra_packages = (
+ set(build_request.packages) - default_packages - profile_packages
+ )
+ branch = get_branch(build_request.version)["name"]
+ for pkg in extra_packages:
+ if not pkg.startswith("-"):
+ add_timestamp(
+ f"stats:packages:{branch}:{pkg}",
+ {"stats": "packages", "branch": branch, "package": pkg},
+ )
+
+ build_cmd_packages = build_request.packages
+
+ if build_request.diff_packages:
+ build_cmd_packages: list[str] = diff_packages(
+ build_request.packages, default_packages | profile_packages
+ )
+ log.debug(f"Diffed packages: {build_cmd_packages}")
- manifest: dict[str, str] = parse_manifest(job.meta["stdout"])
- log.debug(f"Manifest: {manifest}")
+ job.meta["imagebuilder_status"] = "validate_manifest"
+ job.save_meta()
- # Check if all requested packages are in the manifest
- if err := check_manifest(manifest, build_request.packages_versions):
- report_error(job, err)
+ returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
+ container,
+ [
+ "make",
+ "manifest",
+ f"PROFILE={build_request.profile}",
+ f"PACKAGES={' '.join(build_cmd_packages)}",
+ "STRIP_ABI=1",
+ ],
+ )
- packages_hash: str = get_packages_hash(manifest.keys())
- log.debug(f"Packages Hash: {packages_hash}")
+ job.save_meta()
- job.meta["build_cmd"] = [
- "make",
- "image",
- f"PROFILE={build_request.profile}",
- f"PACKAGES={' '.join(build_cmd_packages)}",
- f"EXTRA_IMAGE_NAME={packages_hash[:12]}",
- f"BIN_DIR=/builder/{request_hash}",
- ]
+ if returncode:
+ report_error(job, check_package_errors(job.meta["stderr"]))
- if build_request.defaults:
- job.meta["build_cmd"].append(f"FILES={bin_dir}/files")
+ manifest: dict[str, str] = parse_manifest(job.meta["stdout"])
+ log.debug(f"Manifest: {manifest}")
- # Check if custom rootfs size is requested
- if build_request.rootfs_size_mb:
- log.debug("Found custom rootfs size %d", build_request.rootfs_size_mb)
- job.meta["build_cmd"].append(f"ROOTFS_PARTSIZE={build_request.rootfs_size_mb}")
+ # Check if all requested packages are in the manifest
+ if err := check_manifest(manifest, build_request.packages_versions):
+ report_error(job, err)
- log.debug("Build command: %s", job.meta["build_cmd"])
+ packages_hash: str = get_packages_hash(manifest.keys())
+ log.debug(f"Packages Hash: {packages_hash}")
- job.meta["imagebuilder_status"] = "building_image"
- job.save_meta()
+ job.meta["build_cmd"] = [
+ "make",
+ "image",
+ f"PROFILE={build_request.profile}",
+ f"PACKAGES={' '.join(build_cmd_packages)}",
+ f"EXTRA_IMAGE_NAME={packages_hash[:12]}",
+ f"BIN_DIR=/builder/{request_hash}",
+ ]
+
+ if build_request.defaults:
+ job.meta["build_cmd"].append("FILES=/builder/asu-files")
+
+ # Check if custom rootfs size is requested
+ if build_request.rootfs_size_mb:
+ log.debug("Found custom rootfs size %d", build_request.rootfs_size_mb)
+ job.meta["build_cmd"].append(
+ f"ROOTFS_PARTSIZE={build_request.rootfs_size_mb}"
+ )
- returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
- container,
- job.meta["build_cmd"],
- copy=["/builder/" + request_hash, bin_dir.parent],
- )
+ log.debug("Build command: %s", job.meta["build_cmd"])
- container.kill()
+ job.meta["imagebuilder_status"] = "building_image"
+ job.save_meta()
+
+ returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
+ container,
+ job.meta["build_cmd"],
+ copy=["/builder/" + request_hash, bin_dir.parent],
+ )
+ finally:
+ _cleanup_container(container)
job.save_meta()
@@ -322,8 +367,10 @@ def _build(build_request: BuildRequest, job=None):
map(
lambda i: i["name"],
filter(
- lambda i: i["type"]
- in ["sysupgrade", "factory", "combined", "combined-efi", "sdcard"],
+ lambda i: (
+ i["type"]
+ in ["sysupgrade", "factory", "combined", "combined-efi", "sdcard"]
+ ),
json_content["profiles"][build_request.profile]["images"],
),
)
@@ -356,41 +403,50 @@ def _build(build_request: BuildRequest, job=None):
{
"type": "bind",
"source": str(bin_dir),
- "target": request_hash,
+ "target": "/work",
"read_only": False,
},
],
- user="root", # running as root to have write access to the mounted volume
- working_dir=request_hash,
+ user="root",
+ working_dir="/work",
environment={
"IMAGES_TO_SIGN": " ".join(images),
"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/builder/staging_dir/host/bin",
},
- auto_remove=True,
- )
- returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
- container,
- [
- "bash",
- "-c",
- (
- "env;"
- "for IMAGE in $IMAGES_TO_SIGN; do "
- "touch ${IMAGE}.test;"
- 'fwtool -t -s /dev/null "$IMAGE" && echo "sign entfern";'
- 'cp "/builder/key-build.ucert" "$IMAGE.ucert" && echo "moved";'
- 'usign -S -m "$IMAGE" -s "/builder/key-build" -x "$IMAGE.sig" && echo "usign";'
- 'ucert -A -c "$IMAGE.ucert" -x "$IMAGE.sig" && echo "ucert";'
- 'fwtool -S "$IMAGE.ucert" "$IMAGE" && echo "fwtool";'
- "done"
- ),
- ],
+ image_volume_mode="ignore",
)
- container.stop()
+ try:
+ container.start()
+ returncode, job.meta["stdout"], job.meta["stderr"] = run_cmd(
+ container,
+ [
+ "bash",
+ "-c",
+ (
+ "env;"
+ "for IMAGE in $IMAGES_TO_SIGN; do "
+ "touch ${IMAGE}.test;"
+ 'fwtool -t -s /dev/null "$IMAGE" && echo "sign entfern";'
+ 'cp "/builder/key-build.ucert" "$IMAGE.ucert" && echo "moved";'
+ 'usign -S -m "$IMAGE" -s "/builder/key-build" -x "$IMAGE.sig" && echo "usign";'
+ 'ucert -A -c "$IMAGE.ucert" -x "$IMAGE.sig" && echo "ucert";'
+ 'fwtool -S "$IMAGE.ucert" "$IMAGE" && echo "fwtool";'
+ "done"
+ ),
+ ],
+ )
+ finally:
+ _cleanup_container(container)
job.save_meta()
else:
log.warning("No build key found, skipping signing")
+ store = get_store()
+ store.upload_dir(bin_dir, request_hash)
+
+ if not isinstance(store, LocalStore):
+ shutil.rmtree(bin_dir, ignore_errors=True)
+
json_content.update({"manifest": manifest})
json_content.update(json_content["profiles"][build_request.profile])
json_content["id"] = build_request.profile
@@ -436,9 +492,10 @@ def _build(build_request: BuildRequest, job=None):
def build(build_request: BuildRequest, job=None):
try:
result = _build(build_request, job)
- except Exception:
+ except Exception as exc:
# Log all build errors, including internal server errors.
add_build_event("failures")
+ error_log.log_build_error(build_request, str(exc))
raise
else:
add_build_event("successes")
diff --git a/asu/build_request.py b/asu/build_request.py
index 1fc18437..1e5db7f7 100644
--- a/asu/build_request.py
+++ b/asu/build_request.py
@@ -1,4 +1,4 @@
-from typing import Annotated
+from typing import Annotated, Literal
from pydantic import BaseModel, Field
@@ -7,6 +7,8 @@
STRING_PATTERN = r"^[\w.,-]*$"
TARGET_PATTERN = r"^[\w]*/[\w]*$"
PKG_VERSION_PATTERN = r"^[\w+.,~-]*$"
+REPO_NAME_PATTERN = r"^[\w.-]+$"
+REPO_URL_PATTERN = r"^https?://\S+$"
class BuildRequest(BaseModel):
@@ -135,13 +137,27 @@ class BuildRequest(BaseModel):
),
] = None
repositories: Annotated[
- dict[str, str],
+ dict[
+ Annotated[str, Field(pattern=REPO_NAME_PATTERN)],
+ Annotated[str, Field(pattern=REPO_URL_PATTERN)],
+ ],
Field(
description="""
Additional repositories for user packages.
""".strip()
),
] = {}
+ repositories_mode: Annotated[
+ Literal["append", "replace"],
+ Field(
+ description="""
+ How to apply the requested repositories (only used when
+ `repositories` is non-empty):
+ - `append`: merge into existing ImageBuilder repositories
+ - `replace`: replace existing repositories entirely (default)
+ """.strip()
+ ),
+ ] = "replace"
repository_keys: Annotated[
list[str],
Field(
diff --git a/asu/config.py b/asu/config.py
index 92c57041..dd36be91 100644
--- a/asu/config.py
+++ b/asu/config.py
@@ -1,7 +1,7 @@
from pathlib import Path
from typing import Union
-from pydantic_settings import BaseSettings, SettingsConfigDict
+from pydantic_settings import BaseSettings, SettingsConfigDict, TomlConfigSettingsSource
# Adding a new entry to `package_changes_list` requires determining
# the revision at which the package appears, is removed or has been
@@ -61,9 +61,20 @@ def release(branch_off_rev, enabled=True):
class Settings(BaseSettings):
- model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8")
+ model_config = SettingsConfigDict(
+ toml_file="asu.toml",
+ extra="ignore",
+ )
- public_path: Path = Path.cwd() / "public"
+ @classmethod
+ def settings_customise_sources(cls, settings_cls, **kwargs):
+ return (
+ kwargs["env_settings"],
+ TomlConfigSettingsSource(settings_cls),
+ kwargs["init_settings"],
+ )
+
+ public_path: Path = Path("/public")
redis_url: str = "redis://localhost:6379"
upstream_url: str = "https://downloads.openwrt.org"
allow_defaults: bool = False
@@ -73,7 +84,6 @@ class Settings(BaseSettings):
max_defaults_length: int = 20480
repository_allow_list: list = []
base_container: str = "ghcr.io/openwrt/imagebuilder"
- container_socket_path: str = ""
container_identity: str = ""
branches: dict = {
"SNAPSHOT": {
@@ -89,12 +99,19 @@ class Settings(BaseSettings):
"22.03": release(19160),
"21.02": release(15812, enabled=True), # Enabled for now...
}
+ store_backend: str = "local" # "local" or "s3"
+ s3_endpoint: str = ""
+ s3_access_key: str = ""
+ s3_secret_key: str = ""
+ s3_bucket: str = "asu-store"
+ s3_region: str = ""
+ s3_public_url: str = "" # base URL for redirects, e.g. "https://cdn.example.com"
server_stats: str = ""
log_level: str = "INFO"
- squid_cache: bool = False
- build_ttl: str = "3h"
+ build_ttl: str = "7d"
+ build_ttl_unversioned: str = "24h"
build_defaults_ttl: str = "30m"
- build_failure_ttl: str = "10m"
+ build_failure_ttl: str = "1h"
max_pending_jobs: int = 200
job_timeout: str = "10m"
diff --git a/asu/main.py b/asu/main.py
index 8602bac4..ab410650 100644
--- a/asu/main.py
+++ b/asu/main.py
@@ -12,6 +12,7 @@
from asu import __version__
from asu.config import settings
from asu.routers import api, stats
+from asu.store import LocalStore, get_store
from asu.util import (
client_get,
get_branch,
@@ -48,16 +49,22 @@
@app.api_route("/store/{path:path}", methods=["GET", "HEAD"])
def store(path: str):
- path = (settings.public_path / "store" / path).resolve()
- if not path.is_file() or settings.public_path / "store" not in path.parents:
- raise HTTPException(status_code=404, detail="Not found")
-
- return FileResponse(
- path,
- media_type="application/octet-stream",
- filename=path.name, # adds Content-Disposition: attachment; filename="..."
- headers={"X-Content-Type-Options": "nosniff"},
- )
+ store_backend = get_store()
+
+ if isinstance(store_backend, LocalStore):
+ store_root = (settings.public_path / "store").resolve()
+ file_path = (store_root / path).resolve()
+ if not file_path.is_file() or store_root not in file_path.parents:
+ raise HTTPException(status_code=404, detail="Not found")
+
+ return FileResponse(
+ file_path,
+ media_type="application/octet-stream",
+ filename=file_path.name,
+ headers={"X-Content-Type-Options": "nosniff"},
+ )
+
+ return RedirectResponse(store_backend.get_url(path), status_code=302)
@app.get("/", response_class=HTMLResponse)
@@ -77,6 +84,17 @@ def index(request: Request):
)
+@app.get("/stats", response_class=HTMLResponse)
+def stats_page(request: Request):
+ return templates.TemplateResponse(
+ request=request,
+ name="stats.html",
+ context=dict(
+ branches=reversed(settings.branches),
+ ),
+ )
+
+
@app.get("/json/v1/{path:path}/index.json")
def json_v1_target_index(path: str) -> dict[str, Union[str, dict[str, str]]]:
base_path: str = f"{settings.upstream_url}/{path}"
diff --git a/asu/package_changes.py b/asu/package_changes.py
index cde549b1..905d7847 100644
--- a/asu/package_changes.py
+++ b/asu/package_changes.py
@@ -29,6 +29,13 @@ def _add_if_missing(package):
build_request.packages.append(package)
log.debug(f"Added {package} to packages")
+ def _remove_if_present(package):
+ if package in build_request.packages:
+ build_request.packages.remove(package)
+ log.debug(f"Removed {package} from packages")
+ return True
+ return False
+
# 23.05 specific changes
if build_request.version.startswith("23.05"):
# mediatek/mt7622 specific changes
@@ -55,8 +62,7 @@ def _add_if_missing(package):
if build_request.version.startswith("24.10"):
# `auc` no longer exists here
- if "auc" in build_request.packages:
- build_request.packages.remove("auc")
+ if _remove_if_present("auc"):
_add_if_missing("owut")
if build_request.profile in {"tplink_archer-c6-v2"}:
@@ -124,8 +130,10 @@ def _add_if_missing(package):
"solidrun_clearfog-pro",
}:
_add_if_missing("kmod-dsa-mv88e6xxx")
+ # Changes for https://github.com/openwrt/openwrt/commit/62bf0287326dcfab4596d5f4cad77cd9e7f8f03b
# Changes for https://github.com/openwrt/openwrt/commit/a18d95f35bd54ade908e8ec3158435859402552d
elif build_request.target == "lantiq/xrx200":
+ _add_if_missing("kmod-dsa-gswip")
if build_request.profile in {
"arcadyan_arv7519rw22",
"arcadyan_vgv7510kw22-brn",
@@ -162,8 +170,10 @@ def _add_if_missing(package):
}:
_add_if_missing("xrx200-rev1.1-phy11g-firmware")
_add_if_missing("xrx200-rev1.2-phy11g-firmware")
+ # Changes for https://github.com/openwrt/openwrt/commit/62bf0287326dcfab4596d5f4cad77cd9e7f8f03b
# Changes for https://github.com/openwrt/openwrt/commit/a18d95f35bd54ade908e8ec3158435859402552d
elif build_request.target == "lantiq/xrx200_legacy":
+ _add_if_missing("kmod-dsa-gswip")
if build_request.profile in {
"alphanetworks_asl56026",
"netgear_dm200",
@@ -197,6 +207,10 @@ def _add_if_missing(package):
}:
_add_if_missing("kmod-hci-uart")
+ if build_request.version == "SNAPSHOT": # Change "SNAPSHOT" to 26.x when needed.
+ # https://github.com/openwrt/openwrt/commit/5b61a50244ebc82096f5949de294ad69851e1fd6
+ _remove_if_present("kmod-nf-conntrack6")
+
# TODO: if we ever fully implement 'packages_versions', this needs rework
for version, packages in language_packs.items():
if build_request.version >= version: # Includes snapshots
@@ -205,3 +219,8 @@ def _add_if_missing(package):
if package.startswith(old):
lang = package.replace(old, "")
build_request.packages[i] = f"{new}{lang}"
+
+ # Clean out all the no longer present -en translations
+ for package in list(build_request.packages):
+ if package.startswith("luci-i18n-") and package.endswith("-en"):
+ _remove_if_present(package)
diff --git a/asu/repositories.py b/asu/repositories.py
new file mode 100644
index 00000000..7f1f63fd
--- /dev/null
+++ b/asu/repositories.py
@@ -0,0 +1,61 @@
+from urllib.parse import urlparse
+
+from asu.config import settings
+
+
+def is_repo_allowed(repo_url: str, allow_list: list[str]) -> bool:
+ """Check if a repository URL is allowed by the allow list.
+
+ Uses proper URL parsing to prevent subdomain and userinfo bypasses
+ that affect naive prefix matching.
+ """
+ if not allow_list:
+ return False
+ parsed = urlparse(repo_url)
+ for allowed in allow_list:
+ allowed_parsed = urlparse(allowed)
+ if (
+ parsed.scheme == allowed_parsed.scheme
+ and parsed.hostname == allowed_parsed.hostname
+ and parsed.path.startswith(allowed_parsed.path.rstrip("/") + "/")
+ ):
+ return True
+ return False
+
+
+def merge_repositories(
+ base_content: str, extra_repos: dict[str, str], apk_mode: bool
+) -> str:
+ """Append extra repositories to existing content.
+
+ For opkg (repositories.conf): entries are `src/gz `.
+ For apk (repositories): entries are plain URLs, one per line.
+ """
+ lines = [line for line in base_content.splitlines() if line.strip()]
+
+ for name, url in sorted(extra_repos.items()):
+ if apk_mode:
+ lines.append(url)
+ else:
+ lines.append(f"src/gz {name} {url}")
+
+ if not apk_mode:
+ if not any("src imagebuilder file:packages" in line for line in lines):
+ lines.append("src imagebuilder file:packages")
+ if not any("option check_signature" in line for line in lines):
+ lines.append("option check_signature")
+
+ return "\n".join(lines) + "\n"
+
+
+def validate_repos(repositories: dict[str, str]) -> dict[str, str]:
+ """Filter repositories against the allow list.
+
+ Repositories are already validated at the API level, but this
+ provides defense-in-depth for the build worker.
+ """
+ return {
+ name: url
+ for name, url in repositories.items()
+ if is_repo_allowed(url, settings.repository_allow_list)
+ }
diff --git a/asu/routers/api.py b/asu/routers/api.py
index b26c85de..b810176b 100644
--- a/asu/routers/api.py
+++ b/asu/routers/api.py
@@ -8,6 +8,7 @@
from asu.build import build
from asu.build_request import BuildRequest
from asu.config import settings
+from asu.repositories import is_repo_allowed
from asu.util import (
add_timestamp,
add_build_event,
@@ -88,6 +89,10 @@ def validate_request(
if build_request.defaults and not settings.allow_defaults:
return validation_failure("Handling `defaults` not enabled on server")
+ for url in build_request.repositories.values():
+ if not is_repo_allowed(url, settings.repository_allow_list):
+ return validation_failure(f"Repository not allowed: {url}")
+
if build_request.distro not in get_distros():
return validation_failure(f"Unsupported distro: {build_request.distro}")
@@ -217,9 +222,12 @@ def api_v1_build_post(
request_hash: str = get_request_hash(build_request)
job: Job = get_queue().fetch_job(request_hash)
status: int = 200
- result_ttl: str = settings.build_ttl
if build_request.defaults:
result_ttl = settings.build_defaults_ttl
+ elif build_request.packages_versions:
+ result_ttl = settings.build_ttl
+ else:
+ result_ttl = settings.build_ttl_unversioned
failure_ttl: str = settings.build_failure_ttl
if build_request.client:
diff --git a/asu/routers/stats.py b/asu/routers/stats.py
index c2185ffc..e77b17b0 100644
--- a/asu/routers/stats.py
+++ b/asu/routers/stats.py
@@ -1,8 +1,9 @@
from datetime import datetime as dt, timedelta, UTC
from fastapi import APIRouter
+from fastapi.responses import PlainTextResponse
-from asu.util import get_redis_ts
+from asu.util import error_log, get_queue, get_redis_ts
router = APIRouter()
@@ -26,6 +27,33 @@ def start_stop(duration, interval):
return start, stop, stamps, labels
+@router.get("/stats/summary")
+def get_stats_summary() -> dict:
+ """Return queue length and builds in last 24 hours."""
+ ts = get_redis_ts()
+ rc = ts.client
+
+ now = int(dt.now(UTC).timestamp() * 1000)
+ day_ago = now - DAY_MS
+
+ builds_24h = 0
+ key = "stats:build:successes"
+ if rc.exists(key):
+ result = ts.range(
+ key,
+ from_time=day_ago,
+ to_time=now,
+ aggregation_type="sum",
+ bucket_size_msec=DAY_MS,
+ )
+ builds_24h = int(sum(v for _, v in result))
+
+ return {
+ "queue_length": len(get_queue()),
+ "builds_24h": builds_24h,
+ }
+
+
@router.get("/builds-per-day")
def get_builds_per_day() -> dict:
"""
@@ -120,3 +148,55 @@ def sum_data(version, data):
for version in sorted(bucket)
],
}
+
+
+@router.get("/top-packages")
+def get_top_packages(branch: str = None, n: int = 30) -> dict:
+ """Return the most requested packages, optionally filtered by branch."""
+ n = min(n, 100)
+ interval = N_DAYS * DAY_MS
+
+ start, stop, stamps, labels = start_stop(N_DAYS, DAY_MS)
+
+ filters = ["stats=packages"]
+ if branch:
+ filters.append(f"branch={branch}")
+
+ result = get_redis_ts().mrange(
+ filters=filters,
+ with_labels=True,
+ from_time=start,
+ to_time=stop,
+ aggregation_type="sum",
+ bucket_size_msec=interval,
+ )
+
+ packages = {}
+ for row in result:
+ for data in row.values():
+ pkg = data[0].get("package", "unknown")
+ total = sum(v for _, v in data[1])
+ packages[pkg] = packages.get(pkg, 0) + total
+
+ sorted_packages = sorted(packages.items(), key=lambda x: x[1], reverse=True)[:n]
+
+ return {
+ "packages": [
+ {"name": name, "count": int(count)} for name, count in sorted_packages
+ ],
+ "branch": branch,
+ "days": N_DAYS,
+ }
+
+
+@router.get("/build-errors", response_class=PlainTextResponse)
+def get_build_errors(n: int = 100) -> str:
+ """Return a summary of recent build errors.
+
+ Args:
+ n: Maximum number of entries to return (default 100, max 500)
+
+ Returns:
+ Plain text summary of build errors
+ """
+ return error_log.get_summary(min(n, 500))
diff --git a/asu/static/style.css b/asu/static/style.css
index 01ff0689..b2b4aaa3 100644
--- a/asu/static/style.css
+++ b/asu/static/style.css
@@ -117,7 +117,7 @@ header > div {
color: #fff;
}
-header > div > img {
+header > div img {
height: 3em;
padding: 0.75em;
}
@@ -338,3 +338,134 @@ td {
margin: 2px;
width: 1fr;
}
+
+details {
+ margin-top: 1.5em;
+ border: 1px solid #ddd;
+ border-radius: 4px;
+ padding: 0.5em 1em;
+ background: #fafafa;
+}
+
+details summary {
+ cursor: pointer;
+ user-select: none;
+}
+
+details summary h2 {
+ display: inline;
+ margin: 0;
+ font-size: 1.2em;
+}
+
+details[open] summary {
+ margin-bottom: 0.5em;
+}
+
+.error-log {
+ background: #1a1a1a;
+ color: #c0c0c0;
+ padding: 1em;
+ border-radius: 4px;
+ font-family: monospace;
+ font-size: 0.8em;
+ line-height: 1.5;
+ max-width: 800px;
+ max-height: 400px;
+ overflow: auto;
+ white-space: pre-wrap;
+ word-wrap: break-word;
+}
+
+/* Dark theme */
+@media (prefers-color-scheme: dark) {
+ body {
+ background-color: #0d1117;
+ color: #c9d1d9;
+ }
+
+ a {
+ color: #58a6ff;
+ }
+
+ header {
+ background-color: #001a2e;
+ box-shadow: 0 0.125em 0.25em rgba(0, 0, 0, 0.4);
+ }
+
+ .container {
+ color: #c9d1d9;
+ }
+
+ h1, h2, h3 {
+ color: #e6edf3;
+ }
+
+ code {
+ background: #161b22;
+ color: #79c0ff;
+ padding: 0.15em 0.4em;
+ border-radius: 3px;
+ }
+
+ #versions {
+ background-color: #161b22;
+ color: #c9d1d9;
+ border-color: #30363d;
+ }
+
+ select {
+ background-color: #161b22;
+ color: #c9d1d9;
+ border: 1px solid #30363d;
+ }
+
+ .autocomplete > input {
+ background-color: #161b22;
+ color: #c9d1d9;
+ border-color: #30363d;
+ }
+
+ .autocomplete-items div {
+ background-color: #161b22;
+ border-bottom-color: #30363d;
+ }
+
+ .autocomplete-items div:hover {
+ background-color: #1c2128;
+ }
+
+ details {
+ background: #161b22;
+ border-color: #30363d;
+ }
+
+ table {
+ box-shadow: 0 0 0.5em rgba(0, 0, 0, 0.5);
+ }
+
+ th {
+ background: #001a2e;
+ }
+
+ tr {
+ background: #0d1117;
+ }
+
+ tr:hover {
+ background: #161b22;
+ }
+
+ tr:nth-child(2n + 1) {
+ background: #111820;
+ }
+
+ th, td {
+ border-bottom-color: #21262d;
+ }
+
+ .flex-item {
+ background-color: #161b22;
+ border-color: #30363d;
+ }
+}
diff --git a/asu/store.py b/asu/store.py
new file mode 100644
index 00000000..54d82996
--- /dev/null
+++ b/asu/store.py
@@ -0,0 +1,101 @@
+import logging
+import mimetypes
+from pathlib import Path
+from typing import Protocol
+
+import boto3
+
+from asu.config import settings
+
+log = logging.getLogger("rq.worker")
+
+
+class Store(Protocol):
+ def upload_file(self, local_path: Path, key: str) -> None: ...
+ def upload_dir(self, local_dir: Path, prefix: str) -> None: ...
+ def get_url(self, key: str) -> str: ...
+ def exists(self, key: str) -> bool: ...
+
+
+class LocalStore:
+ def __init__(self):
+ self.base = settings.public_path / "store"
+ self.base.mkdir(parents=True, exist_ok=True)
+
+ def upload_file(self, local_path: Path, key: str) -> None:
+ dest = self.base / key
+ dest.parent.mkdir(parents=True, exist_ok=True)
+ if local_path.resolve() != dest.resolve():
+ import shutil
+
+ shutil.copy2(local_path, dest)
+
+ def upload_dir(self, local_dir: Path, prefix: str) -> None:
+ for path in local_dir.rglob("*"):
+ if path.is_file():
+ key = f"{prefix}/{path.relative_to(local_dir)}"
+ self.upload_file(path, key)
+
+ def get_url(self, key: str) -> str:
+ return f"/store/{key}"
+
+ def exists(self, key: str) -> bool:
+ return (self.base / key).is_file()
+
+ def get_local_path(self, key: str) -> Path:
+ return self.base / key
+
+
+class S3Store:
+ def __init__(self):
+ kwargs = {"service_name": "s3"}
+ if settings.s3_endpoint:
+ kwargs["endpoint_url"] = settings.s3_endpoint
+ if settings.s3_access_key:
+ kwargs["aws_access_key_id"] = settings.s3_access_key
+ kwargs["aws_secret_access_key"] = settings.s3_secret_key
+ if settings.s3_region:
+ kwargs["region_name"] = settings.s3_region
+
+ self._client = boto3.client(**kwargs)
+ self._bucket = settings.s3_bucket
+
+ def upload_file(self, local_path: Path, key: str) -> None:
+ content_type = (
+ mimetypes.guess_type(str(local_path))[0] or "application/octet-stream"
+ )
+ self._client.upload_file(
+ str(local_path),
+ self._bucket,
+ f"store/{key}",
+ ExtraArgs={"ContentType": content_type},
+ )
+ log.debug(f"Uploaded {local_path} to s3://{self._bucket}/store/{key}")
+
+ def upload_dir(self, local_dir: Path, prefix: str) -> None:
+ for path in local_dir.rglob("*"):
+ if path.is_file():
+ key = f"{prefix}/{path.relative_to(local_dir)}"
+ self.upload_file(path, key)
+
+ def get_url(self, key: str) -> str:
+ if settings.s3_public_url:
+ return f"{settings.s3_public_url.rstrip('/')}/store/{key}"
+ return self._client.generate_presigned_url(
+ "get_object",
+ Params={"Bucket": self._bucket, "Key": f"store/{key}"},
+ ExpiresIn=3600,
+ )
+
+ def exists(self, key: str) -> bool:
+ try:
+ self._client.head_object(Bucket=self._bucket, Key=f"store/{key}")
+ return True
+ except self._client.exceptions.ClientError:
+ return False
+
+
+def get_store() -> Store:
+ if settings.store_backend == "s3":
+ return S3Store()
+ return LocalStore()
diff --git a/asu/templates/overview.html b/asu/templates/overview.html
index 2ac0843c..0c227d1c 100644
--- a/asu/templates/overview.html
+++ b/asu/templates/overview.html
@@ -72,21 +72,11 @@ About the Sysupgrade Server
{% if server_stats %}
-
-
Builds per Day (last 30 days)
-
-
-
-
Weekly build counts by branch (last 6 months)
-
-
-
-
+
+ Queue length: ... |
+ Builds (24h): ... |
+ Detailed statistics
+
{% endif %}
@@ -110,60 +100,45 @@ Weekly build counts by branch (last 6 months)
+
+
+ Build Errors
+ Recent build failures to help diagnose upstream imagebuilder and package issues.
+ Click to load...
+
-{% if server_stats %}
-
- loadVersions();
+{% if server_stats %}
+
{% endif %}
diff --git a/asu/templates/stats.html b/asu/templates/stats.html
new file mode 100644
index 00000000..f9916d69
--- /dev/null
+++ b/asu/templates/stats.html
@@ -0,0 +1,117 @@
+
+
+
+
+ OpenWrt Sysupgrade Server - Statistics
+
+
+
+
+
+

+
+
+
+
+
+
+
+
Builds per Day (last 30 days)
+
+
+
+
Weekly build counts by branch (last 6 months)
+
+
+
+
+
+
Top Packages (last 30 days)
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/asu/util.py b/asu/util.py
index 7f8e8f68..e961a640 100644
--- a/asu/util.py
+++ b/asu/util.py
@@ -4,9 +4,10 @@
import json
import logging
import struct
+from datetime import datetime, UTC
from os import getgid, getuid
from pathlib import Path
-from re import match
+from re import match, findall, sub, DOTALL, MULTILINE
from tarfile import TarFile
from io import BytesIO
from typing import Optional
@@ -179,6 +180,7 @@ def get_request_hash(build_request: BuildRequest) -> str:
str(build_request.rootfs_size_mb),
str(build_request.repository_keys),
str(build_request.repositories),
+ build_request.repositories_mode,
]
),
)
@@ -264,9 +266,16 @@ def get_container_version_tag(input_version: str) -> str:
return version
+def _find_podman_socket() -> str:
+ for path in ["./podman.sock", "/var/podman.sock"]:
+ if Path(path).exists():
+ return f"unix://{Path(path).resolve()}"
+ return "unix:///var/podman.sock"
+
+
def get_podman() -> PodmanClient:
return PodmanClient(
- base_url=f"unix://{settings.container_socket_path}",
+ base_url=_find_podman_socket(),
identity=settings.container_identity,
)
@@ -316,7 +325,7 @@ def run_cmd(
member.uid = uuid
member.gid = ugid
member.mode = 0o755 if member.isdir() else 0o644
- tar_file.extractall(copy[1])
+ tar_file.extractall(copy[1], filter="data")
return returncode, stdout, stderr
@@ -366,6 +375,82 @@ def check_manifest(
f"Impossible package selection: {package} version not as requested: "
f"{version} vs. {manifest[package]}"
)
+ return None
+
+
+def check_package_errors(stderr: str) -> str:
+ """
+ Note that this docstring is used as the test case, see tests/test_util.py
+
+ opkg error formats:
+
+ Case opkg-1
+ Collected errors:
+ * opkg_install_cmd: Cannot install package OPKG-MISSING.
+
+ Case opkg-2
+ Collected errors:
+ * check_conflicts_for: The following packages conflict with OPKG-CONFLICT-1:
+ * check_conflicts_for: OPKG-CONFLICT-2 *
+ * opkg_install_cmd: Cannot install package OPKG-CONFLICT-1.
+
+ Case opkg-3
+ Collected errors:
+ * check_data_file_clashes: Package OPKG-CONFLICT-3 wants to install file /some/file
+ But that file is already provided by package * OPKG-CONFLICT-4
+ * opkg_install_cmd: Cannot install package OPKG-CONFLICT-4.
+
+ apk error formats:
+
+ Case apk-1
+ ERROR: unable to select packages:
+ APK-MISSING (no such package):
+ required by: world[APK-MISSING]
+
+ Case apk-2
+ ERROR: unable to select packages:
+ APK-CONFLICT-1:
+ conflicts: APK-CONFLICT-2[nftables=1.1.6-r1]
+ satisfies: world[nftables-json]
+ blah[nftables]
+ APK-CONFLICT-2:
+ conflicts: APK-CONFLICT-1[nftables=1.1.6-r1]
+ satisfies: world[nftables-nojson]
+
+ Case apk-3
+ ERROR: APK-CONFLICT-3: trying to overwrite somefile owned by APK-CONFLICT-4.
+ """
+
+ # Grab the missing ones first, as that's easy.
+ missing = set(
+ findall(r"Cannot install package ([^ ]+)\.", stderr) # Case opkg-1
+ + findall(r" ([^ ]+) \(no such package\)", stderr) # Case apk-1
+ )
+
+ # Conflicts are grouped in apk, so need to be flattened.
+ # Case apk-2
+ conflicts = findall(r"\n +([^:\n]+):\n +conflicts: ([^[]+)", stderr, DOTALL)
+ conflicts = set(item for pair in conflicts for item in pair)
+
+ # Case opkg-2, opkg-3, apk-3
+ conflicts.update(
+ findall(r"check_data_file_clashes: Package ([^ ]+) wants to", stderr)
+ + findall(r"is already provided by package \* ([^ ]+)$", stderr, MULTILINE)
+ + findall(r"\* check_conflicts_for:.+ ([^ ]+)(?: \*|:)$", stderr, MULTILINE)
+ + findall(r"ERROR: ([^ ]+): trying to overwrite", stderr)
+ + findall(r"trying to overwrite .* owned by ([^ ]+)\.", stderr)
+ )
+
+ # opkg reports missing and conflicts with same message, so clean that up.
+ # If it's conflicting, remove it from missing...
+ missing.difference_update(conflicts)
+
+ pkg_list = ":" if missing or conflicts else ""
+ if missing:
+ pkg_list += " missing (" + ", ".join(sorted(missing)) + ")"
+ if conflicts:
+ pkg_list += " conflicts (" + ", ".join(sorted(conflicts)) + ")"
+ return f"Impossible package selection{pkg_list}"
def parse_packages_file(url: str) -> dict[str, str]:
@@ -575,3 +660,65 @@ def reload_profiles(app: FastAPI, version: str, target: str) -> bool:
}
return True
+
+
+class ErrorLog:
+ """Redis-backed error log for build failures.
+
+ Stores errors in a Redis list for access from any worker or server.
+ Entries are capped at MAX_ENTRIES to bound memory usage.
+
+ Log format is intentionally minimal and anonymized to protect user privacy:
+ timestamp version:target:profile error_message
+ """
+
+ REDIS_KEY = "build:errors"
+ MAX_ENTRIES = 5000
+
+ def log_build_error(self, build_request: BuildRequest, error_message: str) -> None:
+ timestamp = datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S")
+ clean_error = sub(r"[0-9a-f]{64}", r"[job-id]", error_message)
+ clean_error = " ".join(clean_error.split())[:200]
+ profile_info = (
+ f"{build_request.version}:{build_request.target}:{build_request.profile}"
+ )
+ entry = f"{timestamp} {profile_info} {clean_error}"
+
+ try:
+ rc = get_redis_client()
+ rc.lpush(self.REDIS_KEY, entry)
+ rc.ltrim(self.REDIS_KEY, 0, self.MAX_ENTRIES - 1)
+ except Exception:
+ log.warning(f"Failed to log build error to Redis: {entry}")
+
+ def get_entries(self, n_entries: int = 100) -> list[str]:
+ try:
+ rc = get_redis_client()
+ entries = rc.lrange(self.REDIS_KEY, 0, n_entries - 1)
+ return [e.decode() if isinstance(e, bytes) else e for e in entries]
+ except Exception:
+ return []
+
+ def get_summary(self, n_entries: int = 100) -> str:
+ entries = self.get_entries(n_entries)
+ if not entries:
+ return "No build errors recorded."
+
+ first_time = entries[-1].split(" ", 2)[0:2]
+ last_time = entries[0].split(" ", 2)[0:2]
+ first_ts = " ".join(first_time) if len(first_time) == 2 else "unknown"
+ last_ts = " ".join(last_time) if len(last_time) == 2 else "unknown"
+
+ lines = [
+ f"Build Errors: {len(entries)} entries",
+ f"Time range: {first_ts} to {last_ts}",
+ "",
+ "Recent errors:",
+ "-" * 60,
+ ]
+ lines.extend(entries)
+ return "\n".join(lines)
+
+
+# Module-level singleton instance
+error_log = ErrorLog()
diff --git a/misc/nginx-cache.conf b/misc/nginx-cache.conf
new file mode 100644
index 00000000..31186d7e
--- /dev/null
+++ b/misc/nginx-cache.conf
@@ -0,0 +1,47 @@
+proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=packages:10m
+ max_size=50g inactive=7d use_temp_path=off;
+
+# Caching reverse proxy for OpenWrt package downloads.
+#
+# Accepts requests like:
+# http://cache/downloads.openwrt.org/releases/25.12.2/...
+# http://cache/raw.githubusercontent.com/libremesh/...
+#
+# Fetches from the upstream host via HTTPS, caches the response.
+#
+# Usage: set upstream_url = "http://cache" in asu.toml, then
+# the ImageBuilder repositories file URLs are rewritten from
+# https://downloads.openwrt.org/... to http://cache/downloads.openwrt.org/...
+
+server {
+ listen 80;
+ server_name _;
+
+ resolver 1.1.1.1 8.8.8.8 valid=300s;
+ resolver_timeout 5s;
+
+ location / {
+ # Extract host and path from the URI:
+ # /downloads.openwrt.org/releases/... -> host=downloads.openwrt.org path=/releases/...
+ set $upstream_host $uri;
+ if ($upstream_host ~ "^/([^/]+)(.*)$") {
+ set $upstream_host $1;
+ set $upstream_path $2;
+ }
+
+ proxy_pass https://$upstream_host$upstream_path$is_args$args;
+ proxy_ssl_server_name on;
+
+ proxy_set_header Host $upstream_host;
+ proxy_set_header Accept-Encoding "";
+
+ proxy_cache packages;
+ proxy_cache_valid 200 7d;
+ proxy_cache_valid 404 1m;
+ proxy_cache_use_stale error timeout updating;
+ proxy_cache_lock on;
+ proxy_cache_key $upstream_host$upstream_path;
+
+ add_header X-Cache-Status $upstream_cache_status;
+ }
+}
diff --git a/podman-compose.cache.yml b/podman-compose.cache.yml
new file mode 100644
index 00000000..c8fa56e7
--- /dev/null
+++ b/podman-compose.cache.yml
@@ -0,0 +1,9 @@
+services:
+ cache:
+ image: "docker.io/library/nginx:alpine"
+ restart: unless-stopped
+ volumes:
+ - ./misc/nginx-cache.conf:/etc/nginx/conf.d/default.conf:ro
+ - ./cache-data:/var/cache/nginx:rw
+ networks:
+ - asu-build
diff --git a/podman-compose.workers.yml b/podman-compose.workers.yml
new file mode 100644
index 00000000..18fb6d56
--- /dev/null
+++ b/podman-compose.workers.yml
@@ -0,0 +1,33 @@
+# Decentralized worker setup with S3 storage.
+#
+# Workers upload built images to S3 instead of requiring a shared filesystem
+# with the server. This allows workers to run on separate machines.
+#
+# Usage:
+# cp env.workers .env
+# podman-compose -f podman-compose.workers.yml up -d
+#
+# Required .env variables:
+# REDIS_URL - Redis connection string (must be reachable by workers)
+# STORE_BACKEND=s3
+# S3_ENDPOINT - S3-compatible endpoint URL
+# S3_ACCESS_KEY - S3 access key
+# S3_SECRET_KEY - S3 secret key
+# S3_BUCKET - S3 bucket name
+# S3_REGION - S3 region (use "auto" for Cloudflare R2)
+# S3_PUBLIC_URL - Public URL for serving images (optional)
+
+services:
+ worker:
+ image: "docker.io/openwrt/asu:latest"
+ build:
+ context: .
+ dockerfile: Containerfile
+ restart: unless-stopped
+ command: uv run rqworker --logging_level INFO
+ volumes:
+ - ./asu.toml:/app/asu.toml:ro
+ - ./podman.sock:/var/podman.sock:rw
+ - ./public:/public:rw
+ deploy:
+ replicas: ${WORKER_REPLICAS:-1}
diff --git a/podman-compose.yml b/podman-compose.yml
index 113ce173..c621326a 100644
--- a/podman-compose.yml
+++ b/podman-compose.yml
@@ -6,11 +6,11 @@ services:
dockerfile: Containerfile
restart: unless-stopped
command: uv run uvicorn --host 0.0.0.0 asu.main:app
- env_file: .env
environment:
REDIS_URL: "redis://redis:6379/0"
volumes:
- - $PUBLIC_PATH/store:$PUBLIC_PATH/store:ro
+ - ./asu.toml:/app/asu.toml:ro
+ - ./public/store:/public/store:ro
ports:
- "127.0.0.1:8000:8000"
depends_on:
@@ -23,12 +23,12 @@ services:
dockerfile: Containerfile
restart: unless-stopped
command: uv run rqworker --logging_level INFO
- env_file: .env
environment:
REDIS_URL: "redis://redis:6379/0"
volumes:
- - $PUBLIC_PATH:$PUBLIC_PATH:rw
- - $CONTAINER_SOCKET_PATH:$CONTAINER_SOCKET_PATH:rw
+ - ./asu.toml:/app/asu.toml:ro
+ - ./public:/public:rw
+ - ./podman.sock:/var/podman.sock:rw
depends_on:
- redis
@@ -37,45 +37,13 @@ services:
restart: unless-stopped
volumes:
- ./redis-data:/data/:rw
- ports:
- - "127.0.0.1:6379:6379"
- # Optionally add more workers
- # worker2:
- # image: "docker.io/openwrt/asu:latest"
- # restart: unless-stopped
- # command: rqworker --logging_level INFO
- # env_file: .env
- # environment:
- # REDIS_URL: "redis://redis:6379/0"
- # volumes:
- # - $PUBLIC_PATH:$PUBLIC_PATH:rw
- # - $CONTAINER_SOCKET_PATH:$CONTAINER_SOCKET_PATH:rw
- # depends_on:
- # - redis
- #
- # Optionally add a Squid cache container when using `SQUID_CACHE`
- # squid:
- # image: "docker.io/ubuntu/squid:latest"
- # restart: unless-stopped
- # ports:
- # - "127.0.0.1:3128:3128"
- # volumes:
- # - ".squid.conf:/etc/squid/conf.d/snippet.conf:ro"
- # - "./squid-data/:/var/spool/squid/:rw"
+ # Optional: caching proxy for upstream package downloads.
+ # Enable with: podman-compose -f podman-compose.yml -f podman-compose.cache.yml up -d
+ # and set upstream_url = "http://cache" in asu.toml.
- # Optionally add a Grafana container when using `SERVER_STATS`
- # grafana:
- # image: docker.io/grafana/grafana-oss
- # container_name: grafana
- # restart: unless-stopped
- # ports:
- # - "127.0.0.1:3000:3000"
- # depends_on:
- # - redis
- # environment:
- # GF_SERVER_DOMAIN: sysupgrade.openwrt.org
- # GF_SERVER_ROOT_URL: https://sysupgrade.openwrt.org/stats/
- # GF_SERVER_SERVE_FROM_SUB_PATH: "true"
- # volumes:
- # - ./grafana-data:/var/lib/grafana
+networks:
+ # Isolated network for build containers. Has internet access but
+ # cannot reach Redis or other compose services.
+ asu-build:
+ external: true
diff --git a/pyproject.toml b/pyproject.toml
index 4494809b..f8f3b96f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,3 +1,10 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.hatch.build.targets.wheel]
+packages = ["asu"]
+
[project]
name = "asu"
version = "0.0.0"
@@ -17,14 +24,16 @@ dependencies = [
"uvicorn>=0.37.0",
"fastapi-cache2>=0.2.2",
"httpx>=0.28.1",
+ "podman-compose>=1.5.0",
+ "boto3>=1.42.88",
]
[project.optional-dependencies]
dev = [
- "pytest>=8.4.2",
+ "pytest>=9.0.3",
"ruff>=0.14.9",
"coverage>=7.13.0",
- "isort>=7.0.0",
+ "isort>=8.0.1",
"fakeredis>=2.32.0",
"pytest-httpserver>=1.1.3",
]
diff --git a/tests/ci/asu-cache.toml b/tests/ci/asu-cache.toml
new file mode 100644
index 00000000..8ae9d313
--- /dev/null
+++ b/tests/ci/asu-cache.toml
@@ -0,0 +1,2 @@
+upstream_url = "http://cache"
+log_level = "DEBUG"
diff --git a/tests/ci/openwrt-one-25.12.2.json b/tests/ci/openwrt-one-25.12.2.json
new file mode 100644
index 00000000..8140eabe
--- /dev/null
+++ b/tests/ci/openwrt-one-25.12.2.json
@@ -0,0 +1,49 @@
+{
+ "profile": "openwrt_one",
+ "target": "mediatek/filogic",
+ "packages": [
+ "base-files",
+ "ca-bundle",
+ "dnsmasq",
+ "dropbear",
+ "firewall4",
+ "fitblk",
+ "fstools",
+ "kmod-crypto-hw-safexcel",
+ "kmod-gpio-button-hotplug",
+ "kmod-leds-gpio",
+ "kmod-nft-offload",
+ "kmod-phy-aquantia",
+ "libc",
+ "libgcc",
+ "libustream-mbedtls",
+ "logd",
+ "mtd",
+ "netifd",
+ "nftables",
+ "odhcp6c",
+ "odhcpd-ipv6only",
+ "apk-mbedtls",
+ "ppp",
+ "ppp-mod-pppoe",
+ "procd-ujail",
+ "uboot-envtools",
+ "uci",
+ "uclient-fetch",
+ "urandom-seed",
+ "urngd",
+ "wpad-basic-mbedtls",
+ "kmod-mt7915e",
+ "kmod-mt7981-firmware",
+ "mt7981-wo-firmware",
+ "kmod-rtc-pcf8563",
+ "kmod-usb3",
+ "kmod-nvme",
+ "kmod-phy-airoha-en8811h",
+ "luci"
+ ],
+ "defaults": "",
+ "version": "25.12.2",
+ "diff_packages": true,
+ "client": "ci"
+}
diff --git a/tests/conftest.py b/tests/conftest.py
index df4ff57d..e82fdf89 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,5 +1,4 @@
-import shutil
-import tempfile
+import logging
from pathlib import Path
import pytest
@@ -9,6 +8,8 @@
from asu.config import settings
+logger = logging.getLogger(__name__)
+
def redis_load_mock_data(redis):
return
@@ -83,10 +84,8 @@ def pytest_collection_modifyitems(config, items):
@pytest.fixture
-def test_path():
- test_path = tempfile.mkdtemp(dir=Path.cwd() / "tests")
- yield test_path
- shutil.rmtree(test_path)
+def test_path(tmp_path):
+ return str(tmp_path)
@pytest.fixture
@@ -97,7 +96,11 @@ def mocked_redis_client(*args, **kwargs):
def mocked_redis_queue():
return Queue(connection=redis_server, is_async=settings.async_queue)
+ saved_upstream_url = settings.upstream_url
+ saved_repository_allow_list = settings.repository_allow_list
+
settings.public_path = Path(test_path) / "public"
+ settings.store_backend = "local"
settings.async_queue = False
settings.upstream_url = "http://localhost:8123"
settings.server_stats = "stats"
@@ -116,10 +119,23 @@ def mocked_redis_queue():
yield real_app
+ settings.upstream_url = saved_upstream_url
+ settings.repository_allow_list = saved_repository_allow_list
+
+
+class DebugTestClient(TestClient):
+ """TestClient that logs response body for error responses."""
+
+ def request(self, *args, **kwargs):
+ response = super().request(*args, **kwargs)
+ if response.status_code >= 400:
+ logger.error("Response %d: %s", response.status_code, response.text)
+ return response
+
@pytest.fixture
def client(app, upstream):
- yield TestClient(app)
+ yield DebugTestClient(app, raise_server_exceptions=True)
@pytest.fixture(scope="session")
diff --git a/tests/test_api.py b/tests/test_api.py
index fb62343c..f6c2fe0e 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -51,6 +51,7 @@ def test_api_build_inputs(client):
assert request["client"] is None
assert request["rootfs_size_mb"] is None
assert request["diff_packages"] is False
+ assert request["repositories_mode"] == "replace"
def test_api_build_version_code(client):
@@ -213,8 +214,8 @@ def test_api_build_request_hash(client):
profile="testprofile",
)
- case12hash = "8d8e0aa2fd95bb75dba4aff4279dd6f976a40ad17300927d54b8a9a9b0576306"
- case34hash = "6b1645013216da39ee09deae75b87b0636f3c50648b037750b0a80448ce5c7ca"
+ case12hash = "1c4a79c6b711a576996cf9a5e7046a4581008c4466574096266f0e6ea4208fbc"
+ case34hash = "c5a849e05b60611b465042594fc3489a44f7695c3d09e36433a577ee772ad7b7"
# Case 1 - diff_packages=True, first package ordering
json["diff_packages"] = True
@@ -435,21 +436,23 @@ def test_api_build_empty_packages_list(client):
@pytest.mark.slow
-def test_api_build_conflicting_packages(client):
- """Use real build to get proper context for conflicts."""
+def test_api_build_missing_package(app):
+ """Use real build to get proper error for missing packages."""
+ settings.upstream_url = "https://downloads.openwrt.org"
+ client = TestClient(app)
response = client.post(
"/api/v1/build",
json=dict(
- version="23.05.5",
+ version="25.12.2",
target="ath79/generic",
profile="8dev_carambola2",
- packages=["dnsmasq", "dnsmasq-full"],
+ packages=["this-package-does-not-exist"],
),
)
assert response.status_code == 500
data = response.json()
- assert data["detail"] == "Error: Impossible package selection"
+ assert "this-package-does-not-exist" in data["detail"]
def test_api_build_without_packages_list(client):
@@ -497,12 +500,13 @@ def test_api_build_empty_request(client):
@pytest.mark.slow
def test_api_build_real_x86(app):
+ settings.upstream_url = "https://downloads.openwrt.org"
client = TestClient(app)
response = client.post(
"/api/v1/build",
json=dict(
target="x86/64",
- version="23.05.5",
+ version="25.12.2",
packages=["tmux", "vim"],
profile="some_random_cpu_which_doesnt_exists_as_profile",
),
@@ -516,7 +520,7 @@ def test_api_build_real_x86(app):
"/api/v1/build",
json=dict(
target="x86/64",
- version="23.05.5",
+ version="25.12.2",
packages=["tmux", "vim"],
profile="some_random_cpu_which_doesnt_exists_as_profile",
filesystem="ext4",
@@ -530,12 +534,13 @@ def test_api_build_real_x86(app):
@pytest.mark.slow
def test_api_build_real_ath79(app):
+ settings.upstream_url = "https://downloads.openwrt.org"
client = TestClient(app)
response = client.post(
"/api/v1/build",
json=dict(
target="ath79/generic",
- version="23.05.5",
+ version="25.12.2",
packages=["tmux", "vim"],
profile="8dev,carambola2", # Test unsanitized profile.
),
@@ -549,7 +554,7 @@ def test_api_build_real_ath79(app):
"/api/v1/build",
json=dict(
target="ath79/generic",
- version="23.05.5",
+ version="25.12.2",
packages=["tmux", "vim"],
profile="8dev_carambola2",
filesystem="squashfs",
@@ -641,7 +646,7 @@ def test_api_build_bad_version(client):
response = client.post(
"/api/v1/build",
json=dict(
- version="19.07.2",
+ version="99.99.99",
target="testtarget/testsubtarget",
profile="testprofile",
packages=["test1", "test2"],
@@ -649,7 +654,7 @@ def test_api_build_bad_version(client):
)
assert response.status_code == 400
data = response.json()
- assert data["detail"] == "Unsupported version: 19.07.2"
+ assert data["detail"] == "Unsupported branch: 99.99.99"
def test_api_build_bad_profile(client):
@@ -719,7 +724,7 @@ def test_api_build_defaults_filled_allowed(app):
data = response.json()
assert (
data["request_hash"]
- == "9c8d0cd7d9ec208a233b954edb20c3c20b5c11103bb7f5f1ebface565f8c6720"
+ == "ba50558496f8fead41e8d5bc72afd1ad7d27bc053afb550a8bf6ee3bbcc64952"
)
@@ -758,3 +763,129 @@ def test_api_stats(client):
assert response.status_code == 200
data = response.json()
assert data["queue_length"] == 0
+
+
+@pytest.mark.slow
+def test_api_build_libremesh_apk(app):
+ """Build with LibreMesh apk repository (25.12.2, x86/64)."""
+ settings.upstream_url = "https://downloads.openwrt.org"
+ settings.repository_allow_list = ["https://raw.githubusercontent.com/libremesh/"]
+ client = TestClient(app)
+ response = client.post(
+ "/api/v1/build",
+ json=dict(
+ target="x86/64",
+ version="25.12.2",
+ profile="generic",
+ packages=["lime-system"],
+ repositories={
+ "libremesh": "https://raw.githubusercontent.com/libremesh/lime-feed/gh-pages/master/openwrt-25.12/x86_64/packages.adb",
+ },
+ repository_keys=[
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEdFJZ2qVti49Ol8LJZYuxgOCLowBS\n"
+ "8bI86a7zqhSbs5yon3JON7Yee7CQOgqwPOX5eMALGOu8iFGAqIRx5YjfYA==\n"
+ "-----END PUBLIC KEY-----\n"
+ ],
+ repositories_mode="append",
+ ),
+ )
+
+ data = response.json()
+ assert response.status_code == 200, data.get("stderr", data.get("detail", ""))[
+ :2000
+ ]
+ assert "lime-system" in data["manifest"]
+
+
+@pytest.mark.slow
+def test_api_build_libremesh_opkg(app):
+ """Build with LibreMesh opkg repository (23.05.5, ath79)."""
+ settings.upstream_url = "https://downloads.openwrt.org"
+ settings.repository_allow_list = ["https://raw.githubusercontent.com/libremesh/"]
+ client = TestClient(app)
+ response = client.post(
+ "/api/v1/build",
+ json=dict(
+ target="ath79/generic",
+ version="23.05.5",
+ profile="8dev_carambola2",
+ packages=["lime-system"],
+ repositories={
+ "libremesh": "https://raw.githubusercontent.com/libremesh/lime-feed/gh-pages/2024.1",
+ },
+ repository_keys=[
+ "RWSnGzyChavSiyQ+vLk3x7F0NqcLa4kKyXCdriThMhO78ldHgxGljM/8",
+ ],
+ repositories_mode="append",
+ ),
+ )
+
+ data = response.json()
+ assert response.status_code == 200, data.get("stderr", data.get("detail", ""))[
+ :2000
+ ]
+ assert "lime-system" in data["manifest"]
+
+
+@pytest.mark.slow
+def test_api_build_freifunk_apk(app):
+ """Build with Freifunk Weimarnetz apk repository (25.12.2, ath79)."""
+ settings.upstream_url = "https://downloads.openwrt.org"
+ settings.repository_allow_list = ["https://buildbot.weimarnetz.de/"]
+ client = TestClient(app)
+ response = client.post(
+ "/api/v1/build",
+ json=dict(
+ target="ath79/generic",
+ version="25.12.2",
+ profile="8dev_carambola2",
+ packages=["weimarnetz-feed-apk"],
+ repositories={
+ "weimarnetz": "https://buildbot.weimarnetz.de/builds/brauhaus/packages/stable/25.12/ath79/generic/weimarnetz_packages/packages.adb",
+ },
+ repository_keys=[
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzZWFJBl7JU/XlRXaU4duMoqnu/L1\n"
+ "aPZGMO349gtL2Wt3eo8fC2qcbnXV2FdcPXaySeY4RmbrlG1ehDonJfW7Jg==\n"
+ "-----END PUBLIC KEY-----\n"
+ ],
+ repositories_mode="append",
+ ),
+ )
+
+ data = response.json()
+ assert response.status_code == 200, data.get("stderr", data.get("detail", ""))[
+ :2000
+ ]
+ assert "weimarnetz-feed-apk" in data["manifest"]
+
+
+@pytest.mark.slow
+def test_api_build_freifunk_opkg(app):
+ """Build with Freifunk Weimarnetz opkg repository (24.10.6, ath79)."""
+ settings.upstream_url = "https://downloads.openwrt.org"
+ settings.repository_allow_list = ["https://buildbot.weimarnetz.de/"]
+ client = TestClient(app)
+ response = client.post(
+ "/api/v1/build",
+ json=dict(
+ target="ath79/generic",
+ version="24.10.6",
+ profile="8dev_carambola2",
+ packages=["weimarnetz-feed-opkg"],
+ repositories={
+ "weimarnetz": "https://buildbot.weimarnetz.de/builds/brauhaus/packages/stable/24.10/ath79/generic/weimarnetz_packages",
+ },
+ repository_keys=[
+ "RWRIR91gqalV7vnWiH8RjngeXUohKt0VMGPVHNYPVPX3Ala/k6tdjuWC",
+ ],
+ repositories_mode="append",
+ ),
+ )
+
+ data = response.json()
+ assert response.status_code == 200, data.get("stderr", data.get("detail", ""))[
+ :2000
+ ]
+ assert "weimarnetz-feed-opkg" in data["manifest"]
diff --git a/tests/test_build.py b/tests/test_build.py
new file mode 100644
index 00000000..edd9e41d
--- /dev/null
+++ b/tests/test_build.py
@@ -0,0 +1,56 @@
+from asu.repositories import merge_repositories
+
+
+def test_merge_opkg_appends():
+ base = "src/gz base https://example.com/base\noption check_signature\n"
+ merged = merge_repositories(
+ base,
+ {"custom": "https://example.com/custom"},
+ apk_mode=False,
+ )
+ assert "src/gz base https://example.com/base" in merged
+ assert "src/gz custom https://example.com/custom" in merged
+ assert "option check_signature" in merged
+ assert "src imagebuilder file:packages" in merged
+
+
+def test_merge_opkg_adds_required_lines():
+ merged = merge_repositories(
+ "",
+ {"custom": "https://example.com/custom"},
+ apk_mode=False,
+ )
+ assert "src imagebuilder file:packages" in merged
+ assert "option check_signature" in merged
+
+
+def test_merge_opkg_replace_mode():
+ merged = merge_repositories(
+ "",
+ {"foo": "https://example.com/foo", "bar": "https://example.com/bar"},
+ apk_mode=False,
+ )
+ assert "src/gz bar https://example.com/bar" in merged
+ assert "src/gz foo https://example.com/foo" in merged
+ assert "option check_signature" in merged
+
+
+def test_merge_apk_appends():
+ base = "https://example.com/a\nhttps://example.com/b\n"
+ merged = merge_repositories(
+ base,
+ {"x": "https://example.com/c"},
+ apk_mode=True,
+ )
+ assert "https://example.com/a" in merged
+ assert "https://example.com/b" in merged
+ assert "https://example.com/c" in merged
+
+
+def test_merge_apk_replace_mode():
+ merged = merge_repositories(
+ "",
+ {"x": "https://example.com/new"},
+ apk_mode=True,
+ )
+ assert "https://example.com/new" in merged
diff --git a/tests/test_build_inject.py b/tests/test_build_inject.py
new file mode 100644
index 00000000..f13366c6
--- /dev/null
+++ b/tests/test_build_inject.py
@@ -0,0 +1,176 @@
+"""Test build file injection logic (keys, repos, defaults).
+
+Tests verify that _make_tar produces correct archives and that
+inject_files constructs the right file trees for the container.
+"""
+
+import base64
+import tarfile
+from io import BytesIO
+from unittest.mock import MagicMock, patch
+
+from asu.build import _make_tar, inject_files
+from asu.build_request import BuildRequest
+
+
+def _extract_tar(data: bytes) -> dict[str, str]:
+ """Helper: extract tar bytes into {name: content} dict."""
+ result = {}
+ with tarfile.open(fileobj=BytesIO(data)) as tar:
+ for member in tar.getmembers():
+ if member.isfile():
+ result[member.name] = tar.extractfile(member).read().decode("utf-8")
+ return result
+
+
+def test_make_tar_single_file():
+ data = _make_tar({"hello.txt": "world"})
+ files = _extract_tar(data)
+ assert files == {"hello.txt": "world"}
+
+
+def test_make_tar_multiple_files():
+ data = _make_tar(
+ {
+ "a.txt": "aaa",
+ "sub/b.txt": "bbb",
+ }
+ )
+ files = _extract_tar(data)
+ assert files["a.txt"] == "aaa"
+ assert files["sub/b.txt"] == "bbb"
+
+
+def test_make_tar_binary_content():
+ data = _make_tar({"bin.dat": b"\x00\x01\x02"})
+ with tarfile.open(fileobj=BytesIO(data)) as tar:
+ content = tar.extractfile("bin.dat").read()
+ assert content == b"\x00\x01\x02"
+
+
+def test_make_tar_empty():
+ data = _make_tar({})
+ with tarfile.open(fileobj=BytesIO(data)) as tar:
+ assert tar.getmembers() == []
+
+
+def test_inject_files_no_extras():
+ """No keys, repos, or defaults — nothing should be injected."""
+ container = MagicMock()
+ request = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ )
+ inject_files(container, request)
+ container.put_archive.assert_not_called()
+
+
+@patch("asu.build._detect_apk_mode", return_value=False)
+def test_inject_files_with_defaults(mock_detect):
+ container = MagicMock()
+ request = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ defaults="echo hello",
+ )
+ inject_files(container, request)
+ container.put_archive.assert_called_once()
+
+ call_args = container.put_archive.call_args
+ assert call_args[0][0] == "/builder/"
+ files = _extract_tar(call_args[0][1])
+ assert "asu-files/etc/uci-defaults/99-asu-defaults" in files
+ assert files["asu-files/etc/uci-defaults/99-asu-defaults"] == "echo hello"
+
+
+@patch("asu.build._detect_apk_mode", return_value=False)
+def test_inject_files_with_repositories(mock_detect):
+ container = MagicMock()
+ request = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={},
+ )
+ inject_files(container, request)
+ container.put_archive.assert_not_called()
+
+
+def test_inject_files_with_usign_keys():
+ """usign keys go to /builder/keys/."""
+ container = MagicMock()
+ key_data = base64.b64encode(b"\x00" * 42).decode()
+ request = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repository_keys=[key_data],
+ )
+ inject_files(container, request)
+ container.put_archive.assert_called_once()
+
+ call_args = container.put_archive.call_args
+ assert call_args[0][0] == "/builder/"
+ files = _extract_tar(call_args[0][1])
+ key_files = [f for f in files if f.startswith("keys/")]
+ assert len(key_files) == 1
+ assert key_data in files[key_files[0]]
+
+
+def test_inject_files_with_pem_keys():
+ """PEM keys also go to /builder/keys/."""
+ container = MagicMock()
+ pem_key = "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZI...\n-----END PUBLIC KEY-----\n"
+ request = BuildRequest(
+ version="25.12.2",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repository_keys=[pem_key],
+ )
+ inject_files(container, request)
+ container.put_archive.assert_called_once()
+
+ call_args = container.put_archive.call_args
+ assert call_args[0][0] == "/builder/"
+ files = _extract_tar(call_args[0][1])
+ pem_files = [f for f in files if f.endswith(".pem")]
+ assert len(pem_files) == 1
+ assert pem_key in files[pem_files[0]]
+
+
+def test_inject_files_mixed_keys():
+ """Both PEM and usign keys in one request go to /builder/keys/."""
+ container = MagicMock()
+ pem_key = "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZI...\n-----END PUBLIC KEY-----\n"
+ usign_key = base64.b64encode(b"\x00" * 42).decode()
+ request = BuildRequest(
+ version="25.12.2",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repository_keys=[pem_key, usign_key],
+ )
+ inject_files(container, request)
+ container.put_archive.assert_called_once()
+
+ call_args = container.put_archive.call_args
+ assert call_args[0][0] == "/builder/"
+ files = _extract_tar(call_args[0][1])
+ assert len(files) == 2
+
+
+@patch("asu.build._detect_apk_mode", return_value=False)
+def test_inject_files_defaults_and_keys(mock_detect):
+ """Multiple inject types should result in multiple put_archive calls."""
+ container = MagicMock()
+ key_data = base64.b64encode(b"\x00" * 42).decode()
+ request = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ defaults="echo test",
+ repository_keys=[key_data],
+ )
+ inject_files(container, request)
+ assert container.put_archive.call_count == 2
diff --git a/tests/test_package_changes.py b/tests/test_package_changes.py
index 59c35141..d7f4d02a 100644
--- a/tests/test_package_changes.py
+++ b/tests/test_package_changes.py
@@ -45,6 +45,39 @@ def test_apply_package_changes_does_not_modify_input_dict():
assert build_request == original_req
+def test_apply_package_add_and_remove():
+ build_request = BuildRequest(
+ **{
+ "version": "24.10",
+ "target": "ath79/generic",
+ "profile": "buffalo_wzr-hp-g300nh-s",
+ "packages": ["auc"],
+ }
+ )
+ apply_package_changes(build_request)
+
+ assert "owut" in build_request.packages
+
+ build_request.version = "SNAPSHOT"
+ build_request.packages = [
+ "a",
+ "b",
+ "kmod-nf-conntrack",
+ "c",
+ "kmod-nf-conntrack6",
+ "d",
+ "e",
+ ]
+
+ assert len(build_request.packages) == 7
+
+ apply_package_changes(build_request)
+
+ assert len(build_request.packages) == 6
+ assert "kmod-nf-conntrack" in build_request.packages
+ assert "kmod-nf-conntrack6" not in build_request.packages
+
+
def test_apply_package_changes_release():
build_request = BuildRequest(
**{
@@ -83,17 +116,21 @@ def test_apply_package_changes_lang_packs():
"target": "mediatek/mt7622",
"profile": "foobar",
"packages": [
+ "luci-i18n-english-en", # Should be deleted
"luci-i18n-opkg-ko", # Should be replaced
"luci-i18n-xinetd-lt", # Should be untouched
+ "luci-i18n-opkg-en", # Should be deleted
"luci-i18n-opkg-zh-cn", # Should be replaced
],
}
)
- assert len(build_request.packages) == 3
- assert build_request.packages[0] == "luci-i18n-opkg-ko"
- assert build_request.packages[1] == "luci-i18n-xinetd-lt"
- assert build_request.packages[2] == "luci-i18n-opkg-zh-cn"
+ assert len(build_request.packages) == 5
+ assert build_request.packages[0] == "luci-i18n-english-en"
+ assert build_request.packages[1] == "luci-i18n-opkg-ko"
+ assert build_request.packages[2] == "luci-i18n-xinetd-lt"
+ assert build_request.packages[3] == "luci-i18n-opkg-en"
+ assert build_request.packages[4] == "luci-i18n-opkg-zh-cn"
apply_package_changes(build_request)
diff --git a/tests/test_security.py b/tests/test_security.py
new file mode 100644
index 00000000..1cd87f24
--- /dev/null
+++ b/tests/test_security.py
@@ -0,0 +1,186 @@
+"""Security regression tests for issues found in the 2026-02-06 audit."""
+
+import pytest
+
+from asu.build_request import BuildRequest
+from asu.config import settings
+
+
+def test_repo_name_rejects_newline():
+ """Repository name with embedded newline must be rejected by Pydantic."""
+ with pytest.raises(Exception):
+ BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"evil\nsrc/gz pwned http://x.com": "https://a.com/repo"},
+ )
+
+
+def test_repo_name_rejects_spaces():
+ """Repository name with spaces must be rejected."""
+ with pytest.raises(Exception):
+ BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"name with spaces": "https://a.com/repo"},
+ )
+
+
+def test_repo_name_rejects_slashes():
+ """Repository name with slashes must be rejected."""
+ with pytest.raises(Exception):
+ BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"src/gz": "https://a.com/repo"},
+ )
+
+
+def test_repo_name_accepts_valid():
+ """Valid repository names must be accepted."""
+ req = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"custom-repo": "https://example.com/repo"},
+ )
+ assert "custom-repo" in req.repositories
+
+
+def test_repo_name_accepts_dots_underscores():
+ req = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"my_repo.v2": "https://example.com/repo"},
+ )
+ assert "my_repo.v2" in req.repositories
+
+
+def test_repo_name_rejects_empty():
+ """Empty repository name must be rejected."""
+ with pytest.raises(Exception):
+ BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"": "https://a.com/repo"},
+ )
+
+
+def test_api_repo_name_newline_injection(client):
+ """Newline in repository name must be rejected at the API level."""
+ response = client.post(
+ "/api/v1/build",
+ json={
+ "version": "1.2.3",
+ "target": "testtarget/testsubtarget",
+ "profile": "testprofile",
+ "repositories": {
+ "legit\nsrc/gz pwned http://evil.com": "https://example.com/repo"
+ },
+ },
+ )
+ assert response.status_code == 422
+
+
+def test_repo_url_rejects_non_http():
+ """Repository URLs must start with http:// or https://."""
+ with pytest.raises(Exception):
+ BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"repo": "ftp://example.com/repo"},
+ )
+
+
+def test_repo_url_rejects_no_scheme():
+ with pytest.raises(Exception):
+ BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"repo": "example.com/repo"},
+ )
+
+
+def test_repo_url_accepts_https():
+ req = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"repo": "https://example.com/packages"},
+ )
+ assert req.repositories["repo"] == "https://example.com/packages"
+
+
+def test_repo_url_accepts_http():
+ req = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"repo": "http://example.com/packages"},
+ )
+ assert req.repositories["repo"] == "http://example.com/packages"
+
+
+def test_repositories_mode_accepts_append_and_replace():
+ append_request = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories_mode="append",
+ )
+ replace_request = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories_mode="replace",
+ )
+ assert append_request.repositories_mode == "append"
+ assert replace_request.repositories_mode == "replace"
+
+
+def test_repositories_mode_rejects_invalid_value():
+ with pytest.raises(Exception):
+ BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories_mode="invalid",
+ )
+
+
+def test_api_repositories_mode_rejects_invalid_value(client):
+ response = client.post(
+ "/api/v1/build",
+ json={
+ "version": "1.2.3",
+ "target": "testtarget/testsubtarget",
+ "profile": "testprofile",
+ "repositories_mode": "invalid",
+ },
+ )
+ assert response.status_code == 422
+
+
+def test_api_repo_not_in_allow_list(client):
+ """Repositories not in the allow list must be rejected at the API level."""
+ settings.repository_allow_list = ["https://allowed.example.com/"]
+ response = client.post(
+ "/api/v1/build",
+ json={
+ "version": "1.2.3",
+ "target": "testtarget/testsubtarget",
+ "profile": "testprofile",
+ "repositories": {
+ "evil": "https://evil.example.com/packages",
+ },
+ },
+ )
+ assert response.status_code == 400
+ assert "not allowed" in response.json()["detail"]
diff --git a/tests/test_stats.py b/tests/test_stats.py
index 81ceffe8..fcdc8287 100644
--- a/tests/test_stats.py
+++ b/tests/test_stats.py
@@ -1,6 +1,9 @@
import time
+
from fakeredis import FakeStrictRedis
+from asu.build_request import BuildRequest
+
build_config_1 = dict(
version="1.2.3",
target="testtarget/testsubtarget",
@@ -172,3 +175,86 @@ def test_stats_builds_by_version(client, redis_server: FakeStrictRedis):
data = response.json()
assert len(data["labels"]) == 26
assert len(data["datasets"][0]["data"]) == 26
+
+
+def test_build_error_log(client, redis_server):
+ """Test that build errors are logged to Redis."""
+ from asu.util import ErrorLog
+
+ error_log = ErrorLog()
+
+ # Clear any existing errors
+ redis_server.delete(ErrorLog.REDIS_KEY)
+
+ # Initially should have no errors
+ response = client.get("/api/v1/build-errors")
+ assert response.status_code == 200
+ assert "No build errors recorded" in response.text
+
+ # Log an error
+ build_request = BuildRequest(
+ distro="openwrt",
+ version="24.10-SNAPSHOT",
+ version_code="",
+ target="ath79/generic",
+ profile="tplink_tl-wdr4300-v1",
+ packages=["vim"],
+ )
+ error_log.log_build_error(build_request, "Test error message")
+
+ entries = error_log.get_entries()
+ assert len(entries) == 1
+ assert "24.10-SNAPSHOT:ath79/generic:tplink_tl-wdr4300-v1" in entries[0]
+ assert "Test error message" in entries[0]
+
+ # Log another — most recent should be first
+ error_log.log_build_error(build_request, "Second error")
+ entries = error_log.get_entries()
+ assert len(entries) == 2
+ assert "Second error" in entries[0]
+
+ # Test summary format
+ summary = error_log.get_summary()
+ assert "Build Errors: 2 entries" in summary
+ assert "Time range:" in summary
+
+ # Test sanitization of job hashes
+ error_log.log_build_error(
+ build_request,
+ "Internal Server Error (no container with ID "
+ "eee08b3b7b072f2ba82559c6e61da9b84e00cdbc35a4d99392fec36c0bf64356"
+ " found in database: no such container)",
+ )
+ entries = error_log.get_entries()
+ assert " ID [job-id] found " in entries[0]
+
+
+def test_build_error_log_api(client):
+ """Test the /api/v1/build-errors endpoint."""
+ response = client.get("/api/v1/build-errors")
+ assert response.status_code == 200
+ assert response.headers["content-type"] == "text/plain; charset=utf-8"
+
+ response = client.get("/api/v1/build-errors?n=50")
+ assert response.status_code == 200
+
+
+def test_build_error_log_respects_n_entries(client, redis_server):
+ """Test that get_entries respects n_entries limit."""
+ from asu.util import ErrorLog
+
+ error_log = ErrorLog()
+ redis_server.delete(ErrorLog.REDIS_KEY)
+
+ build_request = BuildRequest(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ )
+
+ for i in range(10):
+ error_log.log_build_error(build_request, f"Error {i}")
+
+ entries = error_log.get_entries(n_entries=3)
+ assert len(entries) == 3
+ assert "Error 9" in entries[0]
diff --git a/tests/test_store.py b/tests/test_store.py
index e777d545..54eb3d84 100644
--- a/tests/test_store.py
+++ b/tests/test_store.py
@@ -1,6 +1,8 @@
-from asu.config import settings
+import tempfile
+from pathlib import Path
-# store_path = settings.public_path / "store"
+from asu.config import settings
+from asu.store import LocalStore
def test_store_content_type_img(client):
@@ -34,3 +36,56 @@ def test_store_file_missing(client):
headers = response.headers
assert headers["Content-Type"] != "application/octet-stream"
+
+
+def test_local_store_upload_file():
+ with tempfile.TemporaryDirectory() as tmpdir:
+ settings.public_path = Path(tmpdir)
+ store = LocalStore()
+
+ src = Path(tmpdir) / "image.bin"
+ src.write_bytes(b"firmware data")
+
+ store.upload_file(src, "abc123/image.bin")
+
+ dest = Path(tmpdir) / "store" / "abc123" / "image.bin"
+ assert dest.is_file()
+ assert dest.read_bytes() == b"firmware data"
+
+
+def test_local_store_upload_dir():
+ with tempfile.TemporaryDirectory() as tmpdir:
+ settings.public_path = Path(tmpdir)
+ store = LocalStore()
+
+ build_dir = Path(tmpdir) / "build"
+ build_dir.mkdir()
+ (build_dir / "image.bin").write_bytes(b"fw1")
+ (build_dir / "profiles.json").write_text("{}")
+
+ store.upload_dir(build_dir, "abc123")
+
+ store_dir = Path(tmpdir) / "store" / "abc123"
+ assert (store_dir / "image.bin").read_bytes() == b"fw1"
+ assert (store_dir / "profiles.json").read_text() == "{}"
+
+
+def test_local_store_exists():
+ with tempfile.TemporaryDirectory() as tmpdir:
+ settings.public_path = Path(tmpdir)
+ store = LocalStore()
+
+ assert not store.exists("abc123/image.bin")
+
+ (Path(tmpdir) / "store" / "abc123").mkdir(parents=True)
+ (Path(tmpdir) / "store" / "abc123" / "image.bin").touch()
+
+ assert store.exists("abc123/image.bin")
+
+
+def test_local_store_get_url():
+ with tempfile.TemporaryDirectory() as tmpdir:
+ settings.public_path = Path(tmpdir)
+ store = LocalStore()
+
+ assert store.get_url("abc123/image.bin") == "/store/abc123/image.bin"
diff --git a/tests/test_util.py b/tests/test_util.py
index 21a78ceb..3625910d 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -2,12 +2,16 @@
import tempfile
from pathlib import Path
+import pytest
+
from podman import PodmanClient
import asu.util
+from asu.repositories import is_repo_allowed
from asu.build_request import BuildRequest
from asu.util import (
check_manifest,
+ check_package_errors,
diff_packages,
fingerprint_pubkey_usign,
get_container_version_tag,
@@ -67,8 +71,21 @@ def test_get_request_hash():
assert (
get_request_hash(request)
- == "99ff721439cd696f7da259541a07d7bfc7eb6c45a844db532e0384b464e23f46"
+ == "525e19496bd8d47b9b63aa5fb2b2f5da467558893d39a42eb32210007fd57800"
+ )
+
+
+def test_get_request_hash_includes_repositories_mode():
+ base = dict(
+ version="1.2.3",
+ target="testtarget/testsubtarget",
+ profile="testprofile",
+ repositories={"custom": "https://example.com/repo"},
)
+ append_hash = get_request_hash(BuildRequest(**base, repositories_mode="append"))
+ replace_hash = get_request_hash(BuildRequest(**base, repositories_mode="replace"))
+
+ assert append_hash != replace_hash
def test_diff_packages():
@@ -180,8 +197,8 @@ class ResponseText:
assert packages == packages_without_abi
# Old opkg-style Packages format, but with v1 index.json
- asu.util.client_get = (
- lambda url: ResponseJson1() if "json" in url else ResponseText()
+ asu.util.client_get = lambda url: (
+ ResponseJson1() if "json" in url else ResponseText()
)
index = parse_packages_file("httpx://fake_url")
packages = index["packages"]
@@ -190,8 +207,8 @@ class ResponseText:
assert packages == packages_without_abi
# New apk-style without Packages, but old v1 index.json
- asu.util.client_get = (
- lambda url: ResponseJson1() if "json" in url else Response404()
+ asu.util.client_get = lambda url: (
+ ResponseJson1() if "json" in url else Response404()
)
index = parse_packages_file("httpx://fake_url")
packages = index["packages"]
@@ -311,6 +328,24 @@ def test_check_manifest():
)
+def test_check_package_errors():
+ assert check_package_errors("hello world") == "Impossible package selection"
+ assert (
+ check_package_errors(
+ " * opkg_install_cmd: Cannot install package OPKG-MISSING."
+ )
+ == "Impossible package selection: missing (OPKG-MISSING)"
+ )
+ assert (
+ check_package_errors(check_package_errors.__doc__)
+ == "Impossible package selection:"
+ " missing (APK-MISSING, OPKG-MISSING)"
+ " conflicts"
+ " (APK-CONFLICT-1, APK-CONFLICT-2, APK-CONFLICT-3, APK-CONFLICT-4,"
+ " OPKG-CONFLICT-1, OPKG-CONFLICT-2, OPKG-CONFLICT-3, OPKG-CONFLICT-4)"
+ )
+
+
def test_get_podman():
podman = get_podman()
assert isinstance(podman, PodmanClient)
@@ -336,6 +371,39 @@ def test_run_cmd():
assert "testtarget/testsubtarget" in stdout
+def test_run_cmd_rejects_tar_path_traversal(tmp_path, monkeypatch):
+ """Tar archives with path traversal members must be rejected (CVE-2007-4559).
+
+ The filter='data' argument to extractall() raises an error for entries
+ with absolute paths or parent directory references like '../../etc/passwd'.
+ """
+ import io
+ import tarfile
+ from unittest.mock import MagicMock
+
+ # Build a malicious tar archive with a path traversal entry
+ buf = io.BytesIO()
+ with tarfile.open(fileobj=buf, mode="w") as tar:
+ info = tarfile.TarInfo(name="../../etc/malicious")
+ info.size = 7
+ tar.addfile(info, io.BytesIO(b"pwned!\n"))
+ buf.seek(0)
+
+ # Mock a container that returns this malicious tar
+ mock_container = MagicMock()
+ mock_container.exec_run.return_value = (0, (b"ok", b""))
+ mock_container.get_archive.return_value = (iter([buf.getvalue()]), None)
+
+ dest = str(tmp_path / "output")
+ os.makedirs(dest)
+
+ with pytest.raises(Exception, match="is outside the destination"):
+ run_cmd(mock_container, ["echo"], copy=["/fake", dest])
+
+ # Verify the malicious file was NOT written
+ assert not (tmp_path / "etc" / "malicious").exists()
+
+
def test_parse_manifest_opkg():
manifest = parse_manifest("test - 1.0\ntest2 - 2.0\ntest3 - 3.0\ntest4 - 3.0\n")
@@ -347,6 +415,39 @@ def test_parse_manifest_opkg():
}
+def test_is_repo_allowed_empty_list():
+ assert is_repo_allowed("https://example.com/repo", []) is False
+
+
+def test_is_repo_allowed_valid():
+ allow = ["https://downloads.openwrt.org"]
+ assert is_repo_allowed("https://downloads.openwrt.org/releases/23.05", allow)
+
+
+def test_is_repo_allowed_subdomain_bypass():
+ """Attacker registers downloads.openwrt.org.evil.com"""
+ allow = ["https://downloads.openwrt.org"]
+ assert not is_repo_allowed("https://downloads.openwrt.org.evil.com/packages", allow)
+
+
+def test_is_repo_allowed_userinfo_bypass():
+ """Attacker uses URL userinfo to redirect"""
+ allow = ["https://downloads.openwrt.org"]
+ assert not is_repo_allowed("https://downloads.openwrt.org@evil.com/packages", allow)
+
+
+def test_is_repo_allowed_scheme_mismatch():
+ allow = ["https://downloads.openwrt.org"]
+ assert not is_repo_allowed("http://downloads.openwrt.org/releases", allow)
+
+
+def test_is_repo_allowed_exact_host_no_path():
+ """URL must have a path under the allowed prefix, not just the host"""
+ allow = ["https://downloads.openwrt.org/releases"]
+ assert not is_repo_allowed("https://downloads.openwrt.org/snapshots", allow)
+ assert is_repo_allowed("https://downloads.openwrt.org/releases/23.05", allow)
+
+
def test_parse_manifest_apk():
manifest = parse_manifest("test 1.0\ntest2 2.0\ntest3 3.0\ntest4 3.0\n")
diff --git a/uv.lock b/uv.lock
index 395d4d45..9f7f01ab 100644
--- a/uv.lock
+++ b/uv.lock
@@ -35,12 +35,14 @@ wheels = [
[[package]]
name = "asu"
version = "0.0.0"
-source = { virtual = "." }
+source = { editable = "." }
dependencies = [
+ { name = "boto3" },
{ name = "fastapi", extra = ["standard"] },
{ name = "fastapi-cache2" },
{ name = "httpx" },
{ name = "podman" },
+ { name = "podman-compose" },
{ name = "pydantic-settings" },
{ name = "pynacl" },
{ name = "redis" },
@@ -60,6 +62,7 @@ dev = [
[package.metadata]
requires-dist = [
+ { name = "boto3", specifier = ">=1.35.0" },
{ name = "coverage", marker = "extra == 'dev'", specifier = ">=7.13.0" },
{ name = "fakeredis", marker = "extra == 'dev'", specifier = ">=2.32.0" },
{ name = "fastapi", extras = ["standard"], specifier = ">=0.119.0" },
@@ -67,6 +70,7 @@ requires-dist = [
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "isort", marker = "extra == 'dev'", specifier = ">=7.0.0" },
{ name = "podman", specifier = ">=5.6.0" },
+ { name = "podman-compose", specifier = ">=1.5.0" },
{ name = "pydantic-settings", specifier = ">=2.12.0" },
{ name = "pynacl", specifier = ">=1.6.0" },
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=8.4.2" },
@@ -78,6 +82,34 @@ requires-dist = [
]
provides-extras = ["dev"]
+[[package]]
+name = "boto3"
+version = "1.42.78"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "botocore" },
+ { name = "jmespath" },
+ { name = "s3transfer" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/2b/ebdad075934cf6bb78bf81fe31d83339bcd804ad6c856f7341376cbc88b6/boto3-1.42.78.tar.gz", hash = "sha256:cef2ebdb9be5c0e96822f8d3941ac4b816c90a5737a7ffb901d664c808964b63", size = 112789, upload-time = "2026-03-27T19:28:07.58Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/57/bb/1f6dade1f1e86858bef7bd332bc8106c445f2dbabec7b32ab5d7d118c9b6/boto3-1.42.78-py3-none-any.whl", hash = "sha256:480a34a077484a5ca60124dfd150ba3ea6517fc89963a679e45b30c6db614d26", size = 140556, upload-time = "2026-03-27T19:28:06.125Z" },
+]
+
+[[package]]
+name = "botocore"
+version = "1.42.78"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jmespath" },
+ { name = "python-dateutil" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/67/8e/cdb34c8ca71216d214e049ada2148ee08bcda12b1ac72af3a720dea300ff/botocore-1.42.78.tar.gz", hash = "sha256:61cbd49728e23f68cfd945406ab40044d49abed143362f7ffa4a4f4bd4311791", size = 15023592, upload-time = "2026-03-27T19:27:57.122Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/72/94bba1a375d45c685b00e051b56142359547837086a83861d76f6aec26f4/botocore-1.42.78-py3-none-any.whl", hash = "sha256:038ab63c7f898e8b5db58cb6a45e4da56c31dd984e7e995839a3540c735564ea", size = 14701729, upload-time = "2026-03-27T19:27:54.05Z" },
+]
+
[[package]]
name = "certifi"
version = "2026.1.4"
@@ -535,6 +567,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
]
+[[package]]
+name = "jmespath"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d3/59/322338183ecda247fb5d1763a6cbe46eff7222eaeebafd9fa65d4bf5cb11/jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d", size = 27377, upload-time = "2026-01-22T16:35:26.279Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419, upload-time = "2026-01-22T16:35:24.919Z" },
+]
+
[[package]]
name = "markdown-it-py"
version = "4.0.0"
@@ -662,6 +703,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0a/9e/8c62f05b104d9f00edbb4c298b152deceb393ea67f0288d89d1139d7a859/podman-5.6.0-py3-none-any.whl", hash = "sha256:967ff8ad8c6b851bc5da1a9410973882d80e235a9410b7d1e931ce0c3324fbe3", size = 88713, upload-time = "2025-09-05T09:42:38.405Z" },
]
+[[package]]
+name = "podman-compose"
+version = "1.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "python-dotenv" },
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/24/91/b168a685ca6813ff9b467d76a7365a099aec16a1032b6edf39b0cd19f6c3/podman_compose-1.5.0.tar.gz", hash = "sha256:5cc09362852711ce5d27648e41cb5fd058ea5a75acbcdec2f8d0b0c114a18e8e", size = 47377, upload-time = "2025-07-07T14:18:09.633Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/41/4b/75ab5c151b9d170fdae0048a6f6528535aff848140c007f408af9ac555d6/podman_compose-1.5.0-py3-none-any.whl", hash = "sha256:f0b9d35f4da1b309172adf208a5cb7a882b532a834c2202666c1988b6f147546", size = 47129, upload-time = "2025-07-07T14:18:08.373Z" },
+]
+
[[package]]
name = "pycparser"
version = "2.23"
@@ -773,11 +827,11 @@ wheels = [
[[package]]
name = "pygments"
-version = "2.19.2"
+version = "2.20.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" },
]
[[package]]
@@ -866,11 +920,11 @@ wheels = [
[[package]]
name = "python-multipart"
-version = "0.0.21"
+version = "0.0.22"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" },
]
[[package]]
@@ -929,7 +983,7 @@ wheels = [
[[package]]
name = "requests"
-version = "2.32.5"
+version = "2.33.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
@@ -937,9 +991,9 @@ dependencies = [
{ name = "idna" },
{ name = "urllib3" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
+ { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" },
]
[[package]]
@@ -1062,6 +1116,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" },
]
+[[package]]
+name = "s3transfer"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "botocore" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" },
+]
+
[[package]]
name = "sentry-sdk"
version = "2.48.0"
@@ -1161,11 +1227,11 @@ wheels = [
[[package]]
name = "urllib3"
-version = "2.6.2"
+version = "2.6.3"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" },
+ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
]
[[package]]
@@ -1297,12 +1363,12 @@ wheels = [
[[package]]
name = "werkzeug"
-version = "3.1.4"
+version = "3.1.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "markupsafe" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687, upload-time = "2025-11-29T02:15:22.841Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/61/f1/ee81806690a87dab5f5653c1f146c92bc066d7f4cebc603ef88eb9e13957/werkzeug-3.1.6.tar.gz", hash = "sha256:210c6bede5a420a913956b4791a7f4d6843a43b6fcee4dfa08a65e93007d0d25", size = 864736, upload-time = "2026-02-19T15:17:18.884Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960, upload-time = "2025-11-29T02:15:21.13Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/ec/d58832f89ede95652fd01f4f24236af7d32b70cab2196dfcc2d2fd13c5c2/werkzeug-3.1.6-py3-none-any.whl", hash = "sha256:7ddf3357bb9564e407607f988f683d72038551200c704012bb9a4c523d42f131", size = 225166, upload-time = "2026-02-19T15:17:17.475Z" },
]