Skip to content

ci(triage): fire on PR comments + send is_pr/pr context to routine (#… #735

ci(triage): fire on PR comments + send is_pr/pr context to routine (#…

ci(triage): fire on PR comments + send is_pr/pr context to routine (#… #735

Workflow file for this run

name: CI
on:
push:
branches: [main, python-adcp-sdk-setup]
pull_request:
branches: [main]
jobs:
test:
name: Test Python ${{ matrix.python-version }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Run linter
run: |
ruff check src/
- name: Run type checker
run: |
mypy src/adcp/
- name: Run tests
run: |
pytest tests/ -v --cov=src/adcp --cov-report=term-missing
pg-replay-store:
name: PgReplayStore tests (Postgres 16)
runs-on: ubuntu-latest
services:
postgres:
# CI-local ephemeral database. POSTGRES_HOST_AUTH_METHOD=trust
# avoids shipping any password literal (real or placeholder) in
# this workflow — GitHub's default CI network is already the
# trust boundary for this throwaway service.
image: postgres:16
env:
POSTGRES_HOST_AUTH_METHOD: trust
POSTGRES_DB: adcp_test
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 5s
--health-timeout 5s
--health-retries 10
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install dependencies (with [pg] extra)
run: |
python -m pip install --upgrade pip
pip install -e ".[dev,pg]"
- name: Run PgReplayStore tests (unit + full-wire e2e)
env:
ADCP_PG_TEST_URL: postgresql://postgres@localhost:5432/adcp_test
run: |
pytest tests/conformance/signing/test_pg_replay_store.py \
tests/conformance/signing/test_pg_replay_store_e2e.py \
-v
conventional-commits:
name: Validate conventional commit format
runs-on: ubuntu-latest
if: github.event_name == 'pull_request'
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Validate PR commits
uses: amannn/action-semantic-pull-request@v5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Validate individual commits
run: |
# Get the base branch
BASE_SHA=$(git merge-base origin/${{ github.base_ref }} HEAD)
# Check each commit since the base
echo "Validating commits since $BASE_SHA..."
git log --format="%H %s" $BASE_SHA..HEAD | while read sha message; do
# Skip merge commits (GitHub automatically creates these)
if echo "$message" | grep -qE '^Merge [0-9a-f]+ into [0-9a-f]+'; then
echo "⊙ Skipping merge commit: $sha"
continue
fi
# Check if message matches conventional commit format
if ! echo "$message" | grep -qE '^(feat|fix|docs|style|refactor|perf|test|build|ci|chore|revert)(\([^)]+\))?!?: .+'; then
echo "❌ Commit $sha does not follow Conventional Commits format:"
echo " $message"
echo ""
echo "Expected format: <type>[optional scope]: <description>"
echo "Types: feat, fix, docs, style, refactor, perf, test, build, ci, chore, revert"
echo ""
echo "Examples:"
echo " feat: add new feature"
echo " fix: resolve bug in parser"
echo " feat(api): add new endpoint"
echo " feat!: breaking change"
exit 1
else
echo "✓ $sha: $message"
fi
done
echo ""
echo "✅ All commits follow Conventional Commits format"
downstream-imports:
name: Downstream import smoke (representative consumer symbols)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Build and install wheel
run: |
python -m pip install --upgrade pip build
python -m build --wheel --outdir dist/
pip install dist/*.whl
# Proxy for real downstream import sites (salesagent, creative agents,
# signals agents). Any ImportError here means we broke the public API
# surface without a migration pointer — failing CI is the goal.
- name: Import representative public-API symbols
run: |
python - <<'PY'
from adcp import (
ADCPClient,
AgentConfig,
BrandReference,
CpmPricingOption,
CreateMediaBuyRequest,
Error,
GetProductsRequest,
ListCreativesRequest,
MediaBuyStatus,
Package,
PackageRequest,
PublisherPropertiesAll,
SyncCatalogsRequest,
)
from adcp.types import (
AudioFormatAsset,
BriefFormatAsset,
CatalogFormatAsset,
ContextObject,
CreativeAsset,
CssFormatAsset,
DaastFormatAsset,
HtmlFormatAsset,
ImageFormatAsset,
JavascriptFormatAsset,
MarkdownFormatAsset,
RepeatableAssetGroup,
TargetingOverlay,
TextFormatAsset,
UrlFormatAsset,
VastFormatAsset,
VideoFormatAsset,
WebhookFormatAsset,
)
# Removed-type shims: old import paths must raise a guided
# ImportError pointing at the migration guide.
import adcp
for name in ("BrandManifest", "FormatCategory", "DeliverTo"):
try:
getattr(adcp, name)
except ImportError as exc:
assert "MIGRATION_v3_to_v4" in str(exc), (
f"{name} deprecation shim dropped migration pointer: {exc}"
)
else:
raise AssertionError(
f"{name} import should raise ImportError with migration pointer"
)
# The deep submodule path (some older import sites reach this far)
# must also surface the migration pointer, not a bare ModuleNotFoundError.
try:
from adcp.types.generated_poc.enums.format_category import FormatCategory # noqa: F401
except ImportError as exc:
assert "MIGRATION_v3_to_v4" in str(exc), exc
else:
raise AssertionError(
"format_category submodule should raise ImportError with migration pointer"
)
assert adcp.__version__ and adcp.__version__ != "3.12.0", (
f"adcp.__version__={adcp.__version__!r} — expected real pkg metadata"
)
assert adcp.get_adcp_version(), "ADCP_VERSION file is empty"
print(f"OK — adcp=={adcp.__version__}, spec={adcp.get_adcp_version()}")
PY
schema-check:
name: Validate schemas are up-to-date
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Check ADCP version
id: version-check
run: |
VERSION=$(cat src/adcp/ADCP_VERSION)
echo "ADCP_VERSION=$VERSION"
# Skip regeneration + drift check for pre-release tags (alpha/beta/rc)
# and for `latest`, which is a moving dev snapshot — the committed
# generated types are frozen against the bundle we last synced, and
# CI's fresh sync against today's `latest.tgz` is expected to drift.
if echo "$VERSION" | grep -qE '(alpha|beta|rc)' || [ "$VERSION" = "latest" ]; then
echo "is_prerelease=true" >> $GITHUB_OUTPUT
echo "Pre-release / latest version detected - will skip schema sync"
else
echo "is_prerelease=false" >> $GITHUB_OUTPUT
echo "Stable version - will sync schemas from upstream"
fi
# Stable upstream tags ship Sigstore sidecars; sync_schemas.py
# verifies the bundle via `cosign verify-blob` before extraction.
# Installs cosign from the official sigstore/cosign-installer action.
- name: Install cosign (for signature verification)
if: steps.version-check.outputs.is_prerelease != 'true'
uses: sigstore/cosign-installer@v3
- name: Download latest schemas
if: steps.version-check.outputs.is_prerelease != 'true'
run: python scripts/sync_schemas.py
- name: Fix schema references
if: steps.version-check.outputs.is_prerelease != 'true'
run: python scripts/fix_schema_refs.py
- name: Bundle schemas into package
if: steps.version-check.outputs.is_prerelease != 'true'
run: python scripts/bundle_schemas.py
- name: Generate models
if: steps.version-check.outputs.is_prerelease != 'true'
run: python scripts/generate_types.py
- name: Validate generated code syntax
run: |
echo "Validating generated code can be parsed..."
python -m py_compile src/adcp/types/_generated.py
echo "✓ Syntax validation passed"
- name: Validate generated code imports
run: |
echo "Validating generated code can be imported..."
python -c "from adcp.types import _generated as generated; print(f'✓ Successfully imported {len(dir(generated))} symbols')"
- name: Run code generation tests
run: |
echo "Running code generation test suite..."
pytest tests/test_code_generation.py -v --tb=short
- name: Check for schema drift
if: steps.version-check.outputs.is_prerelease != 'true'
run: |
# datamodel-codegen's numbered-variant class names
# (Pass1/Pass4, Status16/Status17, StatusFilter1/StatusFilter4,
# Type80, etc.) shift between regens because the generator
# walks the schema graph in filesystem-iteration order and
# APFS (macOS) vs. ext4 (Linux CI) sort differently. The
# numbers are an implementation detail; semantic aliases in
# ``src/adcp/types/aliases.py`` pin the names downstream
# actually uses.
#
# The real drift guarantees we need are enforced elsewhere:
# * ``tests/test_schemas_version_pin.py`` — ADCP_VERSION
# matches ``schemas/cache/index.json.adcp_version`` on
# every test run.
# * This job's "Validate generated code syntax/imports"
# steps above — the regenerated code compiles and imports.
# * ``tests/test_asset_aliases_stable.py`` — the semantic
# aliases still point at valid classes.
#
# We keep this step as a "regen runs without error on stable
# tags" smoke — but don't fail on line-level diff, because
# the non-determinism produces false positives that block
# release PRs for cosmetic churn.
if git diff --quiet src/adcp/types/_generated.py schemas/cache/; then
echo "✓ Schemas are up-to-date (no diff)"
else
echo "ℹ Regen produced cosmetic diff — see aliases.py for stable names"
echo " Numbered-variant class-name churn is expected; the semantic"
echo " alias tests and drift-version-pin test guard the real surface."
fi