Skip to content

fix(source-salesforce): skip time-based slicing for full_refresh sync… #31450

fix(source-salesforce): skip time-based slicing for full_refresh sync…

fix(source-salesforce): skip time-based slicing for full_refresh sync… #31450

name: Publish Connectors
on:
push:
branches:
- master
paths:
- "airbyte-integrations/connectors/**/metadata.yaml"
workflow_call:
inputs:
connectors:
description: "Connectors to Publish. This should be a string of the form: --name=source-pokeapi --name=destination-postgres"
default: "--name=source-pokeapi"
type: string
with-semver-suffix:
description: |
Semver suffix to apply ephemerally (not committed back to the repo):
- default: Resolves to 'none' on push to master, 'preview' on manual trigger
- none: Use exact version from metadata.yaml (for production releases)
- preview: Append '-preview.{sha}' suffix (for development builds)
- rc: Append '-rc1' suffix (for release candidates)
default: default
type: string
publish-java-tars:
description: "Whether to publish Java connector tar files."
required: false
default: false
type: boolean
gitref:
description: "Git ref (branch or SHA) to build connectors from. Used by pre-release workflow to build from PR branches."
required: false
type: string
dry-run:
description: "Dry run mode: run all logic but skip actual uploads (Docker, GCS, registry). Useful for testing workflow changes."
required: false
default: false
type: boolean
outputs:
docker-image-tag:
description: "Docker image tag used when publishing. For single-connector callers only; multi-connector callers should not rely on this output."
value: ${{ jobs.publish_connector_registry_entries.outputs.docker-image-tag }}
workflow_dispatch:
inputs:
connectors:
description: "Connectors to Publish. This should be a string of the form: --name=source-pokeapi --name=destination-postgres"
default: "--name=source-pokeapi"
type: string
with-semver-suffix:
description: |
Semver suffix to apply ephemerally (not committed back to the repo):
- default: Resolves to 'none' on push to master, 'preview' on manual trigger
- none: Use exact version from metadata.yaml (for production releases)
- preview: Append '-preview.{sha}' suffix (for development builds)
- rc: Append '-rc1' suffix (for release candidates)
default: default
type: choice
options:
- default
- none
- preview
- rc
publish-java-tars:
description: "Whether to publish Java connector tar files."
required: false
default: false
type: boolean
dry-run:
description: "Dry run mode: run all logic but skip actual uploads (Docker, GCS, registry). Useful for testing workflow changes."
required: false
default: false
type: boolean
jobs:
publish_options:
name: Resolve options for connector publishing
runs-on: ubuntu-24.04
steps:
- name: Checkout Airbyte
# v4
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955
with:
ref: ${{ inputs.gitref || '' }}
fetch-depth: 2 # Required so we can conduct a diff from the previous commit to understand what connectors have changed.
submodules: true # Required for the enterprise repo since it uses a submodule that needs to exist for this workflow to run successfully.
- name: List connectors to publish [manual]
id: list-connectors-manual
if: github.event_name == 'workflow_dispatch'
shell: bash
# When invoked manually, we run on the connectors specified in the input.
run: echo connectors-to-publish=$(./poe-tasks/parse-connector-name-args.sh ${{ inputs.connectors }}) | tee -a $GITHUB_OUTPUT
- name: List connectors to publish [On merge to master]
id: list-connectors-master
if: github.event_name == 'push'
shell: bash
# When merging to master, we run on connectors that have changed since the previous commit.
run: echo connectors-to-publish=$(./poe-tasks/get-modified-connectors.sh --prev-commit --json) | tee -a $GITHUB_OUTPUT
- name: Resolve semver suffix
id: resolve-semver-suffix
shell: bash
run: |
# Determine the semver suffix mode.
# Priority: with-semver-suffix input > defaults
# Note: On push events, inputs.* is undefined (empty string), so we treat empty as 'default'
SEMVER_SUFFIX="${{ inputs.with-semver-suffix || 'default' }}"
# Resolve 'default' based on trigger type
if [[ "$SEMVER_SUFFIX" == "default" ]]; then
if [[ "${{ github.event_name }}" == "push" ]]; then
# On merge to master, use exact version (no suffix)
SEMVER_SUFFIX="none"
else
# On manual trigger, use preview suffix
SEMVER_SUFFIX="preview"
fi
fi
echo "with-semver-suffix=$SEMVER_SUFFIX" | tee -a $GITHUB_OUTPUT
- name: Resolve publish-java-tars
id: resolve-publish-java-tars
shell: bash
run: |
# If workflow is triggered on merge to master, we can assume
# the workflow is running in the open-source repo and we always want to publish.
if [[ "${{ inputs.publish-java-tars }}" == "" ]]; then
echo "publish-java-tars=true" | tee -a $GITHUB_OUTPUT
# if workflow is triggered manually, any other way, use the input publish-java-tars.
else
echo "publish-java-tars=${{ inputs.publish-java-tars }}" | tee -a $GITHUB_OUTPUT
fi
outputs:
# Exactly one of the manual/master steps will run, so just OR them together.
connectors-to-publish: ${{ steps.list-connectors-manual.outputs.connectors-to-publish || steps.list-connectors-master.outputs.connectors-to-publish }}
with-semver-suffix: ${{ steps.resolve-semver-suffix.outputs.with-semver-suffix }}
# publishing java tars is not optional if triggered by push to master (in the non-enterprise repo).
publish-java-tars: ${{ steps.resolve-publish-java-tars.outputs.publish-java-tars }}
# dry-run mode skips all uploads but runs all logic
dry-run: ${{ inputs.dry-run || 'false' }}
publish_connectors:
name: Publish connectors
needs: [publish_options]
runs-on: ubuntu-24.04
strategy:
matrix: ${{ fromJson(needs.publish_options.outputs.connectors-to-publish) }}
max-parallel: 5
# Allow all jobs to run, even if one fails
fail-fast: false
steps:
- name: Authenticate as GitHub App
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: get-app-token
with:
owner: "airbytehq"
# Use dynamic repo name since this workflow is also called from airbyte-enterprise
repositories: ${{ github.event.repository.name }}
app-id: ${{ secrets.OCTAVIA_PUBLISH_BOT_APP_ID }}
private-key: ${{ secrets.OCTAVIA_PUBLISH_BOT_PRIVATE_KEY }}
- name: Checkout Airbyte
# v4
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955
with:
ref: ${{ inputs.gitref || '' }}
fetch-depth: 2 # Required so we can conduct a diff from the previous commit to understand what connectors have changed.
submodules: true # Required for the enterprise repo since it uses a submodule that needs to exist for this workflow to run successfully.
- name: Create docker buildx builder
id: create-buildx-builder
shell: bash
run: docker buildx create --use --driver=docker-container --name builder --platform linux/amd64,linux/arm64
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
distribution: zulu
java-version: 21
cache: gradle
- name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Set up Python
# v5
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: "3.11"
check-latest: true
update-environment: true
- name: Install and configure Poetry
# v1
uses: snok/install-poetry@76e04a911780d5b312d89783f7b1cd627778900a
with:
# There are a few uses of `poetry run --directory` in various scripts.
# If we upgrade to 2.x, those need to be switched to `--project`.
version: 1.8.5
- name: Install the latest version of uv
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
with:
# We pass a token with dedicated GH rate limit
github-token: ${{ steps.get-app-token.outputs.token }}
- name: Install Poe
run: |
# Install Poe so we can run the connector tasks:
uv tool install poethepoet
- name: Get connector metadata
id: connector-metadata
working-directory: airbyte-integrations/connectors/${{ matrix.connector }}
run: |
set -euo pipefail
echo "connector-language=$(poe -qq get-language)" | tee -a $GITHUB_OUTPUT
echo "connector-version=$(poe -qq get-version)" | tee -a $GITHUB_OUTPUT
# We're intentionally not using the `google-github-actions/auth` action.
# The upload-connector-metadata step runs a script which handles auth manually.
# This is because we're writing files to multiple buckets, using different credentials
# for each bucket.
# (it's unclear whether that's actually necessary)
- name: Install gcloud
# v2.1.5
uses: google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9
- name: Install metadata_service
run: poetry install --directory airbyte-ci/connectors/metadata_service/lib
- name: Validate connector metadata
id: validate-connector-metadata
shell: bash
run: ./poe-tasks/validate-connector-metadata.sh --name ${{ matrix.connector }}
- name: Publish to Python Registry
id: publish-python-registry
if: steps.connector-metadata.outputs.connector-language == 'python' && needs.publish_options.outputs.dry-run != 'true'
shell: bash
run: |
./poe-tasks/publish-python-registry.sh --name ${{ matrix.connector }} --with-semver-suffix ${{ needs.publish_options.outputs.with-semver-suffix }}
env:
PYTHON_REGISTRY_TOKEN: ${{ secrets.PYPI_TOKEN }}
- name: "[DRY-RUN] Skip Publish to Python Registry"
if: steps.connector-metadata.outputs.connector-language == 'python' && needs.publish_options.outputs.dry-run == 'true'
run: |
echo "DRY-RUN: Skipping Python Registry publish for ${{ matrix.connector }}"
- name: Upload Python Dependencies to GCS
id: upload-python-dependencies-master
if: steps.connector-metadata.outputs.connector-language == 'python' && needs.publish_options.outputs.dry-run != 'true'
shell: bash
env:
GCS_CREDENTIALS: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
run: |
./poe-tasks/upload-python-dependencies.sh \
--name ${{ matrix.connector }} \
--bucket prod-airbyte-cloud-connector-metadata-service \
--with-semver-suffix ${{ needs.publish_options.outputs.with-semver-suffix }}
- name: "[DRY-RUN] Skip Upload Python Dependencies to GCS"
if: steps.connector-metadata.outputs.connector-language == 'python' && needs.publish_options.outputs.dry-run == 'true'
run: |
echo "DRY-RUN: Skipping Python Dependencies upload for ${{ matrix.connector }}"
- name: Build and publish JVM connectors images
id: build-and-publish-JVM-connectors-images
if: steps.connector-metadata.outputs.connector-language == 'java' && needs.publish_options.outputs.dry-run != 'true'
shell: bash
run: |
./poe-tasks/build-and-publish-java-connectors-with-tag.sh --name ${{ matrix.connector }} --with-semver-suffix ${{ needs.publish_options.outputs.with-semver-suffix }} --publish
- name: "[DRY-RUN] Build JVM connectors images (no publish)"
id: build-JVM-connectors-images-dry-run
if: steps.connector-metadata.outputs.connector-language == 'java' && needs.publish_options.outputs.dry-run == 'true'
shell: bash
run: |
echo "DRY-RUN: Building JVM connector without publishing..."
./poe-tasks/build-and-publish-java-connectors-with-tag.sh --name ${{ matrix.connector }} --with-semver-suffix ${{ needs.publish_options.outputs.with-semver-suffix }}
- name: Publish JVM connectors tar file
id: publish-JVM-connectors-tar-file
if: steps.connector-metadata.outputs.connector-language == 'java' && needs.publish_options.outputs.publish-java-tars == 'true' && needs.publish_options.outputs.dry-run != 'true'
shell: bash
run: ./poe-tasks/upload-java-connector-tar-file.sh --name ${{ matrix.connector }} --with-semver-suffix ${{ needs.publish_options.outputs.with-semver-suffix }}
env:
GCS_CREDENTIALS: ${{ secrets.METADATA_SERVICE_DEV_GCS_CREDENTIALS }}
# we allow it to fail because we are testing this step. We should remove this once we are sure it works.
continue-on-error: true
- name: "[DRY-RUN] Skip Publish JVM connectors tar file"
if: steps.connector-metadata.outputs.connector-language == 'java' && needs.publish_options.outputs.publish-java-tars == 'true' && needs.publish_options.outputs.dry-run == 'true'
run: |
echo "DRY-RUN: Skipping JVM connector tar file upload for ${{ matrix.connector }}"
- name: Build and publish Python and Manifest-Only connectors images
id: build-and-publish-python-manifest-only-connectors-images
if: steps.connector-metadata.outputs.connector-language != 'java'
uses: ./.github/actions/connector-image-build-push
with:
connector-name: ${{ matrix.connector }}
with-semver-suffix: ${{ needs.publish_options.outputs.with-semver-suffix }}
dry-run: ${{ needs.publish_options.outputs.dry-run }}
docker-hub-username: ${{ secrets.DOCKER_HUB_USERNAME }}
docker-hub-password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Upload connector metadata
id: upload-connector-metadata
if: needs.publish_options.outputs.dry-run != 'true'
shell: bash
run: ./poe-tasks/upload-connector-metadata.sh --name ${{ matrix.connector }} --with-semver-suffix ${{ needs.publish_options.outputs.with-semver-suffix }}
env:
GCS_CREDENTIALS: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
SPEC_CACHE_GCS_CREDENTIALS: ${{ secrets.SPEC_CACHE_SERVICE_ACCOUNT_KEY_PUBLISH }}
METADATA_SERVICE_GCS_CREDENTIALS: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
- name: "[DRY-RUN] Skip Upload connector metadata"
if: needs.publish_options.outputs.dry-run == 'true'
run: |
echo "DRY-RUN: Skipping connector metadata upload for ${{ matrix.connector }}"
# The connector registries are generated from the spec cache,
# and in some cases, there are cross-connector dependencies.
# Specifically, connectors with a strict-encrypt variant need the strict-encrypt's spec cache
# to generate their registry entry.
# For example, generating destination-postgres's cloud.json will read from
# destination-postgres-strict-encrypt's spec cache.
# As a result, we need publish_connector_registry_entries to run after publish_connectors is fully completed.
# Once strict-encrypt connectors are all gone, we can merge these steps together
# (i.e. move the generate-XYZ-registry-entry steps to the bottom of publish_connectors).
publish_connector_registry_entries:
name: Publish connector registry entries
needs: [publish_options, publish_connectors]
runs-on: ubuntu-24.04
strategy:
matrix: ${{ fromJson(needs.publish_options.outputs.connectors-to-publish) }}
max-parallel: 5
# Allow all jobs to run, even if one fails
fail-fast: false
outputs:
docker-image-tag: ${{ steps.connector-metadata.outputs.docker-image-tag }}
steps:
- name: Authenticate as GitHub App
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: get-app-token
with:
owner: "airbytehq"
# Use dynamic repo name since this workflow is also called from airbyte-enterprise
repositories: ${{ github.event.repository.name }}
app-id: ${{ secrets.OCTAVIA_PUBLISH_BOT_APP_ID }}
private-key: ${{ secrets.OCTAVIA_PUBLISH_BOT_PRIVATE_KEY }}
- name: Checkout Airbyte
# v4
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955
with:
ref: ${{ inputs.gitref || '' }}
fetch-depth: 2 # Required so we can conduct a diff from the previous commit to understand what connectors have changed.
submodules: true # Required for the enterprise repo since it uses a submodule that needs to exist for this workflow to run successfully.
- name: Set up Python
# v5
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: "3.11"
check-latest: true
update-environment: true
- name: Install and configure Poetry
# v1
uses: snok/install-poetry@76e04a911780d5b312d89783f7b1cd627778900a
with:
version: 1.8.5
- name: Install metadata_service
run: poetry install --directory airbyte-ci/connectors/metadata_service/lib
- name: Install the latest version of uv
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
with:
# We pass a token with dedicated GH rate limit
github-token: ${{ steps.get-app-token.outputs.token }}
- name: Install Poe
run: |
# Install Poe so we can run the connector tasks:
uv tool install poethepoet
- name: Enable progressive rollout for RC builds
if: needs.publish_options.outputs.with-semver-suffix == 'rc'
working-directory: airbyte-integrations/connectors/${{ matrix.connector }}
run: |
echo "Enabling progressive rollout for RC build..."
echo "=== enableProgressiveRollout BEFORE update ==="
grep -n "enableProgressiveRollout" metadata.yaml || echo "(not found)"
# Ephemerally enable progressive rollout in metadata.yaml (not committed back)
yq -i '.data.releases.rolloutConfiguration.enableProgressiveRollout = true' metadata.yaml
echo "=== enableProgressiveRollout AFTER update ==="
grep -n "enableProgressiveRollout" metadata.yaml
echo "Progressive rollout enabled in metadata.yaml"
- name: Get connector metadata
id: connector-metadata
working-directory: airbyte-integrations/connectors/${{ matrix.connector }}
run: |
set -euo pipefail
echo "connector-language=$(poe -qq get-language)" | tee -a $GITHUB_OUTPUT
echo "connector-version=$(poe -qq get-version)" | tee -a $GITHUB_OUTPUT
CONNECTOR_VERSION=$(poe -qq get-version)
SEMVER_SUFFIX="${{ needs.publish_options.outputs.with-semver-suffix }}"
# Generate docker image tag based on semver suffix mode
case "$SEMVER_SUFFIX" in
preview)
hash=$(git rev-parse --short=7 HEAD)
echo "docker-image-tag=${CONNECTOR_VERSION}-preview.${hash}" | tee -a $GITHUB_OUTPUT
echo "release-type-flag=--pre-release" | tee -a $GITHUB_OUTPUT
;;
rc)
echo "docker-image-tag=${CONNECTOR_VERSION}-rc1" | tee -a $GITHUB_OUTPUT
echo "release-type-flag=--main-release" | tee -a $GITHUB_OUTPUT
;;
none|*)
echo "docker-image-tag=${CONNECTOR_VERSION}" | tee -a $GITHUB_OUTPUT
echo "release-type-flag=--main-release" | tee -a $GITHUB_OUTPUT
;;
esac
- name: Generate OSS Registry Entry
id: generate-oss-registry-entry
if: needs.publish_options.outputs.dry-run != 'true'
shell: bash
run: |
echo "Generating OSS registry entry for ${{ matrix.connector }}"
poetry run --directory airbyte-ci/connectors/metadata_service/lib \
metadata_service generate-registry-entry \
--bucket-name prod-airbyte-cloud-connector-metadata-service \
--metadata-file-path "airbyte-integrations/connectors/${{ matrix.connector }}/metadata.yaml" \
--registry-type oss \
--docker-image-tag ${{ steps.connector-metadata.outputs.docker-image-tag }} \
${{ steps.connector-metadata.outputs.release-type-flag }}
env:
GCS_CREDENTIALS: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
SLACK_TOKEN: ${{ secrets.SLACK_BOT_TOKEN_AIRBYTE_TEAM }}
- name: "[DRY-RUN] Skip Generate OSS Registry Entry"
if: needs.publish_options.outputs.dry-run == 'true'
run: |
echo "DRY-RUN: Skipping OSS registry entry generation for ${{ matrix.connector }}"
- name: Generate Cloud Registry Entry
id: generate-cloud-registry-entry
if: needs.publish_options.outputs.dry-run != 'true'
shell: bash
run: |
echo "Generating Cloud registry entry for ${{ matrix.connector }}"
poetry run --directory airbyte-ci/connectors/metadata_service/lib \
metadata_service generate-registry-entry \
--bucket-name prod-airbyte-cloud-connector-metadata-service \
--metadata-file-path "airbyte-integrations/connectors/${{ matrix.connector }}/metadata.yaml" \
--registry-type cloud \
--docker-image-tag ${{ steps.connector-metadata.outputs.docker-image-tag }} \
${{ steps.connector-metadata.outputs.release-type-flag }}
env:
GCS_CREDENTIALS: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
SLACK_TOKEN: ${{ secrets.SLACK_BOT_TOKEN_AIRBYTE_TEAM }}
- name: "[DRY-RUN] Skip Generate Cloud Registry Entry"
if: needs.publish_options.outputs.dry-run == 'true'
run: |
echo "DRY-RUN: Skipping Cloud registry entry generation for ${{ matrix.connector }}"
generate_connector_registry:
name: Generate connector registry
needs: [publish_options, publish_connector_registry_entries]
if: needs.publish_options.outputs.dry-run != 'true'
uses: ./.github/workflows/generate-connector-registries.yml
secrets: inherit
notify-failure-slack-channel:
name: "Notify Slack Channel on Publish Failures"
runs-on: ubuntu-24.04
needs:
- publish_connector_registry_entries
if: ${{ always() && contains(needs.*.result, 'failure') && github.ref == 'refs/heads/master' }}
steps:
- name: Authenticate as GitHub App
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: get-app-token
with:
owner: "airbytehq"
# Use dynamic repo name since this workflow is also called from airbyte-enterprise
repositories: ${{ github.event.repository.name }}
app-id: ${{ secrets.OCTAVIA_PUBLISH_BOT_APP_ID }}
private-key: ${{ secrets.OCTAVIA_PUBLISH_BOT_PRIVATE_KEY }}
- name: Checkout Airbyte
# v4
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955
with:
ref: ${{ inputs.gitref || '' }}
submodules: true # Required for the enterprise repo since it uses a submodule that needs to exist for this workflow to run successfully.
- name: Match GitHub User to Slack User
id: match-github-to-slack-user
uses: ./.github/actions/match-github-to-slack-user
env:
AIRBYTE_TEAM_BOT_SLACK_TOKEN: ${{ secrets.SLACK_AIRBYTE_TEAM_READ_USERS }}
GITHUB_API_TOKEN: ${{ steps.get-app-token.outputs.token }}
- name: Send publish failures to connector-publish-failures channel
id: slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
with:
# This data can be any valid JSON from a previous step in the GitHub Action
payload: |
{
"channel": "#connector-publish-failures",
"username": "Connectors CI/CD Bot",
"text": "🚨 Publish workflow failed:\n ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} \n merged by ${{ github.actor }} (<@${{ steps.match-github-to-slack-user.outputs.slack_user_ids }}>). "
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.PUBLISH_ON_MERGE_SLACK_WEBHOOK }}
notify-failure-pager-duty:
name: "Notify PagerDuty on Publish Failures"
runs-on: ubuntu-24.04
needs:
- publish_connector_registry_entries
if: ${{ always() && contains(needs.*.result, 'failure') && github.ref == 'refs/heads/master' }}
steps:
- name: Checkout Airbyte
# v4
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955
with:
ref: ${{ inputs.gitref || '' }}
submodules: true # Required for the enterprise repo since it uses a submodule that needs to exist for this workflow to run successfully.
- name: Notify PagerDuty
id: pager-duty
uses: ./.github/actions/send-pager-duty-event
with:
# Integration URL: https://airbyte.pagerduty.com/services/P5GNI5T/integrations/PGKH9JV
integration_key: ${{ secrets.PAGER_DUTY_PUBLISH_FAILURES_INTEGRATION_KEY }}
summary: "Publish workflow failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} merged by ${{ github.actor }}"
severity: "critical"
source: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"