diff --git a/.github/workflows/run_runner_test_harness.yml b/.github/workflows/run_runner_test_harness.yml index fa4a4e1..3c26be8 100644 --- a/.github/workflows/run_runner_test_harness.yml +++ b/.github/workflows/run_runner_test_harness.yml @@ -18,12 +18,12 @@ jobs: steps: - name: Checkout snowflake - uses: actions/checkout@v4 + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 with: ref: ${{ github.ref }} - name: Set up Docker - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # v2 - name: Build Docker Image run: | @@ -49,7 +49,7 @@ jobs: exit 1 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c # v4 with: python-version: '3.10' @@ -68,7 +68,7 @@ jobs: cat latest.log - name: Upload Log File as Artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 with: name: latest-log-${{ matrix.pgver }} path: latest.log diff --git a/.github/workflows/snowflake_regression_tests.yml b/.github/workflows/snowflake_regression_tests.yml index 1bb6558..3dcc9fe 100644 --- a/.github/workflows/snowflake_regression_tests.yml +++ b/.github/workflows/snowflake_regression_tests.yml @@ -20,12 +20,12 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 with: ref: ${{ github.ref }} - name: Set up Docker - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # v2 - name: Build Docker Image run: | @@ -269,7 +269,7 @@ jobs: - name: Upload Log File as Artifact if: success() || failure() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 with: name: latest-log-${{ matrix.pgver }} path: latest.log diff --git a/Dockerfile b/Dockerfile index cd7c036..7cbd83b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,20 +1,18 @@ -ARG PGVER -FROM postgres:$PGVER-alpine +ARG PGVER=17 +FROM postgres:${PGVER}-alpine RUN apk add --no-cache \ make \ gcc \ musl-dev \ postgresql-dev \ - git \ - clang \ - llvm + git WORKDIR /home/postgres/snowflake COPY . /home/postgres/snowflake/ -RUN USE_PGXS=1 make && USE_PGXS=1 make install +RUN USE_PGXS=1 make with_llvm=no && USE_PGXS=1 make with_llvm=no install EXPOSE 5432 diff --git a/test/runner.py b/test/runner.py index b7d9c6e..509fe1f 100644 --- a/test/runner.py +++ b/test/runner.py @@ -191,8 +191,9 @@ def runTest(testName): try: # Build the psql command with appropriate switches/inputs/outputs, psql_command, actual_output_file, expected_output_file = build_psql_command(testName) - # Execute the psql command - result = subprocess.run(psql_command, shell=True, capture_output=True, text=True) + # Execute the psql command, redirecting output to file via Python (not shell) + with open(actual_output_file, 'w') as outf: + result = subprocess.run(psql_command, stdout=outf, stderr=subprocess.STDOUT, text=True) # Compare actual and expected output files, setting shallow=False compares content and not just timestamps/size if filecmp.cmp(actual_output_file, expected_output_file, shallow=False): result_status = "pass" @@ -325,16 +326,22 @@ def build_psql_command(sql_file): # Assume the expected output file is in the same location as the sql_file, but with a .out extension expected_output_file = sql_file.replace('.sql', '.out') - # Construct the full psql command using the database parameters and file paths - psql_command = f"{psql_command_path} -X -a -d {pgdb} -p {port} -h {pghost} < {sql_file} > {actual_output_file} 2>&1" - + # Construct the psql command as an argument list (no shell interpretation needed) + psql_command = [ + psql_command_path, "-X", "-a", + "-d", pgdb, + "-p", str(port), + "-h", pghost, + "-f", sql_file, + ] + if glDebug: print("sql_file: " + str(sql_file)) print("port: " + str(port)) print("actual output file: " + str(actual_output_file)) print("expected output file: " + str(expected_output_file)) print("psql_command: " + str(psql_command)) - + # Return the constructed psql command and the paths for the actual and expected output files return psql_command, actual_output_file, expected_output_file diff --git a/test/t/lib/18config.env b/test/t/lib/18config.env new file mode 100644 index 0000000..210935d --- /dev/null +++ b/test/t/lib/18config.env @@ -0,0 +1,46 @@ +# Use this file to set a group of values to environment variables; you can source this file to set all the values at once. +export EDGE_INSTALL_SCRIPT=install.py +export REPO=https://pgedge-devel.s3.amazonaws.com/REPO +export EDGE_REPO=$REPO/$EDGE_INSTALL_SCRIPT +export EDGE_HOST=127.0.0.1 + +# Use this environment variable to set the number of seconds that a timing-sensitive test +# sleeps before confirming a result has been replicated. +export EDGE_SLEEP=5 + +# Your setup scripts should start at the following port, and iterate through the setup for the number of nodes in +# EDGE_NODES. + +export EDGE_START_PORT=5432 +export EDGE_NODES=2 + +# This is where the installation should happen: +export NC_DIR="nc" +export EDGE_HOME_DIR="$NC_DIR/pgedge" +export EDGE_CLUSTER="demo" +export EDGE_CLUSTER_DIR="$EDGE_HOME_DIR/cluster/$EDGE_CLUSTER" + +# These are the arguments associated with the cli setup: +export EDGE_USERNAME="lcusr" +export EDGE_PASSWORD="password" +export EDGE_DB="lcdb" +export EDGE_REPUSER=`whoami` + +# postgres version details +export EDGE_INST_VERSION=18 +export EDGE_COMPONENT="pg$EDGE_INST_VERSION" + +# Leave spock_ver empty if you want to use spocks default version +# As of 1st August 2024, spock40 is the default pinned version + +# As of 10/23/24: Note that if the spock version is empty, cluster add-node will FAIL: +# It will return an error: ERROR: function spock.set_cluster_readonly() does not exist +export EDGE_SPOCK_VER="4.0.9" + +export EDGE_CLI="pgedge" + +# Path to store autoddl related actual outputs +export EDGE_ACTUAL_OUT_DIR="/tmp/auto_ddl/" + +# To ensure locale related outputs (such as monetary values) stay consistent +export LC_ALL="C.UTF-8" diff --git a/test/t/util_test.py b/test/t/util_test.py index dfede79..cf97274 100644 --- a/test/t/util_test.py +++ b/test/t/util_test.py @@ -1,4 +1,4 @@ -import sys, os, psycopg2, json, subprocess, shutil, re, csv, socket +import sys, os, psycopg2, json, subprocess, shutil, re, csv, socket, shlex from dotenv import load_dotenv from psycopg2 import sql @@ -85,7 +85,7 @@ def enable_autoddl(host, dbname, port, pw, usr): def run_cmd(msg, cmd, node_path): print(cmd) - result = subprocess.run(f"{node_path}/pgedge/pgedge {cmd}", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + result = subprocess.run([f"{node_path}/pgedge/pgedge", *shlex.split(cmd)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) # nosec B603 return result # ************************************************************************************************************** @@ -97,7 +97,7 @@ def run_cmd(msg, cmd, node_path): def run_nc_cmd(msg, cmd, node_path): print(cmd) - result = subprocess.run(f"{node_path}/pgedge {cmd}", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + result = subprocess.run([f"{node_path}/pgedge", *shlex.split(cmd)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) # nosec B603 return result # ************************************************************************************************************** @@ -358,9 +358,9 @@ def get_sqlite_connection(db_file): ## Execute a query on the SQLite database: -def execute_sqlite_query(conn): +def execute_sqlite_query(conn, query): cur = conn.cursor() - cur.execute(f"{query}") + cur.execute(query) rows = cur.fetchall() for row in rows: print(row)