Skip to content

Fixes #25721: Handle Pinot JSON results with a custom type #43148

Fixes #25721: Handle Pinot JSON results with a custom type

Fixes #25721: Handle Pinot JSON results with a custom type #43148

# Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This workflow executes end-to-end (e2e) tests using Playwright with PostgreSQL as the database.
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
name: Postgresql PR Playwright E2E Tests
on:
merge_group:
workflow_dispatch:
pull_request_target:
types:
- labeled
- opened
- synchronize
- reopened
- ready_for_review
paths-ignore:
- ".github/**"
- "openmetadata-dist/**"
- "docker/**"
- "!docker/development/docker-compose.yml"
- "!docker/development/docker-compose-postgres.yml"
- "openmetadata-ui/src/main/resources/ui/playwright/doc-generator/**"
- "openmetadata-ui/src/main/resources/ui/playwright/docs/**"
- "openmetadata-ui/src/main/resources/ui/scripts/**"
permissions:
contents: read
pull-requests: write
concurrency:
group: playwright-ci-pr-postgresql-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
if: ${{ !github.event.pull_request.draft }}
steps:
- name: Wait for the labeler
uses: lewagon/wait-on-check-action@v1.3.4
if: ${{ github.event_name == 'pull_request_target' }}
with:
ref: ${{ github.event.pull_request.head.sha }}
check-name: Team Label
repo-token: ${{ secrets.GITHUB_TOKEN }}
wait-interval: 90
- name: Verify PR labels
uses: jesusvasquez333/verify-pr-label-action@v1.4.0
if: ${{ github.event_name == 'pull_request_target' }}
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
valid-labels: "safe to test"
pull-request-number: "${{ github.event.pull_request.number }}"
disable-reviews: true
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'merge_group' && github.sha || github.event.pull_request.head.sha }}
- name: Setup JDK 21
uses: actions/setup-java@v4
with:
java-version: '21'
distribution: 'temurin'
- name: Cache Maven Dependencies
uses: actions/cache@v4
with:
path: ~/.m2
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Install antlr cli
run: sudo make install_antlr_cli
- name: Build with Maven
run: mvn -DskipTests clean package
- name: Upload Maven build artifact
uses: actions/upload-artifact@v4
with:
name: openmetadata-build
path: openmetadata-dist/target/openmetadata-*.tar.gz
retention-days: 1
playwright-ci-postgresql:
needs: [build]
runs-on: ubuntu-latest
if: ${{ !cancelled() && needs.build.result == 'success' }}
environment: test
strategy:
fail-fast: false
matrix:
shardIndex: [1, 2, 3, 4, 5, 6]
shardTotal: [6]
steps:
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
android: true
dotnet: true
haskell: true
large-packages: false
swap-storage: true
docker-images: false
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'merge_group' && github.sha || github.event.pull_request.head.sha }}
- name: Download Maven build artifact
uses: actions/download-artifact@v4
with:
name: openmetadata-build
path: openmetadata-dist/target
- name: Setup Openmetadata Test Environment
uses: ./.github/actions/setup-openmetadata-test-environment
with:
python-version: "3.10"
args: "-d postgresql -s true"
ingestion_dependency: "all"
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version-file: "openmetadata-ui/src/main/resources/ui/.nvmrc"
- name: Install dependencies
working-directory: openmetadata-ui/src/main/resources/ui/
run: yarn --ignore-scripts --frozen-lockfile
- name: Install Playwright Browsers
run: npx playwright@1.57.0 install chromium --with-deps
- name: Run Playwright tests
id: run-tests
working-directory: openmetadata-ui/src/main/resources/ui/
run: |
if [ "${{ matrix.shardIndex }}" -eq "1" ]; then
echo "🔹 Running DataAssetRules-only tests on shard 1"
# The testMatch pattern ensures only DataAssetRules*.spec.ts files run
npx playwright test \
--project=setup \
--project=DataAssetRulesEnabled \
--project=DataAssetRulesDisabled \
--project=Basic \
--project=SearchRBAC \
else
# Shards 2-6 run common chromium tests equally distributed (5-way sharding)
CHROMIUM_SHARD=$(( ${{ matrix.shardIndex }} - 1 ))
echo "🔹 Running common chromium tests on shard ${CHROMIUM_SHARD}/5"
npx playwright test \
--project=chromium \
--grep-invert @dataAssetRules \
--shard=${CHROMIUM_SHARD}/5
fi
env:
PLAYWRIGHT_IS_OSS: true
PLAYWRIGHT_SNOWFLAKE_USERNAME: ${{ secrets.TEST_SNOWFLAKE_USERNAME }}
PLAYWRIGHT_SNOWFLAKE_PASSWORD: ${{ secrets.TEST_SNOWFLAKE_PASSWORD }}
PLAYWRIGHT_SNOWFLAKE_ACCOUNT: ${{ secrets.TEST_SNOWFLAKE_ACCOUNT }}
PLAYWRIGHT_SNOWFLAKE_DATABASE: ${{ secrets.TEST_SNOWFLAKE_DATABASE }}
PLAYWRIGHT_SNOWFLAKE_WAREHOUSE: ${{ secrets.TEST_SNOWFLAKE_WAREHOUSE }}
PLAYWRIGHT_SNOWFLAKE_PASSPHRASE: ${{ secrets.TEST_SNOWFLAKE_PASSPHRASE }}
PLAYWRIGHT_PROJECT_ID: ${{ steps.cypress-project-id.outputs.CYPRESS_PROJECT_ID }}
PLAYWRIGHT_BQ_PRIVATE_KEY: ${{ secrets.TEST_BQ_PRIVATE_KEY }}
PLAYWRIGHT_BQ_PROJECT_ID: ${{ secrets.PLAYWRIGHT_BQ_PROJECT_ID }}
PLAYWRIGHT_BQ_PRIVATE_KEY_ID: ${{ secrets.TEST_BQ_PRIVATE_KEY_ID }}
PLAYWRIGHT_BQ_PROJECT_ID_TAXONOMY: ${{ secrets.TEST_BQ_PROJECT_ID_TAXONOMY }}
PLAYWRIGHT_BQ_CLIENT_EMAIL: ${{ secrets.TEST_BQ_CLIENT_EMAIL }}
PLAYWRIGHT_BQ_CLIENT_ID: ${{ secrets.TEST_BQ_CLIENT_ID }}
PLAYWRIGHT_REDSHIFT_HOST: ${{ secrets.E2E_REDSHIFT_HOST_PORT }}
PLAYWRIGHT_REDSHIFT_USERNAME: ${{ secrets.E2E_REDSHIFT_USERNAME }}
PLAYWRIGHT_REDSHIFT_PASSWORD: ${{ secrets.E2E_REDSHIFT_PASSWORD }}
PLAYWRIGHT_REDSHIFT_DATABASE: ${{ secrets.TEST_REDSHIFT_DATABASE }}
PLAYWRIGHT_METABASE_USERNAME: ${{ secrets.TEST_METABASE_USERNAME }}
PLAYWRIGHT_METABASE_PASSWORD: ${{ secrets.TEST_METABASE_PASSWORD }}
PLAYWRIGHT_METABASE_DB_SERVICE_NAME: ${{ secrets.TEST_METABASE_DB_SERVICE_NAME }}
PLAYWRIGHT_METABASE_HOST_PORT: ${{ secrets.TEST_METABASE_HOST_PORT }}
PLAYWRIGHT_SUPERSET_USERNAME: ${{ secrets.TEST_SUPERSET_USERNAME }}
PLAYWRIGHT_SUPERSET_PASSWORD: ${{ secrets.TEST_SUPERSET_PASSWORD }}
PLAYWRIGHT_SUPERSET_HOST_PORT: ${{ secrets.TEST_SUPERSET_HOST_PORT }}
PLAYWRIGHT_KAFKA_BOOTSTRAP_SERVERS: ${{ secrets.TEST_KAFKA_BOOTSTRAP_SERVERS }}
PLAYWRIGHT_KAFKA_SCHEMA_REGISTRY_URL: ${{ secrets.TEST_KAFKA_SCHEMA_REGISTRY_URL }}
PLAYWRIGHT_GLUE_ACCESS_KEY: ${{ secrets.TEST_GLUE_ACCESS_KEY }}
PLAYWRIGHT_GLUE_SECRET_KEY: ${{ secrets.TEST_GLUE_SECRET_KEY }}
PLAYWRIGHT_GLUE_AWS_REGION: ${{ secrets.TEST_GLUE_AWS_REGION }}
PLAYWRIGHT_GLUE_ENDPOINT: ${{ secrets.TEST_GLUE_ENDPOINT }}
PLAYWRIGHT_GLUE_STORAGE_SERVICE: ${{ secrets.TEST_GLUE_STORAGE_SERVICE }}
PLAYWRIGHT_MYSQL_USERNAME: ${{ secrets.TEST_MYSQL_USERNAME }}
PLAYWRIGHT_MYSQL_PASSWORD: ${{ secrets.TEST_MYSQL_PASSWORD }}
PLAYWRIGHT_MYSQL_HOST_PORT: ${{ secrets.TEST_MYSQL_HOST_PORT }}
PLAYWRIGHT_MYSQL_DATABASE_SCHEMA: ${{ secrets.TEST_MYSQL_DATABASE_SCHEMA }}
PLAYWRIGHT_POSTGRES_USERNAME: ${{ secrets.TEST_POSTGRES_USERNAME }}
PLAYWRIGHT_POSTGRES_PASSWORD: ${{ secrets.TEST_POSTGRES_PASSWORD }}
PLAYWRIGHT_POSTGRES_HOST_PORT: ${{ secrets.TEST_POSTGRES_HOST_PORT }}
PLAYWRIGHT_POSTGRES_DATABASE: ${{ secrets.TEST_POSTGRES_DATABASE }}
PLAYWRIGHT_AIRFLOW_HOST_PORT: ${{ secrets.TEST_AIRFLOW_HOST_PORT }}
PLAYWRIGHT_ML_MODEL_TRACKING_URI: ${{ secrets.TEST_ML_MODEL_TRACKING_URI }}
PLAYWRIGHT_ML_MODEL_REGISTRY_URI: ${{ secrets.TEST_ML_MODEL_REGISTRY_URI }}
PLAYWRIGHT_S3_STORAGE_ACCESS_KEY_ID: ${{ secrets.TEST_S3_STORAGE_ACCESS_KEY_ID }}
PLAYWRIGHT_S3_STORAGE_SECRET_ACCESS_KEY: ${{ secrets.TEST_S3_STORAGE_SECRET_ACCESS_KEY }}
PLAYWRIGHT_S3_STORAGE_END_POINT_URL: ${{ secrets.TEST_S3_STORAGE_END_POINT_URL }}
# Recommended: pass the GitHub token lets this action correctly
# determine the unique run id necessary to re-run the checks
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
with:
name: playwright-report-${{ matrix.shardIndex }}
path: openmetadata-ui/src/main/resources/ui/playwright/output/playwright-report
retention-days: 5
- name: Upload test results (screenshots, traces)
uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
with:
name: playwright-test-results-${{ matrix.shardIndex }}
path: openmetadata-ui/src/main/resources/ui/playwright/output/test-results
retention-days: 5
- name: Upload results JSON for summary
uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
with:
name: playwright-results-json-${{ matrix.shardIndex }}
path: openmetadata-ui/src/main/resources/ui/playwright/output/results.json
retention-days: 1
if-no-files-found: ignore
- name: Clean Up
run: |
cd ./docker/development
docker compose down --remove-orphans
sudo rm -rf ${PWD}/docker-volume
playwright-summary:
if: ${{ !cancelled() && github.event_name == 'pull_request_target' }}
needs: playwright-ci-postgresql
runs-on: ubuntu-latest
steps:
- name: Download all results JSON
uses: actions/download-artifact@v4
with:
pattern: playwright-results-json-*
path: results
- name: Post consolidated PR comment
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const fs = require('fs');
const path = require('path');
const runId = '${{ github.run_id }}';
const repo = context.repo;
const prNumber = context.payload.pull_request?.number;
if (!prNumber) return;
const artifactUrl = `https://github.com/${repo.owner}/${repo.repo}/actions/runs/${runId}`;
const commentMarker = '<!-- playwright-summary -->';
// Collect results from all shards
const shardResults = [];
const resultsDir = 'results';
if (fs.existsSync(resultsDir)) {
for (const dir of fs.readdirSync(resultsDir).sort()) {
const jsonPath = path.join(resultsDir, dir, 'results.json');
if (!fs.existsSync(jsonPath)) continue;
const shardNum = dir.replace('playwright-results-json-', '');
const report = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
const allTests = [];
function collectTests(suite, filePath) {
const file = suite.file || filePath || '';
for (const spec of (suite.specs || [])) {
for (const test of (spec.tests || [])) {
const results = test.results || [];
const lastResult = results[results.length - 1] || {};
const firstResult = results[0] || {};
allTests.push({
title: spec.title,
file: file,
status: test.status,
retries: results.length - 1,
error: lastResult.error?.message || firstResult.error?.message || '',
});
}
}
for (const child of (suite.suites || [])) {
collectTests(child, file);
}
}
for (const suite of (report.suites || [])) {
collectTests(suite, '');
}
shardResults.push({
shard: shardNum,
genuine: allTests.filter(t => t.status === 'unexpected'),
flaky: allTests.filter(t => t.status === 'flaky'),
passed: allTests.filter(t => t.status === 'expected'),
skipped: allTests.filter(t => t.status === 'skipped'),
});
}
}
if (shardResults.length === 0) {
console.log('No shard results found');
return;
}
// Aggregate totals
const totalPassed = shardResults.reduce((s, r) => s + r.passed.length, 0);
const totalFailed = shardResults.reduce((s, r) => s + r.genuine.length, 0);
const totalFlaky = shardResults.reduce((s, r) => s + r.flaky.length, 0);
const totalSkipped = shardResults.reduce((s, r) => s + r.skipped.length, 0);
const lines = [commentMarker];
if (totalFailed > 0) {
lines.push(`## 🔴 Playwright Results — ${totalFailed} failure(s)${totalFlaky > 0 ? `, ${totalFlaky} flaky` : ''}`);
} else if (totalFlaky > 0) {
lines.push(`## 🟡 Playwright Results — all passed (${totalFlaky} flaky)`);
} else {
lines.push(`## ✅ Playwright Results — all ${totalPassed} tests passed`);
}
lines.push('');
lines.push(`✅ ${totalPassed} passed · ❌ ${totalFailed} failed · 🟡 ${totalFlaky} flaky · ⏭️ ${totalSkipped} skipped`);
lines.push('');
// Per-shard summary table
lines.push('| Shard | Passed | Failed | Flaky | Skipped |');
lines.push('|-------|--------|--------|-------|---------|');
for (const r of shardResults) {
const status = r.genuine.length > 0 ? '🔴' : r.flaky.length > 0 ? '🟡' : '✅';
lines.push(`| ${status} Shard ${r.shard} | ${r.passed.length} | ${r.genuine.length} | ${r.flaky.length} | ${r.skipped.length} |`);
}
lines.push('');
// Genuine failures detail
const allGenuine = shardResults.flatMap(r => r.genuine.map(t => ({ ...t, shard: r.shard })));
if (allGenuine.length > 0) {
lines.push('### Genuine Failures (failed on all attempts)');
lines.push('');
for (const t of allGenuine.slice(0, 30)) {
const shortFile = t.file.replace(/.*playwright\/e2e\//, '');
lines.push(`<details><summary>❌ <code>${shortFile}</code> › ${t.title} (shard ${t.shard})</summary>`);
lines.push('');
lines.push('```');
lines.push(t.error.substring(0, 1000));
lines.push('```');
lines.push('</details>');
lines.push('');
}
if (allGenuine.length > 30) {
lines.push(`... and ${allGenuine.length - 30} more failures`);
lines.push('');
}
}
// Flaky tests
const allFlaky = shardResults.flatMap(r => r.flaky.map(t => ({ ...t, shard: r.shard })));
if (allFlaky.length > 0) {
lines.push(`<details><summary>🟡 ${allFlaky.length} flaky test(s) (passed on retry)</summary>`);
lines.push('');
for (const t of allFlaky.slice(0, 30)) {
const shortFile = t.file.replace(/.*playwright\/e2e\//, '');
lines.push(`- \`${shortFile}\` › ${t.title} (shard ${t.shard}, ${t.retries} ${t.retries === 1 ? 'retry' : 'retries'})`);
}
if (allFlaky.length > 30) {
lines.push(`- ... and ${allFlaky.length - 30} more`);
}
lines.push('');
lines.push('</details>');
lines.push('');
}
lines.push(`📦 [Download artifacts](${artifactUrl})`);
lines.push('');
lines.push('<details><summary>How to debug locally</summary>');
lines.push('');
lines.push('```bash');
lines.push('# Download playwright-test-results-<shard> artifact and unzip');
lines.push('npx playwright show-trace path/to/trace.zip # view trace');
lines.push('```');
lines.push('</details>');
const body = lines.join('\n');
// Find existing consolidated comment
let existingComment = null;
for await (const response of github.paginate.iterator(
github.rest.issues.listComments, { ...repo, issue_number: prNumber, per_page: 100 }
)) {
const found = response.data.find(c =>
c.user?.login === 'github-actions[bot]' && c.body?.includes(commentMarker)
);
if (found) { existingComment = found; break; }
}
// Also clean up any old per-shard comments from previous runs
for await (const response of github.paginate.iterator(
github.rest.issues.listComments, { ...repo, issue_number: prNumber, per_page: 100 }
)) {
for (const c of response.data) {
if (c.user?.login === 'github-actions[bot]' && /Playwright Shard \d/.test(c.body || '') && !c.body?.includes(commentMarker)) {
await github.rest.issues.deleteComment({ ...repo, comment_id: c.id });
console.log(`Deleted old per-shard comment ${c.id}`);
}
}
}
if (existingComment) {
await github.rest.issues.updateComment({ ...repo, comment_id: existingComment.id, body });
} else {
await github.rest.issues.createComment({ ...repo, issue_number: prNumber, body });
}