diff --git a/actions/setup/js/check_runs_helpers.cjs b/actions/setup/js/check_runs_helpers.cjs new file mode 100644 index 00000000000..b121691de0b --- /dev/null +++ b/actions/setup/js/check_runs_helpers.cjs @@ -0,0 +1,83 @@ +// @ts-check + +/** + * Returns true for check runs that represent deployment environment gates rather + * than CI checks. + * @param {any} run + * @returns {boolean} + */ +function isDeploymentCheck(run) { + return run?.app?.slug === "github-deployments"; +} + +/** + * Select latest check run per name and apply standard filtering. + * @param {any[]} checkRuns + * @param {{ + * includeList?: string[]|null, + * excludeList?: string[]|null, + * excludedCheckRunIds?: Set, + * }} [options] + * @returns {{relevant: any[], deploymentCheckCount: number, currentRunFilterCount: number}} + */ +function selectLatestRelevantChecks(checkRuns, options = {}) { + const includeList = options.includeList || null; + const excludeList = options.excludeList || null; + const excludedCheckRunIds = options.excludedCheckRunIds || new Set(); + + /** @type {Map} */ + const latestByName = new Map(); + let deploymentCheckCount = 0; + let currentRunFilterCount = 0; + + for (const run of checkRuns) { + if (isDeploymentCheck(run)) { + deploymentCheckCount++; + continue; + } + if (excludedCheckRunIds.has(run.id)) { + currentRunFilterCount++; + continue; + } + const existing = latestByName.get(run.name); + if (!existing || new Date(run.started_at ?? 0) > new Date(existing.started_at ?? 0)) { + latestByName.set(run.name, run); + } + } + + const relevant = []; + for (const [name, run] of latestByName) { + if (includeList && includeList.length > 0 && !includeList.includes(name)) { + continue; + } + if (excludeList && excludeList.length > 0 && excludeList.includes(name)) { + continue; + } + relevant.push(run); + } + + return { relevant, deploymentCheckCount, currentRunFilterCount }; +} + +/** + * Computes failing checks with shared semantics. + * @param {any[]} checkRuns + * @param {{allowPending?: boolean}} [options] + * @returns {any[]} + */ +function getFailingChecks(checkRuns, options = {}) { + const allowPending = options.allowPending === true; + const failedConclusions = new Set(["failure", "cancelled", "timed_out"]); + return checkRuns.filter(run => { + if (run.status === "completed") { + return run.conclusion != null && failedConclusions.has(run.conclusion); + } + return !allowPending; + }); +} + +module.exports = { + isDeploymentCheck, + selectLatestRelevantChecks, + getFailingChecks, +}; diff --git a/actions/setup/js/check_skip_if_check_failing.cjs b/actions/setup/js/check_skip_if_check_failing.cjs index 97d0ff5475c..9051488d426 100644 --- a/actions/setup/js/check_skip_if_check_failing.cjs +++ b/actions/setup/js/check_skip_if_check_failing.cjs @@ -5,6 +5,7 @@ const { getErrorMessage, isRateLimitError } = require("./error_helpers.cjs"); const { ERR_API } = require("./error_codes.cjs"); const { getBaseBranch } = require("./get_base_branch.cjs"); const { writeDenialSummary } = require("./pre_activation_summary.cjs"); +const { selectLatestRelevantChecks, getFailingChecks } = require("./check_runs_helpers.cjs"); /** * Determines the ref to check for CI status. @@ -52,22 +53,6 @@ function parseListEnv(envValue) { } } -/** - * Returns true for check runs that represent deployment environment gates rather - * than CI checks. These should be ignored by default so that a pending deployment - * approval does not falsely block the agentic workflow. - * - * Deployment gate checks are identified by the GitHub App that created them: - * - "github-deployments" – the built-in GitHub Deployments service - * - * @param {object} run - A check run object from the GitHub API - * @returns {boolean} - */ -function isDeploymentCheck(run) { - const slug = run.app?.slug; - return slug === "github-deployments"; -} - /** * Fetches the check run IDs for all jobs in the current workflow run. * These IDs are used to filter out the current workflow's own checks @@ -149,25 +134,11 @@ async function main() { // Filter to the latest run per check name (GitHub may have multiple runs per name). // Deployment gate checks and the current run's own checks are silently skipped here // so they never influence the gate. - /** @type {Map} */ - const latestByName = new Map(); - let deploymentCheckCount = 0; - let currentRunFilterCount = 0; - for (const run of checkRuns) { - if (isDeploymentCheck(run)) { - deploymentCheckCount++; - continue; - } - if (currentRunCheckRunIds.has(run.id)) { - currentRunFilterCount++; - continue; - } - const name = run.name; - const existing = latestByName.get(name); - if (!existing || new Date(run.started_at ?? 0) > new Date(existing.started_at ?? 0)) { - latestByName.set(name, run); - } - } + const { relevant, deploymentCheckCount, currentRunFilterCount } = selectLatestRelevantChecks(checkRuns, { + includeList, + excludeList, + excludedCheckRunIds: currentRunCheckRunIds, + }); if (deploymentCheckCount > 0) { core.info(`Skipping ${deploymentCheckCount} deployment gate check(s) (app: github-deployments)`); @@ -176,32 +147,9 @@ async function main() { core.info(`Skipping ${currentRunFilterCount} check run(s) from the current workflow run`); } - // Apply user-defined include/exclude filtering - const relevant = []; - for (const [name, run] of latestByName) { - if (includeList && includeList.length > 0 && !includeList.includes(name)) { - continue; - } - if (excludeList && excludeList.length > 0 && excludeList.includes(name)) { - continue; - } - relevant.push(run); - } - core.info(`Evaluating ${relevant.length} check run(s) after filtering`); - // A check is "failing" if it either: - // 1. Completed with a non-success conclusion (failure, cancelled, timed_out), OR - // 2. Is still pending/in-progress — unless allow-pending is set - const failedConclusions = new Set(["failure", "cancelled", "timed_out"]); - - const failingChecks = relevant.filter(run => { - if (run.status === "completed") { - return run.conclusion != null && failedConclusions.has(run.conclusion); - } - // Pending/queued/in_progress: treat as failing unless allow-pending is true - return !allowPending; - }); + const failingChecks = getFailingChecks(relevant, { allowPending }); if (failingChecks.length > 0) { const names = failingChecks.map(r => (r.status === "completed" ? `${r.name} (${r.conclusion})` : `${r.name} (${r.status})`)).join(", "); diff --git a/actions/setup/js/merge_pull_request.cjs b/actions/setup/js/merge_pull_request.cjs new file mode 100644 index 00000000000..85cb9f4fdc2 --- /dev/null +++ b/actions/setup/js/merge_pull_request.cjs @@ -0,0 +1,583 @@ +// @ts-check +/// + +const { createAuthenticatedGitHubClient } = require("./handler_auth.cjs"); +const { getErrorMessage } = require("./error_helpers.cjs"); +const { resolveTargetRepoConfig, resolveAndValidateRepo } = require("./repo_helpers.cjs"); +const { globPatternToRegex } = require("./glob_pattern_helpers.cjs"); +const { isStagedMode } = require("./safe_output_helpers.cjs"); +const { selectLatestRelevantChecks } = require("./check_runs_helpers.cjs"); +const { withRetry, isTransientError } = require("./error_recovery.cjs"); +const { normalizeBranchName } = require("./normalize_branch_name.cjs"); +const { resolveNumberFromTemporaryId } = require("./temporary_id.cjs"); +const MERGEABILITY_PENDING_ERROR = "pull request mergeability is still being computed"; + +/** + * @typedef {import('./types/handler-factory').HandlerFactoryFunction} HandlerFactoryFunction + */ + +/** + * @param {string[]} patterns + * @returns {RegExp[]} + */ +function compilePathGlobs(patterns) { + return patterns.map(p => globPatternToRegex(p, { pathMode: true, caseSensitive: true })); +} + +/** + * @param {any} githubClient + * @param {string} owner + * @param {string} repo + * @param {number} pullNumber + * @returns {Promise} + */ +async function getPullRequestWithMergeability(githubClient, owner, repo, pullNumber) { + core.info(`Fetching PR #${pullNumber} in ${owner}/${repo} with mergeability retry`); + return withRetry( + async () => { + const { data } = await githubClient.rest.pulls.get({ + owner, + repo, + pull_number: pullNumber, + }); + if (data && data.mergeable === null) { + throw new Error(MERGEABILITY_PENDING_ERROR); + } + return data; + }, + { + maxRetries: 3, + initialDelayMs: 1000, + shouldRetry: error => { + const msg = getErrorMessage(error).toLowerCase(); + return isTransientError(error) || msg === MERGEABILITY_PENDING_ERROR; + }, + }, + `fetch pull request #${pullNumber}` + ).catch(async error => { + try { + const fallback = await githubClient.rest.pulls.get({ + owner, + repo, + pull_number: pullNumber, + }); + if (fallback?.data) { + core.warning(`Mergeability remained unknown after retries for PR #${pullNumber}, continuing with latest state`); + return fallback.data; + } + } catch (fallbackError) { + throw new Error(`Failed to fetch pull request #${pullNumber} after retry and fallback attempts. Retry error: ${getErrorMessage(error)}. Fallback error: ${getErrorMessage(fallbackError)}`); + } + throw error; + }); +} + +/** + * @param {any} githubClient + * @param {string} owner + * @param {string} repo + * @param {number} pullNumber + * @returns {Promise<{reviewDecision: string|null, unresolvedThreadCount: number}>} + */ +async function getReviewSummary(githubClient, owner, repo, pullNumber) { + core.info(`Collecting review summary for PR #${pullNumber}`); + let unresolvedThreadCount = 0; + let reviewDecision = null; + let cursor = null; + let hasNextPage = true; + let page = 0; + while (hasNextPage) { + page++; + const result = await withRetry( + async () => + githubClient.graphql( + ` + query($owner: String!, $repo: String!, $number: Int!, $after: String) { + repository(owner: $owner, name: $repo) { + pullRequest(number: $number) { + reviewDecision + reviewThreads(first: 100, after: $after) { + pageInfo { hasNextPage endCursor } + nodes { isResolved } + } + } + } + } + `, + { owner, repo, number: pullNumber, after: cursor } + ), + { + maxRetries: 3, + initialDelayMs: 1000, + shouldRetry: error => isTransientError(error), + }, + `fetch review summary GraphQL page ${page} for PR #${pullNumber}` + ); + + const pr = result?.repository?.pullRequest; + if (!pr) { + core.warning(`No pull request data returned while reading review summary for PR #${pullNumber}`); + break; + } + reviewDecision = pr.reviewDecision || null; + const threads = pr.reviewThreads?.nodes || []; + core.info(`Review page ${page}: ${threads.length} thread(s)`); + unresolvedThreadCount += threads.filter(t => !t.isResolved).length; + hasNextPage = pr.reviewThreads?.pageInfo?.hasNextPage === true; + cursor = pr.reviewThreads?.pageInfo?.endCursor || null; + } + + core.info(`Review summary: decision=${reviewDecision || "null"}, unresolvedThreads=${unresolvedThreadCount}`); + return { reviewDecision, unresolvedThreadCount }; +} + +/** + * @param {any} githubClient + * @param {string} owner + * @param {string} repo + * @param {string} baseBranch + * @returns {Promise<{isProtected: boolean, isDefault: boolean, defaultBranch: string|null, requiredChecks: string[]}>} + */ +async function getBranchPolicy(githubClient, owner, repo, baseBranch) { + const baseBranchValidation = sanitizeBranchName(baseBranch, "target base"); + if (!baseBranchValidation.valid || !baseBranchValidation.value) { + throw new Error(`Invalid target base branch for policy evaluation: ${baseBranchValidation.error} (original: ${JSON.stringify(baseBranch)}, normalized: ${JSON.stringify(baseBranchValidation.normalized || "")})`); + } + const sanitizedBaseBranch = baseBranchValidation.value; + + core.info(`Checking target branch policy for ${owner}/${repo}@${sanitizedBaseBranch}`); + const [{ data: branch }, { data: repository }] = await Promise.all([ + githubClient.rest.repos.getBranch({ + owner, + repo, + branch: sanitizedBaseBranch, + }), + githubClient.rest.repos.get({ + owner, + repo, + }), + ]); + + const defaultBranchRaw = repository.default_branch; + const defaultBranchValidation = sanitizeBranchName(defaultBranchRaw, "default"); + const defaultBranch = defaultBranchValidation.valid ? defaultBranchValidation.value : defaultBranchRaw; + const isDefault = defaultBranch !== null && sanitizedBaseBranch === defaultBranch; + if (isDefault) { + core.info(`Target branch ${sanitizedBaseBranch} is the repository default branch`); + } + + const isProtected = branch?.protected === true; + if (isProtected) { + core.info(`Target branch ${sanitizedBaseBranch} is protected`); + return { isProtected: true, isDefault, defaultBranch, requiredChecks: [] }; + } + + try { + const { data } = await githubClient.rest.repos.getBranchProtection({ + owner, + repo, + branch: sanitizedBaseBranch, + }); + const contexts = Array.isArray(data?.required_status_checks?.contexts) ? data.required_status_checks.contexts : []; + const checks = Array.isArray(data?.required_status_checks?.checks) ? data.required_status_checks.checks.map(c => c?.context).filter(Boolean) : []; + core.info(`Branch protection checks for ${sanitizedBaseBranch}: ${[...new Set([...contexts, ...checks])].join(", ") || "(none)"}`); + return { isProtected: false, isDefault, defaultBranch, requiredChecks: [...new Set([...contexts, ...checks])] }; + } catch (error) { + if (error && typeof error === "object" && "status" in error && error.status === 404) { + core.info(`No branch protection rules found for ${sanitizedBaseBranch}`); + return { isProtected: false, isDefault, defaultBranch, requiredChecks: [] }; + } + core.error(`Failed to read branch protection for ${sanitizedBaseBranch}: ${getErrorMessage(error)}`); + throw error; + } +} + +/** + * @param {any} githubClient + * @param {string} owner + * @param {string} repo + * @param {string} headSha + * @param {string[]} requiredChecks + * @returns {Promise<{missing: string[], failing: Array<{name: string, status: string, conclusion: string|null}>}>} + */ +async function evaluateRequiredChecks(githubClient, owner, repo, headSha, requiredChecks) { + core.info(`Evaluating required checks on ${headSha}: ${requiredChecks.join(", ") || "(none)"}`); + if (requiredChecks.length === 0) { + return { missing: [], failing: [] }; + } + + const checkRuns = await githubClient.paginate(githubClient.rest.checks.listForRef, { + owner, + repo, + ref: headSha, + per_page: 100, + }); + + const { relevant } = selectLatestRelevantChecks(checkRuns, { includeList: requiredChecks }); + core.info(`Fetched ${checkRuns.length} check run(s), ${relevant.length} relevant latest check run(s)`); + const byName = new Map(relevant.map(run => [run.name, run])); + const missing = []; + const failing = []; + + for (const checkName of requiredChecks) { + const run = byName.get(checkName); + if (!run) { + core.warning(`Required check missing: ${checkName}`); + missing.push(checkName); + continue; + } + if (run.status !== "completed" || run.conclusion !== "success") { + core.warning(`Required check not passing: ${checkName} status=${run.status} conclusion=${run.conclusion || "null"}`); + failing.push({ name: checkName, status: run.status, conclusion: run.conclusion || null }); + } + } + + return { missing, failing }; +} + +/** + * @returns {number|undefined} + */ +function resolveContextPullNumber() { + if (context.payload?.pull_request?.number) { + return context.payload.pull_request.number; + } + if (context.payload?.issue?.pull_request && context.payload?.issue?.number) { + return context.payload.issue.number; + } + return undefined; +} + +/** + * @param {string|undefined|null} branchName + * @param {string} branchRole + * @returns {{valid: boolean, value?: string, error?: string, normalized?: string}} + */ +function sanitizeBranchName(branchName, branchRole) { + if (typeof branchName !== "string" || branchName.trim() === "") { + return { valid: false, error: `${branchRole} branch is missing` }; + } + + const normalized = normalizeBranchName(branchName); + if (typeof normalized !== "string" || normalized.trim() === "") { + return { + valid: false, + error: `${branchRole} branch is invalid after sanitization`, + normalized: String(normalized || ""), + }; + } + + if (normalized !== branchName) { + return { + valid: false, + error: `${branchRole} branch contains invalid characters`, + normalized, + }; + } + + return { valid: true, value: normalized }; +} + +/** + * @param {string[]} labels + * @param {string[]} allowedLabels + * @returns {string[]} + */ +function findAllowedLabelMatches(labels, allowedLabels) { + return labels.filter(label => allowedLabels.includes(label)); +} + +/** + * @param {any} message + * Message object containing pull_request_number (optional) + * @param {any} [resolvedTemporaryIds] + * Optional map of resolved temporary IDs from prior safe-output operations + * @returns {{success: true, pullNumber: number, fromTemporaryId: boolean} | {success: false, error: string}} + */ +function resolvePullRequestNumber(message, resolvedTemporaryIds) { + const pullNumberRaw = message?.pull_request_number; + if (pullNumberRaw !== undefined && pullNumberRaw !== null) { + const resolution = resolveNumberFromTemporaryId(pullNumberRaw, resolvedTemporaryIds); + if (resolution.errorMessage) { + return { success: false, error: resolution.errorMessage }; + } + if (resolution.resolved === null) { + return { success: false, error: "Failed to resolve pull_request_number" }; + } + return { success: true, pullNumber: resolution.resolved, fromTemporaryId: resolution.wasTemporaryId }; + } + + const contextPullNumber = resolveContextPullNumber(); + if (!contextPullNumber) { + return { success: false, error: "pull_request_number is required for merge_pull_request" }; + } + return { success: true, pullNumber: contextPullNumber, fromTemporaryId: false }; +} + +/** + * Handler factory for merge_pull_request. + * @type {HandlerFactoryFunction} + */ +async function main(config = {}) { + const githubClient = await createAuthenticatedGitHubClient(config); + const isStaged = isStagedMode(config); + const { defaultTargetRepo, allowedRepos } = resolveTargetRepoConfig(config); + const maxCount = Number(config.max || 1); + const requiredLabels = Array.isArray(config.required_labels) ? config.required_labels : []; + const allowedLabels = Array.isArray(config.allowed_labels) ? config.allowed_labels : []; + const allowedBranches = Array.isArray(config.allowed_branches) ? config.allowed_branches : []; + + const allowedBranchPatterns = compilePathGlobs(allowedBranches); + core.info(`merge_pull_request handler configured: max=${maxCount}, requiredLabels=${requiredLabels.length}, allowedLabels=${allowedLabels.length}, allowedBranches=${allowedBranches.length}, staged=${isStaged}`); + + let processedCount = 0; + + return async function handleMergePullRequest(message, resolvedTemporaryIds) { + core.info(`Processing merge_pull_request message: ${JSON.stringify({ pull_request_number: message?.pull_request_number, repo: message?.repo, merge_method: message?.merge_method })}`); + if (processedCount >= maxCount) { + core.warning(`Skipping merge_pull_request: max count of ${maxCount} reached`); + return { success: false, error: `Max count of ${maxCount} reached` }; + } + processedCount++; + + const repoResult = resolveAndValidateRepo(message, defaultTargetRepo, allowedRepos, "merge pull request"); + if (!repoResult.success) { + core.error(`Repository validation failed: ${repoResult.error}`); + return { success: false, error: repoResult.error }; + } + const { owner, repo } = repoResult.repoParts; + core.info(`Resolved target repository: ${owner}/${repo}`); + + const pullNumberResolution = resolvePullRequestNumber(message, resolvedTemporaryIds); + if (!pullNumberResolution.success) { + core.error(pullNumberResolution.error); + return { success: false, error: pullNumberResolution.error }; + } + const pullNumber = pullNumberResolution.pullNumber; + if (pullNumberResolution.fromTemporaryId) { + core.info(`Resolved temporary ID '${String(message?.pull_request_number)}' to pull request #${pullNumber}`); + } + core.info(`Target PR number: ${pullNumber}`); + + /** @type {Array<{code: string, message: string, details?: any}>} */ + const failureReasons = []; + + try { + const pr = await getPullRequestWithMergeability(githubClient, owner, repo, pullNumber); + if (!pr) { + core.error(`Pull request #${pullNumber} not found`); + return { success: false, error: `Pull request #${pullNumber} not found` }; + } + const sourceBranchValidation = sanitizeBranchName(pr.head?.ref, "source"); + if (!sourceBranchValidation.valid) { + failureReasons.push({ + code: "source_branch_invalid", + message: sourceBranchValidation.error || "source branch is invalid", + details: { source_branch: pr.head?.ref, normalized: sourceBranchValidation.normalized || null }, + }); + } + const sourceBranch = sourceBranchValidation.valid ? sourceBranchValidation.value : null; + + const baseBranchValidation = sanitizeBranchName(pr.base?.ref, "target base"); + if (!baseBranchValidation.valid) { + failureReasons.push({ + code: "target_base_branch_invalid", + message: baseBranchValidation.error || "target base branch is invalid", + details: { base_branch: pr.base?.ref, normalized: baseBranchValidation.normalized || null }, + }); + } + const baseBranch = baseBranchValidation.valid ? baseBranchValidation.value : null; + + core.info( + `PR state: merged=${pr.merged}, draft=${pr.draft}, mergeable=${pr.mergeable}, mergeable_state=${pr.mergeable_state || "unknown"}, head=${JSON.stringify(sourceBranch || pr.head?.ref || null)}, base=${JSON.stringify(baseBranch || pr.base?.ref || null)}` + ); + if (pr.merged) { + core.info(`PR #${pullNumber} is already merged, returning idempotent success`); + return { + success: true, + merged: true, + alreadyMerged: true, + pull_request_number: pr.number, + pull_request_url: pr.html_url, + checks_evaluated: [], + }; + } + + if (pr.draft) { + failureReasons.push({ code: "pr_is_draft", message: "Pull request is still in draft state" }); + } + if (pr.mergeable === false || pr.mergeable_state === "dirty") { + failureReasons.push({ code: "merge_conflicts", message: "Pull request has unresolved merge conflicts" }); + } + if (pr.mergeable !== true) { + failureReasons.push({ code: "not_mergeable", message: `Pull request is not mergeable (mergeable=${String(pr.mergeable)}, state=${pr.mergeable_state || "unknown"})` }); + } + + const labels = (pr.labels || []).map(l => l.name).filter(Boolean); + core.info(`PR labels (${labels.length}): ${labels.join(", ") || "(none)"}`); + const missingRequiredLabels = requiredLabels.filter(label => !labels.includes(label)); + if (missingRequiredLabels.length > 0) { + failureReasons.push({ + code: "missing_required_labels", + message: "Required labels are missing", + details: { missing: missingRequiredLabels, present: labels }, + }); + } + + if (allowedLabels.length > 0) { + const matchedLabels = findAllowedLabelMatches(labels, allowedLabels); + core.info(`Allowed label match count: ${matchedLabels.length}`); + if (matchedLabels.length === 0) { + failureReasons.push({ + code: "allowed_labels_no_match", + message: "No pull request label matches allowed-labels", + details: { present: labels, allowed_labels: allowedLabels }, + }); + } + } + + if (allowedBranchPatterns.length > 0 && sourceBranch && !allowedBranchPatterns.some(re => re.test(sourceBranch))) { + failureReasons.push({ + code: "branch_not_allowed", + message: `Source branch "${sourceBranch}" does not match allowed-branches`, + details: { source_branch: sourceBranch, patterns: allowedBranches }, + }); + } + if (allowedBranchPatterns.length > 0) { + core.info(`Allowed branch patterns: ${allowedBranches.join(", ")}`); + } + + /** @type {{isProtected: boolean, isDefault: boolean, defaultBranch: string|null, requiredChecks: string[]}} */ + let branchPolicy = { isProtected: false, isDefault: false, defaultBranch: null, requiredChecks: [] }; + if (baseBranch) { + branchPolicy = await getBranchPolicy(githubClient, owner, repo, baseBranch); + if (branchPolicy.isProtected) { + failureReasons.push({ + code: "target_branch_protected", + message: `Target branch "${baseBranch}" is protected`, + }); + } + if (branchPolicy.isDefault) { + failureReasons.push({ + code: "target_branch_default", + message: `Target branch "${baseBranch}" is the repository default branch`, + details: { default_branch: branchPolicy.defaultBranch }, + }); + } + } + + const checkSummary = await evaluateRequiredChecks(githubClient, owner, repo, pr.head.sha, branchPolicy.requiredChecks); + core.info(`Required check summary: missing=${checkSummary.missing.length}, failing=${checkSummary.failing.length}`); + if (checkSummary.missing.length > 0) { + failureReasons.push({ + code: "required_checks_missing", + message: "Required status checks are not completed", + details: { missing: checkSummary.missing }, + }); + } + if (checkSummary.failing.length > 0) { + failureReasons.push({ + code: "required_checks_failing", + message: "Required status checks are not passing", + details: { failing: checkSummary.failing }, + }); + } + + if ((pr.requested_reviewers || []).length > 0 || (pr.requested_teams || []).length > 0) { + failureReasons.push({ + code: "pending_reviewers", + message: "All assigned reviewers have not approved yet", + details: { + requested_reviewers: (pr.requested_reviewers || []).map(r => r.login), + requested_teams: (pr.requested_teams || []).map(t => t.slug), + }, + }); + } + + const reviewSummary = await getReviewSummary(githubClient, owner, repo, pullNumber); + if (reviewSummary.reviewDecision === "CHANGES_REQUESTED" || reviewSummary.reviewDecision === "REVIEW_REQUIRED") { + failureReasons.push({ + code: "blocking_review_state", + message: `Blocking review state remains active (${reviewSummary.reviewDecision})`, + }); + } + if (reviewSummary.unresolvedThreadCount > 0) { + failureReasons.push({ + code: "unresolved_review_threads", + message: "Pull request has unresolved review threads", + details: { unresolved_count: reviewSummary.unresolvedThreadCount }, + }); + } + + if (failureReasons.length > 0) { + core.warning(`merge_pull_request blocked with ${failureReasons.length} gate failure(s): ${failureReasons.map(r => r.code).join(", ")}`); + return { + success: false, + error: "merge_pull_request gate checks failed", + failure_reasons: failureReasons, + checks_evaluated: branchPolicy.requiredChecks, + }; + } + + if (isStaged) { + core.info(`Staged mode: merge for PR #${pullNumber} not executed`); + return { + success: true, + staged: true, + merged: false, + pull_request_number: pr.number, + pull_request_url: pr.html_url, + checks_evaluated: branchPolicy.requiredChecks, + }; + } + + const mergeResponse = await githubClient.rest.pulls.merge({ + owner, + repo, + pull_number: pullNumber, + merge_method: message.merge_method || "merge", + commit_title: message.commit_title, + commit_message: message.commit_message, + }); + + if (mergeResponse.data?.merged !== true) { + core.error(`Merge API returned merged=false for PR #${pullNumber}: ${mergeResponse.data?.message || "no message"}`); + return { + success: false, + error: mergeResponse.data?.message || "Merge API returned merged=false", + failure_reasons: [{ code: "merge_not_completed", message: mergeResponse.data?.message || "Merge was not completed" }], + checks_evaluated: branchPolicy.requiredChecks, + }; + } + + return { + success: true, + merged: true, + pull_request_number: pr.number, + pull_request_url: pr.html_url, + sha: mergeResponse.data?.sha, + message: mergeResponse.data?.message, + checks_evaluated: branchPolicy.requiredChecks, + }; + } catch (error) { + core.error(`merge_pull_request failed for PR #${pullNumber}: ${getErrorMessage(error)}`); + return { + success: false, + error: getErrorMessage(error), + failure_reasons: [{ code: "merge_operation_error", message: getErrorMessage(error) }], + }; + } + }; +} + +module.exports = { + main, + __testables: { + compilePathGlobs, + resolveContextPullNumber, + sanitizeBranchName, + getBranchPolicy, + findAllowedLabelMatches, + resolvePullRequestNumber, + }, +}; diff --git a/actions/setup/js/merge_pull_request.test.cjs b/actions/setup/js/merge_pull_request.test.cjs new file mode 100644 index 00000000000..611f742da8e --- /dev/null +++ b/actions/setup/js/merge_pull_request.test.cjs @@ -0,0 +1,127 @@ +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; + +describe("merge_pull_request branch validation", () => { + beforeEach(() => { + global.core = { + info: vi.fn(), + warning: vi.fn(), + error: vi.fn(), + }; + }); + + afterEach(() => { + vi.resetModules(); + vi.clearAllMocks(); + delete global.core; + }); + + it("sanitizes and rejects invalid branch names", async () => { + const { __testables } = await import("./merge_pull_request.cjs"); + + const valid = __testables.sanitizeBranchName("feature/ok-branch", "source"); + expect(valid).toEqual({ valid: true, value: "feature/ok-branch" }); + + const invalid = __testables.sanitizeBranchName("feature/unsafe\nbranch", "source"); + expect(invalid.valid).toBe(false); + expect(invalid.error).toContain("contains invalid characters"); + }); + + it("marks protected base branch as protected", async () => { + const { __testables } = await import("./merge_pull_request.cjs"); + + const githubClient = { + rest: { + repos: { + getBranch: vi.fn().mockResolvedValue({ data: { protected: true } }), + get: vi.fn().mockResolvedValue({ data: { default_branch: "main" } }), + }, + }, + }; + + const policy = await __testables.getBranchPolicy(githubClient, "github", "gh-aw", "release/1.0"); + expect(policy.isProtected).toBe(true); + expect(policy.requiredChecks).toEqual([]); + }); + + it("detects repository default branch", async () => { + const { __testables } = await import("./merge_pull_request.cjs"); + + const githubClient = { + rest: { + repos: { + getBranch: vi.fn().mockResolvedValue({ + data: { + protected: false, + }, + }), + getBranchProtection: vi.fn().mockResolvedValue({ + data: { required_status_checks: { contexts: ["ci/test"] } }, + }), + get: vi.fn().mockResolvedValue({ data: { default_branch: "main" } }), + }, + }, + }; + + const policy = await __testables.getBranchPolicy(githubClient, "github", "gh-aw", "main"); + expect(policy.isDefault).toBe(true); + expect(policy.requiredChecks).toEqual(["ci/test"]); + }); + + it("does not mark non-default branches as default", async () => { + const { __testables } = await import("./merge_pull_request.cjs"); + + const githubClient = { + rest: { + repos: { + getBranch: vi.fn().mockResolvedValue({ data: { protected: false } }), + getBranchProtection: vi.fn().mockRejectedValue({ status: 404 }), + get: vi.fn().mockResolvedValue({ data: { default_branch: "main" } }), + }, + }, + }; + + const policy = await __testables.getBranchPolicy(githubClient, "github", "gh-aw", "feature-branch"); + expect(policy.isDefault).toBe(false); + }); + + it("rejects unsafe base branch names before branch policy lookup", async () => { + const { __testables } = await import("./merge_pull_request.cjs"); + + const githubClient = { + rest: { + repos: { + getBranch: vi.fn(), + get: vi.fn(), + }, + }, + }; + + await expect(__testables.getBranchPolicy(githubClient, "github", "gh-aw", "main;rm -rf /")).rejects.toThrow("Invalid target base branch for policy evaluation"); + expect(githubClient.rest.repos.getBranch).not.toHaveBeenCalled(); + }); + + it("matches allowed labels by exact value (no glob matching)", async () => { + const { __testables } = await import("./merge_pull_request.cjs"); + + expect(__testables.findAllowedLabelMatches(["release/v1", "automerge/pr-1"], ["release/*", "automerge/*"])).toEqual([]); + expect(__testables.findAllowedLabelMatches(["automerge", "release"], ["automerge", "deploy"])).toEqual(["automerge"]); + expect(__testables.findAllowedLabelMatches(["release/*", "automerge/*"], ["release/*", "automerge/*"])).toEqual(["release/*", "automerge/*"]); + expect(__testables.findAllowedLabelMatches([], ["automerge"])).toEqual([]); + expect(__testables.findAllowedLabelMatches(["AutoMerge"], ["automerge"])).toEqual([]); + }); + + it("resolves temporary ID for pull_request_number", async () => { + const { __testables } = await import("./merge_pull_request.cjs"); + const result = __testables.resolvePullRequestNumber({ pull_request_number: "aw_pr1" }, { aw_pr1: { number: 42 } }); + expect(result).toEqual({ success: true, pullNumber: 42, fromTemporaryId: true }); + }); + + it("fails on unresolved temporary ID for pull_request_number", async () => { + const { __testables } = await import("./merge_pull_request.cjs"); + const result = __testables.resolvePullRequestNumber({ pull_request_number: "aw_missing" }, {}); + expect(result.success).toBe(false); + if (!result.success) { + expect(result.error).toContain("Unresolved temporary ID"); + } + }); +}); diff --git a/actions/setup/js/safe_output_handler_manager.cjs b/actions/setup/js/safe_output_handler_manager.cjs index b5559147f4e..1aa5cd9c504 100644 --- a/actions/setup/js/safe_output_handler_manager.cjs +++ b/actions/setup/js/safe_output_handler_manager.cjs @@ -49,6 +49,7 @@ const HANDLER_MAP = { create_pull_request: "./create_pull_request.cjs", push_to_pull_request_branch: "./push_to_pull_request_branch.cjs", update_pull_request: "./update_pull_request.cjs", + merge_pull_request: "./merge_pull_request.cjs", close_pull_request: "./close_pull_request.cjs", mark_pull_request_as_ready_for_review: "./mark_pull_request_as_ready_for_review.cjs", hide_comment: "./hide_comment.cjs", diff --git a/actions/setup/js/safe_outputs_tools.json b/actions/setup/js/safe_outputs_tools.json index 51594c0c98a..71f164b92e5 100644 --- a/actions/setup/js/safe_outputs_tools.json +++ b/actions/setup/js/safe_outputs_tools.json @@ -807,6 +807,45 @@ "additionalProperties": false } }, + { + "name": "merge_pull_request", + "description": "Merge an existing pull request only after policy checks pass (status checks, approvals, resolved review threads, label/branch constraints, and mergeability gates). Use this when workflows require controlled merges instead of direct merge operations.", + "inputSchema": { + "type": "object", + "properties": { + "pull_request_number": { + "type": ["number", "string"], + "description": "Pull request number to merge. This is the numeric ID from the GitHub URL (e.g., 321 in github.com/owner/repo/pull/321). If omitted, uses the triggering pull request context." + }, + "merge_method": { + "type": "string", + "enum": ["merge", "squash", "rebase"], + "description": "Merge strategy to use: 'merge', 'squash', or 'rebase'. Defaults to 'merge'." + }, + "commit_title": { + "type": "string", + "description": "Optional custom commit title to use for the merge commit/squash commit." + }, + "commit_message": { + "type": "string", + "description": "Optional custom commit message body for the merge." + }, + "repo": { + "type": "string", + "description": "Target repository in 'owner/repo' format for cross-repository merge operations. If omitted, uses the configured default target repository." + }, + "secrecy": { + "type": "string", + "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\")." + }, + "integrity": { + "type": "string", + "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\")." + } + }, + "additionalProperties": false + } + }, { "name": "push_to_pull_request_branch", "description": "Push committed changes to a pull request's branch. Use this to add follow-up commits to an existing PR, such as addressing review feedback or fixing issues. Changes must be committed locally before calling this tool.", diff --git a/actions/setup/js/types/safe-outputs-config.d.ts b/actions/setup/js/types/safe-outputs-config.d.ts index a48f34c97bc..4ff3a87b84e 100644 --- a/actions/setup/js/types/safe-outputs-config.d.ts +++ b/actions/setup/js/types/safe-outputs-config.d.ts @@ -206,6 +206,17 @@ interface PushToPullRequestBranchConfig extends SafeOutputConfig { "if-no-changes"?: string; } +/** + * Configuration for merging pull requests with policy checks. + */ +interface MergePullRequestConfig extends SafeOutputConfig { + "required-labels"?: string[]; + "allowed-labels"?: string[]; + "allowed-branches"?: string[]; + "allowed-files"?: string[]; + "protected-files"?: string[]; +} + /** * Configuration for uploading assets */ @@ -335,6 +346,7 @@ type SpecificSafeOutputConfig = | AddReviewerConfig | UpdateIssueConfig | UpdatePullRequestConfig + | MergePullRequestConfig | PushToPullRequestBranchConfig | UploadAssetConfig | AssignMilestoneConfig @@ -371,6 +383,7 @@ export { AddReviewerConfig, UpdateIssueConfig, UpdatePullRequestConfig, + MergePullRequestConfig, PushToPullRequestBranchConfig, UploadAssetConfig, AssignMilestoneConfig, diff --git a/docs/adr/27193-gated-merge-pull-request-safe-output.md b/docs/adr/27193-gated-merge-pull-request-safe-output.md new file mode 100644 index 00000000000..d0243b91ed5 --- /dev/null +++ b/docs/adr/27193-gated-merge-pull-request-safe-output.md @@ -0,0 +1,97 @@ +# ADR-27193: Gated `merge-pull-request` Safe-Output with Policy-Driven Merge Enforcement + +**Date**: 2026-04-19 +**Status**: Draft +**Deciders**: pelikhan, Copilot + +--- + +## Part 1 — Narrative (Human-Friendly) + +### Context + +The gh-aw agentic workflow platform already supports a safe-output model in which agents can perform real side-effects (creating issues, posting comments, etc.) only through a compiler-validated, runtime-gated execution path. Until this change, there was no way for an agent to merge a pull request through the same safety layer. Merging is a high-consequence, irreversible action that must be gated on repository policy (CI status, review approval, label constraints, and branch restrictions) before it can be executed safely. The existing safe-output infrastructure — a Go compiler that validates frontmatter configuration and a Node.js runtime handler layer — already provides the extension point needed to add merge support without inventing a separate execution path. + +### Decision + +We will add `merge-pull-request` as a new safe-output type that integrates with the existing compiler and runtime handler model rather than introducing a standalone merge action. The merge handler evaluates a sequenced set of policy gates — CI checks, review decision, unresolved threads, required/allowed labels, source-branch allow-list, default-branch protection, draft state, mergeability, and conflict state — and only proceeds when all gates pass. Configuration is expressed in workflow YAML frontmatter under `safe-outputs.merge-pull-request` using the same typed-config pattern already used by other safe-output types. + +### Alternatives Considered + +#### Alternative 1: Standalone Merge Action Outside the Safe-Output Model + +A dedicated GitHub Actions action or a separate Go command could have been written to perform gated merges independently of the safe-output layer. This would have been simpler to prototype but would have forked the security model: safe-outputs validate permissions at compile time, enforce `max` call budgets, and provide a single auditable execution path. A standalone action would duplicate that plumbing or omit it entirely, leaving merge calls outside the auditable boundary. + +#### Alternative 2: Thin Merge Wrapper With No Policy Gates + +The handler could have simply called the merge API and relied on external branch-protection rules configured in GitHub repository settings. This reduces code but shifts policy configuration to GitHub UI settings, making it invisible to code reviewers and hard to version-control. Policy gates expressed in workflow frontmatter are auditable, diffable, and scoped to the specific workflow rather than globally to the repo. + +#### Alternative 3: Separate Runtime Execution Path for High-Risk Operations + +Merge could have been treated as a distinct risk tier requiring its own runtime pipeline separate from lower-risk safe-output types. This would allow future independent evolution of merge-specific policy but introduces architectural fragmentation immediately without a concrete need. The existing model already supports configuration-driven per-type gates, so a separate pipeline is premature. + +### Consequences + +#### Positive +- Merge operations are now auditable through the same compiler + runtime path as all other safe-output types. +- Policy gates (labels, branches, CI, reviews, files) are version-controlled in workflow YAML frontmatter rather than scattered across GitHub repository UI settings. +- Shared `check_runs_helpers.cjs` eliminates logic duplication between merge gating and the existing `check_skip_if_check_failing` safe-output. +- `withRetry` wrapping of mergeability and GraphQL review-summary calls handles eventual-consistency delays from the GitHub API without requiring callers to manage retry logic. +- Idempotency: if the PR is already merged the handler returns success, making the operation safe to re-run. + +#### Negative +- The gate evaluation logic is complex (10+ sequential checks) and lives entirely in a single `.cjs` handler file; future contributors extending the gate list must understand the full sequencing. +- Retry-backed mergeability polling adds latency on every merge attempt, even when mergeability is immediately available. +- Adding a new safe-output type increases schema surface area in `main_workflow_schema.json` and both `safe_outputs_tools.json` catalogs, which must be kept in sync manually. + +#### Neutral +- The `contents:write` + `pull-requests:write` permission pair must be present in any workflow that uses `merge-pull-request`; this is enforced at compile time but requires authors to explicitly declare permissions. +- The W3C-style Safe Outputs specification (`docs/src/content/docs/reference/safe-outputs-specification.md`) was updated to include a formal `merge_pull_request` section, continuing the precedent of spec-first documentation for safe-output types. +- A Go spec-enforcement test (`safe_outputs_specification_merge_pull_request_test.go`) was added to prevent spec drift; this test must be updated if the type name or required policy statements change. + +--- + +## Part 2 — Normative Specification (RFC 2119) + +> The key words **MUST**, **MUST NOT**, **REQUIRED**, **SHALL**, **SHALL NOT**, **SHOULD**, **SHOULD NOT**, **RECOMMENDED**, **MAY**, and **OPTIONAL** in this section are to be interpreted as described in [RFC 2119](https://www.rfc-editor.org/rfc/rfc2119). + +### Safe-Output Model Integration + +1. The `merge-pull-request` capability **MUST** be implemented as a safe-output type within the existing compiler-plus-runtime-handler model and **MUST NOT** introduce a separate merge execution path outside that model. +2. Configuration for `merge-pull-request` **MUST** be expressed in workflow YAML frontmatter under the `safe-outputs.merge-pull-request` key, using the same typed-config parsing pattern used by other safe-output types. +3. The compiler **MUST** validate `merge-pull-request` configuration at compile time, including configured branch and label constraints (`allowed-branches`, `allowed-labels`, `required-labels`). +4. The runtime handler **MUST** be registered in the safe-output handler manager alongside all other safe-output handlers. + +### Policy Gate Evaluation + +1. Before invoking the merge API, the runtime handler **MUST** evaluate all of the following gates in order, and **MUST** abort with a descriptive error if any gate fails: + a. Draft state — the PR **MUST NOT** be a draft. + b. Mergeability — the PR **MUST** be in a mergeable state (not conflicting, not blocked). + c. CI checks — all required check runs **MUST** be passing; the handler **MUST** exclude deployment-environment check runs from this evaluation. + d. Review decision — the PR's review decision **MUST NOT** be `CHANGES_REQUESTED` or `REVIEW_REQUIRED` when those states are present. + e. Unresolved review threads — the PR **MUST** have zero unresolved review threads. + f. Required labels — every label in `required-labels` **MUST** be present on the PR. + g. Allowed labels — when `allowed-labels` is configured, at least one PR label **MUST** exactly match a configured label name. + h. Allowed branches — when `allowed-branches` is configured, the PR source branch **MUST** match at least one configured glob pattern. + i. Default-branch protection — the PR target branch **MUST NOT** be the repository default branch. +2. Gate evaluation **MUST** be idempotent: if the PR is already merged the handler **MUST** return a success response without attempting another merge. +3. Mergeability retrieval **MUST** use retry logic to handle GitHub API eventual-consistency delays; implementations **SHOULD** retry at least 3 times with exponential back-off before reporting failure. + +### Shared Infrastructure + +1. Check-run filtering and deduplication logic **MUST** be implemented in a shared helper module (`check_runs_helpers.cjs`) and **MUST NOT** be duplicated in individual safe-output handlers. +2. GraphQL calls used to retrieve review summary data **SHOULD** be wrapped with retry logic to tolerate transient API failures. + +### Schema and Permissions + +1. The `merge-pull-request` type **MUST** be declared in `main_workflow_schema.json` and in all `safe_outputs_tools.json` catalogs used by compiler and runtime. +2. Any workflow using `merge-pull-request` **MUST** declare `contents: write` and `pull-requests: write` permissions; the compiler **MUST** enforce this at compile time. +3. The W3C-style Safe Outputs specification **MUST** include a formal section documenting the `merge_pull_request` type, its policy gates, and its required permissions. + +### Conformance + +An implementation is considered conformant with this ADR if it satisfies all **MUST** and **MUST NOT** requirements above. Failure to meet any **MUST** or **MUST NOT** requirement constitutes non-conformance. + +--- + +*This is a DRAFT ADR generated by the [Design Decision Gate](https://github.com/github/gh-aw/actions/runs/24632957089) workflow. The PR author must review, complete, and finalize this document before the PR can merge.* diff --git a/docs/src/content/docs/guides/maintaining-repos.md b/docs/src/content/docs/guides/maintaining-repos.md index 8d882a88be0..6a4d43c939e 100644 --- a/docs/src/content/docs/guides/maintaining-repos.md +++ b/docs/src/content/docs/guides/maintaining-repos.md @@ -109,7 +109,7 @@ The available safe-outputs map directly to GitHub actions: | `comment-issue` | Post a comment on an issue | | `comment-pull-request` | Post a comment on a pull request | | `create-pull-request` | Open a new pull request | -| `merge-pull-request` | Merge a pull request | +| `merge-pull-request` | Merge a pull request (experimental) | | `close-issue` | Close an issue | | `create-issue` | Open a new issue | | `assign-issue` | Assign an issue to a user or team | diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md index 8647effe0f4..6d073dba029 100644 --- a/docs/src/content/docs/reference/frontmatter-full.md +++ b/docs/src/content/docs/reference/frontmatter-full.md @@ -4575,6 +4575,54 @@ safe-outputs: # body updates enabled) update-pull-request: null + # Enable AI agents to merge pull requests under configured policy gates. + # (optional) + # This field supports multiple formats (oneOf): + + # Option 1: Enable pull request merge with default policy configuration + merge-pull-request: null + + # Option 2: Configuration for controlled pull request merges. The merge is blocked + # unless all configured gates pass. + merge-pull-request: + # Maximum number of pull request merges to perform per run (default: 1). Supports + # integer or GitHub Actions expression (e.g. '${{ inputs.max }}'). + # (optional) + # This field supports multiple formats (oneOf): + + # Option 1: integer + max: 1 + + # Option 2: GitHub Actions expression that resolves to an integer at runtime + max: "example-value" + + # List of labels that must all be present on the pull request before merge is + # allowed. + # (optional) + required-labels: [] + # Array of strings + + # Exact pull request label names. At least one existing PR label must exactly + # match one of these values when configured. + # (optional) + allowed-labels: [] + # Array of strings + + # Glob patterns for allowed source branch names (pull request head ref). + # (optional) + allowed-branches: [] + # Array of strings + + # GitHub token to use for this specific output type. Overrides global github-token + # if specified. + # (optional) + github-token: "${{ secrets.GITHUB_TOKEN }}" + + # If true, evaluate merge gates and emit preview results without executing the + # merge API call. + # (optional) + staged: true + # Enable AI agents to push commits directly to pull request branches for automated # fixes or improvements. # (optional) diff --git a/docs/src/content/docs/reference/safe-outputs-specification.md b/docs/src/content/docs/reference/safe-outputs-specification.md index 9ce93b58a59..95bf20a8d63 100644 --- a/docs/src/content/docs/reference/safe-outputs-specification.md +++ b/docs/src/content/docs/reference/safe-outputs-specification.md @@ -7,9 +7,9 @@ sidebar: # Safe Outputs MCP Gateway Specification -**Version**: 1.16.0 +**Version**: 1.17.0 **Status**: Working Draft -**Publication Date**: 2026-04-06 +**Publication Date**: 2026-04-19 **Editor**: GitHub Agentic Workflows Team **This Version**: [safe-outputs-specification](/gh-aw/reference/safe-outputs-specification/) **Latest Published Version**: This document @@ -2760,6 +2760,84 @@ This section provides complete definitions for all remaining safe output types. --- +#### Type: merge_pull_request + +**Purpose**: Merge pull requests only when configured policy gates pass. + +**Default Max**: 1 +**Cross-Repository Support**: Yes +**Mandatory**: No + +**MCP Tool Schema**: + +```json +{ + "name": "merge_pull_request", + "description": "Merge an existing pull request only after policy checks pass (status checks, approvals, resolved review threads, label/branch constraints, and mergeability gates).", + "inputSchema": { + "type": "object", + "properties": { + "pull_request_number": { + "type": ["number", "string"], + "description": "Pull request number to merge. Supports numeric values or temporary IDs from prior safe-output operations. If omitted, uses triggering pull request context." + }, + "merge_method": { + "type": "string", + "enum": ["merge", "squash", "rebase"] + }, + "commit_title": {"type": "string"}, + "commit_message": {"type": "string"}, + "repo": { + "type": "string", + "description": "Target repository in owner/repo format." + } + }, + "additionalProperties": false + } +} +``` + +**Operational Semantics**: + +1. **Repository/PR Resolution**: Resolves target repository and pull request from context or explicit input. +2. **Mergeability Check**: Validates pull request is mergeable and not draft/conflicted. +3. **Policy Gates**: Enforces required checks, review decision, unresolved review thread gating, label constraints, and source branch constraints. +4. **Base Branch Protection**: Refuses merges when the target base branch is protected or is the repository default branch. +5. **Idempotency**: Returns success when the pull request is already merged. + +**Configuration Parameters**: + +- `max`: Operation limit (default: 1) +- `required-labels`: Labels that must exist on the pull request +- `allowed-labels`: Exact label names; at least one pull request label must exactly match when configured +- `allowed-branches`: Source branch glob patterns +- `target-repo`: Cross-repository target +- `allowed-repos`: Cross-repository allowlist +- `staged`: Staged mode override + +**Required Permissions**: + +*GitHub Actions Token*: + +- `contents: write` - Merge operation execution +- `pull-requests: write` - Pull request metadata and merge operations + +*GitHub App*: + +- `contents: write` - Merge operation execution +- `pull-requests: write` - Pull request metadata and merge operations +- `metadata: read` - Repository metadata (automatically granted) + +**Notes**: + +- Merge execution is blocked unless all configured gates pass. +- Merge to the repository default branch is always refused by this safe output type. +- `pull_request_number` may be a temporary ID that resolves to a pull request number from earlier safe-output operations. +- GraphQL mergeability and review-summary queries are retried with transient-error retry logic. +- Compiling a workflow with `merge-pull-request` emits: `Using experimental feature: merge-pull-request`. + +--- + #### Type: mark_pull_request_as_ready_for_review **Purpose**: Convert draft pull request to ready-for-review status. @@ -4715,6 +4793,12 @@ safe-outputs: ## Appendix F: Document History +**Version 1.17.0** (2026-04-19): + +- **Added**: `merge_pull_request` safe output type definition in Section 7.3, including schema, policy gate semantics, and required permissions +- **Documented**: Merge policy gates for checks, reviews, labels, branch constraints, file constraints, and base-branch restrictions +- **Updated**: Publication metadata to 1.17.0 + **Version 1.15.0** (2026-03-29): - **Added**: Section 11 "Cache Memory Integrity" specifying integrity-aware cache key format, git-backed branching, merge-down semantics, pre-agent setup, and post-agent commit requirements (CI1–CI12) diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json index 09c87aa5414..d54207180d8 100644 --- a/pkg/parser/schemas/main_workflow_schema.json +++ b/pkg/parser/schemas/main_workflow_schema.json @@ -4515,7 +4515,7 @@ }, "safe-outputs": { "type": "object", - "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.", + "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, merge-pull-request, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.", "description": "Safe output processing configuration that automatically creates GitHub issues, comments, and pull requests from AI workflow output without requiring write permissions in the main job", "examples": [ { @@ -7034,6 +7034,68 @@ ], "description": "Enable AI agents to edit and update existing pull request content, titles, labels, reviewers, and metadata." }, + "merge-pull-request": { + "oneOf": [ + { + "type": "null", + "description": "Enable pull request merge with default policy configuration" + }, + { + "type": "object", + "description": "Configuration for controlled pull request merges. The merge is blocked unless all configured gates pass.", + "properties": { + "max": { + "description": "Maximum number of pull request merges to perform per run (default: 1). Supports integer or GitHub Actions expression (e.g. '${{ inputs.max }}').", + "oneOf": [ + { + "type": "integer", + "minimum": 1, + "maximum": 10, + "default": 1 + }, + { + "type": "string", + "pattern": "^\\$\\{\\{.*\\}\\}$", + "description": "GitHub Actions expression that resolves to an integer at runtime" + } + ] + }, + "required-labels": { + "type": "array", + "items": { + "type": "string" + }, + "description": "List of labels that must all be present on the pull request before merge is allowed." + }, + "allowed-labels": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Exact pull request label names. At least one existing PR label must exactly match one of these values when configured." + }, + "allowed-branches": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Glob patterns for allowed source branch names (pull request head ref)." + }, + "github-token": { + "$ref": "#/$defs/github_token", + "description": "GitHub token to use for this specific output type. Overrides global github-token if specified." + }, + "staged": { + "type": "boolean", + "description": "If true, evaluate merge gates and emit preview results without executing the merge API call.", + "examples": [true, false] + } + }, + "additionalProperties": false + } + ], + "description": "Enable AI agents to merge pull requests under configured policy gates." + }, "push-to-pull-request-branch": { "oneOf": [ { diff --git a/pkg/workflow/compiler.go b/pkg/workflow/compiler.go index b1d2107b480..a092f50542d 100644 --- a/pkg/workflow/compiler.go +++ b/pkg/workflow/compiler.go @@ -174,6 +174,12 @@ func (c *Compiler) validateWorkflowData(workflowData *WorkflowData, markdownPath return formatCompilerError(markdownPath, "error", err.Error(), err) } + // Validate safe-outputs merge-pull-request configuration + log.Printf("Validating safe-outputs merge-pull-request") + if err := validateSafeOutputsMergePullRequest(workflowData.SafeOutputs); err != nil { + return formatCompilerError(markdownPath, "error", err.Error(), err) + } + // Validate safe-job needs: declarations against known generated job IDs log.Printf("Validating safe-job needs declarations") if err := validateSafeJobNeeds(workflowData); err != nil { @@ -304,6 +310,12 @@ func (c *Compiler) validateWorkflowData(workflowData *WorkflowData, markdownPath c.IncrementWarningCount() } + // Emit experimental warning for merge-pull-request feature + if workflowData.SafeOutputs != nil && workflowData.SafeOutputs.MergePullRequest != nil { + fmt.Fprintln(os.Stderr, console.FormatWarningMessage("Using experimental feature: merge-pull-request")) + c.IncrementWarningCount() + } + // Inform users when this workflow is a redirect stub for updates. if workflowData.Redirect != "" { fmt.Fprintln(os.Stderr, formatCompilerMessage(markdownPath, "info", diff --git a/pkg/workflow/compiler_safe_outputs_handlers.go b/pkg/workflow/compiler_safe_outputs_handlers.go index 62f17b071f4..e1f15e20e16 100644 --- a/pkg/workflow/compiler_safe_outputs_handlers.go +++ b/pkg/workflow/compiler_safe_outputs_handlers.go @@ -447,6 +447,20 @@ var handlerRegistry = map[string]handlerBuilder{ AddIfTrue("staged", c.Staged). Build() }, + "merge_pull_request": func(cfg *SafeOutputsConfig) map[string]any { + if cfg.MergePullRequest == nil { + return nil + } + c := cfg.MergePullRequest + return newHandlerConfigBuilder(). + AddTemplatableInt("max", c.Max). + AddStringSlice("required_labels", c.RequiredLabels). + AddStringSlice("allowed_labels", c.AllowedLabels). + AddStringSlice("allowed_branches", c.AllowedBranches). + AddIfNotEmpty("github-token", c.GitHubToken). + AddIfTrue("staged", c.Staged). + Build() + }, "close_pull_request": func(cfg *SafeOutputsConfig) map[string]any { if cfg.ClosePullRequests == nil { return nil diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go index 579552333fe..dee9caa5e6d 100644 --- a/pkg/workflow/compiler_types.go +++ b/pkg/workflow/compiler_types.go @@ -548,6 +548,7 @@ type SafeOutputsConfig struct { UnassignFromUser *UnassignFromUserConfig `yaml:"unassign-from-user,omitempty"` // Remove assignees from issues UpdateIssues *UpdateIssuesConfig `yaml:"update-issue,omitempty"` UpdatePullRequests *UpdatePullRequestsConfig `yaml:"update-pull-request,omitempty"` // Update GitHub pull request title/body + MergePullRequest *MergePullRequestConfig `yaml:"merge-pull-request,omitempty"` // Merge pull requests under constrained policy checks PushToPullRequestBranch *PushToPullRequestBranchConfig `yaml:"push-to-pull-request-branch,omitempty"` UploadAssets *UploadAssetsConfig `yaml:"upload-asset,omitempty"` UploadArtifact *UploadArtifactConfig `yaml:"upload-artifact,omitempty"` // Upload files as run-scoped GitHub Actions artifacts diff --git a/pkg/workflow/imports.go b/pkg/workflow/imports.go index 02344407a91..7ec19cccd47 100644 --- a/pkg/workflow/imports.go +++ b/pkg/workflow/imports.go @@ -361,6 +361,8 @@ func hasSafeOutputType(config *SafeOutputsConfig, key string) bool { return config.UpdateIssues != nil case "update-pull-request": return config.UpdatePullRequests != nil + case "merge-pull-request": + return config.MergePullRequest != nil case "push-to-pull-request-branch": return config.PushToPullRequestBranch != nil case "upload-asset": @@ -506,6 +508,9 @@ func mergeSafeOutputConfig(result *SafeOutputsConfig, config map[string]any, c * if result.UpdatePullRequests == nil && importedConfig.UpdatePullRequests != nil { result.UpdatePullRequests = importedConfig.UpdatePullRequests } + if result.MergePullRequest == nil && importedConfig.MergePullRequest != nil { + result.MergePullRequest = importedConfig.MergePullRequest + } if result.PushToPullRequestBranch == nil && importedConfig.PushToPullRequestBranch != nil { result.PushToPullRequestBranch = importedConfig.PushToPullRequestBranch } else if result.PushToPullRequestBranch != nil && importedConfig.PushToPullRequestBranch != nil { diff --git a/pkg/workflow/js/safe_outputs_tools.json b/pkg/workflow/js/safe_outputs_tools.json index f8428359712..4d39b035fc4 100644 --- a/pkg/workflow/js/safe_outputs_tools.json +++ b/pkg/workflow/js/safe_outputs_tools.json @@ -962,6 +962,52 @@ "additionalProperties": false } }, + { + "name": "merge_pull_request", + "description": "Merge an existing pull request only after policy checks pass (status checks, approvals, resolved review threads, label/branch constraints, and mergeability gates). Use this when workflows require controlled merges instead of direct merge operations.", + "inputSchema": { + "type": "object", + "properties": { + "pull_request_number": { + "type": [ + "number", + "string" + ], + "description": "Pull request number to merge. This is the numeric ID from the GitHub URL (e.g., 321 in github.com/owner/repo/pull/321). If omitted, uses the triggering pull request context." + }, + "merge_method": { + "type": "string", + "enum": [ + "merge", + "squash", + "rebase" + ], + "description": "Merge strategy to use: 'merge', 'squash', or 'rebase'. Defaults to 'merge'." + }, + "commit_title": { + "type": "string", + "description": "Optional custom commit title to use for the merge commit/squash commit." + }, + "commit_message": { + "type": "string", + "description": "Optional custom commit message body for the merge." + }, + "repo": { + "type": "string", + "description": "Target repository in 'owner/repo' format for cross-repository merge operations. If omitted, uses the configured default target repository." + }, + "secrecy": { + "type": "string", + "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\")." + }, + "integrity": { + "type": "string", + "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\")." + } + }, + "additionalProperties": false + } + }, { "name": "push_to_pull_request_branch", "description": "Push committed changes to a pull request's branch. Use this to add follow-up commits to an existing PR, such as addressing review feedback or fixing issues. Changes must be committed locally before calling this tool.", diff --git a/pkg/workflow/merge_pull_request.go b/pkg/workflow/merge_pull_request.go new file mode 100644 index 00000000000..08612ddb371 --- /dev/null +++ b/pkg/workflow/merge_pull_request.go @@ -0,0 +1,34 @@ +package workflow + +import "github.com/github/gh-aw/pkg/logger" + +var mergePullRequestLog = logger.New("workflow:merge_pull_request") + +// MergePullRequestConfig holds configuration for merging pull requests with policy checks. +type MergePullRequestConfig struct { + BaseSafeOutputConfig `yaml:",inline"` + RequiredLabels []string `yaml:"required-labels,omitempty"` // Labels that must be present on the PR + AllowedLabels []string `yaml:"allowed-labels,omitempty"` // Exact label names; at least one PR label must match when configured + AllowedBranches []string `yaml:"allowed-branches,omitempty"` // Glob patterns for source branch names +} + +// parseMergePullRequestConfig handles merge-pull-request configuration. +func (c *Compiler) parseMergePullRequestConfig(outputMap map[string]any) *MergePullRequestConfig { + configData, exists := outputMap["merge-pull-request"] + if !exists { + return nil + } + + cfg := &MergePullRequestConfig{} + if configMap, ok := configData.(map[string]any); ok { + cfg.RequiredLabels = ParseStringArrayFromConfig(configMap, "required-labels", mergePullRequestLog) + cfg.AllowedLabels = ParseStringArrayFromConfig(configMap, "allowed-labels", mergePullRequestLog) + cfg.AllowedBranches = ParseStringArrayFromConfig(configMap, "allowed-branches", mergePullRequestLog) + c.parseBaseSafeOutputConfig(configMap, &cfg.BaseSafeOutputConfig, 1) + return cfg + } + + // merge-pull-request: null enables defaults + cfg.Max = defaultIntStr(1) + return cfg +} diff --git a/pkg/workflow/merge_pull_request_experimental_warning_test.go b/pkg/workflow/merge_pull_request_experimental_warning_test.go new file mode 100644 index 00000000000..27d5b9dc3ec --- /dev/null +++ b/pkg/workflow/merge_pull_request_experimental_warning_test.go @@ -0,0 +1,92 @@ +//go:build integration + +package workflow + +import ( + "bytes" + "io" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/github/gh-aw/pkg/testutil" +) + +func TestMergePullRequestExperimentalWarning(t *testing.T) { + tests := []struct { + name string + content string + expectWarning bool + }{ + { + name: "merge-pull-request enabled produces experimental warning", + content: `--- +on: workflow_dispatch +engine: copilot +safe-outputs: + merge-pull-request: +--- + +# Test Workflow +`, + expectWarning: true, + }, + { + name: "no merge-pull-request does not produce experimental warning", + content: `--- +on: workflow_dispatch +engine: copilot +--- + +# Test Workflow +`, + expectWarning: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := testutil.TempDir(t, "merge-pull-request-experimental-warning-test") + + testFile := filepath.Join(tmpDir, "test-workflow.md") + if err := os.WriteFile(testFile, []byte(tt.content), 0644); err != nil { + t.Fatal(err) + } + + oldStderr := os.Stderr + r, w, _ := os.Pipe() + os.Stderr = w + + compiler := NewCompiler() + compiler.SetStrictMode(false) + err := compiler.CompileWorkflow(testFile) + + w.Close() + os.Stderr = oldStderr + var buf bytes.Buffer + io.Copy(&buf, r) + stderrOutput := buf.String() + + if err != nil { + t.Errorf("expected compilation to succeed but it failed: %v", err) + return + } + + expectedMessage := "Using experimental feature: merge-pull-request" + if tt.expectWarning { + if !strings.Contains(stderrOutput, expectedMessage) { + t.Errorf("expected warning containing %q, got stderr:\n%s", expectedMessage, stderrOutput) + } + if compiler.GetWarningCount() == 0 { + t.Error("expected warning count > 0 but got 0") + } + return + } + + if strings.Contains(stderrOutput, expectedMessage) { + t.Errorf("did not expect warning %q, but got stderr:\n%s", expectedMessage, stderrOutput) + } + }) + } +} diff --git a/pkg/workflow/safe_outputs_config.go b/pkg/workflow/safe_outputs_config.go index f96ee96e87f..1ac733fb14f 100644 --- a/pkg/workflow/safe_outputs_config.go +++ b/pkg/workflow/safe_outputs_config.go @@ -258,6 +258,12 @@ func (c *Compiler) extractSafeOutputsConfig(frontmatter map[string]any) *SafeOut config.UpdatePullRequests = updatePullRequestsConfig } + // Handle merge-pull-request + mergePullRequestConfig := c.parseMergePullRequestConfig(outputMap) + if mergePullRequestConfig != nil { + config.MergePullRequest = mergePullRequestConfig + } + // Handle push-to-pull-request-branch pushToBranchConfig := c.parsePushToPullRequestBranchConfig(outputMap) if pushToBranchConfig != nil { diff --git a/pkg/workflow/safe_outputs_import_test.go b/pkg/workflow/safe_outputs_import_test.go index 74e333a80df..1253b2ad149 100644 --- a/pkg/workflow/safe_outputs_import_test.go +++ b/pkg/workflow/safe_outputs_import_test.go @@ -412,6 +412,57 @@ imports: assert.Equal(t, strPtr("5"), workflowData.SafeOutputs.AddComments.Max) } +// TestSafeOutputsImportMergePullRequestType tests importing merge-pull-request from a shared workflow. +func TestSafeOutputsImportMergePullRequestType(t *testing.T) { + compiler := NewCompiler(WithVersion("1.0.0")) + + tmpDir := t.TempDir() + workflowsDir := filepath.Join(tmpDir, ".github", "workflows") + err := os.MkdirAll(workflowsDir, 0755) + require.NoError(t, err, "Failed to create workflows directory") + + sharedWorkflow := `--- +safe-outputs: + merge-pull-request: + required-labels: + - ready-to-merge +--- + +# Shared Merge Pull Request Configuration +` + + sharedFile := filepath.Join(workflowsDir, "shared-merge-pr.md") + err = os.WriteFile(sharedFile, []byte(sharedWorkflow), 0644) + require.NoError(t, err, "Failed to write shared file") + + mainWorkflow := `--- +on: pull_request +permissions: + contents: read +imports: + - ./shared-merge-pr.md +--- + +# Main Workflow +` + + mainFile := filepath.Join(workflowsDir, "main.md") + err = os.WriteFile(mainFile, []byte(mainWorkflow), 0644) + require.NoError(t, err, "Failed to write main file") + + oldDir, err := os.Getwd() + require.NoError(t, err, "Failed to get current directory") + err = os.Chdir(workflowsDir) + require.NoError(t, err, "Failed to change directory") + defer func() { _ = os.Chdir(oldDir) }() + + workflowData, err := compiler.ParseWorkflowFile("main.md") + require.NoError(t, err, "Failed to parse workflow") + require.NotNil(t, workflowData.SafeOutputs, "SafeOutputs should not be nil") + require.NotNil(t, workflowData.SafeOutputs.MergePullRequest, "MergePullRequest should be imported") + assert.Equal(t, []string{"ready-to-merge"}, workflowData.SafeOutputs.MergePullRequest.RequiredLabels) +} + // TestMergeSafeOutputsUnit tests the MergeSafeOutputs function directly func TestMergeSafeOutputsUnit(t *testing.T) { compiler := NewCompiler(WithVersion("1.0.0")) @@ -481,6 +532,15 @@ func TestMergeSafeOutputsUnit(t *testing.T) { expectError: false, expectedTypes: []string{"create-issue", "add-comment"}, }, + { + name: "import merge-pull-request to empty config", + topConfig: nil, + importedJSON: []string{ + `{"merge-pull-request":{"required-labels":["ready-to-merge"]}}`, + }, + expectError: false, + expectedTypes: []string{"merge-pull-request"}, + }, } for _, tt := range tests { diff --git a/pkg/workflow/safe_outputs_permissions.go b/pkg/workflow/safe_outputs_permissions.go index 3fe88cbc7d7..07950ac4eb2 100644 --- a/pkg/workflow/safe_outputs_permissions.go +++ b/pkg/workflow/safe_outputs_permissions.go @@ -167,6 +167,10 @@ func ComputePermissionsForSafeOutputs(safeOutputs *SafeOutputsConfig) *Permissio permissions.Merge(NewPermissionsContentsReadPRWrite()) } } + if safeOutputs.MergePullRequest != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.MergePullRequest.Staged) { + safeOutputsPermissionsLog.Print("Adding permissions for merge-pull-request") + permissions.Merge(NewPermissionsContentsWritePRWrite()) + } if safeOutputs.ClosePullRequests != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.ClosePullRequests.Staged) { safeOutputsPermissionsLog.Print("Adding permissions for close-pull-request") permissions.Merge(NewPermissionsContentsReadPRWrite()) @@ -329,6 +333,8 @@ func SafeOutputsConfigFromKeys(keys []string) *SafeOutputsConfig { config.UpdateIssues = &UpdateIssuesConfig{} case "update-pull-request": config.UpdatePullRequests = &UpdatePullRequestsConfig{} + case "merge-pull-request": + config.MergePullRequest = &MergePullRequestConfig{} case "push-to-pull-request-branch": config.PushToPullRequestBranch = &PushToPullRequestBranchConfig{} case "upload-asset": diff --git a/pkg/workflow/safe_outputs_specification_merge_pull_request_test.go b/pkg/workflow/safe_outputs_specification_merge_pull_request_test.go new file mode 100644 index 00000000000..9fe09a74ed6 --- /dev/null +++ b/pkg/workflow/safe_outputs_specification_merge_pull_request_test.go @@ -0,0 +1,74 @@ +//go:build !integration + +package workflow + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSafeOutputsSpecificationDocumentsMergePullRequest(t *testing.T) { + specPath := findRepoFile(t, filepath.Join("docs", "src", "content", "docs", "reference", "safe-outputs-specification.md")) + specBytes, err := os.ReadFile(specPath) + require.NoError(t, err, "should read safe outputs specification") + + spec := string(specBytes) + section := extractSpecTypeSection(t, spec, "merge_pull_request") + + assert.Contains(t, section, "**Purpose**: Merge pull requests only when configured policy gates pass.", + "spec should define merge_pull_request purpose") + assert.Contains(t, section, "Base Branch Protection", + "spec should document base branch restrictions for merge_pull_request") + assert.Contains(t, section, "repository default branch", + "spec should explicitly refuse merge_pull_request to repository default branch") + assert.Contains(t, section, "`required-labels`", + "spec should document required-labels configuration for merge_pull_request") + assert.Contains(t, section, "`contents: write`", + "spec should document contents: write permission for merge_pull_request") + assert.Contains(t, section, "`pull-requests: write`", + "spec should document pull-requests: write permission for merge_pull_request") + assert.Contains(t, section, "temporary ID", + "spec should document temporary ID support for merge_pull_request pull_request_number") +} + +func extractSpecTypeSection(t *testing.T, spec, typeName string) string { + t.Helper() + + header := "#### Type: " + typeName + start := strings.Index(spec, header) + require.NotEqual(t, -1, start, "spec should include section header for %s", typeName) + + rest := spec[start+len(header):] + nextOffset := strings.Index(rest, "\n#### Type: ") + if nextOffset == -1 { + return spec[start:] + } + + return spec[start : start+len(header)+nextOffset] +} + +func findRepoFile(t *testing.T, relativePath string) string { + t.Helper() + + wd, err := os.Getwd() + require.NoError(t, err, "should get current working directory") + + dir := wd + for { + candidate := filepath.Join(dir, relativePath) + if _, err := os.Stat(candidate); err == nil { + return candidate + } + + parent := filepath.Dir(dir) + if parent == dir { + t.Fatalf("could not find %s from %s", relativePath, wd) + } + dir = parent + } +} diff --git a/pkg/workflow/safe_outputs_state.go b/pkg/workflow/safe_outputs_state.go index cb3def0c99a..70317dc848e 100644 --- a/pkg/workflow/safe_outputs_state.go +++ b/pkg/workflow/safe_outputs_state.go @@ -45,6 +45,7 @@ var safeOutputFieldMapping = map[string]string{ "UnassignFromUser": "unassign_from_user", "UpdateIssues": "update_issue", "UpdatePullRequests": "update_pull_request", + "MergePullRequest": "merge_pull_request", "PushToPullRequestBranch": "push_to_pull_request_branch", "UploadAssets": "upload_asset", "UploadArtifact": "upload_artifact", diff --git a/pkg/workflow/safe_outputs_validation.go b/pkg/workflow/safe_outputs_validation.go index ce05f5e7a14..059d3bdb98e 100644 --- a/pkg/workflow/safe_outputs_validation.go +++ b/pkg/workflow/safe_outputs_validation.go @@ -181,6 +181,52 @@ func validateTargetValue(configName, target string) error { var safeOutputsAllowWorkflowsValidationLog = newValidationLogger("safe_outputs_allow_workflows") +var safeOutputsMergePullRequestValidationLog = newValidationLogger("safe_outputs_merge_pull_request") + +// validateSafeOutputsMergePullRequest validates merge-pull-request policy configuration. +func validateSafeOutputsMergePullRequest(config *SafeOutputsConfig) error { + if config == nil || config.MergePullRequest == nil { + return nil + } + + c := config.MergePullRequest + safeOutputsMergePullRequestValidationLog.Print("Validating merge-pull-request policy fields") + + validateNonEmptyStringList := func(field string, values []string) error { + for i, value := range values { + if strings.TrimSpace(value) == "" { + return fmt.Errorf("safe-outputs.merge-pull-request.%s[%d] cannot be empty", field, i) + } + } + return nil + } + + validateRefGlobList := func(field string, patterns []string) error { + for i, pat := range patterns { + if errs := validateRefGlob(pat); len(errs) > 0 { + msgs := make([]string, 0, len(errs)) + for _, e := range errs { + msgs = append(msgs, e.Message) + } + return fmt.Errorf("invalid glob pattern %q in safe-outputs.merge-pull-request.%s[%d]: %s", pat, field, i, strings.Join(msgs, "; ")) + } + } + return nil + } + + if err := validateNonEmptyStringList("required-labels", c.RequiredLabels); err != nil { + return err + } + if err := validateNonEmptyStringList("allowed-labels", c.AllowedLabels); err != nil { + return err + } + if err := validateRefGlobList("allowed-branches", c.AllowedBranches); err != nil { + return err + } + + return nil +} + // validateSafeOutputsAllowWorkflows validates that allow-workflows: true requires // a GitHub App to be configured in safe-outputs.github-app. The workflows permission // is a GitHub App-only permission and cannot be granted via GITHUB_TOKEN. diff --git a/pkg/workflow/safe_outputs_validation_config.go b/pkg/workflow/safe_outputs_validation_config.go index 88751ca7ebb..0afa39bb733 100644 --- a/pkg/workflow/safe_outputs_validation_config.go +++ b/pkg/workflow/safe_outputs_validation_config.go @@ -165,6 +165,16 @@ var ValidationConfig = map[string]TypeValidationConfig{ "repo": {Type: "string", MaxLength: 256}, // Optional: target repository in format "owner/repo" }, }, + "merge_pull_request": { + DefaultMax: 1, + Fields: map[string]FieldValidation{ + "pull_request_number": {IssueOrPRNumber: true}, + "merge_method": {Type: "string", Enum: []string{"merge", "squash", "rebase"}}, + "commit_title": {Type: "string", Sanitize: true, MaxLength: 256}, + "commit_message": {Type: "string", Sanitize: true, MaxLength: MaxBodyLength}, + "repo": {Type: "string", MaxLength: 256}, // Optional: target repository in format "owner/repo" + }, + }, "push_to_pull_request_branch": { DefaultMax: 1, Fields: map[string]FieldValidation{ diff --git a/pkg/workflow/safe_outputs_validation_merge_pull_request_test.go b/pkg/workflow/safe_outputs_validation_merge_pull_request_test.go new file mode 100644 index 00000000000..3552b2ea6ec --- /dev/null +++ b/pkg/workflow/safe_outputs_validation_merge_pull_request_test.go @@ -0,0 +1,59 @@ +//go:build !integration + +package workflow + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidateSafeOutputsMergePullRequestLabelValidation(t *testing.T) { + tests := []struct { + name string + config *SafeOutputsConfig + wantErr string + }{ + { + name: "empty required-labels entry fails", + config: &SafeOutputsConfig{ + MergePullRequest: &MergePullRequestConfig{ + RequiredLabels: []string{"safe-to-merge", " "}, + }, + }, + wantErr: "safe-outputs.merge-pull-request.required-labels[1] cannot be empty", + }, + { + name: "empty allowed-labels entry fails", + config: &SafeOutputsConfig{ + MergePullRequest: &MergePullRequestConfig{ + AllowedLabels: []string{"release", ""}, + }, + }, + wantErr: "safe-outputs.merge-pull-request.allowed-labels[1] cannot be empty", + }, + { + name: "non-empty labels pass", + config: &SafeOutputsConfig{ + MergePullRequest: &MergePullRequestConfig{ + RequiredLabels: []string{"safe-to-merge"}, + AllowedLabels: []string{"release", "automerge"}, + }, + }, + wantErr: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateSafeOutputsMergePullRequest(tt.config) + if tt.wantErr == "" { + assert.NoError(t, err, "expected merge-pull-request label validation to pass") + return + } + require.Error(t, err, "expected merge-pull-request label validation to fail") + assert.Contains(t, err.Error(), tt.wantErr, "expected validation error to include field-specific message") + }) + } +}