Upstream Merge #354
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Upstream Merge | |
| on: | |
| # Poll upstream regularly; also allow manual and external triggers | |
| schedule: | |
| - cron: "*/30 * * * *" | |
| workflow_dispatch: | |
| inputs: | |
| upstream_repo: | |
| description: "Upstream repo (owner/name)" | |
| required: false | |
| default: "openai/codex" | |
| upstream_branch: | |
| description: "Upstream branch" | |
| required: false | |
| default: "main" | |
| repository_dispatch: | |
| types: [upstream-push] | |
| concurrency: | |
| group: upstream-merge | |
| # Do not cancel in‑flight runs; upstream merges can be long‑running. Starting | |
| # a new run should not kill the previous one. | |
| cancel-in-progress: false | |
| permissions: | |
| contents: write | |
| pull-requests: write | |
| env: | |
| UPSTREAM_REPO: ${{ inputs.upstream_repo || 'openai/codex' }} | |
| UPSTREAM_BRANCH: ${{ inputs.upstream_branch || 'main' }} | |
| MERGE_BRANCH: upstream-merge | |
| # Controls whether we auto-close an existing upstream-merge PR when the run | |
| # skips due to no upstream changes and the branch is zero‑diff vs base. | |
| # This used to be true when we reset the branch each run; now we carry | |
| # forward the branch for incremental merges, so default this to false. | |
| CLOSE_ZERO_DIFF_ON_SKIP: "false" | |
| RG_VERSION: "14.1.0" | |
| JQ_VERSION: "1.7.1" | |
| jobs: | |
| precheck: | |
| name: Precheck (no-op gate) | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 10 | |
| outputs: | |
| skip_due_to_active: ${{ steps.active_guard.outputs.skip_due_to_active }} | |
| action: ${{ steps.check.outputs.action }} | |
| upstream_only: ${{ steps.check.outputs.upstream_only }} | |
| merge_only: ${{ steps.check.outputs.merge_only }} | |
| upstream_in_merge: ${{ steps.check.outputs.upstream_in_merge }} | |
| steps: | |
| - name: Guard concurrent upstream-merge run | |
| id: active_guard | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.CODE_GH_PAT || github.token }} | |
| script: | | |
| const { owner, repo } = context.repo; | |
| const workflowId = 'upstream-merge.yml'; | |
| const resp = await github.rest.actions.listWorkflowRuns({ | |
| owner, | |
| repo, | |
| workflow_id: workflowId, | |
| per_page: 20 | |
| }); | |
| const blocking = resp.data.workflow_runs.filter(run => | |
| run.status === 'in_progress' && run.id !== context.runId | |
| ); | |
| const skip = blocking.length > 0; | |
| core.setOutput('skip_due_to_active', skip ? 'true' : 'false'); | |
| if (skip) { | |
| const latest = blocking[0]; | |
| await core.summary | |
| .addHeading('Upstream Merge: Active Run Guard') | |
| .addRaw(`- blocking_runs: ${blocking.length}`, true) | |
| .addRaw(`- blocking_run: #${latest.run_number} (${latest.status})`, true) | |
| .addLink('Blocking run logs', latest.html_url) | |
| .write(); | |
| core.notice(`Another Upstream Merge run (#${latest.run_number}) is still ${latest.status}; skipping remaining steps.`); | |
| core.info('Exiting early because another upstream-merge run is still active.'); | |
| } | |
| - name: Check out repository (full history) | |
| if: steps.active_guard.outputs.skip_due_to_active != 'true' | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| persist-credentials: false | |
| - name: "Quick precheck: compute upstream/merge deltas" | |
| if: steps.active_guard.outputs.skip_due_to_active != 'true' | |
| id: check | |
| shell: bash | |
| env: | |
| UPSTREAM_REPO: ${{ env.UPSTREAM_REPO }} | |
| UPSTREAM_BRANCH: ${{ env.UPSTREAM_BRANCH }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| MERGE_BRANCH: ${{ env.MERGE_BRANCH }} | |
| OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} | |
| run: | | |
| set -euo pipefail | |
| git remote add upstream "https://github.com/${UPSTREAM_REPO}.git" 2>/dev/null || true | |
| git fetch --no-tags --prune origin "${DEFAULT_BRANCH}" | |
| HAS_MERGE_BRANCH=false | |
| if git ls-remote --exit-code --heads origin "${MERGE_BRANCH}" >/dev/null 2>&1; then | |
| git fetch --no-tags --prune origin "${MERGE_BRANCH}" | |
| HAS_MERGE_BRANCH=true | |
| fi | |
| git fetch --no-tags --prune upstream \ | |
| "+refs/heads/${UPSTREAM_BRANCH}:refs/remotes/upstream/${UPSTREAM_BRANCH}" | |
| # Count exclusive commits to determine work type | |
| upstream_only=$(git rev-list --count "upstream/${UPSTREAM_BRANCH}" --not "origin/${DEFAULT_BRANCH}" || echo 0) | |
| merge_only=0 | |
| if [ "$HAS_MERGE_BRANCH" = true ]; then | |
| merge_only=$(git rev-list --count "origin/${MERGE_BRANCH}" --not "origin/${DEFAULT_BRANCH}" || echo 0) | |
| fi | |
| upstream_in_merge=false | |
| if [ "$HAS_MERGE_BRANCH" = true ] && git merge-base --is-ancestor "upstream/${UPSTREAM_BRANCH}" "origin/${MERGE_BRANCH}"; then | |
| upstream_in_merge=true | |
| fi | |
| openai_api_key_present=false | |
| if [ -n "${OPENAI_API_KEY:-}" ]; then | |
| openai_api_key_present=true | |
| fi | |
| action=no_work | |
| action_reason=no_work | |
| if [ "$upstream_only" -gt 0 ] && [ "$upstream_in_merge" != true ]; then | |
| action=merge | |
| action_reason=upstream_has_new_commits | |
| elif [ "$merge_only" -gt 0 ] || [ "$upstream_in_merge" = true ]; then | |
| action=pr_only | |
| action_reason=merge_branch_needs_pr | |
| fi | |
| if [ "$action" = merge ] && [ "$openai_api_key_present" != true ]; then | |
| action=no_work | |
| action_reason=missing_openai_api_key | |
| fi | |
| { | |
| echo "action=$action"; | |
| echo "action_reason=$action_reason"; | |
| echo "openai_api_key_present=$openai_api_key_present"; | |
| echo "upstream_only=$upstream_only"; | |
| echo "merge_only=$merge_only"; | |
| echo "upstream_in_merge=$upstream_in_merge"; | |
| } | tee -a "$GITHUB_OUTPUT" | |
| { | |
| echo "### Upstream Merge: Precheck"; | |
| echo "- action: ${action}"; | |
| echo "- reason: ${action_reason}"; | |
| echo "- openai_api_key_present: ${openai_api_key_present}"; | |
| echo "- upstream_only: ${upstream_only}"; | |
| echo "- merge_only: ${merge_only}"; | |
| echo "- upstream_in_merge: ${upstream_in_merge}"; | |
| } >> "$GITHUB_STEP_SUMMARY" | |
| - name: Close stale upstream-merge PR on skip when zero diff (opt-in) | |
| if: steps.active_guard.outputs.skip_due_to_active != 'true' && steps.check.outputs.skip == 'true' && steps.check.outputs.mirror_on_skip == 'true' && env.CLOSE_ZERO_DIFF_ON_SKIP == 'true' | |
| uses: actions/github-script@v7 | |
| env: | |
| MERGE_BRANCH: ${{ env.MERGE_BRANCH }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| with: | |
| github-token: ${{ secrets.CODE_GH_PAT || github.token }} | |
| script: | | |
| const owner = context.repo.owner; | |
| const repo = context.repo.repo; | |
| const head = process.env.MERGE_BRANCH; | |
| const base = process.env.DEFAULT_BRANCH; | |
| const headRef = `${owner}:${head}`; | |
| const prs = await github.rest.pulls.list({ owner, repo, state: 'open', head: headRef }); | |
| if (!prs.data.length) { return; } | |
| let zeroDiff = false; | |
| try { | |
| const cmp = await github.rest.repos.compareCommitsWithBasehead({ owner, repo, basehead: `${base}...${head}` }); | |
| zeroDiff = (cmp.data.files || []).length === 0; | |
| } catch (e) { | |
| core.warning(`Compare failed (${base}...${head}): ${e.message}. Skipping.`); | |
| return; | |
| } | |
| if (zeroDiff) { | |
| const pr = prs.data[0]; | |
| await github.rest.issues.createComment({ owner, repo, issue_number: pr.number, body: 'Closing: upstream merge has no net file changes vs base.' }); | |
| await github.rest.pulls.update({ owner, repo, pull_number: pr.number, state: 'closed' }); | |
| core.notice(`Closed PR #${pr.number} due to zero diff (skip path).`); | |
| } | |
| merge: | |
| needs: [precheck] | |
| if: needs.precheck.outputs.skip_due_to_active != 'true' && needs.precheck.outputs.action == 'merge' | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 45 | |
| steps: | |
| - name: Check out repository (full history) | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| persist-credentials: false | |
| - name: Configure authenticated origin for pushes | |
| env: | |
| GH_TOKEN: ${{ secrets.CODE_GH_PAT || github.token }} | |
| REPO: ${{ github.repository }} | |
| run: | | |
| set -euo pipefail | |
| git remote set-url origin "https://x-access-token:${GH_TOKEN}@github.com/${REPO}.git" | |
| - name: Set git identity for commits | |
| run: | | |
| git config user.name "just-every-code" | |
| git config user.email "code@justevery.com" | |
| git config --global --add safe.directory "$GITHUB_WORKSPACE" | |
| - name: "Quick no-op: ancestor check (default and merge branch)" | |
| id: check_upstream | |
| shell: bash | |
| env: | |
| UPSTREAM_REPO: ${{ env.UPSTREAM_REPO }} | |
| UPSTREAM_BRANCH: ${{ env.UPSTREAM_BRANCH }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| MERGE_BRANCH: ${{ env.MERGE_BRANCH }} | |
| run: | | |
| set -euo pipefail | |
| git remote add upstream "https://github.com/${UPSTREAM_REPO}.git" 2>/dev/null || true | |
| # Fetch exact refs with commit graph, no blobs, to make ancestor checks reliable and fast. | |
| # NOTE: Do not consult origin/upstream-merge here — it may contain a prior partial merge and | |
| # cause false positives. We primarily compare upstream vs the default branch, but if a dedicated | |
| # merge branch exists we also consider it for no-op (so we don't re-trigger while a PR is open). | |
| git fetch --no-tags --prune --filter=blob:none origin "${DEFAULT_BRANCH}" | |
| HAS_MERGE_BRANCH=false | |
| if git ls-remote --exit-code --heads origin "${MERGE_BRANCH}" >/dev/null 2>&1; then | |
| git fetch --no-tags --prune --filter=blob:none origin "${MERGE_BRANCH}" | |
| HAS_MERGE_BRANCH=true | |
| fi | |
| git fetch --no-tags --prune --filter=blob:none upstream \ | |
| "+refs/heads/${UPSTREAM_BRANCH}:refs/remotes/upstream/${UPSTREAM_BRANCH}" | |
| # Evaluate ancestor relationships and expose results as step outputs | |
| if git merge-base --is-ancestor "upstream/${UPSTREAM_BRANCH}" "origin/${DEFAULT_BRANCH}"; then | |
| echo "skip=true" >> "$GITHUB_OUTPUT" | |
| echo "skip_reason=upstream_ancestor_of_default" >> "$GITHUB_OUTPUT" | |
| echo "mirror_on_skip=true" >> "$GITHUB_OUTPUT" | |
| elif [ "$HAS_MERGE_BRANCH" = true ] && git merge-base --is-ancestor "upstream/${UPSTREAM_BRANCH}" "origin/${MERGE_BRANCH}"; then | |
| echo "skip=true" >> "$GITHUB_OUTPUT" | |
| echo "skip_reason=upstream_ancestor_of_merge_branch" >> "$GITHUB_OUTPUT" | |
| echo "mirror_on_skip=false" >> "$GITHUB_OUTPUT" | |
| else | |
| echo "skip=false" >> "$GITHUB_OUTPUT" | |
| echo "skip_reason=upstream_has_new_commits" >> "$GITHUB_OUTPUT" | |
| echo "mirror_on_skip=false" >> "$GITHUB_OUTPUT" | |
| fi | |
| - name: Summarize quick check | |
| if: always() | |
| run: | | |
| { | |
| echo "### Upstream Merge: Quick Check"; | |
| echo "- skip: ${{ steps.check_upstream.outputs.skip || '' }}"; | |
| echo "- reason: ${{ steps.check_upstream.outputs.skip_reason || 'n/a' }}"; | |
| echo "- mirror_on_skip: ${{ steps.check_upstream.outputs.mirror_on_skip || 'false' }}"; | |
| } >> "$GITHUB_STEP_SUMMARY" | |
| # Expensive setup only runs if we are not skipping. | |
| - name: Setup Rust toolchain (match repo) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: dtolnay/rust-toolchain@master | |
| with: | |
| toolchain: 1.90.0 | |
| - name: Add local bin to PATH | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| run: | | |
| mkdir -p "$HOME/.local/bin" | |
| echo "$HOME/.local/bin" >> "$GITHUB_PATH" | |
| - name: Set shared Cargo env (CARGO_HOME, CARGO_TARGET_DIR) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| run: | | |
| echo "CARGO_HOME=${RUNNER_TEMP}/cargo-home" >> "$GITHUB_ENV" | |
| echo "CARGO_TARGET_DIR=${GITHUB_WORKSPACE}/code-rs/target" >> "$GITHUB_ENV" | |
| - name: Cache ripgrep binary | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/cache@v4 | |
| with: | |
| path: ~/.local/tools/rg-${{ env.RG_VERSION }} | |
| key: rg-${{ runner.os }}-${{ env.RG_VERSION }} | |
| - name: Setup ripgrep (cached) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| run: | | |
| set -euo pipefail | |
| if command -v rg >/dev/null 2>&1; then rg --version; exit 0; fi | |
| mkdir -p "$HOME/.local/tools" "$HOME/.local/bin" | |
| if [ ! -x "$HOME/.local/tools/rg-${RG_VERSION}/rg" ]; then | |
| cd "$HOME/.local/tools" | |
| TARBALL="ripgrep-${RG_VERSION}-x86_64-unknown-linux-musl.tar.gz" | |
| URL="https://github.com/BurntSushi/ripgrep/releases/download/${RG_VERSION}/${TARBALL}" | |
| curl -sSL "$URL" -o "$TARBALL" | |
| tar -xzf "$TARBALL" | |
| rm -f "$TARBALL" | |
| mv "ripgrep-${RG_VERSION}-x86_64-unknown-linux-musl" "rg-${RG_VERSION}" | |
| fi | |
| install -m 0755 "$HOME/.local/tools/rg-${RG_VERSION}/rg" "$HOME/.local/bin/rg" | |
| rg --version | |
| - name: Cache jq binary | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/cache@v4 | |
| with: | |
| path: ~/.local/tools/jq-${{ env.JQ_VERSION }} | |
| key: jq-${{ runner.os }}-${{ env.JQ_VERSION }} | |
| - name: Setup jq (cached) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| run: | | |
| set -euo pipefail | |
| if command -v jq >/dev/null 2>&1; then jq --version; exit 0; fi | |
| mkdir -p "$HOME/.local/tools/jq-${JQ_VERSION}" "$HOME/.local/bin" | |
| URL="https://github.com/jqlang/jq/releases/download/jq-${JQ_VERSION}/jq-linux-amd64" | |
| curl -sSL "$URL" -o "$HOME/.local/tools/jq-${JQ_VERSION}/jq" | |
| chmod +x "$HOME/.local/tools/jq-${JQ_VERSION}/jq" | |
| install -m 0755 "$HOME/.local/tools/jq-${JQ_VERSION}/jq" "$HOME/.local/bin/jq" | |
| jq --version | |
| # Remove slow apt install; we now use cached static binaries for rg/jq | |
| - name: Cache Rust build (cargo + target) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: Swatinem/rust-cache@v2 | |
| with: | |
| # Our Rust workspace lives in codex-rs; cache its target dir | |
| workspaces: | | |
| codex-rs -> target | |
| save-if: true | |
| cache-on-failure: true | |
| - name: Setup sccache (GHA backend) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: mozilla-actions/sccache-action@v0.0.9 | |
| with: | |
| version: v0.10.0 | |
| token: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Enable sccache | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| run: | | |
| echo 'SCCACHE_GHA_ENABLED=true' >> "$GITHUB_ENV" | |
| echo 'RUSTC_WRAPPER=sccache' >> "$GITHUB_ENV" | |
| echo 'SCCACHE_IDLE_TIMEOUT=1800' >> "$GITHUB_ENV" | |
| echo 'SCCACHE_CACHE_SIZE=5G' >> "$GITHUB_ENV" | |
| # Remove redundant direct caches; Swatinem/rust-cache covers target and cargo dirs via CARGO_HOME | |
| - name: Prime Rust build cache (fast local build) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| env: | |
| STRICT_CARGO_HOME: "1" | |
| CARGO_HOME_ENFORCED: ${{ env.CARGO_HOME }} | |
| run: | | |
| set -euo pipefail | |
| ./build-fast.sh | |
| - name: Setup Node.js | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: "20" | |
| - name: Cache npm (npx) downloads | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ~/.npm | |
| key: npm-cache-${{ runner.os }}-node20-${{ hashFiles('**/package-lock.json', '**/pnpm-lock.yaml', '**/yarn.lock') }} | |
| restore-keys: | | |
| npm-cache-${{ runner.os }}-node20- | |
| - name: Start local OpenAI proxy (no key to agent; verbose logging) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| id: openai_proxy | |
| env: | |
| OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} | |
| run: | | |
| set -euo pipefail | |
| if [ -z "${OPENAI_API_KEY:-}" ]; then | |
| echo "OPENAI_API_KEY secret is required to start the proxy." >&2 | |
| exit 1 | |
| fi | |
| mkdir -p .github/auto | |
| PORT=5055 LOG_DEST=stdout EXIT_ON_5XX=1 RESPONSES_BETA="responses=v1" node scripts/openai-proxy.js > .github/auto/openai-proxy.log 2>&1 & | |
| echo "pid=$!" >> "$GITHUB_OUTPUT" | |
| # Wait briefly for readiness | |
| for i in {1..30}; do if nc -z 127.0.0.1 5055; then break; else sleep 0.2; fi; done || true | |
| - name: Print proxy startup log tail | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| run: | | |
| set -euo pipefail | |
| echo '### openai-proxy.log (tail)' >> "$GITHUB_STEP_SUMMARY" | |
| { echo '```'; tail -n 80 .github/auto/openai-proxy.log || true; echo '```'; } >> "$GITHUB_STEP_SUMMARY" | |
| - name: Prepare agent context (commit range, deleted paths, histogram) | |
| id: prep | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| env: | |
| UPSTREAM_REPO: ${{ env.UPSTREAM_REPO }} | |
| UPSTREAM_BRANCH: ${{ env.UPSTREAM_BRANCH }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| run: | | |
| set -euo pipefail | |
| set -x | |
| mkdir -p .github/auto | |
| git remote add upstream "https://github.com/${UPSTREAM_REPO}.git" 2>/dev/null || true | |
| # Fetch with commit graph but no blobs for speed; ensure sufficient history for merge-base | |
| git fetch --no-tags origin --prune --filter=blob:none | |
| git fetch --no-tags upstream --prune --filter=blob:none "${UPSTREAM_BRANCH}" | |
| RANGE="upstream/${UPSTREAM_BRANCH} ^origin/${DEFAULT_BRANCH}" | |
| : > .github/auto/COMMITS.json | |
| echo '[' >> .github/auto/COMMITS.json | |
| first=1 | |
| while read -r sha; do | |
| title=$(git log -1 --pretty=%s "$sha" | sed 's/"/\\"/g') | |
| date=$(git log -1 --pretty=%cI "$sha") | |
| files=$(git show --pretty=format: --name-only "$sha" | sed '/^$/d') | |
| files_json=$(printf '%s\n' "$files" | jq -Rcs 'split("\n") | map(select(length>0))') | |
| stats=$(git show --shortstat --oneline "$sha" | tail -n1) | |
| ins=$(printf '%s' "$stats" | sed -n 's/.* \([0-9]\+\) insertions\?.*/\1/p') | |
| del=$(printf '%s' "$stats" | sed -n 's/.* \([0-9]\+\) deletions\?.*/\1/p') | |
| ins=${ins:-0}; del=${del:-0} | |
| [ $first -eq 1 ] || echo ',' >> .github/auto/COMMITS.json | |
| first=0 | |
| jq -n --arg sha "$sha" --arg title "$title" --arg date "$date" \ | |
| --argjson files "$files_json" --argjson insertions "$ins" --argjson deletions "$del" \ | |
| '{sha:$sha,title:$title,date:$date,files:$files,insertions:($insertions|tonumber),deletions:($deletions|tonumber)}' >> .github/auto/COMMITS.json | |
| done < <(git rev-list --reverse $RANGE) | |
| echo ']' >> .github/auto/COMMITS.json | |
| if jq -e 'length==0' .github/auto/COMMITS.json >/dev/null 2>&1; then | |
| echo "No upstream commits beyond default; context prepared (empty)."; | |
| fi | |
| git ls-tree -r --name-only "origin/${DEFAULT_BRANCH}" | awk -F'/' '/^codex-rs\//{print $1"/"$2}' | sort -u > .github/auto/DEFAULT_CRATES.txt | |
| git ls-tree -r --name-only "upstream/${UPSTREAM_BRANCH}" | awk -F'/' '/^codex-rs\//{print $1"/"$2}' | sort -u > .github/auto/UPSTREAM_CRATES.txt | |
| comm -13 .github/auto/DEFAULT_CRATES.txt .github/auto/UPSTREAM_CRATES.txt > .github/auto/DELETED_ON_DEFAULT.txt || true | |
| git diff --name-only "origin/${DEFAULT_BRANCH}..upstream/${UPSTREAM_BRANCH}" > .github/auto/DELTA_FILES.txt || true | |
| awk 'BEGIN{tui=cli=core=docs=tests=other=0} | |
| /^codex-rs\/tui\//{tui++; next} | |
| /^codex-cli\//{cli++; next} | |
| /^codex-rs\/(core|common|protocol|exec|file-search)\//{core++; next} | |
| /^docs\//{docs++; next} | |
| /(^|\/)tests?\//{tests++; next} | |
| {other++} | |
| END{printf("tui=%d cli=%d core=%d docs=%d tests=%d other=%d\n",tui,cli,core,docs,tests,other)}' \ | |
| .github/auto/DELTA_FILES.txt > .github/auto/CHANGE_HISTOGRAM.txt | |
| FILES_COUNT=$(wc -l < .github/auto/DELTA_FILES.txt | tr -d ' ') | |
| LOC_EST=$(git diff --shortstat "origin/${DEFAULT_BRANCH}..upstream/${UPSTREAM_BRANCH}" | awk '{for(i=1;i<=NF;i++){if($i=="insertions(+)")ins=$(i-1); if($i=="deletions(-)")del=$(i-1)} } END{print (ins?ins:0)+(del?del:0)}') | |
| MERGE_MODE=one-shot | |
| if [ "${FILES_COUNT:-0}" -gt 800 ] || [ "${LOC_EST:-0}" -gt 15000 ]; then MERGE_MODE=by-bucket; fi | |
| echo "merge_mode=${MERGE_MODE}" >> "$GITHUB_OUTPUT" | |
| echo "files_count=${FILES_COUNT}" >> "$GITHUB_OUTPUT" | |
| echo "loc_est=${LOC_EST}" >> "$GITHUB_OUTPUT" | |
| git diff --stat "origin/${DEFAULT_BRANCH}..upstream/${UPSTREAM_BRANCH}" > .github/auto/DIFFSTAT.txt || true | |
| # Detect reintroduced paths: present in merge-base, absent on default, present on upstream | |
| MB=$(git merge-base "origin/${DEFAULT_BRANCH}" "upstream/${UPSTREAM_BRANCH}" 2>/dev/null || true) | |
| : > .github/auto/REINTRODUCED_PATHS.txt | |
| if [ -n "${MB:-}" ]; then | |
| while read -r status path; do | |
| [ "${status}" = "A" ] || continue | |
| if git ls-tree -r --name-only "$MB" -- "$path" >/dev/null 2>&1 && \ | |
| git ls-tree -r --name-only "$MB" -- "$path" | grep -q . && \ | |
| ! git ls-tree -r --name-only "origin/${DEFAULT_BRANCH}" -- "$path" | grep -q .; then | |
| echo "$path" >> .github/auto/REINTRODUCED_PATHS.txt | |
| fi | |
| done < <(git diff --name-status "origin/${DEFAULT_BRANCH}..upstream/${UPSTREAM_BRANCH}" || true) | |
| else | |
| echo "No merge-base between origin/${DEFAULT_BRANCH} and upstream/${UPSTREAM_BRANCH}; skipping reintroduced path detection." >> .github/auto/CHANGE_HISTOGRAM.txt | |
| fi | |
| if [ -z "${MB:-}" ]; then echo "no_merge_base=true" >> "$GITHUB_OUTPUT"; else echo "no_merge_base=false" >> "$GITHUB_OUTPUT"; fi | |
| - name: Carry forward merge branch (merge default into it) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| env: | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| run: | | |
| set -euo pipefail | |
| # Bring local refs up to date | |
| git fetch --no-tags --prune --filter=blob:none origin | |
| # If upstream-merge exists, base work on it and merge the latest default into it, | |
| # preferring existing upstream-merge resolutions on conflict (-X ours). | |
| if git ls-remote --exit-code --heads origin upstream-merge >/dev/null 2>&1; then | |
| git checkout -B upstream-merge origin/upstream-merge | |
| # Merge the latest default into upstream-merge to rebase the carry-forward state | |
| git merge --no-ff --no-edit -X ours "origin/${DEFAULT_BRANCH}" || true | |
| # Do not push yet; wait for the agent's merge so we do not clobber prior upstream commits on failure. | |
| else | |
| # First run: create the branch from the current default; agent step will publish it after a successful merge. | |
| git checkout -B upstream-merge "origin/${DEFAULT_BRANCH}" | |
| fi | |
| - name: Close accidental PRs targeting upstream-merge (we mirror default) | |
| if: always() | |
| uses: actions/github-script@v7 | |
| env: | |
| MERGE_BRANCH: ${{ env.MERGE_BRANCH }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| with: | |
| github-token: ${{ secrets.CODE_GH_PAT || github.token }} | |
| script: | | |
| const owner = context.repo.owner; | |
| const repo = context.repo.repo; | |
| const base = process.env.MERGE_BRANCH; // upstream-merge | |
| // Close any open PR that targets upstream-merge; we mirror default directly. | |
| const prs = await github.rest.pulls.list({ owner, repo, state: 'open', base }); | |
| for (const pr of prs.data) { | |
| await github.rest.issues.createComment({ owner, repo, issue_number: pr.number, body: 'Closing: this repo auto-mirrors the default branch into upstream-merge. Please target `main` instead.' }); | |
| await github.rest.pulls.update({ owner, repo, pull_number: pr.number, state: 'closed' }); | |
| core.notice(`Closed PR #${pr.number} targeting ${base}.`); | |
| } | |
| - name: Run Code agent to perform upstream merge | |
| id: agent | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| env: | |
| UPSTREAM_REPO: ${{ env.UPSTREAM_REPO }} | |
| UPSTREAM_BRANCH: ${{ env.UPSTREAM_BRANCH }} | |
| MERGE_BRANCH: ${{ env.MERGE_BRANCH }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| GH_TOKEN: ${{ secrets.CODE_GH_PAT || github.token }} | |
| MERGE_MODE: ${{ steps.prep.outputs.merge_mode || 'one-shot' }} | |
| OURS_GLOBS: | | |
| codex-rs/tui/** | |
| codex-cli/** | |
| .github/workflows/** | |
| AGENTS.md | |
| README.md | |
| CHANGELOG.md | |
| run: | | |
| set -euo pipefail | |
| SAFE_PATH="$PATH"; SAFE_HOME="$HOME" | |
| # Build the agent prompt safely without command substitution | |
| { | |
| printf 'Context\n- UPSTREAM_REPO=%s\n- UPSTREAM_BRANCH=%s\n- MERGE_BRANCH=%s\n- DEFAULT_BRANCH=%s\n\n' \ | |
| "$UPSTREAM_REPO" "$UPSTREAM_BRANCH" "$MERGE_BRANCH" "$DEFAULT_BRANCH"; | |
| echo 'Runtime'; echo '- ENV: github-actions'; echo "- MERGE_MODE=${MERGE_MODE}"; echo ''; | |
| echo 'Goals'; | |
| echo '- Keep our fork in sync with upstream by incorporating genuine improvements.'; | |
| echo '- Do not overwrite our unique TUI and tooling approach unless clearly beneficial and compatible.'; | |
| echo '- Make granular decisions commit-by-commit or by bucket; do not blanket-drop upstream changes without review.'; | |
| echo '- Preserve our added functionality in core: model-driven browser tools, agent tools, screenshot handling, and version/UA semantics.'; | |
| echo ''; | |
| echo 'Fork Enhancements (initial, not exhaustive)'; | |
| if [ -f docs/fork-enhancements.md ]; then | |
| sed -n '1,200p' docs/fork-enhancements.md; | |
| else | |
| echo '- (fork overview file missing)'; | |
| fi | |
| echo ''; | |
| echo 'Artifacts'; | |
| echo '- .github/auto/COMMITS.json: upstream commits not in default (sha, title, files, stats).'; | |
| echo '- .github/auto/DELETED_ON_DEFAULT.txt: crates/paths removed on our default; avoid re-introducing.'; | |
| echo '- .github/auto/CHANGE_HISTOGRAM.txt: rough areas touched.'; | |
| echo '- .github/auto/DELTA_FILES.txt and DIFFSTAT.txt: filenames and summary.'; | |
| echo '- .github/auto/REINTRODUCED_PATHS.txt: candidate paths removed previously that upstream reintroduced.'; | |
| echo '<policy>'; | |
| # Emit a minimized JSON policy for clarity (drop empty sections) | |
| if command -v jq >/dev/null 2>&1; then | |
| jq 'del(.prefer_theirs_globs) | with_entries(select(.value|type != "array" or (.value|length>0)))' .github/merge-policy.json 2>/dev/null || cat .github/merge-policy.json || echo '{ }' | |
| else | |
| cat .github/merge-policy.json 2>/dev/null || echo '{ }' | |
| fi | |
| echo '</policy>'; | |
| cat << 'EOP' | |
| <task> | |
| You are the maintainer bot. Perform an upstream merge using our repo policies and a selective reconciliation strategy. | |
| Steps: | |
| 1) Ensure remote `upstream` points to the UPSTREAM_REPO in Context. Fetch origin and upstream. | |
| 2) Write .github/auto/MERGE_PLAN.md summarizing strategy (one-shot, by-bucket, or per-commit) based on MERGE_MODE and artifacts. | |
| 3) Use existing MERGE_BRANCH (prepared earlier). Do not reset or recreate it. | |
| 4) Merge upstream/UPSTREAM_BRANCH into MERGE_BRANCH using `--no-commit`. | |
| - Use the <policy> JSON for prefer_ours_globs, prefer_theirs_globs, and purge_globs. | |
| - Default: adopt upstream outside prefer_ours_globs. In protected areas (prefer_ours_globs), keep ours unless you identify a clearly beneficial, compatible upstream change. | |
| - For files matching prefer_theirs_globs, lean towards upstream unless it breaks our build or documented behavior. | |
| - Explicit invariants to preserve in this fork (must not regress): | |
| • Tool families: any custom handlers with names starting with `browser_` or `agent_`, and `web_fetch` if present, must have corresponding tool schemas exposed by openai_tools (verify.sh enforces handler↔tool parity generically). | |
| • Exposure gating: do not drop the browser gating logic that controls when browser tools are advertised (adapt to upstream refactors without removing the behavior). | |
| • Screenshot UX: do not change screenshot queuing semantics across turns unless you update both producer/consumer paths to preserve UX; prefer preserving our pending queue + TUI updates. | |
| • Version/UA: keep codex_version::version() usage for UA/build and keep get_codex_user_agent_default() for MCP server user agent. | |
| - Do NOT blanket-delete new crates or reintroduced paths. Surface noteworthy cases in MERGE_REPORT.md and make a reasoned choice. | |
| - For any path listed in purge_globs or perma_removed_paths, ensure it remains deleted if upstream reintroduced it. | |
| - Review the upstream commit range (e.g., via `git rev-list upstream/UPSTREAM_BRANCH ^origin/DEFAULT_BRANCH`). Use repo context and the provided artifacts to make sensible, minimal decisions. Prefer preserving our local UX/branding and workflows; adopt upstream when it improves correctness, security, or compatibility. Record notable decisions in MERGE_REPORT.md. | |
| 5) Resolve lockfile conflicts early: if `codex-rs/Cargo.lock` contains merge markers or becomes out of sync with the workspace manifests, regenerate it inside `codex-rs/` (e.g., `cargo update --workspace --locked`; fall back to `cargo update --workspace` if the locked run fails). Commit the regenerated lock as part of the merge and note the action in MERGE_REPORT.md. Prefer preserving our crate versions, but do not leave conflict markers in place. | |
| 6) Compatibility (do not break callers): | |
| - Keep these public re-exports in codex-core: ModelClient, Prompt, ResponseEvent, ResponseStream. | |
| Removing them breaks downstream imports and will fail API tests. | |
| - Keep codex_core::models namespace as an alias to protocol models. | |
| - Do not remove ICU/sys-locale dependencies unless you confirm (via repo-wide search) they are unused across the workspace. | |
| 7) Verify with scripts/upstream-merge/verify.sh. If it fails, fix minimally and re-run until it passes. | |
| - Note: verify.sh includes fork-specific guards for tool registration and UA/version; honor these when resolving conflicts. | |
| 8) Stage and commit with a conventional message and short build status. | |
| 9) Write .github/auto/MERGE_REPORT.md (Incorporated / Dropped / Other changes) summarizing choices. | |
| 10) Push MERGE_BRANCH and prepare PR title/body. | |
| </task> | |
| <constraints> | |
| - Be minimal and surgical; do not refactor. | |
| - Keep diffs focused on merge and required fixes. Do not recreate locally removed theming/UX files. | |
| - Never rewrite git history outside the merge branch. | |
| - If Git reports no merge-base between origin/DEFAULT_BRANCH and upstream/UPSTREAM_BRANCH, you may use `--allow-unrelated-histories` to graft histories and proceed with all policies/guards. | |
| - If the initial `git merge --no-ff --no-commit upstream/UPSTREAM_BRANCH` fails with unrelated histories, re-run the merge in a separate command with `--allow-unrelated-histories` (do not chain with `||`). | |
| - Use only the provided GH_TOKEN for push; do not echo it. | |
| <tools> | |
| - To search for API usages before removing exports: | |
| rg -n "^(use\\s+codex_core::|codex_core::)(ModelClient|Prompt|Response(Event|Stream))\\b" codex-rs | |
| - To look for ICU/sys-locale usage across workspace: | |
| rg -n "\\b(sys_locale|icu_(decimal|locale_core))\\b" codex-rs | |
| - To compile API tests without running them: | |
| cargo check -p codex-core --tests --quiet | |
| </constraints> | |
| EOP | |
| } > .github/auto/AUTO_GOAL.md | |
| env -i PATH="$SAFE_PATH" HOME="$SAFE_HOME" \ | |
| RUSTC_WRAPPER="sccache" SCCACHE_GHA_ENABLED="true" SCCACHE_IDLE_TIMEOUT="1800" SCCACHE_CACHE_SIZE="5G" \ | |
| OPENAI_API_KEY="x" \ | |
| OPENAI_BASE_URL="http://127.0.0.1:5055/v1" \ | |
| OPENAI_API_BASE="http://127.0.0.1:5055/v1" \ | |
| GH_TOKEN="$GH_TOKEN" \ | |
| npm_config_cache="$SAFE_HOME/.npm" \ | |
| npx -y @just-every/code@latest \ | |
| auto \ | |
| --goal-file .github/auto/AUTO_GOAL.md \ | |
| --max-attempts 3 \ | |
| --cd "$GITHUB_WORKSPACE" \ | |
| --skip-git-repo-check \ | |
| -s workspace-write \ | |
| -c sandbox_workspace_write.allow_git_writes=true \ | |
| -c sandbox_workspace_write.network_access=true \ | |
| -c shell_environment_policy.r#set.CARGO_HOME="${RUNNER_TEMP}/cargo-home" \ | |
| -c shell_environment_policy.r#set.CARGO_TARGET_DIR="${GITHUB_WORKSPACE}/codex-rs/target" \ | |
| -c shell_environment_policy.r#set.RUSTC_WRAPPER="sccache" \ | |
| -c shell_environment_policy.r#set.SCCACHE_GHA_ENABLED="true" \ | |
| -c shell_environment_policy.r#set.SCCACHE_IDLE_TIMEOUT="1800" \ | |
| -c shell_environment_policy.r#set.SCCACHE_CACHE_SIZE="5G" \ | |
| -c shell_environment_policy.r#set.KEEP_ENV="1" \ | |
| --json-report .github/auto/AGENT_REPORT.json \ | |
| | tee .github/auto/AGENT_STDOUT.txt | |
| - name: Assert agent success (fail on streaming/errors) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| run: | | |
| set -euo pipefail | |
| echo "Checking agent output for fatal errors..." | |
| if rg -n "^\\[.*\\] ERROR: (stream error|server error|exceeded retry limit)" .github/auto/AGENT_STDOUT.txt >/dev/null 2>&1; then | |
| echo "Agent reported a fatal error (stream/server). Failing job." >&2 | |
| rg -n "^\\[.*\\] ERROR: (stream error|server error|exceeded retry limit)" .github/auto/AGENT_STDOUT.txt || true | |
| exit 1 | |
| fi | |
| # Also flag 5xx responses from the proxy logs if present | |
| if [ -s .github/auto/openai-proxy.log ]; then | |
| if rg -n '"phase":"response_head".*"status":5\\d\\d' .github/auto/openai-proxy.log >/dev/null 2>&1; then | |
| echo "Proxy observed 5xx from upstream during agent run. Failing job." >&2 | |
| rg -n '"phase":"response_head".*"status":5\\d\\d' .github/auto/openai-proxy.log | tail -n 5 || true | |
| exit 1 | |
| fi | |
| if rg -n '"phase":"upstream_error"' .github/auto/openai-proxy.log >/dev/null 2>&1; then | |
| echo "Proxy upstream_error entries found. Failing job." >&2 | |
| rg -n '"phase":"upstream_error"' .github/auto/openai-proxy.log | tail -n 10 || true | |
| exit 1 | |
| fi | |
| fi | |
| - name: Upload artifact - openai-proxy.log | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: openai-proxy.log | |
| if-no-files-found: warn | |
| path: .github/auto/openai-proxy.log | |
| - name: Post-merge verify (build-fast + API compile) | |
| id: verify | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| working-directory: . | |
| run: | | |
| set -euo pipefail | |
| mkdir -p "$GITHUB_WORKSPACE/.github/auto" | |
| bash scripts/upstream-merge/verify.sh || true | |
| cat .github/auto/VERIFY.json || true | |
| if jq -e '.build_fast=="ok" and .api_check=="ok"' .github/auto/VERIFY.json >/dev/null 2>&1; then echo "failed=false" >> "$GITHUB_OUTPUT"; else echo "failed=true" >> "$GITHUB_OUTPUT"; fi | |
| - name: Remediate verification failures with Code agent (loop) | |
| id: remediation_loop | |
| if: steps.check_upstream.outputs.skip != 'true' && steps.verify.outputs.failed == 'true' | |
| env: | |
| GH_TOKEN: ${{ secrets.CODE_GH_PAT || github.token }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| MERGE_BRANCH: ${{ env.MERGE_BRANCH }} | |
| MAX_REMEDIATION_ATTEMPTS: ${{ env.MAX_REMEDIATION_ATTEMPTS || '3' }} | |
| run: | | |
| set -euo pipefail | |
| SAFE_PATH="$PATH"; SAFE_HOME="$HOME" | |
| : > "$GITHUB_WORKSPACE/.github/auto/AGENT_REMEDIATION_STDOUT.txt" | |
| MAX_ATTEMPTS=${MAX_REMEDIATION_ATTEMPTS:-3} | |
| attempt=1 | |
| while [ "$attempt" -le "$MAX_ATTEMPTS" ]; do | |
| echo "::notice::Remediation attempt $attempt of $MAX_ATTEMPTS" | |
| { | |
| echo 'Context'; | |
| echo "- MERGE_BRANCH=${MERGE_BRANCH}"; | |
| echo "- DEFAULT_BRANCH=${DEFAULT_BRANCH}"; | |
| echo "- ATTEMPT=${attempt}/${MAX_ATTEMPTS}"; | |
| echo ''; | |
| echo 'Previous verify logs (tail):'; | |
| echo '---8<---'; | |
| tail -n 200 "$GITHUB_WORKSPACE/.github/auto/VERIFY_build-fast.log" 2>/dev/null || true | |
| tail -n 200 "$GITHUB_WORKSPACE/.github/auto/VERIFY_api-check.log" 2>/dev/null || true | |
| tail -n 200 "$GITHUB_WORKSPACE/.github/auto/VERIFY_guards.log" 2>/dev/null || true | |
| echo '---8<---'; | |
| echo ''; | |
| cat << 'PROMPT' | |
| You are acting as a maintainer to remediate compile-only API test failures after an upstream merge. | |
| Tasks (minimal and surgical): | |
| - Restore or add back missing public re-exports in codex-core that cause `cargo check --tests` to fail (e.g., ModelClient, Prompt, ResponseEvent, ResponseStream), unless you can conclusively update all workspace imports safely. | |
| - Prefer adding compatibility aliases over large refactors. | |
| - Do not remove or rename crates. Do not drop ICU/sys-locale unless unused repo-wide. | |
| - If verification failed due to `codex-rs/Cargo.lock` parse errors or merge markers, regenerate the lock inside `codex-rs/` (try `cargo update --workspace --locked`; fall back to `cargo update --workspace` if necessary) and include the updated file in the fix. | |
| - Run scripts/upstream-merge/verify.sh and iterate until it returns success. | |
| - Update .github/auto/MERGE_REPORT.md with a short note under "Other changes" describing the compatibility adjustments. | |
| - Push to ${MERGE_BRANCH}. | |
| Constraints: | |
| - Keep changes minimal and focused on making tests compile. | |
| - Do not modify TUI/CLI user-visible branding. | |
| - No history rewriting; only commit on ${MERGE_BRANCH}. | |
| - Treat warnings as failures during the build-fast step. | |
| PROMPT | |
| } | env -i PATH="$SAFE_PATH" HOME="$SAFE_HOME" \ | |
| OPENAI_API_KEY="x" \ | |
| OPENAI_BASE_URL="http://127.0.0.1:5055/v1" \ | |
| OPENAI_API_BASE="http://127.0.0.1:5055/v1" \ | |
| GH_TOKEN="$GH_TOKEN" \ | |
| RUSTC_WRAPPER="sccache" SCCACHE_GHA_ENABLED="true" SCCACHE_IDLE_TIMEOUT="1800" SCCACHE_CACHE_SIZE="5G" \ | |
| npm_config_cache="$SAFE_HOME/.npm" \ | |
| npx -y @just-every/code@latest \ | |
| exec \ | |
| -s workspace-write \ | |
| -c sandbox_workspace_write.allow_git_writes=true \ | |
| -c sandbox_workspace_write.network_access=true \ | |
| -c shell_environment_policy.r#set.CARGO_HOME="${RUNNER_TEMP}/cargo-home" \ | |
| -c shell_environment_policy.r#set.CARGO_TARGET_DIR="${GITHUB_WORKSPACE}/codex-rs/target" \ | |
| -c shell_environment_policy.r#set.RUSTC_WRAPPER="sccache" \ | |
| -c shell_environment_policy.r#set.SCCACHE_GHA_ENABLED="true" \ | |
| -c shell_environment_policy.r#set.SCCACHE_IDLE_TIMEOUT="1800" \ | |
| -c shell_environment_policy.r#set.SCCACHE_CACHE_SIZE="5G" \ | |
| -c shell_environment_policy.r#set.KEEP_ENV="1" \ | |
| --cd "$GITHUB_WORKSPACE" \ | |
| --skip-git-repo-check \ | |
| - | tee -a "$GITHUB_WORKSPACE/.github/auto/AGENT_REMEDIATION_STDOUT.txt" | |
| if bash scripts/upstream-merge/verify.sh; then | |
| echo "remediated=true" >> "$GITHUB_OUTPUT" | |
| exit 0 | |
| fi | |
| attempt=$((attempt + 1)) | |
| if [ "$attempt" -gt "$MAX_ATTEMPTS" ]; then | |
| echo "::error::Verification still failing after ${MAX_ATTEMPTS} remediation attempts" >&2 | |
| exit 1 | |
| fi | |
| done | |
| - name: Upload artifact - VERIFY logs | |
| if: steps.check_upstream.outputs.skip != 'true' && always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: VERIFY | |
| if-no-files-found: warn | |
| path: | | |
| .github/auto/VERIFY.json | |
| .github/auto/VERIFY_build-fast.log | |
| .github/auto/VERIFY_api-check.log | |
| .github/auto/VERIFY_guards.log | |
| .github/auto/AGENT_REMEDIATION_STDOUT.txt | |
| - name: Enforce policy removals on merge branch (images + caches + purge_globs) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| env: | |
| GH_TOKEN: ${{ secrets.CODE_GH_PAT || github.token }} | |
| REPO: ${{ github.repository }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| run: | | |
| set -euo pipefail | |
| OWNER=${REPO%%/*} | |
| REPO_N=${REPO##*/} | |
| # Only proceed if branch exists | |
| if ! git ls-remote --exit-code --heads origin upstream-merge >/dev/null 2>&1; then | |
| echo "upstream-merge branch not found; skipping policy cleanup"; exit 0 | |
| fi | |
| wt=.wt-upstream-merge-clean | |
| rm -rf "$wt" && git worktree add -f "$wt" origin/upstream-merge | |
| pushd "$wt" >/dev/null | |
| removed=false | |
| # Remove any accidentally committed merge artifacts from repo root | |
| for f in MERGE_PLAN.md MERGE_REPORT.md; do | |
| if git ls-files -- "$f" | grep -q .; then git rm -f -- "$f"; removed=true; fi | |
| done | |
| # Remove any accidentally committed artifacts under .github/auto/** (these should be uploaded, not tracked) | |
| auto_tracked=$(git ls-files -- '.github/auto/**' || true) | |
| if [ -n "$auto_tracked" ]; then echo "$auto_tracked" | xargs -r git rm -f --; removed=true; fi | |
| # Remove any tracked upstream images disallowed by local policy | |
| for p in .github/codex-cli-*.png .github/codex-cli-*.jpg .github/codex-cli-*.jpeg .github/codex-cli-*.webp; do | |
| files=$(git ls-files -- "$p" || true) | |
| if [ -n "$files" ]; then echo "$files" | xargs -r git rm -f --; removed=true; fi | |
| done | |
| # Belt-and-suspenders: drop any accidentally committed cargo cache dirs | |
| # Cover both repo-root and nested workspace (e.g., codex-rs/.cargo-home) | |
| for d in .cargo-home .cargo2 codex-rs/.cargo-home codex-rs/.cargo2; do | |
| files=$(git ls-files -- "$d/**" || true) | |
| if [ -n "$files" ]; then echo "$files" | xargs -r git rm -f --; removed=true; fi | |
| done | |
| # Do NOT drop new upstream crates automatically. Let the agent decide with context. | |
| # Apply purge_globs and perma_removed_paths from merge-policy.json when present | |
| if command -v jq >/dev/null 2>&1 && [ -f ".github/merge-policy.json" ]; then | |
| mapfile -t purges < <(jq -r '.purge_globs[]? // empty' .github/merge-policy.json 2>/dev/null || true) | |
| mapfile -t perma < <(jq -r '.perma_removed_paths[]? // empty' .github/merge-policy.json 2>/dev/null || true) | |
| for pat in "${purges[@]}" "${perma[@]}"; do | |
| [ -n "${pat:-}" ] || continue | |
| files=$(git ls-files -- "$pat" || true) | |
| if [ -n "$files" ]; then echo "$files" | xargs -r git rm -f --; removed=true; fi | |
| done | |
| fi | |
| # Do NOT automatically remove reintroduced paths; the agent reviews and decides. | |
| if [ "$removed" = true ]; then | |
| git -c user.email="github-actions[bot]@users.noreply.github.com" -c user.name="github-actions[bot]" \ | |
| commit -m "chore(merge): enforce policy removals (images + cargo caches)" | |
| git push origin HEAD:upstream-merge | |
| fi | |
| popd >/dev/null | |
| git worktree remove -f "$wt" | |
| - name: Branding report (guide-only) and perma-deleted guard | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| env: | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| run: | | |
| set -euo pipefail | |
| # Guide-only branding report: detect user-visible 'Codex' strings under TUI/CLI affected by this merge. | |
| mkdir -p .github/auto | |
| : > .github/auto/VERIFY_branding.log | |
| changed_files=$(git diff --name-only "origin/${DEFAULT_BRANCH}..origin/upstream-merge" -- 'codex-rs/tui/**' 'codex-cli/**' | tr '\n' ' ' || true) | |
| if [ -n "${changed_files:-}" ]; then | |
| echo "[branding] scanning changed TUI/CLI files for user-visible 'Codex' strings..." | tee -a .github/auto/VERIFY_branding.log | |
| git diff -U0 --no-color "origin/${DEFAULT_BRANCH}..origin/upstream-merge" -- $changed_files \ | |
| | grep -E '^\+' \ | |
| | grep -E '"[^"]*Codex[^"]*"|'\''[^'\''']*Codex[^'\''']*'\''|`[^`]*Codex[^`]*`' \ | |
| | grep -Evi '(codex-rs|codex-[a-z0-9_-]+|https?://|Cargo|crate|package|workspace)' \ | |
| | sed 's/^/+ /' | tee -a .github/auto/VERIFY_branding.log || true | |
| echo "[branding] Note: guidance only. No changes applied or committed." | tee -a .github/auto/VERIFY_branding.log | |
| else | |
| echo "[branding] no TUI/CLI files changed vs origin/${DEFAULT_BRANCH}." | tee -a .github/auto/VERIFY_branding.log | |
| fi | |
| # docs/** changes are allowed; we may still prefer ours but do not hard-fail here. | |
| if git diff --name-only "origin/${DEFAULT_BRANCH}..origin/upstream-merge" -- 'docs/**' | grep -q .; then | |
| echo "::notice::docs/** changed on upstream-merge; ensuring agent reviews and preserves local branding where needed." >&2 | |
| fi | |
| # Perma-deleted guard: ensure none of perma_removed_paths exist on branch | |
| if command -v jq >/dev/null 2>&1 && [ -f ".github/merge-policy.json" ]; then | |
| mapfile -t perma < <(jq -r '.perma_removed_paths[]? // empty' .github/merge-policy.json 2>/dev/null || true) | |
| if [ ${#perma[@]} -gt 0 ]; then | |
| wt=.wt-guard | |
| rm -rf "$wt" && git worktree add -f "$wt" origin/upstream-merge >/dev/null | |
| pushd "$wt" >/dev/null | |
| for pat in "${perma[@]}"; do | |
| [ -n "${pat:-}" ] || continue | |
| if git ls-files -- "$pat" | grep -q .; then | |
| echo "Perma-deleted guard: files present matching '$pat'" >&2 | |
| git ls-files -- "$pat" >&2 || true | |
| exit 1 | |
| fi | |
| done | |
| popd >/dev/null | |
| git worktree remove -f "$wt" >/dev/null | |
| fi | |
| fi | |
| # Perma-deleted guard: ensure none of perma_removed_paths exist on branch | |
| if command -v jq >/dev/null 2>&1 && [ -f ".github/merge-policy.json" ]; then | |
| mapfile -t perma < <(jq -r '.perma_removed_paths[]? // empty' .github/merge-policy.json 2>/dev/null || true) | |
| if [ ${#perma[@]} -gt 0 ]; then | |
| wt=.wt-guard | |
| rm -rf "$wt" && git worktree add -f "$wt" origin/upstream-merge >/dev/null | |
| pushd "$wt" >/dev/null | |
| for pat in "${perma[@]}"; do | |
| [ -n "${pat:-}" ] || continue | |
| if git ls-files -- "$pat" | grep -q .; then | |
| echo "Perma-deleted guard: files present matching '$pat'" >&2 | |
| git ls-files -- "$pat" >&2 || true | |
| exit 1 | |
| fi | |
| done | |
| popd >/dev/null | |
| git worktree remove -f "$wt" >/dev/null | |
| fi | |
| fi | |
| - name: TUI invariants guard (strict stream ordering keys) | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| env: | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| run: | | |
| set -euo pipefail | |
| # Only run if upstream delta touched TUI | |
| if ! grep -q '^codex-rs/tui/' .github/auto/DELTA_FILES.txt 2>/dev/null; then | |
| echo "No TUI changes in upstream delta; skipping invariants guard."; exit 0 | |
| fi | |
| # Verify critical ordering identifiers exist somewhere under codex-rs/tui/ | |
| failed=0 | |
| for token in request_ordinal output_index sequence_number; do | |
| if ! git ls-tree -r --name-only origin/upstream-merge -- 'codex-rs/tui/**' | while read -r f; do git show "origin/upstream-merge:$f" || true; done | grep -q "$token"; then | |
| echo "::warning::Invariant token '$token' not found under codex-rs/tui on upstream-merge (non-blocking)." >&2 | |
| fi | |
| done | |
| - name: Summarize run | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| env: | |
| MERGE_MODE: ${{ steps.prep.outputs.merge_mode || 'one-shot' }} | |
| NO_MERGE_BASE: ${{ steps.prep.outputs.no_merge_base || 'false' }} | |
| run: | | |
| set -euo pipefail | |
| echo "### Upstream Merge Summary" >> "$GITHUB_STEP_SUMMARY" | |
| echo "- Mode: ${MERGE_MODE}" >> "$GITHUB_STEP_SUMMARY" | |
| echo "- Files changed upstream: ${{ steps.prep.outputs.files_count }} (est. LOC: ${{ steps.prep.outputs.loc_est }})" >> "$GITHUB_STEP_SUMMARY" | |
| echo "- Branch: upstream-merge" >> "$GITHUB_STEP_SUMMARY" | |
| echo "- Artifacts: COMMITS.json, MERGE_PLAN.md, MERGE_REPORT.md, DIFFSTAT.txt" >> "$GITHUB_STEP_SUMMARY" | |
| if [ "${NO_MERGE_BASE}" = "true" ]; then echo "- Note: upstream/default have no merge-base (unrelated histories)." >> "$GITHUB_STEP_SUMMARY"; fi | |
| # Preview small artifacts inline for quick debugging | |
| for f in COMMITS.json CHANGE_HISTOGRAM.txt DIFFSTAT.txt REINTRODUCED_PATHS.txt; do | |
| p=".github/auto/$f"; [ -s "$p" ] || continue; echo "\n#### $f" >> "$GITHUB_STEP_SUMMARY"; echo '\n```' >> "$GITHUB_STEP_SUMMARY"; sed -n '1,120p' "$p" >> "$GITHUB_STEP_SUMMARY"; echo '```' >> "$GITHUB_STEP_SUMMARY"; done | |
| # Upload each artifact separately so they appear as individual items in the UI | |
| - name: Upload artifact - COMMITS.json | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: COMMITS.json | |
| if-no-files-found: warn | |
| path: .github/auto/COMMITS.json | |
| - name: Upload artifact - DELTA_FILES.txt | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: DELTA_FILES.txt | |
| if-no-files-found: warn | |
| path: .github/auto/DELTA_FILES.txt | |
| - name: Upload artifact - DIFFSTAT.txt | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: DIFFSTAT.txt | |
| if-no-files-found: warn | |
| path: .github/auto/DIFFSTAT.txt | |
| - name: Upload artifact - CHANGE_HISTOGRAM.txt | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: CHANGE_HISTOGRAM.txt | |
| if-no-files-found: warn | |
| path: .github/auto/CHANGE_HISTOGRAM.txt | |
| - name: Upload artifact - DELETED_ON_DEFAULT.txt | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: DELETED_ON_DEFAULT.txt | |
| if-no-files-found: warn | |
| path: .github/auto/DELETED_ON_DEFAULT.txt | |
| - name: Upload artifact - REINTRODUCED_PATHS.txt | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: REINTRODUCED_PATHS.txt | |
| if-no-files-found: warn | |
| path: .github/auto/REINTRODUCED_PATHS.txt | |
| - name: Upload artifact - MERGE_PLAN.md | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: MERGE_PLAN.md | |
| if-no-files-found: warn | |
| path: .github/auto/MERGE_PLAN.md | |
| - name: Upload artifact - MERGE_REPORT.md | |
| if: steps.check_upstream.outputs.skip != 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: MERGE_REPORT.md | |
| if-no-files-found: warn | |
| path: .github/auto/MERGE_REPORT.md | |
| - name: Open or update PR (use agent-supplied title/body if present) | |
| uses: actions/github-script@v7 | |
| env: | |
| MERGE_BRANCH: ${{ env.MERGE_BRANCH }} | |
| UPSTREAM_REPO: ${{ env.UPSTREAM_REPO }} | |
| UPSTREAM_BRANCH: ${{ env.UPSTREAM_BRANCH }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| with: | |
| github-token: ${{ secrets.CODE_GH_PAT || github.token }} | |
| script: | | |
| const fs = require('fs'); | |
| function readOrDefault(p, dflt) { try { const t = fs.readFileSync(p,'utf8').trim(); return t || dflt; } catch { return dflt; } } | |
| const owner = context.repo.owner; | |
| const repo = context.repo.repo; | |
| const head = process.env.MERGE_BRANCH; | |
| const base = process.env.DEFAULT_BRANCH; | |
| const dfltTitle = `Upstream merge: ${process.env.UPSTREAM_REPO}@${process.env.UPSTREAM_BRANCH} into ${base}`; | |
| const title = readOrDefault('.github/auto/PR_TITLE.txt', dfltTitle); | |
| let body = readOrDefault('.github/auto/PR_BODY.md', ''); | |
| if (!body) { | |
| body = `This PR merges ${process.env.UPSTREAM_REPO}@${process.env.UPSTREAM_BRANCH} into ${base}.`; | |
| } | |
| // Ensure the branch exists on origin before creating/updating a PR. | |
| const ref = `heads/${head}`; | |
| try { | |
| await github.rest.git.getRef({ owner, repo, ref }); | |
| } catch (e) { | |
| core.notice(`Branch '${head}' not found on origin; skipping PR creation.`); | |
| return; | |
| } | |
| // Skip PR if there are no net file changes vs base. Close existing PR if present. | |
| let zeroDiff = false; | |
| try { | |
| const cmp = await github.rest.repos.compareCommitsWithBasehead({ owner, repo, basehead: `${base}...${head}` }); | |
| const files = cmp.data.files || []; | |
| // Consider zero-diff if no files changed. This handles merges that add commits but no file deltas. | |
| zeroDiff = files.length === 0; | |
| } catch (e) { | |
| core.warning(`Compare failed (${base}...${head}): ${e.message}. Proceeding to PR creation.`); | |
| } | |
| const headRef = `${owner}:${head}`; | |
| const prs = await github.rest.pulls.list({ owner, repo, state: 'open', head: headRef }); | |
| if (zeroDiff) { core.notice('Zero diff vs base; skipping PR creation.'); return; } | |
| if (prs.data.length) { | |
| const pr = prs.data[0]; | |
| await github.rest.pulls.update({ owner, repo, pull_number: pr.number, title, body }); | |
| core.notice(`Updated PR #${pr.number}`); | |
| } else { | |
| const pr = await github.rest.pulls.create({ owner, repo, title, head: headRef, base, body }); | |
| core.notice(`Created PR #${pr.data.number}`); | |
| } | |
| pr: | |
| name: Open/Update PR (no merge work) | |
| needs: [precheck] | |
| if: needs.precheck.outputs.skip_due_to_active != 'true' && needs.precheck.outputs.action == 'pr_only' | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 10 | |
| steps: | |
| - name: Check out repository (full history) | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| persist-credentials: false | |
| - name: Open or update PR (use agent-supplied title/body if present) | |
| uses: actions/github-script@v7 | |
| env: | |
| MERGE_BRANCH: ${{ env.MERGE_BRANCH }} | |
| UPSTREAM_REPO: ${{ env.UPSTREAM_REPO }} | |
| UPSTREAM_BRANCH: ${{ env.UPSTREAM_BRANCH }} | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch || 'main' }} | |
| with: | |
| github-token: ${{ secrets.CODE_GH_PAT || github.token }} | |
| script: | | |
| const fs = require('fs'); | |
| function readOrDefault(p, dflt) { try { const t = fs.readFileSync(p,'utf8').trim(); return t || dflt; } catch { return dflt; } } | |
| const owner = context.repo.owner; | |
| const repo = context.repo.repo; | |
| const head = process.env.MERGE_BRANCH; | |
| const base = process.env.DEFAULT_BRANCH; | |
| const dfltTitle = `Upstream merge: ${process.env.UPSTREAM_REPO}@${process.env.UPSTREAM_BRANCH} into ${base}`; | |
| const title = readOrDefault('.github/auto/PR_TITLE.txt', dfltTitle); | |
| let body = readOrDefault('.github/auto/PR_BODY.md', ''); | |
| if (!body) { body = `This PR merges ${process.env.UPSTREAM_REPO}@${process.env.UPSTREAM_BRANCH} into ${base}.`; } | |
| const ref = `heads/${head}`; | |
| try { await github.rest.git.getRef({ owner, repo, ref }); } catch (e) { | |
| core.notice(`Branch '${head}' not found on origin; skipping PR creation.`); return; | |
| } | |
| let zeroDiff = false; | |
| try { | |
| const cmp = await github.rest.repos.compareCommitsWithBasehead({ owner, repo, basehead: `${base}...${head}` }); | |
| zeroDiff = (cmp.data.files || []).length === 0; | |
| } catch (e) { | |
| core.warning(`Compare failed (${base}...${head}): ${e.message}. Proceeding to PR creation.`); | |
| } | |
| if (zeroDiff) { core.notice('Zero diff vs base; skipping PR creation.'); return; } | |
| const headRef = `${owner}:${head}`; | |
| const prs = await github.rest.pulls.list({ owner, repo, state: 'open', head: headRef }); | |
| if (prs.data.length) { | |
| const pr = prs.data[0]; | |
| await github.rest.pulls.update({ owner, repo, pull_number: pr.number, title, body }); | |
| core.notice(`Updated PR #${pr.number}`); | |
| } else { | |
| const pr = await github.rest.pulls.create({ owner, repo, title, head: headRef, base, body }); | |
| core.notice(`Created PR #${pr.data.number}`); | |
| } |